diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..52373fe24473b1aa44333d318f578ae6bf04b49b 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+tokenizer.json filter=lfs diff=lfs merge=lfs -text
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..6b057297bd700af674d66e78ea68598781a7f434
--- /dev/null
+++ b/README.md
@@ -0,0 +1,254 @@
+---
+license: llama3.1
+library_name: transformers
+base_model:
+- meta-llama/Llama-3.1-405B
+pipeline_tag: text-generation
+---
+
+
+
+
+
+
+# Cogito v2 preview - 405B
+
+[Blog Post](https://www.deepcogito.com/research/cogito-v2-preview)
+
+The Cogito v2 LLMs are instruction tuned generative models. All models are released under an open license for commercial use.
+
+- Cogito v2 models are hybrid reasoning models. Each model can answer directly (standard LLM), or self-reflect before answering (like reasoning models).
+- The LLMs are trained using **Iterated Distillation and Amplification (IDA)** - an scalable and efficient alignment strategy for superintelligence using iterative self-improvement.
+- The models have been optimized for coding, STEM, instruction following and general helpfulness, and have significantly higher multilingual, coding and tool calling capabilities than size equivalent counterparts.
+ - In both standard and reasoning modes, Cogito v2-preview models outperform their size equivalent counterparts on common industry benchmarks.
+- Each model is trained in over 30 languages and supports a context length of 128k.
+
+# Evaluations
+For detailed evaluations, please refer to the [Blog Post](https://www.deepcogito.com/research/cogito-v2-preview).
+
+
+# Usage
+Here is a snippet below for usage with Transformers:
+
+```python
+import transformers
+import torch
+
+model_id = "deepcogito/cogito-v2-preview-llama-405B"
+
+pipeline = transformers.pipeline(
+ "text-generation",
+ model=model_id,
+ model_kwargs={"torch_dtype": torch.bfloat16},
+ device_map="auto",
+)
+
+messages = [
+ {"role": "system", "content": "You are a pirate chatbot who always responds in pirate speak!"},
+ {"role": "user", "content": "Give me a short introduction to LLMs."},
+]
+
+outputs = pipeline(
+ messages,
+ max_new_tokens=512,
+)
+
+print(outputs[0]["generated_text"][-1])
+```
+
+
+
+## Implementing extended thinking
+- By default, the model will answer in the standard mode.
+- To enable thinking, you can do any one of the two methods:
+ - Set `enable_thinking=True` while applying the chat template.
+ - Add a specific system prompt, along with prefilling the response with "\\n".
+
+**NOTE: Unlike Cogito v1 models, we initiate the response with "\\n" at the beginning of every output when reasoning is enabled. This is because hybrid models can be brittle at times (<0.1% of the cases), and adding a "\\n" ensures that the model does indeed respect thinking.**
+
+### Method 1 - Set enable_thinking=True in the tokenizer
+If you are using Huggingface tokenizers, then you can simply use add the argument `enable_thinking=True` to the tokenization (this option is added to the chat template).
+
+Here is an example -
+```python
+from transformers import AutoModelForCausalLM, AutoTokenizer
+
+model_name = "deepcogito/cogito-v2-preview-llama-405B"
+
+model = AutoModelForCausalLM.from_pretrained(
+ model_name,
+ torch_dtype="auto",
+ device_map="auto"
+)
+tokenizer = AutoTokenizer.from_pretrained(model_name)
+
+prompt = "Give me a short introduction to LLMs."
+messages = [
+ {"role": "system", "content": "You are a pirate chatbot who always responds in pirate speak!"},
+ {"role": "user", "content": prompt}
+]
+
+text = tokenizer.apply_chat_template(
+ messages,
+ tokenize=False,
+ add_generation_prompt=True,
+ enable_thinking=True
+)
+model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
+
+generated_ids = model.generate(
+ **model_inputs,
+ max_new_tokens=512
+)
+generated_ids = [
+ output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
+]
+
+response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
+print(response)
+```
+
+### Method 2 - Add a specific system prompt, along with prefilling the response with "\\n".
+To enable thinking, you need to do two parts -
+
+
+Step 1 - Simply use this in the system prompt `system_instruction = 'Enable deep thinking subroutine.'`
+
+If you already have a system_instruction, then use `system_instruction = 'Enable deep thinking subroutine.' + '\n\n' + system_instruction`.
+
+Step 2 - Prefil the response with the tokens `"\n"`.
+
+Here is an example -
+
+```python
+import transformers
+import torch
+
+model_name = "deepcogito/cogito-v2-preview-llama-405B"
+
+model = AutoModelForCausalLM.from_pretrained(
+ model_name,
+ torch_dtype="auto",
+ device_map="auto"
+)
+tokenizer = AutoTokenizer.from_pretrained(model_name)
+
+# Step 1 - Add deep thinking instruction.
+DEEP_THINKING_INSTRUCTION = "Enable deep thinking subroutine."
+
+messages = [
+ {"role": "system", "content": DEEP_THINKING_INSTRUCTION},
+ {"role": "user", "content": "Write a bash script that takes a matrix represented as a string with format '[1,2],[3,4],[5,6]' and prints the transpose in the same format."},
+]
+
+text = tokenizer.apply_chat_template(
+ messages,
+ tokenize=False,
+ add_generation_prompt=True
+)
+
+# Step 2 - Prefill response with "\n".
+text += "\n"
+
+# Now, continue as usual.
+model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
+
+generated_ids = model.generate(
+ **model_inputs,
+ max_new_tokens=512
+)
+generated_ids = [
+ output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
+]
+
+response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
+print(response)
+```
+
+
+Similarly, if you have a system prompt, you can append the `DEEP_THINKING_INSTRUCTION` to the beginning in this way -
+
+```python
+DEEP_THINKING_INSTRUCTION = "Enable deep thinking subroutine."
+
+system_prompt = "Reply to each prompt with only the actual code - no explanations."
+prompt = "Write a bash script that takes a matrix represented as a string with format '[1,2],[3,4],[5,6]' and prints the transpose in the same format."
+
+messages = [
+ {"role": "system", "content": DEEP_THINKING_INSTRUCTION + '\n\n' + system_prompt},
+ {"role": "user", "content": prompt}
+]
+```
+
+
+# Tool Calling
+Cogito models support tool calling (single, parallel, multiple and parallel_multiple) both in standard and extended thinking mode.
+
+Here is a snippet -
+
+```python
+# First, define a tool
+def get_current_temperature(location: str) -> float:
+ """
+ Get the current temperature at a location.
+
+ Args:
+ location: The location to get the temperature for, in the format "City, Country"
+ Returns:
+ The current temperature at the specified location in the specified units, as a float.
+ """
+ return 22. # A real function should probably actually get the temperature!
+
+# Next, create a chat and apply the chat template
+messages = [
+ {"role": "user", "content": "Hey, what's the temperature in Paris right now?"}
+]
+
+model_inputs = tokenizer.apply_chat_template(messages, tools=[get_current_temperature], add_generation_prompt=True)
+
+text = tokenizer.apply_chat_template(messages, tools=[get_current_temperature], add_generation_prompt=True, tokenize=False)
+inputs = tokenizer(text, return_tensors="pt", add_special_tokens=False).to(model.device)
+outputs = model.generate(**inputs, max_new_tokens=512)
+output_text = tokenizer.batch_decode(outputs)[0][len(text):]
+print(output_text)
+```
+
+This will result in the output -
+```
+
+{"name": "get_current_temperature", "arguments": {"location": "Paris, France"}}
+<|eot_id|>
+```
+
+You can then generate text from this input as normal. If the model generates a tool call, you should add it to the chat like so:
+
+```python
+tool_call = {"name": "get_current_temperature", "arguments": {"location": "Paris, France"}}
+messages.append({"role": "assistant", "tool_calls": [{"type": "function", "function": tool_call}]})
+```
+
+and then call the tool and append the result, with the `tool` role, like so:
+
+```python
+messages.append({"role": "tool", "name": "get_current_temperature", "content": "22.0"})
+```
+
+After that, you can `generate()` again to let the model use the tool result in the chat:
+
+```python
+text = tokenizer.apply_chat_template(messages, tools=[get_current_temperature], add_generation_prompt=True, tokenize=False)
+inputs = tokenizer(text, return_tensors="pt", add_special_tokens=False).to(model.device)
+outputs = model.generate(**inputs, max_new_tokens=512)
+output_text = tokenizer.batch_decode(outputs)[0][len(text):]
+```
+
+This should result in the string -
+```
+'The current temperature in Paris is 22.0 degrees.<|eot_id|>'
+```
+
+## License
+This repository and the model weights are licensed under the [Llama 3.3 Community License Agreement](https://github.com/meta-llama/llama-models/blob/main/models/llama3_3/LICENSE) (Llama models' default license agreement).
+
+## Contact
+If you would like to reach out to our team, send an email to [contact@deepcogito.com](contact@deepcogito.com).
diff --git a/chat_template.jinja b/chat_template.jinja
new file mode 100644
index 0000000000000000000000000000000000000000..55dbce59ab4b279c6ba3a91777712dbb9e295cc0
--- /dev/null
+++ b/chat_template.jinja
@@ -0,0 +1,123 @@
+{{- bos_token }}
+{%- if not tools is defined %}
+ {%- set tools = none %}
+{%- endif %}
+{%- if not enable_thinking is defined %}
+ {%- set enable_thinking = false %}
+{%- endif %}
+{#- This block extracts the system message, so we can slot it into the right place. #}
+{%- if messages[0]['role'] == 'system' %}
+ {%- set system_message = messages[0]['content']|trim %}
+ {%- set messages = messages[1:] %}
+{%- else %}
+ {%- set system_message = "" %}
+{%- endif %}
+{#- Set the system message. If enable_thinking is true, add the "Enable deep thinking subroutine." #}
+{%- if enable_thinking %}
+ {%- if system_message != "" %}
+ {%- set system_message = "Enable deep thinking subroutine.
+
+" ~ system_message %}
+ {%- else %}
+ {%- set system_message = "Enable deep thinking subroutine." %}
+ {%- endif %}
+{%- endif %}
+{#- Set the system message. In case there are tools present, add them to the system message. #}
+{%- if tools is not none or system_message != '' %}
+ {{- "<|start_header_id|>system<|end_header_id|>
+
+" }}
+ {{- system_message }}
+ {%- if tools is not none %}
+ {%- if system_message != "" %}
+ {{- "
+
+" }}
+ {%- endif %}
+ {{- "Available Tools:
+" }}
+ {%- for t in tools %}
+ {{- t | tojson(indent=4) }}
+ {{- "
+
+" }}
+ {%- endfor %}
+ {%- endif %}
+ {{- "<|eot_id|>" }}
+{%- endif %}
+
+{#- Rest of the messages #}
+{%- for message in messages %}
+ {#- The special cases are when the message is from a tool (via role ipython/tool/tool_results) or when the message is from the assistant, but has "tool_calls". If not, we add the message directly as usual. #}
+ {#- Case 1 - Usual, non tool related message. #}
+ {%- if not (message.role == "ipython" or message.role == "tool" or message.role == "tool_results" or (message.tool_calls is defined and message.tool_calls is not none)) %}
+ {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>
+
+' }}
+ {%- if message['content'] is string %}
+ {{- message['content'] | trim }}
+ {%- else %}
+ {%- for item in message['content'] %}
+ {%- if item.type == 'text' %}
+ {{- item.text | trim }}
+ {%- endif %}
+ {%- endfor %}
+ {%- endif %}
+ {{- '<|eot_id|>' }}
+
+ {#- Case 2 - the response is from the assistant, but has a tool call returned. The assistant may also have returned some content along with the tool call. #}
+ {%- elif message.tool_calls is defined and message.tool_calls is not none %}
+ {{- "<|start_header_id|>assistant<|end_header_id|>
+
+" }}
+ {%- if message['content'] is string %}
+ {{- message['content'] | trim }}
+ {%- else %}
+ {%- for item in message['content'] %}
+ {%- if item.type == 'text' %}
+ {{- item.text | trim }}
+ {%- if item.text | trim != "" %}
+ {{- "
+
+" }}
+ {%- endif %}
+ {%- endif %}
+ {%- endfor %}
+ {%- endif %}
+ {{- "[" }}
+ {%- for tool_call in message.tool_calls %}
+ {%- set out = tool_call.function|tojson %}
+ {%- if not tool_call.id is defined %}
+ {{- out }}
+ {%- else %}
+ {{- out[:-1] }}
+ {{- ', "id": "' + tool_call.id + '"}' }}
+ {%- endif %}
+ {%- if not loop.last %}
+ {{- ", " }}
+ {%- else %}
+ {{- "]<|eot_id|>" }}
+ {%- endif %}
+ {%- endfor %}
+
+ {#- Case 3 - the response is from a tool call. The tool call may have an id associated with it as well. If it does, we add it to the prompt. #}
+ {%- elif message.role == "ipython" or message["role"] == "tool_results" or message["role"] == "tool" %}
+ {{- "<|start_header_id|>ipython<|end_header_id|>
+
+" }}
+ {%- if message.tool_call_id is defined and message.tool_call_id != '' %}
+ {{- '{"content": ' + (message.content | tojson) + ', "call_id": "' + message.tool_call_id + '"}' }}
+ {%- else %}
+ {{- '{"content": ' + (message.content | tojson) + '}' }}
+ {%- endif %}
+ {{- "<|eot_id|>" }}
+ {%- endif %}
+{%- endfor %}
+{%- if add_generation_prompt %}
+ {{- '<|start_header_id|>assistant<|end_header_id|>
+
+' }}
+ {%- if enable_thinking %}
+ {{- '\n' }}
+ {%- endif %}
+{%- endif %}
\ No newline at end of file
diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..14427e42ddbf74021cfe713049cd03a90f6ded04
--- /dev/null
+++ b/config.json
@@ -0,0 +1,35 @@
+{
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 128000,
+ "eos_token_id": 128001,
+ "head_dim": 128,
+ "hidden_act": "silu",
+ "hidden_size": 16384,
+ "initializer_range": 0.02,
+ "intermediate_size": 53248,
+ "max_position_embeddings": 131072,
+ "mlp_bias": false,
+ "model_type": "llama",
+ "num_attention_heads": 128,
+ "num_hidden_layers": 126,
+ "num_key_value_heads": 8,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": {
+ "factor": 8.0,
+ "high_freq_factor": 4.0,
+ "low_freq_factor": 1.0,
+ "original_max_position_embeddings": 8192,
+ "rope_type": "llama3"
+ },
+ "rope_theta": 500000.0,
+ "tie_word_embeddings": false,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.53.0",
+ "use_cache": true,
+ "vocab_size": 128256
+}
diff --git a/model-00006-of-00191.safetensors b/model-00006-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..39773b0cc9dc1a403f90b2f45262a3d835b22efb
--- /dev/null
+++ b/model-00006-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ec7117f7539c3e627aa56c26436de62cafd82b75abd058e097bc7b1207c7e664
+size 3489661200
diff --git a/model-00012-of-00191.safetensors b/model-00012-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f33790d1e50a89de92b593283ba4666b845b3a32
--- /dev/null
+++ b/model-00012-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:366926d3047c0bacd272ed15c55ce86be399cf99dba82f0eb302b588ce3f747f
+size 3489661200
diff --git a/model-00030-of-00191.safetensors b/model-00030-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a2d967b8d75f8c71206dabbacadfca8f147036fb
--- /dev/null
+++ b/model-00030-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:672f353bbdabbac38030b599f352dd54f3bc5a60582d1d6849e5f41cba6651e5
+size 3489661200
diff --git a/model-00033-of-00191.safetensors b/model-00033-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5241a0a7ec0eb2678d70dee90f5c11fc7ed6f42d
--- /dev/null
+++ b/model-00033-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ea0e4d1677e17bdb346d43621098849236f0bfd96461ca75a5b7fb419a6f01b1
+size 3489661200
diff --git a/model-00039-of-00191.safetensors b/model-00039-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2f0e51fbc5fc3c9465a54898bb1c883bb548c994
--- /dev/null
+++ b/model-00039-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4c23206fa7c9c051ed121b4031b7c78b33d3f29c9828e783013ac24652ea547e
+size 3489661200
diff --git a/model-00042-of-00191.safetensors b/model-00042-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7df1b9d5f571dbd5ad7a9c608885bac6c3efe495
--- /dev/null
+++ b/model-00042-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:088b827b5beb35b1d61c38156695db8ee2063250beeaaa390a139f48142dab4c
+size 3489661200
diff --git a/model-00048-of-00191.safetensors b/model-00048-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c2a272c19b84b63a042f5b1353cca23802a11f46
--- /dev/null
+++ b/model-00048-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46b9cd930abd74e1da8237b8762c362391dbb90dc00712c1b186d1a1e8fdecaf
+size 3489661200
diff --git a/model-00057-of-00191.safetensors b/model-00057-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..80ce6ec33658cd1e0cd9ea4af658f3556d8f600b
--- /dev/null
+++ b/model-00057-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c0cedacbedaf03c0bbfca01ad3076f7a20c555e404398787c127cba66471bf90
+size 3489661200
diff --git a/model-00063-of-00191.safetensors b/model-00063-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..cfeb1d57919e0eb5737ed78c7f15c6f73965184c
--- /dev/null
+++ b/model-00063-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3c72cd81f417104b40b3c098b4c58e9fb82cffc52b3f3a9a8b9a1cd1f8af7fa3
+size 3489661200
diff --git a/model-00069-of-00191.safetensors b/model-00069-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..50dd88fe2612a05814d5323468506c885f13317f
--- /dev/null
+++ b/model-00069-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9cee12d9e2223e2b3f8db76e9ccb19e9d7cadfaf09b1a0fbfae0216f0de771d5
+size 3489661200
diff --git a/model-00078-of-00191.safetensors b/model-00078-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..403909ba1664f8599dd3c12cf18470e42e60d339
--- /dev/null
+++ b/model-00078-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d3435994ba0c2586db36bb2085c0a3b30c9e4da694dc0e628ef7bd190f09a617
+size 3489661200
diff --git a/model-00081-of-00191.safetensors b/model-00081-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b09880a640a2c7c671b6dfc7256c2dd51d751f93
--- /dev/null
+++ b/model-00081-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eb275a89b0f511f20b00842c336724847466128269d46ea6d556f992e5751128
+size 3489661200
diff --git a/model-00087-of-00191.safetensors b/model-00087-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..673cd36754104b391b7127a8ea4c12f0f4498571
--- /dev/null
+++ b/model-00087-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5c4ea6d4e911f073449503ab7fef80b5ac1e113bddbb751642116e43a65b7a32
+size 3489661200
diff --git a/model-00090-of-00191.safetensors b/model-00090-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ecf678e12a94fba43068df2904fc674b4e1c1bd4
--- /dev/null
+++ b/model-00090-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b1b52d143846566e17811061e183859c87390b1c8821ec8f2d5f370faf9337c5
+size 3489661200
diff --git a/model-00096-of-00191.safetensors b/model-00096-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..39087b6a84d3da9722390aa0614f6a991062fa50
--- /dev/null
+++ b/model-00096-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5bf962497af1c2b721be44e91b8784d4eea01c64ee59424cd17272ab7623583f
+size 3489661200
diff --git a/model-00099-of-00191.safetensors b/model-00099-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9896c569bace6277d3e1f3648d882a17c2694688
--- /dev/null
+++ b/model-00099-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98449d4c280c56da4f40cb2ec29ae38c8f631e0bf814aff5e5a09028a0f909d8
+size 3489661200
diff --git a/model-00102-of-00191.safetensors b/model-00102-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8d9336a8a38af146efc5a3e931df6c68515aef75
--- /dev/null
+++ b/model-00102-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:30aaaf067d24c655846938a1b54e317df54e36fe0ee8ad7c69bfd8d0fd3dede9
+size 3489661200
diff --git a/model-00105-of-00191.safetensors b/model-00105-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b5dc2f02c1a832b96cadb7d359e70e5f1edbb3fa
--- /dev/null
+++ b/model-00105-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bcd0f94ae6eabb560103a4dc4d1f6013a0611be27d041c36ef3ba9efcbfa7fb2
+size 3489661200
diff --git a/model-00108-of-00191.safetensors b/model-00108-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..51be17f5a64ec4b2b74a551fe13a9cac3db6d0bb
--- /dev/null
+++ b/model-00108-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7abc40730a2ed0acc7f1cfc7a6a06f22c5999713f1ea77f2795ed15db0c2bfc3
+size 3489661200
diff --git a/model-00111-of-00191.safetensors b/model-00111-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b0a9a70444dd413838e8bbbfe0744c92aabe34a0
--- /dev/null
+++ b/model-00111-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f72a0159bf0c0155a133d18a6069a6a66b44e5296e7000d9d8f54a1cfa2dd35d
+size 3489661200
diff --git a/model-00117-of-00191.safetensors b/model-00117-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..bdc60eda27ac4922bed198f7aa0415c25c6d9b7b
--- /dev/null
+++ b/model-00117-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9751d1d3e7217dd30b45aa533c3776c3c0644e89e13c9a6c51a80bed75f30e94
+size 3489661200
diff --git a/model-00120-of-00191.safetensors b/model-00120-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ace639dc6b87ee2b024570d551433a8b73f97b35
--- /dev/null
+++ b/model-00120-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c95330f5dd9976868dd40599d5a79a4f892fccb442612303eacee71e02ab3d9c
+size 3489661200
diff --git a/model-00123-of-00191.safetensors b/model-00123-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e080a8911c5fe76c376d0896a752f1e287c0b0f8
--- /dev/null
+++ b/model-00123-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:640f7bb25eb6b1d3cc2b5bbd06b040014c4a6a96dd72c201e671fe768450e5c7
+size 3489661200
diff --git a/model-00126-of-00191.safetensors b/model-00126-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..52d175fb76974f26e14cf9706fc66b7270cd3bb5
--- /dev/null
+++ b/model-00126-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a1b35c41fa07fe5ad203e5d3b81cbf38b1ebe359ce62acad16d8a3ed0f67e43f
+size 3489661192
diff --git a/model-00129-of-00191.safetensors b/model-00129-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a9339bd39a04156d452b0f74a2295f9e9debb79a
--- /dev/null
+++ b/model-00129-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2bf6a816a53a0f1372d526d6fef5966ef09662993b04bdd81b58ce01e4f08d0e
+size 3489661200
diff --git a/model-00132-of-00191.safetensors b/model-00132-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c3a65809128894a348e52781e0c862607ac542a5
--- /dev/null
+++ b/model-00132-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:458d060d7d2160bd50baa0575747ff78f9d15640d7f1ef0c67fc98e464b4e5b1
+size 3489661200
diff --git a/model-00138-of-00191.safetensors b/model-00138-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1c5d2fc33fcd84d350519be15be44ecbe0bcb0fd
--- /dev/null
+++ b/model-00138-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:95b9c644d7e4f8ac6d0806555d2678c2df7503efcd7c82e3a403084d1e185395
+size 3489661200
diff --git a/model-00141-of-00191.safetensors b/model-00141-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..bde40e60319dacf512d21411448540321b045386
--- /dev/null
+++ b/model-00141-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:24b6c462d7dfbc7035ce31f10bdf9c675307836f3cb7238bcbf78d619f34ca2d
+size 3489661200
diff --git a/model-00144-of-00191.safetensors b/model-00144-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..fe934a3985c6f44c4444880f4d58d25002391548
--- /dev/null
+++ b/model-00144-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:82a2c0b7cc1c9bb5335578f599ed260dd3f4bf0d364deceecbd602eb08b74282
+size 3489661200
diff --git a/model-00147-of-00191.safetensors b/model-00147-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..46010c7b4ef8920e8908a15b7e4ece8b3f16fbb5
--- /dev/null
+++ b/model-00147-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ca2f70b9f1aea9551ada5446df65c5668fa7d2949d6e772e6cc7000a299a8993
+size 3489661200
diff --git a/model-00150-of-00191.safetensors b/model-00150-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b7be9a1ba62b44f0ac11a33d7390466e87d5cbcd
--- /dev/null
+++ b/model-00150-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:67a43b92cce9890fd1e40849e279faab00e2f9f28f7f0ba140a8808ba16db473
+size 3489661200
diff --git a/model-00153-of-00191.safetensors b/model-00153-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8d6101b4124073923ccf779efdc987a757c73cdb
--- /dev/null
+++ b/model-00153-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:055a988171dfde7121206f3e65419a577edc167452a17e4c75af2f34d2c8131a
+size 3489661200
diff --git a/model-00156-of-00191.safetensors b/model-00156-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a834c4d291fe71bf467d352c25bd649599f44caf
--- /dev/null
+++ b/model-00156-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e80ea7e7bcac3b57828ca433a411edfabbd0a48b3d7c8383638b8b6683fb1bdb
+size 3489661200
diff --git a/model-00159-of-00191.safetensors b/model-00159-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e3e97a8055c8ea42c85cfc9149ba3cbb1cdd3ef5
--- /dev/null
+++ b/model-00159-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:37965d49f4206b790f491f63b74eca59daaeef42b12bf24cf8179e0e5a42ddd8
+size 3489661192
diff --git a/model-00162-of-00191.safetensors b/model-00162-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d44f301d5a3b08b9200fe55ac394619df4d6d764
--- /dev/null
+++ b/model-00162-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0d6d6c1ec27f3bc8fddb918ce7126a6514f2f52326d10cacd8a6938b3ca3f1a5
+size 3489661200
diff --git a/model-00165-of-00191.safetensors b/model-00165-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..aa2c32fde4c7265aec74ce83831b0c81b149b800
--- /dev/null
+++ b/model-00165-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3628daf1f504e7c2ba3872193f722b866245aef998db5dc83a4d4a1345efaf4a
+size 3489661200
diff --git a/model-00168-of-00191.safetensors b/model-00168-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..30f99954f862a4b33100bcb69087cd6a961bb9e3
--- /dev/null
+++ b/model-00168-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:311eed7129fe27a252ca6e0f5fe36df8f7e7c4fc001e7f6ea737c30247cf8130
+size 3489661200
diff --git a/model-00174-of-00191.safetensors b/model-00174-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..61786ca479df5468359bcaab920ae11aa9fcd758
--- /dev/null
+++ b/model-00174-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7963a6897f738f815434a818445c0d84770a62215d5c41ac883d9f4e5c9e2307
+size 3489661200
diff --git a/model-00177-of-00191.safetensors b/model-00177-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7b25b711791fb179477785d4099be1d86a42691c
--- /dev/null
+++ b/model-00177-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eea750cc111f6ea1ef6091aca7e20fd6297266a2d5a8bbc389cb9f298d1962db
+size 3489661200
diff --git a/model-00180-of-00191.safetensors b/model-00180-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a6d4cc92232350ed99b2420dd3761d06e4f8cfa3
--- /dev/null
+++ b/model-00180-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:272929b0b3d7bc3b3ff0ee7a34b3f9fb142d72ace921a3f2f1d581eac56622e1
+size 3489661200
diff --git a/model-00183-of-00191.safetensors b/model-00183-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0e8a619c8c701b61db01b280a01f334415f1eb9a
--- /dev/null
+++ b/model-00183-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aa7ea3fc49b7d231fb5d4f6614fb592d79af05f29da05a843d9c02e93b708a57
+size 3489661200
diff --git a/model-00186-of-00191.safetensors b/model-00186-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..274e082b46eab9592e429c1abfd2e00d154551a1
--- /dev/null
+++ b/model-00186-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4fddb9f771a82e55b50609d3f89cd30349d98c662b58c3b0fe0433db3142ef46
+size 3489661200
diff --git a/model-00189-of-00191.safetensors b/model-00189-of-00191.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0c05b70e415310756ad2f752ef0e78779975e7af
--- /dev/null
+++ b/model-00189-of-00191.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:53ad6a84f4eb63d8120efc7859208c5226575c81854795dc97e6a9db072fa704
+size 3489661200
diff --git a/model.safetensors.index.json b/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..c7787a22572ed115dae4471403e6571f97350737
--- /dev/null
+++ b/model.safetensors.index.json
@@ -0,0 +1 @@
+{"weight_map": {"lm_head.weight": "model-00001-of-00191.safetensors", "model.embed_tokens.weight": "model-00002-of-00191.safetensors", "model.layers.0.input_layernorm.weight": "model-00002-of-00191.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00003-of-00191.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00003-of-00191.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00004-of-00191.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00004-of-00191.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00004-of-00191.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00004-of-00191.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00004-of-00191.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00004-of-00191.safetensors", "model.layers.1.input_layernorm.weight": "model-00004-of-00191.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00004-of-00191.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00005-of-00191.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00005-of-00191.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00005-of-00191.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00005-of-00191.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00005-of-00191.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00005-of-00191.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00005-of-00191.safetensors", "model.layers.10.input_layernorm.weight": "model-00005-of-00191.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00006-of-00191.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00006-of-00191.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00007-of-00191.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00007-of-00191.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00007-of-00191.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00007-of-00191.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00007-of-00191.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00007-of-00191.safetensors", "model.layers.100.input_layernorm.weight": "model-00007-of-00191.safetensors", "model.layers.100.mlp.down_proj.weight": "model-00007-of-00191.safetensors", "model.layers.100.mlp.gate_proj.weight": "model-00008-of-00191.safetensors", "model.layers.100.mlp.up_proj.weight": "model-00008-of-00191.safetensors", "model.layers.100.post_attention_layernorm.weight": "model-00008-of-00191.safetensors", "model.layers.100.self_attn.k_proj.weight": "model-00008-of-00191.safetensors", "model.layers.100.self_attn.o_proj.weight": "model-00008-of-00191.safetensors", "model.layers.100.self_attn.q_proj.weight": "model-00008-of-00191.safetensors", "model.layers.100.self_attn.v_proj.weight": "model-00008-of-00191.safetensors", "model.layers.101.input_layernorm.weight": "model-00008-of-00191.safetensors", "model.layers.101.mlp.down_proj.weight": "model-00009-of-00191.safetensors", "model.layers.101.mlp.gate_proj.weight": "model-00009-of-00191.safetensors", "model.layers.101.mlp.up_proj.weight": "model-00010-of-00191.safetensors", "model.layers.101.post_attention_layernorm.weight": "model-00010-of-00191.safetensors", "model.layers.101.self_attn.k_proj.weight": "model-00010-of-00191.safetensors", "model.layers.101.self_attn.o_proj.weight": "model-00010-of-00191.safetensors", "model.layers.101.self_attn.q_proj.weight": "model-00010-of-00191.safetensors", "model.layers.101.self_attn.v_proj.weight": "model-00010-of-00191.safetensors", "model.layers.102.input_layernorm.weight": "model-00010-of-00191.safetensors", "model.layers.102.mlp.down_proj.weight": "model-00010-of-00191.safetensors", "model.layers.102.mlp.gate_proj.weight": "model-00011-of-00191.safetensors", "model.layers.102.mlp.up_proj.weight": "model-00011-of-00191.safetensors", "model.layers.102.post_attention_layernorm.weight": "model-00011-of-00191.safetensors", "model.layers.102.self_attn.k_proj.weight": "model-00011-of-00191.safetensors", "model.layers.102.self_attn.o_proj.weight": "model-00011-of-00191.safetensors", "model.layers.102.self_attn.q_proj.weight": "model-00011-of-00191.safetensors", "model.layers.102.self_attn.v_proj.weight": "model-00011-of-00191.safetensors", "model.layers.103.input_layernorm.weight": "model-00011-of-00191.safetensors", "model.layers.103.mlp.down_proj.weight": "model-00012-of-00191.safetensors", "model.layers.103.mlp.gate_proj.weight": "model-00012-of-00191.safetensors", "model.layers.103.mlp.up_proj.weight": "model-00013-of-00191.safetensors", "model.layers.103.post_attention_layernorm.weight": "model-00013-of-00191.safetensors", "model.layers.103.self_attn.k_proj.weight": "model-00013-of-00191.safetensors", "model.layers.103.self_attn.o_proj.weight": "model-00013-of-00191.safetensors", "model.layers.103.self_attn.q_proj.weight": "model-00013-of-00191.safetensors", "model.layers.103.self_attn.v_proj.weight": "model-00013-of-00191.safetensors", "model.layers.104.input_layernorm.weight": "model-00013-of-00191.safetensors", "model.layers.104.mlp.down_proj.weight": "model-00013-of-00191.safetensors", "model.layers.104.mlp.gate_proj.weight": "model-00014-of-00191.safetensors", "model.layers.104.mlp.up_proj.weight": "model-00014-of-00191.safetensors", "model.layers.104.post_attention_layernorm.weight": "model-00014-of-00191.safetensors", "model.layers.104.self_attn.k_proj.weight": "model-00014-of-00191.safetensors", "model.layers.104.self_attn.o_proj.weight": "model-00014-of-00191.safetensors", "model.layers.104.self_attn.q_proj.weight": "model-00014-of-00191.safetensors", "model.layers.104.self_attn.v_proj.weight": "model-00014-of-00191.safetensors", "model.layers.105.input_layernorm.weight": "model-00014-of-00191.safetensors", "model.layers.105.mlp.down_proj.weight": "model-00015-of-00191.safetensors", "model.layers.105.mlp.gate_proj.weight": "model-00015-of-00191.safetensors", "model.layers.105.mlp.up_proj.weight": "model-00016-of-00191.safetensors", "model.layers.105.post_attention_layernorm.weight": "model-00016-of-00191.safetensors", "model.layers.105.self_attn.k_proj.weight": "model-00016-of-00191.safetensors", "model.layers.105.self_attn.o_proj.weight": "model-00016-of-00191.safetensors", "model.layers.105.self_attn.q_proj.weight": "model-00016-of-00191.safetensors", "model.layers.105.self_attn.v_proj.weight": "model-00016-of-00191.safetensors", "model.layers.106.input_layernorm.weight": "model-00016-of-00191.safetensors", "model.layers.106.mlp.down_proj.weight": "model-00016-of-00191.safetensors", "model.layers.106.mlp.gate_proj.weight": "model-00017-of-00191.safetensors", "model.layers.106.mlp.up_proj.weight": "model-00017-of-00191.safetensors", "model.layers.106.post_attention_layernorm.weight": "model-00017-of-00191.safetensors", "model.layers.106.self_attn.k_proj.weight": "model-00017-of-00191.safetensors", "model.layers.106.self_attn.o_proj.weight": "model-00017-of-00191.safetensors", "model.layers.106.self_attn.q_proj.weight": "model-00017-of-00191.safetensors", "model.layers.106.self_attn.v_proj.weight": "model-00017-of-00191.safetensors", "model.layers.107.input_layernorm.weight": "model-00017-of-00191.safetensors", "model.layers.107.mlp.down_proj.weight": "model-00018-of-00191.safetensors", "model.layers.107.mlp.gate_proj.weight": "model-00018-of-00191.safetensors", "model.layers.107.mlp.up_proj.weight": "model-00019-of-00191.safetensors", "model.layers.107.post_attention_layernorm.weight": "model-00019-of-00191.safetensors", "model.layers.107.self_attn.k_proj.weight": "model-00019-of-00191.safetensors", "model.layers.107.self_attn.o_proj.weight": "model-00019-of-00191.safetensors", "model.layers.107.self_attn.q_proj.weight": "model-00019-of-00191.safetensors", "model.layers.107.self_attn.v_proj.weight": "model-00019-of-00191.safetensors", "model.layers.108.input_layernorm.weight": "model-00019-of-00191.safetensors", "model.layers.108.mlp.down_proj.weight": "model-00019-of-00191.safetensors", "model.layers.108.mlp.gate_proj.weight": "model-00020-of-00191.safetensors", "model.layers.108.mlp.up_proj.weight": "model-00020-of-00191.safetensors", "model.layers.108.post_attention_layernorm.weight": "model-00020-of-00191.safetensors", "model.layers.108.self_attn.k_proj.weight": "model-00020-of-00191.safetensors", "model.layers.108.self_attn.o_proj.weight": "model-00020-of-00191.safetensors", "model.layers.108.self_attn.q_proj.weight": "model-00020-of-00191.safetensors", "model.layers.108.self_attn.v_proj.weight": "model-00020-of-00191.safetensors", "model.layers.109.input_layernorm.weight": "model-00020-of-00191.safetensors", "model.layers.109.mlp.down_proj.weight": "model-00021-of-00191.safetensors", "model.layers.109.mlp.gate_proj.weight": "model-00021-of-00191.safetensors", "model.layers.109.mlp.up_proj.weight": "model-00022-of-00191.safetensors", "model.layers.109.post_attention_layernorm.weight": "model-00022-of-00191.safetensors", "model.layers.109.self_attn.k_proj.weight": "model-00022-of-00191.safetensors", "model.layers.109.self_attn.o_proj.weight": "model-00022-of-00191.safetensors", "model.layers.109.self_attn.q_proj.weight": "model-00022-of-00191.safetensors", "model.layers.109.self_attn.v_proj.weight": "model-00022-of-00191.safetensors", "model.layers.11.input_layernorm.weight": "model-00022-of-00191.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00022-of-00191.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00023-of-00191.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00023-of-00191.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00023-of-00191.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00023-of-00191.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00023-of-00191.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00023-of-00191.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00023-of-00191.safetensors", "model.layers.110.input_layernorm.weight": "model-00023-of-00191.safetensors", "model.layers.110.mlp.down_proj.weight": "model-00024-of-00191.safetensors", "model.layers.110.mlp.gate_proj.weight": "model-00024-of-00191.safetensors", "model.layers.110.mlp.up_proj.weight": "model-00025-of-00191.safetensors", "model.layers.110.post_attention_layernorm.weight": "model-00025-of-00191.safetensors", "model.layers.110.self_attn.k_proj.weight": "model-00025-of-00191.safetensors", "model.layers.110.self_attn.o_proj.weight": "model-00025-of-00191.safetensors", "model.layers.110.self_attn.q_proj.weight": "model-00025-of-00191.safetensors", "model.layers.110.self_attn.v_proj.weight": "model-00025-of-00191.safetensors", "model.layers.111.input_layernorm.weight": "model-00025-of-00191.safetensors", "model.layers.111.mlp.down_proj.weight": "model-00025-of-00191.safetensors", "model.layers.111.mlp.gate_proj.weight": "model-00026-of-00191.safetensors", "model.layers.111.mlp.up_proj.weight": "model-00026-of-00191.safetensors", "model.layers.111.post_attention_layernorm.weight": "model-00026-of-00191.safetensors", "model.layers.111.self_attn.k_proj.weight": "model-00026-of-00191.safetensors", "model.layers.111.self_attn.o_proj.weight": "model-00026-of-00191.safetensors", "model.layers.111.self_attn.q_proj.weight": "model-00026-of-00191.safetensors", "model.layers.111.self_attn.v_proj.weight": "model-00026-of-00191.safetensors", "model.layers.112.input_layernorm.weight": "model-00026-of-00191.safetensors", "model.layers.112.mlp.down_proj.weight": "model-00027-of-00191.safetensors", "model.layers.112.mlp.gate_proj.weight": "model-00027-of-00191.safetensors", "model.layers.112.mlp.up_proj.weight": "model-00028-of-00191.safetensors", "model.layers.112.post_attention_layernorm.weight": "model-00028-of-00191.safetensors", "model.layers.112.self_attn.k_proj.weight": "model-00028-of-00191.safetensors", "model.layers.112.self_attn.o_proj.weight": "model-00028-of-00191.safetensors", "model.layers.112.self_attn.q_proj.weight": "model-00028-of-00191.safetensors", "model.layers.112.self_attn.v_proj.weight": "model-00028-of-00191.safetensors", "model.layers.113.input_layernorm.weight": "model-00028-of-00191.safetensors", "model.layers.113.mlp.down_proj.weight": "model-00028-of-00191.safetensors", "model.layers.113.mlp.gate_proj.weight": "model-00029-of-00191.safetensors", "model.layers.113.mlp.up_proj.weight": "model-00029-of-00191.safetensors", "model.layers.113.post_attention_layernorm.weight": "model-00029-of-00191.safetensors", "model.layers.113.self_attn.k_proj.weight": "model-00029-of-00191.safetensors", "model.layers.113.self_attn.o_proj.weight": "model-00029-of-00191.safetensors", "model.layers.113.self_attn.q_proj.weight": "model-00029-of-00191.safetensors", "model.layers.113.self_attn.v_proj.weight": "model-00029-of-00191.safetensors", "model.layers.114.input_layernorm.weight": "model-00029-of-00191.safetensors", "model.layers.114.mlp.down_proj.weight": "model-00030-of-00191.safetensors", "model.layers.114.mlp.gate_proj.weight": "model-00030-of-00191.safetensors", "model.layers.114.mlp.up_proj.weight": "model-00031-of-00191.safetensors", "model.layers.114.post_attention_layernorm.weight": "model-00031-of-00191.safetensors", "model.layers.114.self_attn.k_proj.weight": "model-00031-of-00191.safetensors", "model.layers.114.self_attn.o_proj.weight": "model-00031-of-00191.safetensors", "model.layers.114.self_attn.q_proj.weight": "model-00031-of-00191.safetensors", "model.layers.114.self_attn.v_proj.weight": "model-00031-of-00191.safetensors", "model.layers.115.input_layernorm.weight": "model-00031-of-00191.safetensors", "model.layers.115.mlp.down_proj.weight": "model-00031-of-00191.safetensors", "model.layers.115.mlp.gate_proj.weight": "model-00032-of-00191.safetensors", "model.layers.115.mlp.up_proj.weight": "model-00032-of-00191.safetensors", "model.layers.115.post_attention_layernorm.weight": "model-00032-of-00191.safetensors", "model.layers.115.self_attn.k_proj.weight": "model-00032-of-00191.safetensors", "model.layers.115.self_attn.o_proj.weight": "model-00032-of-00191.safetensors", "model.layers.115.self_attn.q_proj.weight": "model-00032-of-00191.safetensors", "model.layers.115.self_attn.v_proj.weight": "model-00032-of-00191.safetensors", "model.layers.116.input_layernorm.weight": "model-00032-of-00191.safetensors", "model.layers.116.mlp.down_proj.weight": "model-00033-of-00191.safetensors", "model.layers.116.mlp.gate_proj.weight": "model-00033-of-00191.safetensors", "model.layers.116.mlp.up_proj.weight": "model-00034-of-00191.safetensors", "model.layers.116.post_attention_layernorm.weight": "model-00034-of-00191.safetensors", "model.layers.116.self_attn.k_proj.weight": "model-00034-of-00191.safetensors", "model.layers.116.self_attn.o_proj.weight": "model-00034-of-00191.safetensors", "model.layers.116.self_attn.q_proj.weight": "model-00034-of-00191.safetensors", "model.layers.116.self_attn.v_proj.weight": "model-00034-of-00191.safetensors", "model.layers.117.input_layernorm.weight": "model-00034-of-00191.safetensors", "model.layers.117.mlp.down_proj.weight": "model-00034-of-00191.safetensors", "model.layers.117.mlp.gate_proj.weight": "model-00035-of-00191.safetensors", "model.layers.117.mlp.up_proj.weight": "model-00035-of-00191.safetensors", "model.layers.117.post_attention_layernorm.weight": "model-00035-of-00191.safetensors", "model.layers.117.self_attn.k_proj.weight": "model-00035-of-00191.safetensors", "model.layers.117.self_attn.o_proj.weight": "model-00035-of-00191.safetensors", "model.layers.117.self_attn.q_proj.weight": "model-00035-of-00191.safetensors", "model.layers.117.self_attn.v_proj.weight": "model-00035-of-00191.safetensors", "model.layers.118.input_layernorm.weight": "model-00035-of-00191.safetensors", "model.layers.118.mlp.down_proj.weight": "model-00036-of-00191.safetensors", "model.layers.118.mlp.gate_proj.weight": "model-00036-of-00191.safetensors", "model.layers.118.mlp.up_proj.weight": "model-00037-of-00191.safetensors", "model.layers.118.post_attention_layernorm.weight": "model-00037-of-00191.safetensors", "model.layers.118.self_attn.k_proj.weight": "model-00037-of-00191.safetensors", "model.layers.118.self_attn.o_proj.weight": "model-00037-of-00191.safetensors", "model.layers.118.self_attn.q_proj.weight": "model-00037-of-00191.safetensors", "model.layers.118.self_attn.v_proj.weight": "model-00037-of-00191.safetensors", "model.layers.119.input_layernorm.weight": "model-00037-of-00191.safetensors", "model.layers.119.mlp.down_proj.weight": "model-00037-of-00191.safetensors", "model.layers.119.mlp.gate_proj.weight": "model-00038-of-00191.safetensors", "model.layers.119.mlp.up_proj.weight": "model-00038-of-00191.safetensors", "model.layers.119.post_attention_layernorm.weight": "model-00038-of-00191.safetensors", "model.layers.119.self_attn.k_proj.weight": "model-00038-of-00191.safetensors", "model.layers.119.self_attn.o_proj.weight": "model-00038-of-00191.safetensors", "model.layers.119.self_attn.q_proj.weight": "model-00038-of-00191.safetensors", "model.layers.119.self_attn.v_proj.weight": "model-00038-of-00191.safetensors", "model.layers.12.input_layernorm.weight": "model-00038-of-00191.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00039-of-00191.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00039-of-00191.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00040-of-00191.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00040-of-00191.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00040-of-00191.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00040-of-00191.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00040-of-00191.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00040-of-00191.safetensors", "model.layers.120.input_layernorm.weight": "model-00040-of-00191.safetensors", "model.layers.120.mlp.down_proj.weight": "model-00040-of-00191.safetensors", "model.layers.120.mlp.gate_proj.weight": "model-00041-of-00191.safetensors", "model.layers.120.mlp.up_proj.weight": "model-00041-of-00191.safetensors", "model.layers.120.post_attention_layernorm.weight": "model-00041-of-00191.safetensors", "model.layers.120.self_attn.k_proj.weight": "model-00041-of-00191.safetensors", "model.layers.120.self_attn.o_proj.weight": "model-00041-of-00191.safetensors", "model.layers.120.self_attn.q_proj.weight": "model-00041-of-00191.safetensors", "model.layers.120.self_attn.v_proj.weight": "model-00041-of-00191.safetensors", "model.layers.121.input_layernorm.weight": "model-00041-of-00191.safetensors", "model.layers.121.mlp.down_proj.weight": "model-00042-of-00191.safetensors", "model.layers.121.mlp.gate_proj.weight": "model-00042-of-00191.safetensors", "model.layers.121.mlp.up_proj.weight": "model-00043-of-00191.safetensors", "model.layers.121.post_attention_layernorm.weight": "model-00043-of-00191.safetensors", "model.layers.121.self_attn.k_proj.weight": "model-00043-of-00191.safetensors", "model.layers.121.self_attn.o_proj.weight": "model-00043-of-00191.safetensors", "model.layers.121.self_attn.q_proj.weight": "model-00043-of-00191.safetensors", "model.layers.121.self_attn.v_proj.weight": "model-00043-of-00191.safetensors", "model.layers.122.input_layernorm.weight": "model-00043-of-00191.safetensors", "model.layers.122.mlp.down_proj.weight": "model-00043-of-00191.safetensors", "model.layers.122.mlp.gate_proj.weight": "model-00044-of-00191.safetensors", "model.layers.122.mlp.up_proj.weight": "model-00044-of-00191.safetensors", "model.layers.122.post_attention_layernorm.weight": "model-00044-of-00191.safetensors", "model.layers.122.self_attn.k_proj.weight": "model-00044-of-00191.safetensors", "model.layers.122.self_attn.o_proj.weight": "model-00044-of-00191.safetensors", "model.layers.122.self_attn.q_proj.weight": "model-00044-of-00191.safetensors", "model.layers.122.self_attn.v_proj.weight": "model-00044-of-00191.safetensors", "model.layers.123.input_layernorm.weight": "model-00044-of-00191.safetensors", "model.layers.123.mlp.down_proj.weight": "model-00045-of-00191.safetensors", "model.layers.123.mlp.gate_proj.weight": "model-00045-of-00191.safetensors", "model.layers.123.mlp.up_proj.weight": "model-00046-of-00191.safetensors", "model.layers.123.post_attention_layernorm.weight": "model-00046-of-00191.safetensors", "model.layers.123.self_attn.k_proj.weight": "model-00046-of-00191.safetensors", "model.layers.123.self_attn.o_proj.weight": "model-00046-of-00191.safetensors", "model.layers.123.self_attn.q_proj.weight": "model-00046-of-00191.safetensors", "model.layers.123.self_attn.v_proj.weight": "model-00046-of-00191.safetensors", "model.layers.124.input_layernorm.weight": "model-00046-of-00191.safetensors", "model.layers.124.mlp.down_proj.weight": "model-00046-of-00191.safetensors", "model.layers.124.mlp.gate_proj.weight": "model-00047-of-00191.safetensors", "model.layers.124.mlp.up_proj.weight": "model-00047-of-00191.safetensors", "model.layers.124.post_attention_layernorm.weight": "model-00047-of-00191.safetensors", "model.layers.124.self_attn.k_proj.weight": "model-00047-of-00191.safetensors", "model.layers.124.self_attn.o_proj.weight": "model-00047-of-00191.safetensors", "model.layers.124.self_attn.q_proj.weight": "model-00047-of-00191.safetensors", "model.layers.124.self_attn.v_proj.weight": "model-00047-of-00191.safetensors", "model.layers.125.input_layernorm.weight": "model-00047-of-00191.safetensors", "model.layers.125.mlp.down_proj.weight": "model-00048-of-00191.safetensors", "model.layers.125.mlp.gate_proj.weight": "model-00048-of-00191.safetensors", "model.layers.125.mlp.up_proj.weight": "model-00049-of-00191.safetensors", "model.layers.125.post_attention_layernorm.weight": "model-00049-of-00191.safetensors", "model.layers.125.self_attn.k_proj.weight": "model-00049-of-00191.safetensors", "model.layers.125.self_attn.o_proj.weight": "model-00049-of-00191.safetensors", "model.layers.125.self_attn.q_proj.weight": "model-00049-of-00191.safetensors", "model.layers.125.self_attn.v_proj.weight": "model-00049-of-00191.safetensors", "model.layers.13.input_layernorm.weight": "model-00049-of-00191.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00049-of-00191.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00050-of-00191.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00050-of-00191.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00050-of-00191.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00050-of-00191.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00050-of-00191.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00050-of-00191.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00050-of-00191.safetensors", "model.layers.14.input_layernorm.weight": "model-00050-of-00191.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00051-of-00191.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00051-of-00191.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00052-of-00191.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00052-of-00191.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00052-of-00191.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00052-of-00191.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00052-of-00191.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00052-of-00191.safetensors", "model.layers.15.input_layernorm.weight": "model-00052-of-00191.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00052-of-00191.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00053-of-00191.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00053-of-00191.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00053-of-00191.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00053-of-00191.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00053-of-00191.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00053-of-00191.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00053-of-00191.safetensors", "model.layers.16.input_layernorm.weight": "model-00053-of-00191.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00054-of-00191.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00054-of-00191.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00055-of-00191.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00055-of-00191.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00055-of-00191.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00055-of-00191.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00055-of-00191.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00055-of-00191.safetensors", "model.layers.17.input_layernorm.weight": "model-00055-of-00191.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00055-of-00191.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00056-of-00191.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00056-of-00191.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00056-of-00191.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00056-of-00191.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00056-of-00191.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00056-of-00191.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00056-of-00191.safetensors", "model.layers.18.input_layernorm.weight": "model-00056-of-00191.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00057-of-00191.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00057-of-00191.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00058-of-00191.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00058-of-00191.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00058-of-00191.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00058-of-00191.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00058-of-00191.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00058-of-00191.safetensors", "model.layers.19.input_layernorm.weight": "model-00058-of-00191.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00058-of-00191.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00059-of-00191.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00059-of-00191.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00059-of-00191.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00059-of-00191.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00059-of-00191.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00059-of-00191.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00059-of-00191.safetensors", "model.layers.2.input_layernorm.weight": "model-00059-of-00191.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00060-of-00191.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00060-of-00191.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00061-of-00191.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00061-of-00191.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00061-of-00191.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00061-of-00191.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00061-of-00191.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00061-of-00191.safetensors", "model.layers.20.input_layernorm.weight": "model-00061-of-00191.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00061-of-00191.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00062-of-00191.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00062-of-00191.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00062-of-00191.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00062-of-00191.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00062-of-00191.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00062-of-00191.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00062-of-00191.safetensors", "model.layers.21.input_layernorm.weight": "model-00062-of-00191.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00063-of-00191.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00063-of-00191.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00064-of-00191.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00064-of-00191.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00064-of-00191.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00064-of-00191.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00064-of-00191.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00064-of-00191.safetensors", "model.layers.22.input_layernorm.weight": "model-00064-of-00191.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00064-of-00191.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00065-of-00191.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00065-of-00191.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00065-of-00191.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00065-of-00191.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00065-of-00191.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00065-of-00191.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00065-of-00191.safetensors", "model.layers.23.input_layernorm.weight": "model-00065-of-00191.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00066-of-00191.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00066-of-00191.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00067-of-00191.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00067-of-00191.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00067-of-00191.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00067-of-00191.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00067-of-00191.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00067-of-00191.safetensors", "model.layers.24.input_layernorm.weight": "model-00067-of-00191.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00067-of-00191.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00068-of-00191.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00068-of-00191.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00068-of-00191.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00068-of-00191.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00068-of-00191.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00068-of-00191.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00068-of-00191.safetensors", "model.layers.25.input_layernorm.weight": "model-00068-of-00191.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00069-of-00191.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00069-of-00191.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00070-of-00191.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00070-of-00191.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00070-of-00191.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00070-of-00191.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00070-of-00191.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00070-of-00191.safetensors", "model.layers.26.input_layernorm.weight": "model-00070-of-00191.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00070-of-00191.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00071-of-00191.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00071-of-00191.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00071-of-00191.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00071-of-00191.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00071-of-00191.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00071-of-00191.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00071-of-00191.safetensors", "model.layers.27.input_layernorm.weight": "model-00071-of-00191.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00072-of-00191.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00072-of-00191.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00073-of-00191.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00073-of-00191.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00073-of-00191.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00073-of-00191.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00073-of-00191.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00073-of-00191.safetensors", "model.layers.28.input_layernorm.weight": "model-00073-of-00191.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00073-of-00191.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00074-of-00191.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00074-of-00191.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00074-of-00191.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00074-of-00191.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00074-of-00191.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00074-of-00191.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00074-of-00191.safetensors", "model.layers.29.input_layernorm.weight": "model-00074-of-00191.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00075-of-00191.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00075-of-00191.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00076-of-00191.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00076-of-00191.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00076-of-00191.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00076-of-00191.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00076-of-00191.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00076-of-00191.safetensors", "model.layers.3.input_layernorm.weight": "model-00076-of-00191.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00076-of-00191.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00077-of-00191.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00077-of-00191.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00077-of-00191.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00077-of-00191.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00077-of-00191.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00077-of-00191.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00077-of-00191.safetensors", "model.layers.30.input_layernorm.weight": "model-00077-of-00191.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00078-of-00191.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00078-of-00191.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00079-of-00191.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00079-of-00191.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00079-of-00191.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00079-of-00191.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00079-of-00191.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00079-of-00191.safetensors", "model.layers.31.input_layernorm.weight": "model-00079-of-00191.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00079-of-00191.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00080-of-00191.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00080-of-00191.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00080-of-00191.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00080-of-00191.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00080-of-00191.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00080-of-00191.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00080-of-00191.safetensors", "model.layers.32.input_layernorm.weight": "model-00080-of-00191.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00081-of-00191.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00081-of-00191.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00082-of-00191.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00082-of-00191.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00082-of-00191.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00082-of-00191.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00082-of-00191.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00082-of-00191.safetensors", "model.layers.33.input_layernorm.weight": "model-00082-of-00191.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00082-of-00191.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00083-of-00191.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00083-of-00191.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00083-of-00191.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00083-of-00191.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00083-of-00191.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00083-of-00191.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00083-of-00191.safetensors", "model.layers.34.input_layernorm.weight": "model-00083-of-00191.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00084-of-00191.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00084-of-00191.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00085-of-00191.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00085-of-00191.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00085-of-00191.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00085-of-00191.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00085-of-00191.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00085-of-00191.safetensors", "model.layers.35.input_layernorm.weight": "model-00085-of-00191.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00085-of-00191.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00086-of-00191.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00086-of-00191.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00086-of-00191.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00086-of-00191.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00086-of-00191.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00086-of-00191.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00086-of-00191.safetensors", "model.layers.36.input_layernorm.weight": "model-00086-of-00191.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00087-of-00191.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00087-of-00191.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00088-of-00191.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00088-of-00191.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00088-of-00191.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00088-of-00191.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00088-of-00191.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00088-of-00191.safetensors", "model.layers.37.input_layernorm.weight": "model-00088-of-00191.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00088-of-00191.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00089-of-00191.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00089-of-00191.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00089-of-00191.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00089-of-00191.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00089-of-00191.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00089-of-00191.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00089-of-00191.safetensors", "model.layers.38.input_layernorm.weight": "model-00089-of-00191.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00090-of-00191.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00090-of-00191.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00091-of-00191.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00091-of-00191.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00091-of-00191.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00091-of-00191.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00091-of-00191.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00091-of-00191.safetensors", "model.layers.39.input_layernorm.weight": "model-00091-of-00191.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00091-of-00191.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00092-of-00191.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00092-of-00191.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00092-of-00191.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00092-of-00191.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00092-of-00191.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00092-of-00191.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00092-of-00191.safetensors", "model.layers.4.input_layernorm.weight": "model-00092-of-00191.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00093-of-00191.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00093-of-00191.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00094-of-00191.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00094-of-00191.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00094-of-00191.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00094-of-00191.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00094-of-00191.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00094-of-00191.safetensors", "model.layers.40.input_layernorm.weight": "model-00094-of-00191.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00094-of-00191.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00095-of-00191.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00095-of-00191.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00095-of-00191.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00095-of-00191.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00095-of-00191.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00095-of-00191.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00095-of-00191.safetensors", "model.layers.41.input_layernorm.weight": "model-00095-of-00191.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00096-of-00191.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00096-of-00191.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00097-of-00191.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00097-of-00191.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00097-of-00191.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00097-of-00191.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00097-of-00191.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00097-of-00191.safetensors", "model.layers.42.input_layernorm.weight": "model-00097-of-00191.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00097-of-00191.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00098-of-00191.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00098-of-00191.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00098-of-00191.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00098-of-00191.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00098-of-00191.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00098-of-00191.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00098-of-00191.safetensors", "model.layers.43.input_layernorm.weight": "model-00098-of-00191.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00099-of-00191.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00099-of-00191.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00100-of-00191.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00100-of-00191.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00100-of-00191.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00100-of-00191.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00100-of-00191.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00100-of-00191.safetensors", "model.layers.44.input_layernorm.weight": "model-00100-of-00191.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00100-of-00191.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00101-of-00191.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00101-of-00191.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00101-of-00191.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00101-of-00191.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00101-of-00191.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00101-of-00191.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00101-of-00191.safetensors", "model.layers.45.input_layernorm.weight": "model-00101-of-00191.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00102-of-00191.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00102-of-00191.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00103-of-00191.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00103-of-00191.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00103-of-00191.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00103-of-00191.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00103-of-00191.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00103-of-00191.safetensors", "model.layers.46.input_layernorm.weight": "model-00103-of-00191.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00103-of-00191.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00104-of-00191.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00104-of-00191.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00104-of-00191.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00104-of-00191.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00104-of-00191.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00104-of-00191.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00104-of-00191.safetensors", "model.layers.47.input_layernorm.weight": "model-00104-of-00191.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00105-of-00191.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00105-of-00191.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00106-of-00191.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00106-of-00191.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00106-of-00191.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00106-of-00191.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00106-of-00191.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00106-of-00191.safetensors", "model.layers.48.input_layernorm.weight": "model-00106-of-00191.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00106-of-00191.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00107-of-00191.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00107-of-00191.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00107-of-00191.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00107-of-00191.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00107-of-00191.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00107-of-00191.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00107-of-00191.safetensors", "model.layers.49.input_layernorm.weight": "model-00107-of-00191.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00108-of-00191.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00108-of-00191.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00109-of-00191.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00109-of-00191.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00109-of-00191.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00109-of-00191.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00109-of-00191.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00109-of-00191.safetensors", "model.layers.5.input_layernorm.weight": "model-00109-of-00191.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00109-of-00191.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00110-of-00191.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00110-of-00191.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00110-of-00191.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00110-of-00191.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00110-of-00191.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00110-of-00191.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00110-of-00191.safetensors", "model.layers.50.input_layernorm.weight": "model-00110-of-00191.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00111-of-00191.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00111-of-00191.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00112-of-00191.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00112-of-00191.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00112-of-00191.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00112-of-00191.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00112-of-00191.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00112-of-00191.safetensors", "model.layers.51.input_layernorm.weight": "model-00112-of-00191.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00112-of-00191.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00113-of-00191.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00113-of-00191.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00113-of-00191.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00113-of-00191.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00113-of-00191.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00113-of-00191.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00113-of-00191.safetensors", "model.layers.52.input_layernorm.weight": "model-00113-of-00191.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00114-of-00191.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00114-of-00191.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00115-of-00191.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00115-of-00191.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00115-of-00191.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00115-of-00191.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00115-of-00191.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00115-of-00191.safetensors", "model.layers.53.input_layernorm.weight": "model-00115-of-00191.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00115-of-00191.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00116-of-00191.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00116-of-00191.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00116-of-00191.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00116-of-00191.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00116-of-00191.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00116-of-00191.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00116-of-00191.safetensors", "model.layers.54.input_layernorm.weight": "model-00116-of-00191.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00117-of-00191.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00117-of-00191.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00118-of-00191.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00118-of-00191.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00118-of-00191.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00118-of-00191.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00118-of-00191.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00118-of-00191.safetensors", "model.layers.55.input_layernorm.weight": "model-00118-of-00191.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00118-of-00191.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00119-of-00191.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00119-of-00191.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00119-of-00191.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00119-of-00191.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00119-of-00191.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00119-of-00191.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00119-of-00191.safetensors", "model.layers.56.input_layernorm.weight": "model-00119-of-00191.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00120-of-00191.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00120-of-00191.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00121-of-00191.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00121-of-00191.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00121-of-00191.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00121-of-00191.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00121-of-00191.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00121-of-00191.safetensors", "model.layers.57.input_layernorm.weight": "model-00121-of-00191.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00121-of-00191.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00122-of-00191.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00122-of-00191.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00122-of-00191.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00122-of-00191.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00122-of-00191.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00122-of-00191.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00122-of-00191.safetensors", "model.layers.58.input_layernorm.weight": "model-00122-of-00191.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00123-of-00191.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00123-of-00191.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00124-of-00191.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00124-of-00191.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00124-of-00191.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00124-of-00191.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00124-of-00191.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00124-of-00191.safetensors", "model.layers.59.input_layernorm.weight": "model-00124-of-00191.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00124-of-00191.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00125-of-00191.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00125-of-00191.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00125-of-00191.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00125-of-00191.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00125-of-00191.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00125-of-00191.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00125-of-00191.safetensors", "model.layers.6.input_layernorm.weight": "model-00125-of-00191.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00126-of-00191.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00126-of-00191.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00127-of-00191.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00127-of-00191.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00127-of-00191.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00127-of-00191.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00127-of-00191.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00127-of-00191.safetensors", "model.layers.60.input_layernorm.weight": "model-00127-of-00191.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00127-of-00191.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00128-of-00191.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00128-of-00191.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00128-of-00191.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00128-of-00191.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00128-of-00191.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00128-of-00191.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00128-of-00191.safetensors", "model.layers.61.input_layernorm.weight": "model-00128-of-00191.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00129-of-00191.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00129-of-00191.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00130-of-00191.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00130-of-00191.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00130-of-00191.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00130-of-00191.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00130-of-00191.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00130-of-00191.safetensors", "model.layers.62.input_layernorm.weight": "model-00130-of-00191.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00130-of-00191.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00131-of-00191.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00131-of-00191.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00131-of-00191.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00131-of-00191.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00131-of-00191.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00131-of-00191.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00131-of-00191.safetensors", "model.layers.63.input_layernorm.weight": "model-00131-of-00191.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00132-of-00191.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00132-of-00191.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00133-of-00191.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00133-of-00191.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00133-of-00191.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00133-of-00191.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00133-of-00191.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00133-of-00191.safetensors", "model.layers.64.input_layernorm.weight": "model-00133-of-00191.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00133-of-00191.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00134-of-00191.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00134-of-00191.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00134-of-00191.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00134-of-00191.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00134-of-00191.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00134-of-00191.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00134-of-00191.safetensors", "model.layers.65.input_layernorm.weight": "model-00134-of-00191.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00135-of-00191.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00135-of-00191.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00136-of-00191.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00136-of-00191.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00136-of-00191.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00136-of-00191.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00136-of-00191.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00136-of-00191.safetensors", "model.layers.66.input_layernorm.weight": "model-00136-of-00191.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00136-of-00191.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00137-of-00191.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00137-of-00191.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00137-of-00191.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00137-of-00191.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00137-of-00191.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00137-of-00191.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00137-of-00191.safetensors", "model.layers.67.input_layernorm.weight": "model-00137-of-00191.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00138-of-00191.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00138-of-00191.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00139-of-00191.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00139-of-00191.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00139-of-00191.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00139-of-00191.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00139-of-00191.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00139-of-00191.safetensors", "model.layers.68.input_layernorm.weight": "model-00139-of-00191.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00139-of-00191.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00140-of-00191.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00140-of-00191.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00140-of-00191.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00140-of-00191.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00140-of-00191.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00140-of-00191.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00140-of-00191.safetensors", "model.layers.69.input_layernorm.weight": "model-00140-of-00191.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00141-of-00191.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00141-of-00191.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00142-of-00191.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00142-of-00191.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00142-of-00191.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00142-of-00191.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00142-of-00191.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00142-of-00191.safetensors", "model.layers.7.input_layernorm.weight": "model-00142-of-00191.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00142-of-00191.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00143-of-00191.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00143-of-00191.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00143-of-00191.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00143-of-00191.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00143-of-00191.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00143-of-00191.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00143-of-00191.safetensors", "model.layers.70.input_layernorm.weight": "model-00143-of-00191.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00144-of-00191.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00144-of-00191.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00145-of-00191.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00145-of-00191.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00145-of-00191.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00145-of-00191.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00145-of-00191.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00145-of-00191.safetensors", "model.layers.71.input_layernorm.weight": "model-00145-of-00191.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00145-of-00191.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00146-of-00191.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00146-of-00191.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00146-of-00191.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00146-of-00191.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00146-of-00191.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00146-of-00191.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00146-of-00191.safetensors", "model.layers.72.input_layernorm.weight": "model-00146-of-00191.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00147-of-00191.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00147-of-00191.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00148-of-00191.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00148-of-00191.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00148-of-00191.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00148-of-00191.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00148-of-00191.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00148-of-00191.safetensors", "model.layers.73.input_layernorm.weight": "model-00148-of-00191.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00148-of-00191.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00149-of-00191.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00149-of-00191.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00149-of-00191.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00149-of-00191.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00149-of-00191.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00149-of-00191.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00149-of-00191.safetensors", "model.layers.74.input_layernorm.weight": "model-00149-of-00191.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00150-of-00191.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00150-of-00191.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00151-of-00191.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00151-of-00191.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00151-of-00191.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00151-of-00191.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00151-of-00191.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00151-of-00191.safetensors", "model.layers.75.input_layernorm.weight": "model-00151-of-00191.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00151-of-00191.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00152-of-00191.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00152-of-00191.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00152-of-00191.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00152-of-00191.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00152-of-00191.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00152-of-00191.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00152-of-00191.safetensors", "model.layers.76.input_layernorm.weight": "model-00152-of-00191.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00153-of-00191.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00153-of-00191.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00154-of-00191.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00154-of-00191.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00154-of-00191.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00154-of-00191.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00154-of-00191.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00154-of-00191.safetensors", "model.layers.77.input_layernorm.weight": "model-00154-of-00191.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00154-of-00191.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00155-of-00191.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00155-of-00191.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00155-of-00191.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00155-of-00191.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00155-of-00191.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00155-of-00191.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00155-of-00191.safetensors", "model.layers.78.input_layernorm.weight": "model-00155-of-00191.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00156-of-00191.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00156-of-00191.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00157-of-00191.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00157-of-00191.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00157-of-00191.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00157-of-00191.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00157-of-00191.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00157-of-00191.safetensors", "model.layers.79.input_layernorm.weight": "model-00157-of-00191.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00157-of-00191.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00158-of-00191.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00158-of-00191.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00158-of-00191.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00158-of-00191.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00158-of-00191.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00158-of-00191.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00158-of-00191.safetensors", "model.layers.8.input_layernorm.weight": "model-00158-of-00191.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00159-of-00191.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00159-of-00191.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00160-of-00191.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00160-of-00191.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00160-of-00191.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00160-of-00191.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00160-of-00191.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00160-of-00191.safetensors", "model.layers.80.input_layernorm.weight": "model-00160-of-00191.safetensors", "model.layers.80.mlp.down_proj.weight": "model-00160-of-00191.safetensors", "model.layers.80.mlp.gate_proj.weight": "model-00161-of-00191.safetensors", "model.layers.80.mlp.up_proj.weight": "model-00161-of-00191.safetensors", "model.layers.80.post_attention_layernorm.weight": "model-00161-of-00191.safetensors", "model.layers.80.self_attn.k_proj.weight": "model-00161-of-00191.safetensors", "model.layers.80.self_attn.o_proj.weight": "model-00161-of-00191.safetensors", "model.layers.80.self_attn.q_proj.weight": "model-00161-of-00191.safetensors", "model.layers.80.self_attn.v_proj.weight": "model-00161-of-00191.safetensors", "model.layers.81.input_layernorm.weight": "model-00161-of-00191.safetensors", "model.layers.81.mlp.down_proj.weight": "model-00162-of-00191.safetensors", "model.layers.81.mlp.gate_proj.weight": "model-00162-of-00191.safetensors", "model.layers.81.mlp.up_proj.weight": "model-00163-of-00191.safetensors", "model.layers.81.post_attention_layernorm.weight": "model-00163-of-00191.safetensors", "model.layers.81.self_attn.k_proj.weight": "model-00163-of-00191.safetensors", "model.layers.81.self_attn.o_proj.weight": "model-00163-of-00191.safetensors", "model.layers.81.self_attn.q_proj.weight": "model-00163-of-00191.safetensors", "model.layers.81.self_attn.v_proj.weight": "model-00163-of-00191.safetensors", "model.layers.82.input_layernorm.weight": "model-00163-of-00191.safetensors", "model.layers.82.mlp.down_proj.weight": "model-00163-of-00191.safetensors", "model.layers.82.mlp.gate_proj.weight": "model-00164-of-00191.safetensors", "model.layers.82.mlp.up_proj.weight": "model-00164-of-00191.safetensors", "model.layers.82.post_attention_layernorm.weight": "model-00164-of-00191.safetensors", "model.layers.82.self_attn.k_proj.weight": "model-00164-of-00191.safetensors", "model.layers.82.self_attn.o_proj.weight": "model-00164-of-00191.safetensors", "model.layers.82.self_attn.q_proj.weight": "model-00164-of-00191.safetensors", "model.layers.82.self_attn.v_proj.weight": "model-00164-of-00191.safetensors", "model.layers.83.input_layernorm.weight": "model-00164-of-00191.safetensors", "model.layers.83.mlp.down_proj.weight": "model-00165-of-00191.safetensors", "model.layers.83.mlp.gate_proj.weight": "model-00165-of-00191.safetensors", "model.layers.83.mlp.up_proj.weight": "model-00166-of-00191.safetensors", "model.layers.83.post_attention_layernorm.weight": "model-00166-of-00191.safetensors", "model.layers.83.self_attn.k_proj.weight": "model-00166-of-00191.safetensors", "model.layers.83.self_attn.o_proj.weight": "model-00166-of-00191.safetensors", "model.layers.83.self_attn.q_proj.weight": "model-00166-of-00191.safetensors", "model.layers.83.self_attn.v_proj.weight": "model-00166-of-00191.safetensors", "model.layers.84.input_layernorm.weight": "model-00166-of-00191.safetensors", "model.layers.84.mlp.down_proj.weight": "model-00166-of-00191.safetensors", "model.layers.84.mlp.gate_proj.weight": "model-00167-of-00191.safetensors", "model.layers.84.mlp.up_proj.weight": "model-00167-of-00191.safetensors", "model.layers.84.post_attention_layernorm.weight": "model-00167-of-00191.safetensors", "model.layers.84.self_attn.k_proj.weight": "model-00167-of-00191.safetensors", "model.layers.84.self_attn.o_proj.weight": "model-00167-of-00191.safetensors", "model.layers.84.self_attn.q_proj.weight": "model-00167-of-00191.safetensors", "model.layers.84.self_attn.v_proj.weight": "model-00167-of-00191.safetensors", "model.layers.85.input_layernorm.weight": "model-00167-of-00191.safetensors", "model.layers.85.mlp.down_proj.weight": "model-00168-of-00191.safetensors", "model.layers.85.mlp.gate_proj.weight": "model-00168-of-00191.safetensors", "model.layers.85.mlp.up_proj.weight": "model-00169-of-00191.safetensors", "model.layers.85.post_attention_layernorm.weight": "model-00169-of-00191.safetensors", "model.layers.85.self_attn.k_proj.weight": "model-00169-of-00191.safetensors", "model.layers.85.self_attn.o_proj.weight": "model-00169-of-00191.safetensors", "model.layers.85.self_attn.q_proj.weight": "model-00169-of-00191.safetensors", "model.layers.85.self_attn.v_proj.weight": "model-00169-of-00191.safetensors", "model.layers.86.input_layernorm.weight": "model-00169-of-00191.safetensors", "model.layers.86.mlp.down_proj.weight": "model-00169-of-00191.safetensors", "model.layers.86.mlp.gate_proj.weight": "model-00170-of-00191.safetensors", "model.layers.86.mlp.up_proj.weight": "model-00170-of-00191.safetensors", "model.layers.86.post_attention_layernorm.weight": "model-00170-of-00191.safetensors", "model.layers.86.self_attn.k_proj.weight": "model-00170-of-00191.safetensors", "model.layers.86.self_attn.o_proj.weight": "model-00170-of-00191.safetensors", "model.layers.86.self_attn.q_proj.weight": "model-00170-of-00191.safetensors", "model.layers.86.self_attn.v_proj.weight": "model-00170-of-00191.safetensors", "model.layers.87.input_layernorm.weight": "model-00170-of-00191.safetensors", "model.layers.87.mlp.down_proj.weight": "model-00171-of-00191.safetensors", "model.layers.87.mlp.gate_proj.weight": "model-00171-of-00191.safetensors", "model.layers.87.mlp.up_proj.weight": "model-00172-of-00191.safetensors", "model.layers.87.post_attention_layernorm.weight": "model-00172-of-00191.safetensors", "model.layers.87.self_attn.k_proj.weight": "model-00172-of-00191.safetensors", "model.layers.87.self_attn.o_proj.weight": "model-00172-of-00191.safetensors", "model.layers.87.self_attn.q_proj.weight": "model-00172-of-00191.safetensors", "model.layers.87.self_attn.v_proj.weight": "model-00172-of-00191.safetensors", "model.layers.88.input_layernorm.weight": "model-00172-of-00191.safetensors", "model.layers.88.mlp.down_proj.weight": "model-00172-of-00191.safetensors", "model.layers.88.mlp.gate_proj.weight": "model-00173-of-00191.safetensors", "model.layers.88.mlp.up_proj.weight": "model-00173-of-00191.safetensors", "model.layers.88.post_attention_layernorm.weight": "model-00173-of-00191.safetensors", "model.layers.88.self_attn.k_proj.weight": "model-00173-of-00191.safetensors", "model.layers.88.self_attn.o_proj.weight": "model-00173-of-00191.safetensors", "model.layers.88.self_attn.q_proj.weight": "model-00173-of-00191.safetensors", "model.layers.88.self_attn.v_proj.weight": "model-00173-of-00191.safetensors", "model.layers.89.input_layernorm.weight": "model-00173-of-00191.safetensors", "model.layers.89.mlp.down_proj.weight": "model-00174-of-00191.safetensors", "model.layers.89.mlp.gate_proj.weight": "model-00174-of-00191.safetensors", "model.layers.89.mlp.up_proj.weight": "model-00175-of-00191.safetensors", "model.layers.89.post_attention_layernorm.weight": "model-00175-of-00191.safetensors", "model.layers.89.self_attn.k_proj.weight": "model-00175-of-00191.safetensors", "model.layers.89.self_attn.o_proj.weight": "model-00175-of-00191.safetensors", "model.layers.89.self_attn.q_proj.weight": "model-00175-of-00191.safetensors", "model.layers.89.self_attn.v_proj.weight": "model-00175-of-00191.safetensors", "model.layers.9.input_layernorm.weight": "model-00175-of-00191.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00175-of-00191.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00176-of-00191.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00176-of-00191.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00176-of-00191.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00176-of-00191.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00176-of-00191.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00176-of-00191.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00176-of-00191.safetensors", "model.layers.90.input_layernorm.weight": "model-00176-of-00191.safetensors", "model.layers.90.mlp.down_proj.weight": "model-00177-of-00191.safetensors", "model.layers.90.mlp.gate_proj.weight": "model-00177-of-00191.safetensors", "model.layers.90.mlp.up_proj.weight": "model-00178-of-00191.safetensors", "model.layers.90.post_attention_layernorm.weight": "model-00178-of-00191.safetensors", "model.layers.90.self_attn.k_proj.weight": "model-00178-of-00191.safetensors", "model.layers.90.self_attn.o_proj.weight": "model-00178-of-00191.safetensors", "model.layers.90.self_attn.q_proj.weight": "model-00178-of-00191.safetensors", "model.layers.90.self_attn.v_proj.weight": "model-00178-of-00191.safetensors", "model.layers.91.input_layernorm.weight": "model-00178-of-00191.safetensors", "model.layers.91.mlp.down_proj.weight": "model-00178-of-00191.safetensors", "model.layers.91.mlp.gate_proj.weight": "model-00179-of-00191.safetensors", "model.layers.91.mlp.up_proj.weight": "model-00179-of-00191.safetensors", "model.layers.91.post_attention_layernorm.weight": "model-00179-of-00191.safetensors", "model.layers.91.self_attn.k_proj.weight": "model-00179-of-00191.safetensors", "model.layers.91.self_attn.o_proj.weight": "model-00179-of-00191.safetensors", "model.layers.91.self_attn.q_proj.weight": "model-00179-of-00191.safetensors", "model.layers.91.self_attn.v_proj.weight": "model-00179-of-00191.safetensors", "model.layers.92.input_layernorm.weight": "model-00179-of-00191.safetensors", "model.layers.92.mlp.down_proj.weight": "model-00180-of-00191.safetensors", "model.layers.92.mlp.gate_proj.weight": "model-00180-of-00191.safetensors", "model.layers.92.mlp.up_proj.weight": "model-00181-of-00191.safetensors", "model.layers.92.post_attention_layernorm.weight": "model-00181-of-00191.safetensors", "model.layers.92.self_attn.k_proj.weight": "model-00181-of-00191.safetensors", "model.layers.92.self_attn.o_proj.weight": "model-00181-of-00191.safetensors", "model.layers.92.self_attn.q_proj.weight": "model-00181-of-00191.safetensors", "model.layers.92.self_attn.v_proj.weight": "model-00181-of-00191.safetensors", "model.layers.93.input_layernorm.weight": "model-00181-of-00191.safetensors", "model.layers.93.mlp.down_proj.weight": "model-00181-of-00191.safetensors", "model.layers.93.mlp.gate_proj.weight": "model-00182-of-00191.safetensors", "model.layers.93.mlp.up_proj.weight": "model-00182-of-00191.safetensors", "model.layers.93.post_attention_layernorm.weight": "model-00182-of-00191.safetensors", "model.layers.93.self_attn.k_proj.weight": "model-00182-of-00191.safetensors", "model.layers.93.self_attn.o_proj.weight": "model-00182-of-00191.safetensors", "model.layers.93.self_attn.q_proj.weight": "model-00182-of-00191.safetensors", "model.layers.93.self_attn.v_proj.weight": "model-00182-of-00191.safetensors", "model.layers.94.input_layernorm.weight": "model-00182-of-00191.safetensors", "model.layers.94.mlp.down_proj.weight": "model-00183-of-00191.safetensors", "model.layers.94.mlp.gate_proj.weight": "model-00183-of-00191.safetensors", "model.layers.94.mlp.up_proj.weight": "model-00184-of-00191.safetensors", "model.layers.94.post_attention_layernorm.weight": "model-00184-of-00191.safetensors", "model.layers.94.self_attn.k_proj.weight": "model-00184-of-00191.safetensors", "model.layers.94.self_attn.o_proj.weight": "model-00184-of-00191.safetensors", "model.layers.94.self_attn.q_proj.weight": "model-00184-of-00191.safetensors", "model.layers.94.self_attn.v_proj.weight": "model-00184-of-00191.safetensors", "model.layers.95.input_layernorm.weight": "model-00184-of-00191.safetensors", "model.layers.95.mlp.down_proj.weight": "model-00184-of-00191.safetensors", "model.layers.95.mlp.gate_proj.weight": "model-00185-of-00191.safetensors", "model.layers.95.mlp.up_proj.weight": "model-00185-of-00191.safetensors", "model.layers.95.post_attention_layernorm.weight": "model-00185-of-00191.safetensors", "model.layers.95.self_attn.k_proj.weight": "model-00185-of-00191.safetensors", "model.layers.95.self_attn.o_proj.weight": "model-00185-of-00191.safetensors", "model.layers.95.self_attn.q_proj.weight": "model-00185-of-00191.safetensors", "model.layers.95.self_attn.v_proj.weight": "model-00185-of-00191.safetensors", "model.layers.96.input_layernorm.weight": "model-00185-of-00191.safetensors", "model.layers.96.mlp.down_proj.weight": "model-00186-of-00191.safetensors", "model.layers.96.mlp.gate_proj.weight": "model-00186-of-00191.safetensors", "model.layers.96.mlp.up_proj.weight": "model-00187-of-00191.safetensors", "model.layers.96.post_attention_layernorm.weight": "model-00187-of-00191.safetensors", "model.layers.96.self_attn.k_proj.weight": "model-00187-of-00191.safetensors", "model.layers.96.self_attn.o_proj.weight": "model-00187-of-00191.safetensors", "model.layers.96.self_attn.q_proj.weight": "model-00187-of-00191.safetensors", "model.layers.96.self_attn.v_proj.weight": "model-00187-of-00191.safetensors", "model.layers.97.input_layernorm.weight": "model-00187-of-00191.safetensors", "model.layers.97.mlp.down_proj.weight": "model-00187-of-00191.safetensors", "model.layers.97.mlp.gate_proj.weight": "model-00188-of-00191.safetensors", "model.layers.97.mlp.up_proj.weight": "model-00188-of-00191.safetensors", "model.layers.97.post_attention_layernorm.weight": "model-00188-of-00191.safetensors", "model.layers.97.self_attn.k_proj.weight": "model-00188-of-00191.safetensors", "model.layers.97.self_attn.o_proj.weight": "model-00188-of-00191.safetensors", "model.layers.97.self_attn.q_proj.weight": "model-00188-of-00191.safetensors", "model.layers.97.self_attn.v_proj.weight": "model-00188-of-00191.safetensors", "model.layers.98.input_layernorm.weight": "model-00188-of-00191.safetensors", "model.layers.98.mlp.down_proj.weight": "model-00189-of-00191.safetensors", "model.layers.98.mlp.gate_proj.weight": "model-00189-of-00191.safetensors", "model.layers.98.mlp.up_proj.weight": "model-00190-of-00191.safetensors", "model.layers.98.post_attention_layernorm.weight": "model-00190-of-00191.safetensors", "model.layers.98.self_attn.k_proj.weight": "model-00190-of-00191.safetensors", "model.layers.98.self_attn.o_proj.weight": "model-00190-of-00191.safetensors", "model.layers.98.self_attn.q_proj.weight": "model-00190-of-00191.safetensors", "model.layers.98.self_attn.v_proj.weight": "model-00190-of-00191.safetensors", "model.layers.99.input_layernorm.weight": "model-00190-of-00191.safetensors", "model.layers.99.mlp.down_proj.weight": "model-00190-of-00191.safetensors", "model.layers.99.mlp.gate_proj.weight": "model-00191-of-00191.safetensors", "model.layers.99.mlp.up_proj.weight": "model-00191-of-00191.safetensors", "model.layers.99.post_attention_layernorm.weight": "model-00191-of-00191.safetensors", "model.layers.99.self_attn.k_proj.weight": "model-00191-of-00191.safetensors", "model.layers.99.self_attn.o_proj.weight": "model-00191-of-00191.safetensors", "model.layers.99.self_attn.q_proj.weight": "model-00191-of-00191.safetensors", "model.layers.99.self_attn.v_proj.weight": "model-00191-of-00191.safetensors", "model.norm.weight": "model-00191-of-00191.safetensors"}}
\ No newline at end of file
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..3c1d04911c269b925af977a3151c9704e990e4d0
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.json b/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..c68051fe3c4d23234a59316bc52d21f6e3a4182c
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,2063 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|finetune_right_pad_id|>",
+ "tokenizer_class": "PreTrainedTokenizerFast"
+}