dlxj commited on
Commit ·
750800e
1
Parent(s): 2b26468
init
Browse files- .gitattributes +3 -0
- .gitignore +1 -0
- Sakura_Launcher_GUI_v0.0.6-beta.exe +3 -0
- readme.txt +17 -0
- run.py +136 -0
.gitattributes
CHANGED
|
@@ -1,4 +1,7 @@
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
| 2 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.gguf filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.exe filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.dll filter=lfs diff=lfs merge=lfs -text
|
| 5 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 6 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 7 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
llama.log
|
Sakura_Launcher_GUI_v0.0.6-beta.exe
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d3bb2741c1422f4c148cf417ba1ea59185f890d1d0eecba30dd9ec2b273a7438
|
| 3 |
+
size 123578607
|
readme.txt
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
see echodict/README.md -> LiveABC互動日语 -> 轻小说翻译
|
| 2 |
+
|
| 3 |
+
see Y:/ai/Galgame_Dataset
|
| 4 |
+
|
| 5 |
+
解压密码:9ll9Ke4iq0jqyq3gS1Wy
|
| 6 |
+
|
| 7 |
+
see /root/huggingface/NLPP_Audio/data/高嶺.txt
|
| 8 |
+
|
| 9 |
+
see https://linux.do/t/topic/144678 原神数据集
|
| 10 |
+
https://modelscope.cn/datasets/aihobbyist/Genshin_Dataset
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
https://www.bilibili.com/video/BV13p4y1d7v9 Bert-VITS2-2.3 数据标注
|
| 14 |
+
|
| 15 |
+
https://www.bilibili.com/video/BV1yz4y1M71e/
|
| 16 |
+
# 语言合成整合包
|
| 17 |
+
|
run.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from openai import OpenAI
|
| 2 |
+
|
| 3 |
+
# If using auth
|
| 4 |
+
# auth = "user:pass"
|
| 5 |
+
# client = OpenAI(api_key="114514", base_url=f"http://{auth}@localhost:5000/v1")
|
| 6 |
+
|
| 7 |
+
# Not using auth
|
| 8 |
+
api_base_url = "http://localhost:8080/v1"
|
| 9 |
+
client = OpenAI(api_key="114514", base_url=api_base_url) # api_key随便填无所谓,base_url填写服务器IP和端口并添加/v1
|
| 10 |
+
|
| 11 |
+
input_text = """先程から何度も込められているため息に、確実に実体験からくる言葉だと分かって周としては胸が痛い。彼女にとって好意も悪意も向けられるのを慣れすぎている事が透けて見えて、やるせなさに唇を嚙んでしまう。
|
| 12 |
+
彼女の今までの交友関係は真昼が天使様として振る舞ってきた軌跡ではあるのだが、それが全て快いものではないのだと改めて突き付けられていた。
|
| 13 |
+
「可愛いものだと勉強の面倒見てもらえるから、評判がいい女と仲良くなって自分の評判を上げる、周りから除け者にされないように、とかそういうもの。悪いとまあ、その、アクセサリーというか戦利品? として欲している殿方とか私に振られた男子を拾うために仲良くしてる振りをする方とか……まあ色々居た訳です」
|
| 14 |
+
ややぐったりとげんなりを合わせたような声音は本当に苦労してきた事が窺えて、思わず労るように頭を撫でてしまう。
|
| 15 |
+
真昼が思い出しただけで心労が嵩んでいそうな声と表情をしていたので周としてはお疲れ様という気持ちでいっぱいだった。""" # 要翻译的日文
|
| 16 |
+
|
| 17 |
+
input_text = """
|
| 18 |
+
○○○○○○○○
|
| 19 |
+
行くぞ、▲高嶺**▲!
|
| 20 |
+
高嶺
|
| 21 |
+
はい!
|
| 22 |
+
よっ。
|
| 23 |
+
あっ。
|
| 24 |
+
げ、失敗。悪い、●が取りにいくから!
|
| 25 |
+
姉ヶ崎
|
| 26 |
+
はい、ボール。
|
| 27 |
+
▲姉ヶ崎*▲? サンキュ。
|
| 28 |
+
どういたしまして。ここの部って男女で練習するんだ?
|
| 29 |
+
ウチの部、そこらへん適当なんだよ。
|
| 30 |
+
ふぅん。それにしてもキレイな子だよね……。
|
| 31 |
+
▲高嶺**▲? うん、たしかに。
|
| 32 |
+
あんな子がチームメイトなんだ……。
|
| 33 |
+
ん?
|
| 34 |
+
ううん、なんにも。じゃあね、がんばって。
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
input_text = """おっさんと女の子 リアルエッチ ふぇらっこ痴女のおしゃぶり日記"""
|
| 38 |
+
|
| 39 |
+
query = "将下面的日文文本翻译成中文:" + input_text
|
| 40 |
+
|
| 41 |
+
gpt_dict = [
|
| 42 |
+
{
|
| 43 |
+
"src": "周",
|
| 44 |
+
"dst": "周",
|
| 45 |
+
"info": "名字,男孩",
|
| 46 |
+
},
|
| 47 |
+
{
|
| 48 |
+
"src": "真昼",
|
| 49 |
+
"dst": "真昼",
|
| 50 |
+
"info": "名字,女孩",
|
| 51 |
+
},
|
| 52 |
+
]
|
| 53 |
+
gpt_dict_text_list = []
|
| 54 |
+
for gpt in gpt_dict:
|
| 55 |
+
src = gpt['src']
|
| 56 |
+
dst = gpt['dst']
|
| 57 |
+
info = gpt['info'] if "info" in gpt.keys() else None
|
| 58 |
+
if info:
|
| 59 |
+
single = f"{src}->{dst} #{info}"
|
| 60 |
+
else:
|
| 61 |
+
single = f"{src}->{dst}"
|
| 62 |
+
gpt_dict_text_list.append(single)
|
| 63 |
+
|
| 64 |
+
gpt_dict_raw_text = "\n".join(gpt_dict_text_list)
|
| 65 |
+
# 0.10版本的user prompt,需要填写术语表,也可以留空。
|
| 66 |
+
query_v010 = "据以下术语表(可以为空):\n" + gpt_dict_raw_text + "\n\n" + "将下面的日文文本根据上述术语表的对应关系和备注翻译成中文:" + input_text
|
| 67 |
+
|
| 68 |
+
extra_query = {
|
| 69 |
+
'do_sample': True,
|
| 70 |
+
'num_beams': 1,
|
| 71 |
+
'repetition_penalty': 1.0,
|
| 72 |
+
}
|
| 73 |
+
models_list = client.models.list()
|
| 74 |
+
print(models_list)
|
| 75 |
+
# print(models_list.data[0].model_version)
|
| 76 |
+
|
| 77 |
+
# chat_completion = client.chat.completions.create(
|
| 78 |
+
for output in client.chat.completions.create(
|
| 79 |
+
model="sukinishiro",
|
| 80 |
+
############# v0.9 prompt #############
|
| 81 |
+
# messages=[
|
| 82 |
+
# {
|
| 83 |
+
# "role": "system",
|
| 84 |
+
# "content": "你是一个轻小说翻译模型,可以流畅通顺地以日本轻小说的风格将日文翻译成简体中文,并联系上下文正确使用人称代词,不擅自添加原文中没有的代词。"
|
| 85 |
+
# },
|
| 86 |
+
# {
|
| 87 |
+
# "role": "user",
|
| 88 |
+
# "content": f"{query}"
|
| 89 |
+
# }
|
| 90 |
+
# ],
|
| 91 |
+
############# v0.10 prompt #############
|
| 92 |
+
messages=[
|
| 93 |
+
{
|
| 94 |
+
"role": "system",
|
| 95 |
+
"content": "你是一个轻小说翻译模型,可以流畅通顺地使用给定的术语表以日本轻小说的风格将日文翻译成简体中文,并联系上下文正确使用人称代词,注意不要混淆使役态和被动态的主语和宾语,不要擅自添加原文中没有的代词,也不要擅自增加或减少换行。"
|
| 96 |
+
},
|
| 97 |
+
{
|
| 98 |
+
"role": "user",
|
| 99 |
+
"content": f"{query_v010}"
|
| 100 |
+
}
|
| 101 |
+
],
|
| 102 |
+
temperature=0.1,
|
| 103 |
+
top_p=0.3,
|
| 104 |
+
max_tokens=512,
|
| 105 |
+
frequency_penalty=0.0,
|
| 106 |
+
seed=-1,
|
| 107 |
+
extra_query=extra_query,
|
| 108 |
+
stream=True,
|
| 109 |
+
# stop=['\n\n'] # NotImplement
|
| 110 |
+
# n=1 # NotImplement
|
| 111 |
+
# logit_bias # won't support
|
| 112 |
+
# response_format # won't support
|
| 113 |
+
# tools # won't support
|
| 114 |
+
# tool_choice # won't support
|
| 115 |
+
# user # won't support
|
| 116 |
+
):
|
| 117 |
+
# stream=True key response
|
| 118 |
+
if output.choices[0].finish_reason:
|
| 119 |
+
print("\nfinish reason is", output.choices[0].finish_reason)
|
| 120 |
+
elif output.choices[0].delta.content:
|
| 121 |
+
print(output.choices[0].delta.content, end="")
|
| 122 |
+
|
| 123 |
+
# stream=False key response
|
| 124 |
+
|
| 125 |
+
# translated_text = chat_completion.choices[0].message.content
|
| 126 |
+
# finish_reason = chat_completion.choices[0].finish_reason
|
| 127 |
+
# prompt_token_cnt = chat_completion.usage.prompt_tokens
|
| 128 |
+
# completion_token_cnt = chat_completion.usage.completion_tokens
|
| 129 |
+
# model = chat_completion.model
|
| 130 |
+
|
| 131 |
+
# print("\n".join([
|
| 132 |
+
# f"The output is {translated_text}",
|
| 133 |
+
# f"The inference stopped because of `{finish_reason}`.",
|
| 134 |
+
# f"The output used {completion_token_cnt} tokens.",
|
| 135 |
+
# f"The output was generated by the `{model}` model."
|
| 136 |
+
# ]))
|