modelId
stringlengths
4
111
lastModified
stringlengths
24
24
tags
list
pipeline_tag
stringlengths
5
30
author
stringlengths
2
34
config
null
securityStatus
null
id
stringlengths
4
111
likes
int64
0
9.53k
downloads
int64
2
73.6M
library_name
stringlengths
2
84
created
timestamp[us]
card
stringlengths
101
901k
card_len
int64
101
901k
embeddings
list
KnutJaegersberg/galactica-orca-wizardlm-1.3b
2023-08-07T04:50:46.000Z
[ "transformers", "pytorch", "safetensors", "opt", "text-generation", "autotrain", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/galactica-orca-wizardlm-1.3b
2
5,887
transformers
2023-07-26T13:09:09
--- tags: - autotrain - text-generation license: cc-by-nc-4.0 --- Prompt ### System:\n{system}\n\n### User:\n{instruction}\n\n### Response:\n
142
[ [ -0.0191802978515625, -0.022857666015625, 0.017242431640625, 0.0247802734375, -0.01323699951171875, 0.0251617431640625, 0.033416748046875, 0.042266845703125, 0.0264739990234375, 0.0509033203125, -0.053466796875, -0.01198577880859375, -0.041046142578125, 0.0244140625, -0.004962921142578125, 0.06768798828125, 0.00141143798828125, 0.00371551513671875, 0.0009784698486328125, 0.007411956787109375, -0.06378173828125, -0.04144287109375, -0.08978271484375, -0.02252197265625, 0.045257568359375, 0.06787109375, 0.0237274169921875, 0.0251922607421875, 0.021820068359375, 0.0220184326171875, 0.0240325927734375, 0.002567291259765625, -0.0218048095703125, 0.04986572265625, 0.0010089874267578125, -0.02716064453125, -0.03424072265625, -0.0153961181640625, 0.03558349609375, 0.0258331298828125, 0.01076507568359375, 0.0173492431640625, -0.00843048095703125, 0.00858306884765625, -0.027069091796875, 0.00313568115234375, 0.009857177734375, 0.01322174072265625, 0.005954742431640625, -0.007030487060546875, -0.0280914306640625, -0.037811279296875, -0.028289794921875, -0.06561279296875, -0.009185791015625, 0.0207061767578125, 0.030029296875, 0.0126800537109375, -0.041717529296875, 0.0018739700317382812, -0.033843994140625, 0.03460693359375, -0.008392333984375, 0.0115814208984375, 0.04730224609375, 0.0408935546875, -0.0247802734375, -0.06964111328125, -0.043853759765625, -0.00418853759765625, -0.01419830322265625, 0.01580810546875, -0.01157379150390625, 0.00142669677734375, 0.048675537109375, -0.0229949951171875, -0.01068878173828125, 0.001972198486328125, -0.03826904296875, -0.028717041015625, 0.011260986328125, 0.0391845703125, 0.058868408203125, -0.0089111328125, -0.03857421875, -0.01983642578125, -0.0177001953125, 0.021392822265625, -0.00920867919921875, 0.03582763671875, -0.0249176025390625, 0.053497314453125, -0.029815673828125, 0.0369873046875, -0.0005507469177246094, 0.0196990966796875, -0.005298614501953125, -0.027313232421875, -0.037384033203125, 0.00801849365234375, 0.05303955078125, 0.0197601318359375, 0.00433349609375, 0.004322052001953125, -0.03387451171875, -0.007110595703125, 0.001224517822265625, -0.057098388671875, -0.0304412841796875, -0.00897216796875, -0.039031982421875, 0.003078460693359375, 0.040069580078125, -0.055755615234375, -0.0654296875, -0.01035308837890625, -0.005786895751953125, -0.0067596435546875, 0.003757476806640625, -0.00833892822265625, -0.03619384765625, 0.02423095703125, 0.03271484375, -0.06622314453125, 0.0419921875, 0.0399169921875, 0.0147705078125, 0.047760009765625, 0.0027618408203125, -0.057342529296875, -0.0008053779602050781, -0.0081787109375, 0.051025390625, -0.02667236328125, -0.0704345703125, -0.01605224609375, 0.021728515625, 0.000018417835235595703, -0.02716064453125, 0.0224151611328125, -0.039276123046875, 0.04376220703125, -0.061920166015625, -0.01861572265625, -0.001178741455078125, -0.00902557373046875, -0.038330078125, 0.06201171875, 0.01837158203125, -0.0289764404296875, -0.0009737014770507812, -0.08099365234375, -0.0179595947265625, 0.03363037109375, -0.00598907470703125, 0.04522705078125, -0.00653839111328125, -0.01479339599609375, 0.023406982421875, -0.01470184326171875, -0.029876708984375, -0.028839111328125, -0.01271820068359375, 0.0223236083984375, -0.0106201171875, 0.06854248046875, 0.03253173828125, 0.004566192626953125, 0.013916015625, -0.051483154296875, 0.0006661415100097656, 0.00371551513671875, -0.0223846435546875, -0.00981903076171875, -0.04351806640625, -0.003749847412109375, -0.0012407302856445312, 0.07977294921875, -0.0543212890625, 0.05279541015625, 0.005588531494140625, 0.01097869873046875, 0.0701904296875, 0.0186767578125, 0.021270751953125, -0.008026123046875, 0.063232421875, -0.0291595458984375, -0.003505706787109375, -0.00780487060546875, -0.024383544921875, -0.05169677734375, 0.00594329833984375, -0.006298065185546875, 0.076416015625, -0.03399658203125, 0.0260009765625, 0.0325927734375, -0.007411956787109375, 0.00894927978515625, -0.0131988525390625, 0.028350830078125, 0.04888916015625, 0.030487060546875, 0.01483154296875, -0.0633544921875, -0.041839599609375, 0.0029811859130859375, -0.06573486328125, -0.027557373046875, 0.025634765625, 0.0122833251953125, -0.040496826171875, 0.0361328125, -0.05609130859375, 0.0302581787109375, 0.008209228515625, 0.007724761962890625, 0.0124359130859375, 0.0352783203125, -0.0189208984375, -0.02752685546875, -0.036376953125, -0.00791168212890625, -0.048919677734375, -0.0546875, -0.0270843505859375, -0.03369140625, 0.0011739730834960938, 0.049957275390625, -0.043731689453125, 0.045379638671875, -0.0105438232421875, -0.08172607421875, 0.0261383056640625, -0.01447296142578125, 0.0221405029296875, -0.0865478515625, 0.00933837890625, -0.025604248046875, -0.01202392578125, -0.016693115234375, 0.011962890625, -0.02130126953125, -0.0094146728515625, -0.0208587646484375, 0.0295867919921875, -0.046844482421875, -0.0202789306640625, -0.0023059844970703125, -0.0095672607421875, 0.0113525390625, -0.01019287109375, -0.0411376953125, 0.08587646484375, 0.0261077880859375, -0.0280609130859375, 0.1068115234375, 0.04278564453125, 0.023468017578125, 0.0450439453125, -0.07867431640625, 0.03021240234375, 0.0092010498046875, 0.0166168212890625, -0.10260009765625, -0.055877685546875, 0.046844482421875, -0.03399658203125, 0.002166748046875, 0.01197052001953125, -0.047607421875, -0.029052734375, -0.01555633544921875, 0.007137298583984375, 0.043731689453125, -0.007686614990234375, 0.01032257080078125, 0.00510406494140625, -0.0030574798583984375, -0.010284423828125, -0.042755126953125, 0.03546142578125, -0.006809234619140625, -0.0271453857421875, -0.0224151611328125, -0.022247314453125, 0.00220489501953125, -0.03668212890625, -0.00146484375, -0.0244140625, 0.01554107666015625, 0.0216827392578125, 0.03619384765625, -0.03564453125, 0.0230865478515625, -0.0162353515625, -0.0238494873046875, 0.007236480712890625, 0.009918212890625, 0.0277862548828125, -0.03509521484375, -0.0286865234375, -0.045379638671875, 0.058502197265625, 0.040283203125, -0.0036220550537109375, 0.0019397735595703125, 0.02178955078125, -0.048370361328125, 0.014373779296875, 0.007049560546875, -0.04583740234375, -0.03436279296875, 0.0087432861328125, -0.0194549560546875, -0.042816162109375, 0.046630859375, -0.01224517822265625, -0.00225830078125, 0.0222320556640625, 0.0128021240234375, -0.0216217041015625, 0.049835205078125, 0.044281005859375, 0.00945281982421875, 0.010040283203125, 0.021820068359375, 0.02569580078125, -0.05975341796875, -0.036285400390625, -0.056182861328125, -0.0301513671875, -0.00872802734375, -0.005035400390625, 0.01514434814453125, 0.0399169921875, -0.027069091796875, 0.058563232421875, -0.043365478515625, 0.0207366943359375, 0.052215576171875, 0.01497650146484375, -0.024749755859375, -0.034942626953125, 0.0027294158935546875, 0.012908935546875, -0.04156494140625, -0.062255859375, 0.0258331298828125, 0.01373291015625, 0.07965087890625, 0.01300048828125, 0.078369140625, 0.027862548828125, -0.01149749755859375, -0.041046142578125, 0.06939697265625, 0.0175018310546875, -0.026885986328125, -0.05670166015625, 0.0147857666015625, -0.08831787109375, -0.0244598388671875, 0.0005979537963867188, -0.074462890625, 0.00379180908203125, 0.0151519775390625, -0.072021484375, -0.0010013580322265625, -0.0816650390625, 0.08685302734375, -0.00707244873046875, 0.036529541015625, 0.023651123046875, -0.060516357421875, 0.005405426025390625, 0.037200927734375, -0.017578125, 0.005992889404296875, -0.059173583984375, 0.0709228515625, -0.027862548828125, 0.06671142578125, -0.0306243896484375, 0.005405426025390625, -0.00659942626953125, 0.01806640625, 0.0159149169921875, 0.014923095703125, 0.033843994140625, -0.04864501953125, 0.013275146484375, -0.031280517578125, -0.03289794921875, 0.03790283203125, -0.032562255859375, -0.033721923828125, -0.01139068603515625, -0.0005359649658203125, 0.01134490966796875, 0.036468505859375, 0.032958984375, 0.058929443359375, -0.0026111602783203125, 0.00933837890625, 0.07476806640625, -0.0184478759765625, 0.0733642578125, 0.0129241943359375, -0.01020050048828125, -0.0191497802734375, 0.05340576171875, 0.0107879638671875, -0.01117706298828125, 0.038543701171875, 0.0247955322265625, -0.03546142578125, 0.005161285400390625, -0.052001953125, 0.003971099853515625, -0.04278564453125, -0.03643798828125, -0.044708251953125, 0.00930023193359375, -0.034271240234375, -0.0277252197265625, 0.01154327392578125, -0.031707763671875, -0.07427978515625, -0.03619384765625, 0.027008056640625, 0.0543212890625, -0.01552581787109375, 0.054473876953125, -0.07696533203125, 0.04132080078125, 0.0289764404296875, 0.036773681640625, -0.0213775634765625, 0.002063751220703125, -0.004718780517578125, -0.01396942138671875, -0.0450439453125, -0.074462890625, -0.0146026611328125, 0.00405120849609375, 0.03485107421875, 0.022735595703125, 0.045745849609375, 0.0268096923828125, -0.03778076171875, 0.08392333984375, 0.047943115234375, -0.058197021484375, 0.06396484375, -0.0209197998046875, 0.034149169921875, 0.052215576171875, 0.01181793212890625, -0.090087890625, 0.0026912689208984375, -0.053802490234375, -0.046875, 0.04278564453125, -0.033477783203125, 0.045196533203125, -0.0215911865234375, -0.0236358642578125, 0.0093994140625, 0.0169219970703125, -0.037506103515625, -0.048583984375, -0.02105712890625, 0.005046844482421875, 0.04730224609375, -0.02081298828125, -0.01000213623046875, 0.004852294921875, 0.026336669921875, 0.0093231201171875, 0.048614501953125, 0.01053619384765625, 0.0307769775390625, -0.00315093994140625, 0.0131683349609375, 0.058929443359375, 0.020965576171875, -0.01003265380859375, 0.0092926025390625, 0.002277374267578125, -0.025054931640625, 0.015869140625, -0.0043182373046875, 0.0149078369140625, -0.00337982177734375, 0.05059814453125, 0.042022705078125, -0.0170440673828125, -0.00405120849609375, 0.018524169921875, -0.0310516357421875, -0.00478363037109375, -0.0362548828125, 0.0300750732421875, 0.0012044906616210938, -0.0101776123046875, 0.0247039794921875, 0.035430908203125, 0.0250396728515625, -0.0194244384765625, 0.03033447265625, -0.0018444061279296875, -0.04925537109375, -0.0217132568359375, 0.038116455078125, 0.06036376953125, -0.086181640625, 0.040283203125, 0.014923095703125, -0.04022216796875, 0.0625, 0.0438232421875, 0.0699462890625, -0.012054443359375, 0.0255126953125, 0.01114654541015625, 0.02105712890625, 0.01271820068359375, 0.07781982421875, -0.01202392578125, -0.042205810546875, 0.01494598388671875, -0.0015239715576171875, -0.0194091796875, -0.00907135009765625, -0.03900146484375, 0.0102996826171875, -0.10400390625, -0.0055694580078125, 0.007740020751953125, -0.037384033203125, -0.0299224853515625, 0.0199127197265625, 0.022064208984375, 0.11376953125, -0.039337158203125, 0.0238037109375, 0.076171875, -0.04571533203125, -0.06536865234375, -0.00786590576171875, -0.011871337890625, -0.056121826171875, 0.06207275390625, 0.022369384765625, -0.008270263671875, -0.0186920166015625, -0.07940673828125, -0.030029296875, 0.05230712890625, 0.01316070556640625, -0.006866455078125, 0.02325439453125, -0.036773681640625, 0.0200958251953125, -0.033935546875, 0.041473388671875, 0.01274871826171875, 0.053253173828125, -0.007274627685546875, -0.041534423828125, 0.0391845703125, -0.0252685546875, 0.00750732421875, 0.025421142578125, -0.053314208984375, 0.057281494140625, -0.0081329345703125, -0.033294677734375, 0.01983642578125, 0.0482177734375, 0.005504608154296875, 0.03521728515625, 0.036529541015625, 0.00658416748046875, 0.058502197265625, -0.0233612060546875, 0.058013916015625, 0.0027332305908203125, -0.01348114013671875, 0.061431884765625, -0.01305389404296875, 0.0247955322265625, 0.042236328125, -0.0406494140625, 0.06488037109375, 0.053314208984375, 0.0154266357421875, 0.0322265625, 0.036407470703125, -0.03271484375, -0.0187225341796875, -0.0224151611328125, -0.0226898193359375, -0.007762908935546875, 0.005664825439453125, 0.006328582763671875, -0.017974853515625, -0.0109405517578125, 0.031036376953125, 0.01025390625, -0.01776123046875, 0.0626220703125, -0.00266265869140625, -0.055877685546875, 0.047576904296875, 0.006687164306640625, 0.039154052734375, -0.02935791015625, -0.02569580078125, -0.034515380859375, -0.0263214111328125, -0.03363037109375, -0.057159423828125, 0.02142333984375, 0.00487518310546875, -0.03387451171875, -0.0240020751953125, 0.03765869140625, -0.0015811920166015625, -0.0304718017578125, -0.004917144775390625, -0.01195526123046875, 0.034027099609375, 0.0274200439453125, -0.0416259765625, -0.015777587890625, 0.0361328125, -0.002716064453125, -0.0088958740234375, 0.056488037109375, 0.005863189697265625, 0.043792724609375, 0.03228759765625, -0.0251312255859375, 0.0177154541015625, -0.0095672607421875, 0.058502197265625, -0.053192138671875, -0.04144287109375, -0.058624267578125, 0.054840087890625, -0.0274658203125, -0.056182861328125, 0.06744384765625, 0.031982421875, 0.0421142578125, -0.01183319091796875, 0.058502197265625, -0.0306396484375, 0.040985107421875, -0.00542449951171875, 0.016845703125, 0.0021839141845703125, -0.000537872314453125, 0.00214385986328125, -0.026702880859375, -0.01030731201171875, 0.035858154296875, 0.01275634765625, 0.00606536865234375, 0.05743408203125, 0.049224853515625, -0.00708770751953125, 0.010498046875, 0.0187225341796875, -0.0083770751953125, 0.01556396484375, 0.04278564453125, 0.0499267578125, -0.0212860107421875, 0.0004127025604248047, 0.028350830078125, -0.05267333984375, -0.013275146484375, -0.0653076171875, -0.042755126953125, -0.0043182373046875, 0.007617950439453125, -0.034881591796875, -0.0224456787109375, 0.06939697265625, 0.0494384765625, -0.088134765625, -0.0318603515625, -0.006198883056640625, 0.025909423828125, 0.007328033447265625, -0.0125885009765625, -0.01482391357421875, -0.0210723876953125, -0.00902557373046875, 0.015716552734375, -0.01514434814453125, 0.043121337890625, -0.01885986328125, -0.00627899169921875, -0.00026345252990722656, 0.0047149658203125, 0.03466796875, 0.041351318359375, -0.045074462890625, -0.03839111328125, 0.023956298828125, -0.03204345703125, -0.0256805419921875, 0.0523681640625, -0.032684326171875, 0.0260467529296875, 0.055908203125, 0.049652099609375, -0.01824951171875, 0.0145263671875, 0.05938720703125, -0.052947998046875, -0.0189208984375, 0.024383544921875, 0.014862060546875, 0.0246124267578125, -0.060272216796875, 0.0301055908203125, 0.034332275390625, -0.05218505859375, -0.03851318359375, 0.0200042724609375, -0.072998046875, -0.01160430908203125, 0.054595947265625, -0.028472900390625, -0.0221710205078125, -0.015960693359375, -0.0780029296875, 0.0175323486328125, -0.091552734375, 0.01068115234375, 0.043975830078125, -0.01715087890625, -0.0242156982421875, -0.024566650390625, 0.0254974365234375, 0.0128021240234375, -0.07269287109375, 0.008148193359375, 0.06781005859375, -0.01531982421875, 0.0106353759765625, 0.049072265625, 0.032745361328125, 0.0239715576171875, 0.0141754150390625, 0.01464080810546875, -0.01259613037109375, -0.02874755859375, -0.0234375, 0.007137298583984375, -0.01007080078125, -0.07818603515625 ] ]
climatebert/distilroberta-base-climate-sentiment
2023-06-02T13:53:52.000Z
[ "transformers", "pytorch", "safetensors", "roberta", "text-classification", "en", "dataset:climatebert/climate_sentiment", "license:apache-2.0", "endpoints_compatible", "region:us" ]
text-classification
climatebert
null
null
climatebert/distilroberta-base-climate-sentiment
0
5,886
transformers
2022-03-02T23:29:05
--- license: apache-2.0 datasets: - climatebert/climate_sentiment language: - en metrics: - accuracy --- # Model Card for distilroberta-base-climate-sentiment ## Model Description This is the fine-tuned ClimateBERT language model with a classification head for classifying climate-related paragraphs into the climate-related sentiment classes opportunity, neutral, or risk. Using the [climatebert/distilroberta-base-climate-f](https://huggingface.co/climatebert/distilroberta-base-climate-f) language model as starting point, the distilroberta-base-climate-sentiment model is fine-tuned on our [climatebert/climate_sentiment](https://huggingface.co/climatebert/climate_sentiment) dataset. *Note: This model is trained on paragraphs. It may not perform well on sentences.* ## Citation Information ```bibtex @techreport{bingler2023cheaptalk, title={How Cheap Talk in Climate Disclosures Relates to Climate Initiatives, Corporate Emissions, and Reputation Risk}, author={Bingler, Julia and Kraus, Mathias and Leippold, Markus and Webersinke, Nicolas}, type={Working paper}, institution={Available at SSRN 3998435}, year={2023} } ``` ## How to Get Started With the Model You can use the model with a pipeline for text classification: ```python from transformers import AutoModelForSequenceClassification, AutoTokenizer, pipeline from transformers.pipelines.pt_utils import KeyDataset import datasets from tqdm.auto import tqdm dataset_name = "climatebert/climate_sentiment" model_name = "climatebert/distilroberta-base-climate-sentiment" # If you want to use your own data, simply load them as 🤗 Datasets dataset, see https://huggingface.co/docs/datasets/loading dataset = datasets.load_dataset(dataset_name, split="test") model = AutoModelForSequenceClassification.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name, max_len=512) pipe = pipeline("text-classification", model=model, tokenizer=tokenizer, device=0) # See https://huggingface.co/docs/transformers/main_classes/pipelines#transformers.pipeline for out in tqdm(pipe(KeyDataset(dataset, "text"), padding=True, truncation=True)): print(out) ```
2,167
[ [ -0.0330810546875, -0.0372314453125, 0.01346588134765625, 0.0146026611328125, -0.0290985107421875, 0.01050567626953125, -0.00860595703125, -0.0086822509765625, -0.00004869699478149414, 0.022125244140625, -0.036376953125, -0.054779052734375, -0.0595703125, -0.010284423828125, -0.022247314453125, 0.11163330078125, 0.01375579833984375, -0.008819580078125, -0.01959228515625, -0.0043182373046875, -0.0103302001953125, -0.059051513671875, -0.036041259765625, -0.0297393798828125, 0.0298614501953125, 0.029205322265625, 0.057830810546875, 0.033294677734375, 0.0204010009765625, 0.0272979736328125, -0.0293731689453125, -0.0205078125, -0.022186279296875, 0.004535675048828125, -0.003803253173828125, -0.0318603515625, -0.037109375, 0.032470703125, 0.032745361328125, 0.040069580078125, -0.0123443603515625, 0.0211944580078125, 0.0244598388671875, 0.0260162353515625, -0.033966064453125, 0.03472900390625, -0.04083251953125, -0.0129241943359375, 0.0005645751953125, 0.00098419189453125, -0.030364990234375, -0.011383056640625, 0.0307159423828125, -0.01335906982421875, 0.0220947265625, 0.0158233642578125, 0.08331298828125, 0.02593994140625, -0.027008056640625, -0.0200653076171875, -0.0345458984375, 0.07098388671875, -0.06256103515625, 0.01314544677734375, 0.004749298095703125, -0.004199981689453125, 0.01532745361328125, -0.051177978515625, -0.0631103515625, -0.00531005859375, -0.016204833984375, 0.0163116455078125, -0.02056884765625, 0.00885009765625, 0.017791748046875, 0.04656982421875, -0.0361328125, -0.0014600753784179688, -0.035919189453125, -0.0195770263671875, 0.040679931640625, 0.0193634033203125, 0.004123687744140625, -0.044769287109375, -0.042755126953125, -0.029998779296875, -0.01141357421875, 0.017669677734375, 0.02386474609375, 0.03289794921875, -0.0281524658203125, 0.0272674560546875, -0.01335906982421875, 0.049346923828125, 0.004398345947265625, -0.01273345947265625, 0.04339599609375, -0.01149749755859375, -0.0362548828125, 0.00634002685546875, 0.07421875, 0.03436279296875, -0.0022525787353515625, 0.00682830810546875, -0.01678466796875, -0.00021946430206298828, 0.00400543212890625, -0.0782470703125, -0.033905029296875, 0.0231475830078125, -0.040496826171875, -0.054168701171875, 0.01392364501953125, -0.0638427734375, -0.0195159912109375, -0.0305023193359375, 0.02435302734375, -0.0211639404296875, -0.03326416015625, 0.0074920654296875, -0.02520751953125, 0.02392578125, 0.006664276123046875, -0.044921875, 0.017364501953125, 0.038177490234375, 0.06787109375, 0.0044097900390625, -0.0275726318359375, -0.014556884765625, -0.02618408203125, 0.0011339187622070312, 0.05413818359375, -0.03668212890625, -0.03582763671875, -0.0165863037109375, 0.0051727294921875, -0.007110595703125, -0.0221405029296875, 0.054779052734375, -0.019744873046875, 0.065673828125, 0.00872802734375, -0.044677734375, -0.0212554931640625, 0.01708984375, -0.02886962890625, 0.08612060546875, 0.032379150390625, -0.08697509765625, 0.0242156982421875, -0.042388916015625, -0.00829315185546875, -0.003803253173828125, 0.0157318115234375, -0.04852294921875, -0.0032958984375, -0.01168060302734375, 0.043060302734375, -0.031280517578125, 0.0310821533203125, -0.03192138671875, 0.00028967857360839844, 0.0201568603515625, -0.0288238525390625, 0.082275390625, 0.0282745361328125, -0.029876708984375, 0.00926971435546875, -0.058380126953125, 0.002849578857421875, -0.01262664794921875, -0.01523590087890625, -0.037689208984375, -0.01409149169921875, 0.030853271484375, 0.031768798828125, 0.0283660888671875, -0.043182373046875, 0.0157012939453125, -0.029327392578125, 0.041595458984375, 0.049468994140625, -0.0042572021484375, 0.04241943359375, -0.0178680419921875, 0.034637451171875, 0.0374755859375, 0.0160980224609375, 0.00788116455078125, -0.0275726318359375, -0.08624267578125, -0.020233154296875, 0.0260467529296875, 0.05889892578125, -0.045196533203125, 0.059356689453125, -0.01364898681640625, -0.0697021484375, -0.03076171875, -0.00606536865234375, 0.0223846435546875, 0.0657958984375, 0.030609130859375, -0.009307861328125, -0.031341552734375, -0.0595703125, -0.0084381103515625, -0.03302001953125, 0.0118865966796875, 0.00543975830078125, 0.03472900390625, -0.029876708984375, 0.09075927734375, -0.060943603515625, -0.01107025146484375, -0.01058197021484375, 0.032196044921875, 0.04022216796875, 0.04278564453125, 0.05560302734375, -0.052734375, -0.04071044921875, -0.034942626953125, -0.03814697265625, -0.00809478759765625, 0.015045166015625, -0.01132965087890625, 0.027069091796875, 0.026031494140625, -0.0413818359375, 0.0211334228515625, 0.037994384765625, -0.051788330078125, 0.030364990234375, -0.00852203369140625, -0.0032749176025390625, -0.09857177734375, 0.005184173583984375, 0.01837158203125, -0.01485443115234375, -0.0574951171875, -0.016815185546875, -0.0184478759765625, -0.0036945343017578125, -0.03228759765625, 0.05340576171875, -0.00832366943359375, 0.039154052734375, -0.0310516357421875, -0.01508331298828125, 0.014862060546875, 0.0341796875, 0.0177154541015625, 0.0301361083984375, 0.06744384765625, -0.050994873046875, 0.033416748046875, 0.0350341796875, -0.006328582763671875, 0.043212890625, -0.04443359375, 0.00441741943359375, -0.0160064697265625, 0.015228271484375, -0.0712890625, -0.00116729736328125, 0.036163330078125, -0.04058837890625, 0.0256500244140625, -0.01197052001953125, -0.038604736328125, -0.03240966796875, -0.01377105712890625, 0.007049560546875, 0.051544189453125, -0.04046630859375, 0.043304443359375, 0.03082275390625, -0.0244903564453125, -0.04058837890625, -0.052276611328125, -0.00952911376953125, -0.048248291015625, -0.0229034423828125, 0.01654052734375, -0.0203094482421875, -0.00618743896484375, -0.005489349365234375, 0.004547119140625, -0.0206146240234375, 0.007053375244140625, 0.0257110595703125, 0.037261962890625, 0.00658416748046875, 0.0180816650390625, 0.0046844482421875, -0.0160064697265625, 0.019134521484375, -0.0258636474609375, 0.04498291015625, -0.038299560546875, 0.0291748046875, -0.021881103515625, 0.0009579658508300781, 0.043487548828125, 0.002971649169921875, 0.07537841796875, 0.05181884765625, -0.0235748291015625, -0.00974273681640625, -0.025909423828125, -0.010345458984375, -0.03564453125, 0.0123443603515625, -0.0201416015625, -0.0650634765625, 0.0285491943359375, 0.00785064697265625, -0.004085540771484375, 0.06439208984375, 0.0400390625, -0.006526947021484375, 0.06329345703125, 0.055999755859375, -0.030914306640625, 0.0299530029296875, -0.0293426513671875, 0.0170440673828125, -0.04681396484375, -0.021820068359375, -0.048431396484375, -0.00414276123046875, -0.05804443359375, -0.0165557861328125, 0.0020923614501953125, 0.0195770263671875, -0.042083740234375, 0.032623291015625, -0.0675048828125, 0.00971221923828125, 0.05169677734375, -0.007633209228515625, 0.01303863525390625, -0.0029048919677734375, -0.0099029541015625, -0.0003952980041503906, -0.05291748046875, -0.056365966796875, 0.05377197265625, 0.04925537109375, 0.056854248046875, -0.01091766357421875, 0.05718994140625, 0.01399993896484375, 0.04046630859375, -0.0576171875, 0.03533935546875, -0.01406097412109375, -0.0445556640625, -0.01317596435546875, -0.0343017578125, -0.0408935546875, 0.00881195068359375, -0.01458740234375, -0.04058837890625, 0.035247802734375, 0.0178680419921875, -0.0192108154296875, 0.019317626953125, -0.0413818359375, 0.07550048828125, -0.0078887939453125, -0.0050201416015625, -0.0006113052368164062, -0.04205322265625, 0.0180206298828125, 0.00811767578125, 0.0221405029296875, -0.0207366943359375, -0.0013284683227539062, 0.061004638671875, -0.03826904296875, 0.08355712890625, -0.04052734375, -0.01256561279296875, 0.036102294921875, -0.0211029052734375, 0.006900787353515625, 0.006366729736328125, -0.0156402587890625, 0.02825927734375, 0.019439697265625, -0.01666259765625, -0.0226593017578125, 0.047821044921875, -0.0706787109375, -0.032379150390625, -0.0716552734375, -0.0139617919921875, -0.002880096435546875, 0.0126800537109375, 0.0426025390625, 0.0258941650390625, -0.0229644775390625, 0.00762939453125, 0.0303497314453125, -0.0185394287109375, 0.029815673828125, 0.0282745361328125, -0.01172637939453125, -0.042144775390625, 0.0733642578125, -0.009063720703125, 0.0081024169921875, 0.0152130126953125, 0.0377197265625, -0.038116455078125, -0.018524169921875, -0.0151824951171875, -0.0010519027709960938, -0.060577392578125, -0.0380859375, -0.0638427734375, -0.042144775390625, -0.036956787109375, 0.01222991943359375, -0.01377105712890625, -0.037750244140625, -0.04949951171875, -0.01558685302734375, 0.05450439453125, 0.033111572265625, -0.00765228271484375, 0.046112060546875, -0.060516357421875, -0.0034008026123046875, 0.01364898681640625, 0.028350830078125, -0.002841949462890625, -0.058563232421875, -0.01557159423828125, 0.01100921630859375, -0.03375244140625, -0.05059814453125, 0.05694580078125, 0.02435302734375, 0.029876708984375, 0.029144287109375, 0.0263519287109375, 0.0289154052734375, -0.019561767578125, 0.061004638671875, 0.024871826171875, -0.09832763671875, 0.038330078125, -0.0119781494140625, 0.015594482421875, 0.046630859375, 0.041412353515625, -0.047088623046875, -0.0399169921875, -0.07110595703125, -0.06939697265625, 0.055267333984375, 0.01457977294921875, 0.02374267578125, -0.00962066650390625, 0.022430419921875, 0.007595062255859375, 0.035003662109375, -0.07476806640625, -0.035064697265625, -0.05078125, -0.03045654296875, -0.0190277099609375, -0.02056884765625, -0.0022525787353515625, -0.03924560546875, 0.059661865234375, -0.01079559326171875, 0.0175323486328125, 0.0232391357421875, 0.007503509521484375, 0.0190887451171875, 0.035736083984375, 0.02899169921875, 0.01255035400390625, -0.0260772705078125, 0.00414276123046875, 0.0096588134765625, -0.03289794921875, 0.00974273681640625, 0.017669677734375, -0.0157318115234375, 0.011505126953125, 0.007904052734375, 0.08331298828125, -0.00984954833984375, -0.0167388916015625, 0.039215087890625, -0.0108489990234375, -0.0229034423828125, -0.0345458984375, 0.01104736328125, -0.0013151168823242188, 0.0089569091796875, 0.0229339599609375, 0.017425537109375, 0.028167724609375, -0.0394287109375, 0.006259918212890625, 0.0208740234375, -0.046630859375, -0.0183258056640625, 0.052337646484375, 0.0181427001953125, -0.0006427764892578125, 0.055145263671875, -0.0205841064453125, -0.0640869140625, 0.03558349609375, 0.0185394287109375, 0.07464599609375, -0.006534576416015625, 0.0237579345703125, 0.044097900390625, 0.009246826171875, -0.0004165172576904297, 0.0188751220703125, 0.002826690673828125, -0.060272216796875, -0.003849029541015625, -0.079345703125, -0.029327392578125, 0.00945281982421875, -0.057373046875, 0.029205322265625, -0.04302978515625, -0.032440185546875, -0.006389617919921875, -0.0031490325927734375, -0.05035400390625, 0.037750244140625, 0.017822265625, 0.07373046875, -0.07318115234375, 0.059906005859375, 0.04425048828125, -0.0299072265625, -0.060150146484375, 0.0006866455078125, -0.004917144775390625, -0.0229339599609375, 0.060089111328125, 0.0266571044921875, -0.008056640625, 0.004985809326171875, -0.057037353515625, -0.052490234375, 0.0762939453125, -0.0013284683227539062, -0.03704833984375, -0.00478363037109375, 0.0025157928466796875, 0.0487060546875, -0.03997802734375, 0.0237274169921875, 0.040283203125, 0.030914306640625, 0.007564544677734375, -0.0640869140625, 0.0016803741455078125, -0.02630615234375, -0.00241851806640625, -0.001102447509765625, -0.0694580078125, 0.07037353515625, -0.00004315376281738281, -0.00670623779296875, 0.00725555419921875, 0.06414794921875, 0.014404296875, 0.024871826171875, 0.060028076171875, 0.04974365234375, 0.057586669921875, -0.030548095703125, 0.060760498046875, -0.039306640625, 0.06658935546875, 0.07037353515625, -0.012481689453125, 0.06463623046875, 0.019317626953125, -0.00894927978515625, 0.07147216796875, 0.05853271484375, -0.0198516845703125, 0.04815673828125, 0.0246429443359375, -0.025360107421875, -0.01010894775390625, 0.00057220458984375, -0.021484375, 0.042083740234375, 0.0272979736328125, -0.0305938720703125, -0.00334930419921875, 0.015411376953125, 0.01385498046875, -0.0102386474609375, -0.019256591796875, 0.050811767578125, 0.0176239013671875, -0.04901123046875, 0.039031982421875, 0.0116424560546875, 0.07830810546875, -0.0223846435546875, 0.01427459716796875, 0.0011844635009765625, 0.0209197998046875, -0.01776123046875, -0.06976318359375, 0.0374755859375, 0.004322052001953125, -0.0215301513671875, -0.0236053466796875, 0.046966552734375, -0.043365478515625, -0.0487060546875, 0.01873779296875, 0.021759033203125, 0.00933837890625, 0.0001329183578491211, -0.07220458984375, -0.0091094970703125, 0.0014867782592773438, -0.02850341796875, 0.0049285888671875, 0.028900146484375, 0.02392578125, 0.0372314453125, 0.035980224609375, -0.00909423828125, -0.01525115966796875, 0.0019588470458984375, 0.0792236328125, -0.0599365234375, -0.0462646484375, -0.0631103515625, 0.04754638671875, -0.0214080810546875, -0.0285491943359375, 0.049102783203125, 0.055511474609375, 0.048187255859375, -0.0131683349609375, 0.06982421875, -0.02392578125, 0.050933837890625, -0.0245513916015625, 0.06536865234375, -0.03271484375, 0.001674652099609375, -0.031494140625, -0.06842041015625, -0.0003848075866699219, 0.060821533203125, -0.0224761962890625, 0.0112152099609375, 0.051544189453125, 0.044097900390625, -0.0004284381866455078, -0.005268096923828125, 0.0016374588012695312, 0.031494140625, 0.01294708251953125, 0.018280029296875, 0.05279541015625, -0.03472900390625, 0.037750244140625, -0.0391845703125, -0.030426025390625, 0.000850677490234375, -0.08062744140625, -0.0787353515625, -0.05438232421875, -0.055267333984375, -0.06304931640625, -0.02374267578125, 0.06201171875, 0.050384521484375, -0.06103515625, -0.016326904296875, -0.01016998291015625, -0.0084381103515625, 0.0194244384765625, -0.0286407470703125, 0.0226593017578125, -0.0261077880859375, -0.0701904296875, -0.00974273681640625, 0.0081634521484375, 0.02880859375, -0.0243072509765625, 0.002777099609375, 0.00859832763671875, 0.0004911422729492188, 0.0374755859375, 0.004665374755859375, -0.0374755859375, -0.02093505859375, -0.00313568115234375, -0.022735595703125, 0.005527496337890625, 0.036163330078125, -0.056304931640625, 0.0018138885498046875, 0.0372314453125, 0.010986328125, 0.0229339599609375, -0.003940582275390625, 0.0313720703125, -0.056549072265625, 0.0229034423828125, 0.0162353515625, 0.035003662109375, 0.034088134765625, -0.03326416015625, 0.0183258056640625, 0.0230560302734375, -0.044464111328125, -0.046173095703125, 0.002902984619140625, -0.08172607421875, -0.020782470703125, 0.08062744140625, -0.006587982177734375, -0.0180206298828125, 0.0009794235229492188, -0.025299072265625, 0.01534271240234375, -0.038055419921875, 0.05291748046875, 0.0751953125, 0.002071380615234375, -0.0232391357421875, -0.005748748779296875, 0.0474853515625, 0.0112762451171875, -0.0537109375, 0.00933837890625, 0.0296783447265625, 0.04229736328125, 0.0261688232421875, 0.038787841796875, 0.0010089874267578125, -0.00975799560546875, -0.0105743408203125, 0.017730712890625, 0.01387786865234375, -0.01392364501953125, -0.02838134765625, -0.0029430389404296875, 0.004451751708984375, -0.005290985107421875 ] ]
p208p2002/zh-wiki-punctuation-restore
2023-05-31T08:44:00.000Z
[ "transformers", "pytorch", "safetensors", "bert", "token-classification", "ner", "punctuation", "zh", "autotrain_compatible", "endpoints_compatible", "region:us" ]
token-classification
p208p2002
null
null
p208p2002/zh-wiki-punctuation-restore
5
5,886
transformers
2023-01-31T01:08:01
--- tags: - ner - punctuation language: - zh --- # zh-wiki-punctuation-restore More Detail: https://github.com/p208p2002/ZH-Punctuation-Restore 共計支援6種標點符號: , 、 。 ? ! ; ## Install ```bash # pip install torch pytorch-lightning pip install zhpr ``` ## Usage ```python from zhpr.predict import DocumentDataset,merge_stride,decode_pred from transformers import AutoModelForTokenClassification,AutoTokenizer from torch.utils.data import DataLoader def predict_step(batch,model,tokenizer): batch_out = [] batch_input_ids = batch encodings = {'input_ids': batch_input_ids} output = model(**encodings) predicted_token_class_id_batch = output['logits'].argmax(-1) for predicted_token_class_ids, input_ids in zip(predicted_token_class_id_batch, batch_input_ids): out=[] tokens = tokenizer.convert_ids_to_tokens(input_ids) # compute the pad start in input_ids # and also truncate the predict # print(tokenizer.decode(batch_input_ids)) input_ids = input_ids.tolist() try: input_id_pad_start = input_ids.index(tokenizer.pad_token_id) except: input_id_pad_start = len(input_ids) input_ids = input_ids[:input_id_pad_start] tokens = tokens[:input_id_pad_start] # predicted_token_class_ids predicted_tokens_classes = [model.config.id2label[t.item()] for t in predicted_token_class_ids] predicted_tokens_classes = predicted_tokens_classes[:input_id_pad_start] for token,ner in zip(tokens,predicted_tokens_classes): out.append((token,ner)) batch_out.append(out) return batch_out if __name__ == "__main__": window_size = 256 step = 200 text = "維基百科是維基媒體基金會運營的一個多語言的百科全書目前是全球網路上最大且最受大眾歡迎的參考工具書名列全球二十大最受歡迎的網站特點是自由內容自由編輯與自由著作權" dataset = DocumentDataset(text,window_size=window_size,step=step) dataloader = DataLoader(dataset=dataset,shuffle=False,batch_size=5) model_name = 'p208p2002/zh-wiki-punctuation-restore' model = AutoModelForTokenClassification.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) model_pred_out = [] for batch in dataloader: batch_out = predict_step(batch,model,tokenizer) for out in batch_out: model_pred_out.append(out) merge_pred_result = merge_stride(model_pred_out,step) merge_pred_result_deocde = decode_pred(merge_pred_result) merge_pred_result_deocde = ''.join(merge_pred_result_deocde) print(merge_pred_result_deocde) ``` ``` 維基百科是維基媒體基金會運營的一個多語言的百科全書,目前是全球網路上最大且最受大眾歡迎的參考工具書,名列全球二十大最受歡迎的網站,特點是自由內容、自由編輯與自由著作權。 ```
2,762
[ [ -0.0037994384765625, -0.051971435546875, 0.027679443359375, 0.0019664764404296875, -0.0256500244140625, -0.0029544830322265625, -0.005023956298828125, -0.00917816162109375, -0.00011861324310302734, 0.0239715576171875, -0.041015625, -0.04443359375, -0.036163330078125, 0.006031036376953125, -0.0212860107421875, 0.08380126953125, -0.0096893310546875, -0.003612518310546875, 0.0015153884887695312, 0.01432037353515625, -0.014923095703125, -0.048187255859375, -0.034423828125, -0.03155517578125, 0.01885986328125, 0.033294677734375, 0.040557861328125, 0.052734375, 0.04742431640625, 0.0284271240234375, 0.00417327880859375, 0.01068878173828125, -0.00876617431640625, -0.020782470703125, 0.017547607421875, -0.0269622802734375, -0.028472900390625, 0.0012712478637695312, 0.0606689453125, 0.057830810546875, -0.00972747802734375, 0.01457977294921875, 0.006282806396484375, 0.04443359375, -0.031158447265625, 0.03204345703125, -0.028472900390625, 0.0017042160034179688, -0.010101318359375, -0.0147705078125, -0.0261077880859375, -0.031036376953125, -0.005504608154296875, -0.05279541015625, 0.0219268798828125, 0.016265869140625, 0.08746337890625, -0.0013675689697265625, -0.0169677734375, -0.020751953125, -0.041473388671875, 0.07037353515625, -0.068603515625, 0.0131988525390625, 0.031829833984375, -0.0011491775512695312, -0.0170745849609375, -0.0794677734375, -0.06671142578125, 0.00153350830078125, -0.020477294921875, 0.017608642578125, -0.0000680685043334961, 0.005535125732421875, 0.0229949951171875, 0.038848876953125, -0.0433349609375, -0.0103912353515625, -0.044647216796875, -0.039825439453125, 0.037353515625, 0.0221405029296875, 0.0299530029296875, -0.0264129638671875, -0.0301971435546875, -0.03900146484375, -0.0008606910705566406, 0.0255279541015625, 0.0279998779296875, 0.039093017578125, -0.03167724609375, 0.04791259765625, -0.00982666015625, 0.055694580078125, 0.018035888671875, -0.033447265625, 0.055755615234375, -0.03717041015625, -0.036285400390625, 0.0038623809814453125, 0.08087158203125, 0.030548095703125, 0.01393890380859375, 0.01114654541015625, -0.006404876708984375, -0.0017032623291015625, -0.0220184326171875, -0.059478759765625, -0.03070068359375, 0.01335906982421875, -0.03289794921875, -0.0283660888671875, 0.0208740234375, -0.049896240234375, -0.0084228515625, -0.0026607513427734375, 0.046417236328125, -0.0498046875, -0.00612640380859375, 0.025360107421875, -0.031768798828125, 0.004573822021484375, 0.005279541015625, -0.07757568359375, 0.013885498046875, 0.022735595703125, 0.051849365234375, 0.0270538330078125, -0.0592041015625, -0.0269927978515625, -0.0030803680419921875, -0.0188751220703125, 0.03314208984375, -0.019134521484375, -0.0379638671875, -0.00901031494140625, 0.012298583984375, -0.04541015625, -0.0224609375, 0.045806884765625, -0.0144500732421875, 0.0462646484375, -0.02459716796875, -0.050201416015625, -0.021392822265625, 0.01213836669921875, -0.034576416015625, 0.08502197265625, 0.0284881591796875, -0.08135986328125, 0.0170440673828125, -0.03057861328125, -0.03363037109375, 0.004528045654296875, -0.003692626953125, -0.05780029296875, -0.018524169921875, 0.0164947509765625, 0.0265045166015625, -0.01026153564453125, 0.0166778564453125, -0.016204833984375, -0.03277587890625, 0.0220489501953125, -0.020904541015625, 0.07635498046875, 0.0002663135528564453, -0.032806396484375, 0.0160980224609375, -0.08343505859375, 0.0122528076171875, 0.0097503662109375, -0.0255889892578125, -0.01047515869140625, -0.019134521484375, 0.017822265625, 0.0250244140625, 0.02777099609375, -0.036895751953125, 0.0195159912109375, -0.03973388671875, 0.04266357421875, 0.0545654296875, 0.00589752197265625, 0.0202789306640625, -0.041046142578125, 0.0279693603515625, 0.0206451416015625, 0.00916290283203125, -0.0124359130859375, -0.0262908935546875, -0.06512451171875, -0.01227569580078125, 0.01482391357421875, 0.041656494140625, -0.05194091796875, 0.065185546875, -0.0193939208984375, -0.042724609375, -0.037567138671875, -0.004131317138671875, 0.033111572265625, 0.06689453125, 0.03741455078125, -0.0173187255859375, -0.048797607421875, -0.050506591796875, -0.0035400390625, -0.0345458984375, -0.0004417896270751953, 0.007106781005859375, 0.05548095703125, -0.01033782958984375, 0.056671142578125, -0.036468505859375, -0.0227813720703125, -0.0125885009765625, 0.02691650390625, 0.0382080078125, 0.052947998046875, 0.0301055908203125, -0.040252685546875, -0.05120849609375, -0.0002818107604980469, -0.04376220703125, 0.006591796875, -0.02587890625, -0.0108184814453125, 0.0165557861328125, 0.0223541259765625, -0.035919189453125, 0.03289794921875, 0.01172637939453125, -0.062347412109375, 0.06414794921875, -0.0279083251953125, 0.0260772705078125, -0.08807373046875, 0.0274810791015625, -0.0262908935546875, 0.01239776611328125, -0.03955078125, -0.00910186767578125, 0.0305328369140625, -0.00038552284240722656, -0.0282135009765625, 0.049163818359375, -0.03155517578125, 0.008209228515625, 0.0055084228515625, 0.003910064697265625, 0.0174713134765625, 0.045989990234375, -0.00958251953125, 0.062347412109375, 0.03668212890625, -0.041748046875, 0.01800537109375, 0.044586181640625, -0.0300140380859375, -0.00482177734375, -0.0311737060546875, -0.01004791259765625, 0.0129547119140625, -0.00020122528076171875, -0.0745849609375, -0.0155181884765625, 0.0269927978515625, -0.057708740234375, 0.01235198974609375, -0.00458526611328125, -0.044281005859375, -0.04168701171875, -0.036224365234375, 0.026214599609375, 0.033599853515625, -0.033599853515625, 0.041748046875, -0.0018444061279296875, -0.01036834716796875, -0.056549072265625, -0.061676025390625, -0.0160675048828125, -0.0030670166015625, -0.05828857421875, 0.036651611328125, -0.0330810546875, -0.006130218505859375, -0.00582122802734375, 0.0087432861328125, 0.00408172607421875, 0.0095672607421875, 0.011077880859375, 0.055023193359375, -0.002986907958984375, -0.0021648406982421875, -0.0016088485717773438, -0.0058441162109375, 0.01035308837890625, -0.0177001953125, 0.061065673828125, -0.003330230712890625, -0.0084686279296875, -0.04217529296875, -0.005565643310546875, 0.05029296875, -0.020599365234375, 0.0648193359375, 0.059661865234375, -0.0257720947265625, -0.0031032562255859375, -0.0307769775390625, 0.0006256103515625, -0.03558349609375, 0.0357666015625, -0.026458740234375, -0.03179931640625, 0.06439208984375, 0.0340576171875, 0.0032672882080078125, 0.06451416015625, 0.032440185546875, 0.00484466552734375, 0.05780029296875, 0.0506591796875, -0.006107330322265625, 0.036651611328125, -0.04473876953125, 0.0189056396484375, -0.045684814453125, -0.039276123046875, -0.032470703125, 0.014923095703125, -0.035980224609375, -0.017425537109375, 0.00881195068359375, 0.016845703125, -0.0239105224609375, 0.01337432861328125, -0.03564453125, 0.00978851318359375, 0.044952392578125, 0.004436492919921875, 0.0013904571533203125, 0.0161590576171875, -0.04095458984375, 0.0007882118225097656, -0.0445556640625, -0.0171051025390625, 0.080810546875, 0.01360321044921875, 0.02197265625, -0.0245819091796875, 0.05596923828125, 0.0016965866088867188, -0.00684356689453125, -0.056427001953125, 0.032318115234375, -0.020965576171875, -0.01910400390625, -0.0321044921875, -0.025482177734375, -0.055816650390625, 0.016510009765625, -0.015655517578125, -0.051727294921875, 0.027923583984375, -0.0074310302734375, -0.05035400390625, 0.036834716796875, -0.0364990234375, 0.0643310546875, 0.0082550048828125, -0.034393310546875, 0.00860595703125, -0.04541015625, 0.02777099609375, -0.00858306884765625, 0.032440185546875, -0.004573822021484375, 0.0188751220703125, 0.0965576171875, -0.06689453125, 0.04083251953125, -0.00368499755859375, 0.00021064281463623047, 0.026031494140625, -0.0184783935546875, 0.0389404296875, -0.0251007080078125, -0.0259857177734375, 0.003093719482421875, 0.002117156982421875, -0.0224456787109375, -0.026123046875, 0.02947998046875, -0.06817626953125, -0.0205078125, -0.04718017578125, -0.0443115234375, 0.0095672607421875, 0.037109375, 0.04840087890625, 0.041748046875, -0.0026874542236328125, 0.0032634735107421875, 0.0265350341796875, -0.0224151611328125, 0.061248779296875, 0.030364990234375, -0.01280975341796875, -0.06341552734375, 0.060394287109375, 0.0240325927734375, 0.00464630126953125, 0.0159912109375, 0.0183258056640625, -0.026336669921875, -0.02685546875, -0.026397705078125, 0.0180511474609375, -0.056732177734375, -0.0113372802734375, -0.049468994140625, -0.032928466796875, -0.06378173828125, -0.013671875, -0.0012311935424804688, -0.0377197265625, -0.03582763671875, -0.016326904296875, 0.0274810791015625, 0.015655517578125, -0.021331787109375, 0.042144775390625, -0.067626953125, 0.0174713134765625, 0.007686614990234375, 0.0197906494140625, 0.005054473876953125, -0.06536865234375, -0.04345703125, -0.007450103759765625, -0.0102691650390625, -0.049072265625, 0.06317138671875, 0.00780487060546875, 0.023651123046875, 0.03509521484375, 0.016357421875, 0.060791015625, -0.01678466796875, 0.05902099609375, 0.03173828125, -0.0802001953125, 0.032440185546875, -0.0164642333984375, 0.0161590576171875, 0.043182373046875, 0.0088958740234375, -0.052276611328125, -0.0229644775390625, -0.05657958984375, -0.0794677734375, 0.07354736328125, 0.0311279296875, -0.01507568359375, -0.008453369140625, 0.0244140625, 0.0104217529296875, 0.006275177001953125, -0.053985595703125, -0.045989990234375, -0.01461029052734375, -0.034088134765625, -0.0109405517578125, -0.0321044921875, 0.00018680095672607422, -0.037322998046875, 0.08050537109375, 0.0151824951171875, 0.0105133056640625, 0.025909423828125, -0.01233673095703125, -0.0186767578125, 0.017486572265625, 0.034210205078125, 0.05072021484375, -0.035858154296875, -0.0053558349609375, -0.0057830810546875, -0.061309814453125, 0.0037670135498046875, 0.037841796875, -0.017669677734375, 0.0155181884765625, 0.044586181640625, 0.063720703125, 0.0102386474609375, -0.02410888671875, 0.0270538330078125, -0.0111236572265625, -0.035430908203125, -0.042205810546875, 0.0025634765625, -0.0131988525390625, 0.00437164306640625, 0.04595947265625, -0.00093841552734375, 0.00803375244140625, -0.043487548828125, 0.011077880859375, 0.0101165771484375, 0.004962921142578125, -0.01373291015625, 0.052947998046875, 0.006229400634765625, -0.0159454345703125, 0.06793212890625, -0.0022125244140625, -0.050262451171875, 0.071044921875, 0.028289794921875, 0.061676025390625, -0.00284576416015625, 0.00007325410842895508, 0.06671142578125, 0.0231170654296875, -0.0113067626953125, 0.041046142578125, -0.00017178058624267578, -0.05426025390625, 0.00848388671875, -0.060150146484375, -0.010101318359375, 0.01299285888671875, -0.06317138671875, 0.0245361328125, -0.041534423828125, -0.0254974365234375, 0.0167694091796875, 0.044525146484375, -0.0443115234375, 0.033843994140625, 0.0016222000122070312, 0.0556640625, -0.0753173828125, 0.07501220703125, 0.0270843505859375, -0.05718994140625, -0.08160400390625, -0.00728607177734375, -0.0266571044921875, -0.0628662109375, 0.038482666015625, 0.0228424072265625, 0.01082611083984375, 0.01551055908203125, -0.0341796875, -0.0799560546875, 0.09552001953125, 0.01398468017578125, -0.05609130859375, -0.016265869140625, 0.0099945068359375, 0.0113525390625, 0.0008072853088378906, 0.019195556640625, 0.04150390625, 0.05303955078125, -0.00881195068359375, -0.06707763671875, 0.0112457275390625, -0.035003662109375, -0.01544189453125, 0.01297760009765625, -0.046234130859375, 0.0975341796875, -0.032745361328125, -0.021240234375, 0.017913818359375, 0.053009033203125, 0.029998779296875, 0.0261383056640625, 0.0254974365234375, 0.039581298828125, 0.07891845703125, -0.01192474365234375, 0.061553955078125, -0.052947998046875, 0.052154541015625, 0.06719970703125, -0.0010242462158203125, 0.055419921875, 0.037933349609375, -0.030487060546875, 0.06268310546875, 0.0582275390625, -0.039093017578125, 0.031951904296875, -0.007442474365234375, -0.0088348388671875, -0.014923095703125, 0.00998687744140625, -0.04376220703125, 0.01168060302734375, 0.0042266845703125, -0.04327392578125, 0.01274871826171875, -0.0035953521728515625, 0.007747650146484375, -0.006809234619140625, -0.0214996337890625, 0.040771484375, -0.00920867919921875, -0.041015625, 0.046417236328125, 0.021392822265625, 0.05926513671875, -0.053466796875, -0.0003991127014160156, -0.018707275390625, 0.027984619140625, -0.0196533203125, -0.04095458984375, -0.00276947021484375, -0.02447509765625, -0.034515380859375, 0.0090179443359375, 0.041107177734375, -0.0513916015625, -0.0745849609375, 0.02679443359375, 0.0183868408203125, 0.0117340087890625, 0.018402099609375, -0.04901123046875, 0.0008363723754882812, 0.01251220703125, -0.025360107421875, 0.01287078857421875, 0.03265380859375, 0.0207977294921875, 0.040771484375, 0.057403564453125, -0.00440216064453125, 0.00017845630645751953, 0.00408172607421875, 0.054656982421875, -0.0635986328125, -0.057281494140625, -0.072509765625, 0.047027587890625, 0.0020809173583984375, -0.0309600830078125, 0.059326171875, 0.06298828125, 0.06793212890625, -0.0109405517578125, 0.06744384765625, -0.003360748291015625, 0.0211639404296875, -0.044708251953125, 0.069580078125, -0.03125, -0.00522613525390625, -0.0196533203125, -0.0325927734375, -0.0110626220703125, 0.05242919921875, -0.0168609619140625, 0.00811004638671875, 0.07220458984375, 0.043304443359375, 0.01303863525390625, -0.02459716796875, 0.0209503173828125, 0.019287109375, 0.0014820098876953125, 0.036956787109375, 0.0149383544921875, -0.062255859375, 0.049163818359375, -0.06439208984375, -0.006805419921875, -0.013824462890625, -0.044281005859375, -0.053192138671875, -0.052093505859375, -0.0379638671875, -0.043609619140625, -0.0024089813232421875, 0.07550048828125, 0.022216796875, -0.0582275390625, -0.0220184326171875, -0.0028629302978515625, 0.009124755859375, -0.0189666748046875, -0.021331787109375, 0.070068359375, -0.0300750732421875, -0.06207275390625, 0.0149383544921875, -0.007648468017578125, 0.0274658203125, 0.011444091796875, -0.0069122314453125, -0.017852783203125, 0.00508880615234375, 0.0221710205078125, 0.016082763671875, -0.0440673828125, -0.0008153915405273438, -0.01163482666015625, -0.0197296142578125, 0.0171051025390625, 0.03271484375, -0.04583740234375, 0.0291595458984375, 0.03515625, 0.008819580078125, 0.03143310546875, -0.00843048095703125, 0.036285400390625, -0.038787841796875, 0.02020263671875, -0.0005550384521484375, 0.045745849609375, 0.0277099609375, -0.033599853515625, 0.0244293212890625, 0.04052734375, -0.027923583984375, -0.06707763671875, -0.0280914306640625, -0.0650634765625, -0.0257720947265625, 0.0655517578125, -0.02203369140625, -0.0325927734375, 0.00589752197265625, -0.005840301513671875, 0.05657958984375, -0.0257110595703125, 0.06427001953125, 0.051971435546875, 0.0180206298828125, 0.0063018798828125, -0.0166015625, 0.035919189453125, 0.0249481201171875, -0.0362548828125, 0.017913818359375, 0.02362060546875, 0.038726806640625, 0.012542724609375, 0.057525634765625, -0.005767822265625, 0.028472900390625, 0.0247802734375, 0.037017822265625, -0.032867431640625, 0.0006632804870605469, -0.03155517578125, 0.0080718994140625, -0.0091400146484375, -0.03173828125 ] ]
Fredithefish/Guanaco-3B-Uncensored
2023-08-27T21:33:10.000Z
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "conversational", "en", "dataset:Fredithefish/openassistant-guanaco-unfiltered", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
conversational
Fredithefish
null
null
Fredithefish/Guanaco-3B-Uncensored
1
5,886
transformers
2023-08-23T13:13:14
--- license: apache-2.0 datasets: - Fredithefish/openassistant-guanaco-unfiltered language: - en library_name: transformers pipeline_tag: conversational inference: false --- <img src="https://huggingface.co/Fredithefish/Guanaco-3B-Uncensored/resolve/main/Guanaco-Uncensored.jpg" alt="Alt Text" width="295"/> # ✨ Guanaco - 3B - Uncensored ✨ <h4 style="color: orange;">IMPORTANT:</h4> <p style="color: orange;">This is the old model. The dataset has been updated and a newer version of this model is available <a href="https://huggingface.co/Fredithefish/Guanaco-3B-Uncensored-v2">here</a>.</p> <br> Guanaco-3B-Uncensored has been fine-tuned for 6 epochs on the [Unfiltered Guanaco Dataset.](https://huggingface.co/datasets/Fredithefish/openassistant-guanaco-unfiltered) using [RedPajama-INCITE-Base-3B-v1](https://huggingface.co/togethercomputer/RedPajama-INCITE-Base-3B-v1) as the base model. <br>The model does not perform well with languages other than English. <br>Please note: This model is designed to provide responses without content filtering or censorship. It generates answers without denials. ## Special thanks I would like to thank AutoMeta for providing me with the computing power necessary to train this model. ### Prompt Template ``` ### Human: {prompt} ### Assistant: ```
1,297
[ [ -0.024871826171875, -0.054168701171875, 0.0208892822265625, 0.01557159423828125, -0.04400634765625, -0.0218353271484375, -0.012176513671875, -0.049652099609375, 0.0218048095703125, 0.049224853515625, -0.038787841796875, -0.051361083984375, -0.05126953125, 0.01531982421875, -0.0077667236328125, 0.08001708984375, 0.0137939453125, -0.0004420280456542969, -0.00872039794921875, -0.0239410400390625, -0.046112060546875, -0.0188446044921875, -0.056884765625, -0.0272216796875, 0.020477294921875, 0.028289794921875, 0.0670166015625, 0.05902099609375, 0.032135009765625, 0.0175628662109375, -0.016510009765625, 0.0096435546875, -0.035247802734375, -0.006053924560546875, -0.0037212371826171875, -0.01102447509765625, -0.0535888671875, 0.0006160736083984375, 0.038055419921875, 0.023193359375, -0.019775390625, 0.006389617919921875, -0.00012755393981933594, 0.041717529296875, -0.049224853515625, 0.01061248779296875, -0.040740966796875, -0.013458251953125, -0.020904541015625, 0.0047760009765625, -0.0276641845703125, -0.038970947265625, -0.01873779296875, -0.04833984375, 0.0179901123046875, 0.0017766952514648438, 0.078369140625, 0.0146484375, -0.0245361328125, 0.0017671585083007812, -0.038909912109375, 0.03656005859375, -0.06719970703125, 0.0219268798828125, 0.0494384765625, 0.031646728515625, -0.01806640625, -0.060760498046875, -0.04876708984375, 0.0007219314575195312, 0.0009083747863769531, -0.01126861572265625, -0.04168701171875, -0.0240631103515625, -0.0029315948486328125, 0.0299835205078125, -0.046417236328125, 0.00986480712890625, -0.05804443359375, -0.01486968994140625, 0.04974365234375, 0.005779266357421875, 0.0184478759765625, -0.025299072265625, -0.04058837890625, -0.017578125, -0.055908203125, -0.00795745849609375, 0.05718994140625, 0.01288604736328125, -0.042083740234375, 0.0292205810546875, -0.0302734375, 0.05194091796875, 0.0016126632690429688, 0.009033203125, 0.04742431640625, -0.004978179931640625, -0.042755126953125, -0.03717041015625, 0.07806396484375, 0.0251617431640625, 0.0019626617431640625, 0.01593017578125, 0.01262664794921875, -0.00812530517578125, 0.025665283203125, -0.062347412109375, -0.032440185546875, 0.021331787109375, -0.04937744140625, -0.0102691650390625, 0.008331298828125, -0.060760498046875, -0.024810791015625, -0.005352020263671875, 0.02874755859375, -0.028656005859375, -0.03350830078125, 0.0205841064453125, 0.01000213623046875, 0.01123809814453125, 0.0309906005859375, -0.0648193359375, 0.026123046875, -0.0023593902587890625, 0.052093505859375, 0.00958251953125, -0.0002498626708984375, -0.0005474090576171875, -0.01087188720703125, 0.002750396728515625, 0.059478759765625, -0.0214691162109375, -0.017486572265625, -0.0188446044921875, 0.0250396728515625, 0.0016326904296875, -0.036651611328125, 0.054962158203125, -0.049163818359375, 0.032867431640625, 0.007579803466796875, -0.0104217529296875, -0.040008544921875, -0.00391387939453125, -0.057159423828125, 0.067626953125, 0.0240631103515625, -0.051513671875, 0.0120391845703125, -0.052825927734375, 0.01320648193359375, 0.00006413459777832031, 0.0037364959716796875, -0.043060302734375, -0.0234832763671875, 0.0290679931640625, 0.0175628662109375, -0.0301055908203125, 0.00827789306640625, -0.042449951171875, -0.031402587890625, 0.00579071044921875, -0.0251922607421875, 0.0989990234375, 0.03271484375, -0.030029296875, 0.009307861328125, -0.046295166015625, 0.0055084228515625, 0.02496337890625, -0.002964019775390625, -0.0113372802734375, -0.019805908203125, -0.005428314208984375, 0.01316070556640625, 0.023345947265625, -0.0517578125, 0.01219940185546875, -0.009552001953125, 0.028289794921875, 0.048309326171875, 0.01071929931640625, 0.016357421875, -0.0338134765625, 0.043212890625, -0.0013227462768554688, 0.055267333984375, 0.007442474365234375, -0.05560302734375, -0.04742431640625, -0.02777099609375, 0.0218505859375, 0.016754150390625, -0.027618408203125, 0.0279083251953125, -0.0190582275390625, -0.052764892578125, -0.0447998046875, 0.00861358642578125, 0.02105712890625, 0.025390625, 0.050079345703125, -0.037353515625, -0.04449462890625, -0.0831298828125, -0.002109527587890625, -0.016693115234375, -0.0102386474609375, 0.0217132568359375, 0.043701171875, -0.0230712890625, 0.0438232421875, -0.0377197265625, -0.03265380859375, 0.006687164306640625, 0.00804901123046875, 0.0273284912109375, 0.0458984375, 0.038848876953125, -0.05267333984375, -0.03302001953125, 0.008148193359375, -0.072021484375, -0.00650787353515625, 0.01934814453125, -0.024749755859375, 0.011871337890625, 0.02935791015625, -0.0271148681640625, 0.043487548828125, 0.0477294921875, -0.0338134765625, 0.019287109375, -0.019012451171875, 0.0274810791015625, -0.0731201171875, 0.0234222412109375, -0.004550933837890625, -0.021453857421875, -0.028717041015625, 0.0189208984375, 0.027862548828125, 0.00009524822235107422, -0.056976318359375, 0.046478271484375, -0.053497314453125, 0.0223236083984375, -0.0259246826171875, 0.005809783935546875, 0.00609588623046875, 0.05224609375, -0.0043182373046875, 0.036407470703125, 0.0298004150390625, -0.03564453125, 0.024322509765625, 0.04180908203125, -0.02655029296875, 0.0302276611328125, -0.05267333984375, 0.035919189453125, -0.01934814453125, 0.0293121337890625, -0.043060302734375, -0.03533935546875, 0.0294036865234375, -0.06439208984375, 0.01226043701171875, -0.03948974609375, -0.029296875, -0.029449462890625, -0.03564453125, 0.04168701171875, 0.050048828125, -0.044647216796875, 0.0310516357421875, 0.038787841796875, -0.002315521240234375, -0.047576904296875, -0.046417236328125, -0.00890350341796875, -0.03118896484375, -0.051361083984375, 0.016326904296875, -0.00556182861328125, -0.003688812255859375, 0.0036334991455078125, 0.00926971435546875, -0.0197906494140625, -0.007167816162109375, 0.03485107421875, 0.03753662109375, 0.016937255859375, -0.022613525390625, 0.0080718994140625, 0.0133056640625, 0.00946044921875, -0.01519012451171875, 0.04443359375, -0.00942230224609375, 0.00490570068359375, -0.046142578125, 0.01007843017578125, 0.0408935546875, -0.0166473388671875, 0.06890869140625, 0.0290374755859375, -0.047576904296875, 0.00441741943359375, -0.045806884765625, 0.0005211830139160156, -0.03424072265625, -0.0025844573974609375, -0.00021839141845703125, -0.0555419921875, 0.05902099609375, 0.04791259765625, -0.01412200927734375, 0.04473876953125, 0.046844482421875, -0.00133514404296875, 0.060455322265625, 0.027801513671875, 0.013702392578125, 0.040069580078125, -0.0243988037109375, -0.0168304443359375, -0.08306884765625, -0.058441162109375, -0.039825439453125, -0.01319122314453125, -0.041748046875, -0.02734375, 0.0263214111328125, 0.00016927719116210938, -0.04754638671875, 0.0537109375, -0.040496826171875, 0.02801513671875, 0.038787841796875, 0.040496826171875, 0.0275115966796875, -0.0025730133056640625, 0.0088348388671875, -0.0025844573974609375, -0.028961181640625, -0.0255126953125, 0.09320068359375, 0.037261962890625, 0.06634521484375, 0.04052734375, 0.01065826416015625, 0.029541015625, 0.02337646484375, -0.01457977294921875, 0.0201568603515625, -0.010894775390625, -0.07293701171875, 0.01038360595703125, -0.034820556640625, -0.08013916015625, 0.0209808349609375, -0.0188446044921875, -0.06402587890625, 0.036285400390625, 0.017486572265625, -0.0311431884765625, 0.0253448486328125, -0.05743408203125, 0.058349609375, 0.0184478759765625, -0.041595458984375, 0.01800537109375, -0.057861328125, 0.0277862548828125, 0.0166473388671875, -0.00011086463928222656, -0.009063720703125, 0.022369384765625, 0.057159423828125, -0.041473388671875, 0.0672607421875, -0.022247314453125, -0.008270263671875, 0.03961181640625, -0.007579803466796875, 0.0338134765625, 0.024932861328125, 0.0011425018310546875, 0.03790283203125, -0.023284912109375, -0.039276123046875, -0.0323486328125, 0.06744384765625, -0.06512451171875, -0.039459228515625, -0.037078857421875, -0.0225067138671875, 0.00003147125244140625, 0.0263214111328125, 0.04742431640625, 0.0211334228515625, -0.0020313262939453125, 0.03125, 0.056396484375, 0.0011796951293945312, 0.0284271240234375, 0.0379638671875, -0.01131439208984375, -0.054412841796875, 0.0189208984375, -0.00383758544921875, 0.00878143310546875, 0.00794219970703125, -0.003101348876953125, -0.05059814453125, -0.051849365234375, -0.032135009765625, 0.03228759765625, -0.041778564453125, -0.049041748046875, -0.061859130859375, -0.02032470703125, -0.054443359375, 0.0124664306640625, -0.0206146240234375, -0.0243988037109375, -0.035980224609375, -0.01348114013671875, 0.0474853515625, 0.040740966796875, -0.0209503173828125, 0.032989501953125, -0.03533935546875, 0.0187530517578125, 0.0183258056640625, 0.0275115966796875, -0.02069091796875, -0.07647705078125, -0.01824951171875, 0.01708984375, -0.0303955078125, -0.0677490234375, 0.0364990234375, -0.00690460205078125, 0.0215301513671875, 0.01678466796875, 0.00658416748046875, 0.037322998046875, -0.007015228271484375, 0.043792724609375, 0.0005412101745605469, -0.056396484375, 0.04962158203125, -0.061676025390625, 0.03570556640625, 0.05877685546875, 0.025390625, -0.0164337158203125, -0.026611328125, -0.06341552734375, -0.0677490234375, 0.0501708984375, 0.04052734375, 0.017608642578125, -0.00795745849609375, 0.0213470458984375, 0.0156707763671875, 0.0143890380859375, -0.0858154296875, -0.0229949951171875, -0.040069580078125, -0.007747650146484375, 0.0288238525390625, -0.01038360595703125, -0.01085662841796875, -0.019927978515625, 0.0660400390625, -0.0096588134765625, 0.041229248046875, 0.01922607421875, -0.0114288330078125, -0.0110931396484375, 0.0012340545654296875, 0.035919189453125, 0.03790283203125, -0.047271728515625, -0.01163482666015625, -0.0074310302734375, -0.055328369140625, -0.0008363723754882812, 0.018402099609375, -0.018310546875, -0.01934814453125, 0.01399993896484375, 0.08856201171875, 0.001338958740234375, -0.017486572265625, 0.0460205078125, -0.0195159912109375, -0.0157012939453125, -0.037506103515625, 0.0232391357421875, -0.0031375885009765625, 0.0167083740234375, 0.03118896484375, 0.007450103759765625, 0.00830841064453125, -0.0304107666015625, 0.005706787109375, 0.01702880859375, -0.0143280029296875, -0.0189208984375, 0.0780029296875, 0.0032196044921875, -0.0034351348876953125, 0.0535888671875, -0.01325225830078125, 0.00019538402557373047, 0.040191650390625, 0.04852294921875, 0.04461669921875, -0.022003173828125, 0.0199432373046875, 0.061370849609375, 0.03045654296875, -0.00493621826171875, 0.026123046875, 0.01450347900390625, -0.04205322265625, -0.007701873779296875, -0.0258331298828125, -0.01137542724609375, 0.052703857421875, -0.0771484375, 0.021820068359375, -0.047882080078125, -0.016937255859375, -0.0202484130859375, 0.0114288330078125, -0.056396484375, 0.041595458984375, -0.00872802734375, 0.06439208984375, -0.07550048828125, 0.06573486328125, 0.04827880859375, -0.040740966796875, -0.057159423828125, -0.02569580078125, 0.0003669261932373047, -0.048858642578125, 0.0016269683837890625, 0.004451751708984375, -0.003818511962890625, 0.0017194747924804688, -0.059783935546875, -0.0648193359375, 0.1044921875, 0.0382080078125, -0.0142822265625, 0.0019435882568359375, -0.0309295654296875, 0.043212890625, -0.034393310546875, 0.047454833984375, 0.034027099609375, 0.0246124267578125, -0.0026111602783203125, -0.058502197265625, 0.0021800994873046875, -0.047454833984375, 0.007640838623046875, 0.0171966552734375, -0.072265625, 0.07861328125, 0.0008883476257324219, -0.0246734619140625, 0.0213470458984375, 0.07208251953125, 0.01385498046875, 0.0016765594482421875, 0.040374755859375, 0.055999755859375, 0.044281005859375, -0.0046844482421875, 0.0740966796875, 0.023101806640625, 0.042816162109375, 0.10540771484375, -0.0015039443969726562, 0.052642822265625, 0.0157012939453125, -0.007556915283203125, 0.06866455078125, 0.0614013671875, -0.0139312744140625, 0.045013427734375, -0.00980377197265625, -0.0221099853515625, 0.00951385498046875, -0.00742340087890625, -0.03826904296875, 0.03857421875, 0.001071929931640625, -0.026458740234375, -0.01441192626953125, -0.005474090576171875, 0.039276123046875, 0.01499176025390625, -0.0141448974609375, 0.044281005859375, -0.0205841064453125, -0.03826904296875, 0.07061767578125, -0.0006070137023925781, 0.05322265625, -0.050079345703125, 0.007457733154296875, -0.051025390625, -0.00818634033203125, -0.0250091552734375, -0.046875, 0.017486572265625, 0.0182342529296875, -0.011199951171875, 0.0156402587890625, 0.0361328125, -0.02410888671875, -0.0214385986328125, 0.0298919677734375, 0.01540374755859375, 0.0203704833984375, 0.0094757080078125, -0.050689697265625, 0.01971435546875, 0.0183868408203125, -0.0062408447265625, 0.01385498046875, 0.01141357421875, -0.0203704833984375, 0.050323486328125, 0.04595947265625, 0.004261016845703125, 0.0020580291748046875, -0.01128387451171875, 0.0684814453125, -0.036376953125, -0.032867431640625, -0.0401611328125, 0.042449951171875, -0.0126495361328125, -0.0509033203125, 0.053680419921875, 0.028350830078125, 0.0762939453125, -0.002872467041015625, 0.03717041015625, -0.03375244140625, 0.02410888671875, -0.0555419921875, 0.06646728515625, -0.0498046875, 0.01354217529296875, -0.0153350830078125, -0.062103271484375, -0.021636962890625, 0.051544189453125, 0.01222991943359375, 0.0221099853515625, 0.070068359375, 0.0653076171875, -0.014678955078125, 0.006832122802734375, 0.005245208740234375, 0.0255889892578125, 0.01262664794921875, 0.04022216796875, 0.031402587890625, -0.059600830078125, 0.027557373046875, -0.0421142578125, -0.02142333984375, -0.007328033447265625, -0.073974609375, -0.0615234375, -0.05035400390625, -0.0232391357421875, -0.042755126953125, 0.0083770751953125, 0.05804443359375, 0.053985595703125, -0.0467529296875, -0.00658416748046875, 0.0121612548828125, -0.006740570068359375, -0.0053558349609375, -0.016693115234375, 0.0196533203125, 0.041778564453125, -0.0731201171875, 0.031280517578125, -0.0027008056640625, 0.025634765625, -0.004467010498046875, 0.0131683349609375, -0.03436279296875, -0.004425048828125, 0.00666046142578125, 0.04833984375, -0.0288238525390625, -0.034332275390625, -0.004856109619140625, 0.00939178466796875, 0.029388427734375, 0.0298919677734375, -0.056732177734375, 0.032562255859375, 0.0298004150390625, 0.01255035400390625, 0.047271728515625, 0.01239776611328125, 0.0330810546875, -0.04766845703125, 0.030181884765625, 0.0111083984375, 0.032745361328125, 0.039215087890625, -0.04248046875, 0.051483154296875, 0.00308990478515625, -0.05023193359375, -0.051544189453125, 0.005954742431640625, -0.0733642578125, -0.01056671142578125, 0.08782958984375, -0.0175628662109375, -0.03240966796875, -0.0097503662109375, -0.0017919540405273438, 0.0252532958984375, -0.048065185546875, 0.052337646484375, 0.037445068359375, -0.007160186767578125, -0.01708984375, -0.04083251953125, 0.030548095703125, 0.0196075439453125, -0.0654296875, -0.003025054931640625, 0.046905517578125, 0.0341796875, 0.0008473396301269531, 0.0693359375, -0.056121826171875, 0.02044677734375, -0.021087646484375, 0.0223236083984375, -0.02349853515625, -0.03790283203125, -0.03900146484375, 0.00384521484375, -0.00942230224609375, -0.02508544921875 ] ]
jordiclive/Llama-2-70b-oasst-1-200
2023-07-26T09:59:11.000Z
[ "transformers", "pytorch", "llama", "text-generation", "sft", "en", "de", "es", "fr", "dataset:OpenAssistant/oasst1", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
jordiclive
null
null
jordiclive/Llama-2-70b-oasst-1-200
2
5,885
transformers
2023-07-25T12:14:07
--- license: apache-2.0 language: - en - de - es - fr tags: - sft inference: false datasets: - OpenAssistant/oasst1 --- # Open-Assistant Llama2 70B SFT OASST This model is a fine-tuning of [Llama2 70B](https://huggingface.co/meta-llama/Llama-2-70b-hf) LLM. It was trained on a mixture of OASST top-1 threads. ## Model Details - **Finetuned from:** [Llama2 70B](https://huggingface.co/meta-llama/Llama-2-70b-hf) - **Model type:** Causal decoder-only transformer language model - **Language:** English, German, Spanish, French (and limited capabilities in Italian, Portuguese, Polish, Dutch, Romanian, Czech, Swedish); - **License:** Apache 2.0 - **Contact:** [Open-Assistant Discord](https://ykilcher.com/open-assistant-discord) ## Prompting Two special tokens are used to mark the beginning of user and assistant turns: `<|prompter|>` and `<|assistant|>`. Each turn ends with a `</s>` token. Input prompt example: ``` <|prompter|>What is a meme, and what's the history behind this word?</s><|assistant|> ``` The input ends with the `<|assistant|>` token to signal that the model should start generating the assistant reply.
1,133
[ [ -0.033294677734375, -0.0703125, 0.0222625732421875, 0.02593994140625, -0.041168212890625, -0.006786346435546875, 0.0041351318359375, -0.021087646484375, 0.024658203125, 0.047698974609375, -0.071044921875, -0.0164794921875, -0.046966552734375, 0.0218963623046875, -0.017120361328125, 0.0853271484375, -0.0191802978515625, 0.0118865966796875, -0.001575469970703125, -0.01073455810546875, -0.04962158203125, -0.030059814453125, -0.0810546875, -0.031646728515625, 0.0201568603515625, 0.03350830078125, 0.049896240234375, 0.04052734375, 0.03631591796875, 0.0213775634765625, -0.0261688232421875, 0.0210113525390625, -0.050537109375, -0.005420684814453125, -0.0164794921875, -0.03228759765625, -0.059173583984375, -0.01611328125, 0.0307159423828125, 0.0341796875, -0.0076141357421875, 0.036956787109375, -0.0123291015625, 0.0187835693359375, -0.027679443359375, 0.024261474609375, -0.0297088623046875, -0.0077056884765625, -0.0182952880859375, -0.01015472412109375, -0.030059814453125, -0.01092529296875, -0.0111846923828125, -0.04638671875, 0.0014925003051757812, 0.005462646484375, 0.0733642578125, 0.04168701171875, -0.031494140625, -0.00621795654296875, -0.054473876953125, 0.03857421875, -0.046173095703125, 0.0299072265625, 0.0443115234375, 0.03887939453125, -0.0245513916015625, -0.05352783203125, -0.05340576171875, -0.0035419464111328125, -0.0020236968994140625, -0.0012998580932617188, -0.033721923828125, -0.01384735107421875, 0.0104217529296875, 0.0235748291015625, -0.0291595458984375, 0.0121917724609375, -0.047576904296875, -0.0094451904296875, 0.01995849609375, 0.018218994140625, 0.01419830322265625, 0.0026988983154296875, -0.026885986328125, -0.00954437255859375, -0.053436279296875, -0.0038471221923828125, 0.027740478515625, 0.01206207275390625, -0.04132080078125, 0.069580078125, -0.029388427734375, 0.052764892578125, 0.023223876953125, -0.0103302001953125, 0.0190582275390625, -0.023651123046875, -0.015228271484375, -0.0112152099609375, 0.06549072265625, 0.01367950439453125, 0.0231170654296875, -0.0116424560546875, -0.01015472412109375, 0.01081085205078125, 0.01096343994140625, -0.045989990234375, -0.005809783935546875, 0.006072998046875, -0.03948974609375, -0.0307769775390625, 0.012603759765625, -0.03271484375, -0.004718780517578125, -0.01983642578125, 0.026214599609375, -0.0087432861328125, -0.0325927734375, 0.019287109375, -0.005733489990234375, 0.03521728515625, 0.029144287109375, -0.05157470703125, 0.0271148681640625, 0.0285797119140625, 0.052947998046875, 0.00916290283203125, -0.0188446044921875, -0.01486968994140625, 0.01009368896484375, -0.006069183349609375, 0.054473876953125, -0.025146484375, -0.03936767578125, -0.01221466064453125, 0.025146484375, 0.004901885986328125, -0.0247344970703125, 0.043609619140625, -0.057342529296875, 0.03900146484375, -0.01068115234375, -0.00171661376953125, -0.023681640625, 0.0090484619140625, -0.06524658203125, 0.08892822265625, 0.01209259033203125, -0.052093505859375, -0.0018568038940429688, -0.08477783203125, -0.01512908935546875, -0.0217437744140625, 0.0005807876586914062, -0.016998291015625, -0.0015001296997070312, 0.01039886474609375, 0.018768310546875, -0.033477783203125, 0.01329803466796875, -0.01416015625, -0.0217132568359375, 0.01068878173828125, -0.0307769775390625, 0.0670166015625, 0.006473541259765625, -0.0167999267578125, 0.01555633544921875, -0.07470703125, -0.032562255859375, 0.0386962890625, -0.032379150390625, -0.00868988037109375, -0.0117340087890625, -0.005794525146484375, 0.014923095703125, 0.053466796875, -0.045379638671875, 0.03094482421875, -0.01983642578125, 0.0241546630859375, 0.0733642578125, -0.00746917724609375, 0.0257110595703125, -0.01910400390625, 0.03509521484375, 0.00583648681640625, 0.026702880859375, -0.006710052490234375, -0.05340576171875, -0.07501220703125, -0.0168609619140625, -0.00418853759765625, 0.05157470703125, -0.056732177734375, 0.054473876953125, -0.00435638427734375, -0.037506103515625, -0.06011962890625, 0.0031681060791015625, 0.0305633544921875, 0.03338623046875, 0.033905029296875, -0.0269775390625, -0.043548583984375, -0.060089111328125, 0.00909423828125, -0.021148681640625, -0.0084991455078125, 0.023193359375, 0.037139892578125, -0.034454345703125, 0.056060791015625, -0.038665771484375, -0.0299072265625, -0.0063934326171875, -0.0057525634765625, 0.0325927734375, 0.0275421142578125, 0.038848876953125, -0.0377197265625, -0.01153564453125, -0.00598907470703125, -0.058807373046875, -0.02020263671875, 0.016845703125, -0.034942626953125, 0.0123748779296875, 0.036865234375, -0.05072021484375, 0.033782958984375, 0.041595458984375, -0.05230712890625, 0.027374267578125, 0.00247955322265625, -0.0011587142944335938, -0.10498046875, 0.00897979736328125, -0.0287322998046875, -0.01380157470703125, -0.0287933349609375, 0.0121002197265625, -0.020599365234375, 0.01146697998046875, -0.046142578125, 0.05865478515625, -0.036102294921875, 0.00838470458984375, -0.032379150390625, -0.0089263916015625, -0.01457977294921875, 0.055206298828125, 0.005229949951171875, 0.053314208984375, 0.032684326171875, -0.0305633544921875, 0.022857666015625, 0.047821044921875, 0.00457763671875, 0.038818359375, -0.08160400390625, 0.03460693359375, -0.0026836395263671875, 0.029510498046875, -0.08489990234375, -0.0207977294921875, 0.04345703125, -0.032745361328125, 0.014068603515625, 0.00551605224609375, -0.059814453125, -0.0280914306640625, -0.0052947998046875, 0.048675537109375, 0.037567138671875, -0.053314208984375, 0.03485107421875, 0.0218505859375, 0.00464630126953125, -0.0316162109375, -0.045166015625, -0.0148773193359375, 0.007160186767578125, -0.05303955078125, 0.0007596015930175781, -0.005420684814453125, 0.003276824951171875, 0.0005288124084472656, 0.0026149749755859375, -0.0185089111328125, 0.01003265380859375, 0.0279693603515625, 0.0237579345703125, -0.0260009765625, -0.0204925537109375, 0.020751953125, -0.01459503173828125, 0.00452423095703125, 0.01800537109375, 0.07623291015625, -0.01526641845703125, -0.0399169921875, -0.039093017578125, 0.0161285400390625, 0.043212890625, -0.0199432373046875, 0.057464599609375, 0.04608154296875, -0.0288848876953125, 0.02398681640625, -0.04290771484375, -0.02264404296875, -0.03778076171875, 0.0146636962890625, -0.036102294921875, -0.07110595703125, 0.04876708984375, -0.005886077880859375, 0.00441741943359375, 0.04815673828125, 0.058685302734375, 0.00884246826171875, 0.07293701171875, 0.049346923828125, -0.011383056640625, 0.04638671875, -0.018951416015625, 0.0004329681396484375, -0.07373046875, -0.0253448486328125, -0.048187255859375, -0.0096435546875, -0.030670166015625, -0.033935546875, 0.028839111328125, 0.0228729248046875, -0.036956787109375, 0.047149658203125, -0.03216552734375, 0.035003662109375, 0.05255126953125, 0.02276611328125, 0.0277099609375, -0.01226806640625, 0.0010213851928710938, 0.0214996337890625, -0.042083740234375, -0.063232421875, 0.07110595703125, 0.04254150390625, 0.068603515625, 0.02984619140625, 0.04998779296875, -0.002689361572265625, 0.003360748291015625, -0.0341796875, 0.04412841796875, 0.00003933906555175781, -0.04754638671875, -0.021575927734375, 0.0038623809814453125, -0.076416015625, 0.00673675537109375, -0.001934051513671875, -0.05828857421875, 0.0010509490966796875, 0.0158233642578125, 0.006923675537109375, 0.018463134765625, -0.0340576171875, 0.0616455078125, -0.0016889572143554688, 0.0035247802734375, -0.006481170654296875, -0.0504150390625, 0.040496826171875, 0.002338409423828125, -0.0008382797241210938, 0.0018758773803710938, -0.005641937255859375, 0.052093505859375, -0.03302001953125, 0.09149169921875, 0.002490997314453125, -0.01409149169921875, 0.023834228515625, 0.01454925537109375, 0.047119140625, -0.00008171796798706055, -0.0006856918334960938, 0.042999267578125, -0.0108489990234375, -0.029510498046875, -0.00728607177734375, 0.0291595458984375, -0.09130859375, -0.038604736328125, -0.017547607421875, -0.020355224609375, 0.01081085205078125, 0.0260467529296875, 0.0264739990234375, -0.0007352828979492188, -0.0283203125, 0.007843017578125, 0.040771484375, -0.006450653076171875, 0.0175628662109375, 0.059814453125, -0.00351715087890625, -0.030242919921875, 0.044097900390625, -0.023590087890625, 0.0166168212890625, 0.032989501953125, 0.00867462158203125, -0.0194091796875, 0.0021686553955078125, -0.0557861328125, 0.03826904296875, -0.049530029296875, -0.0233306884765625, -0.044036865234375, -0.0176544189453125, -0.03131103515625, 0.004119873046875, -0.018768310546875, -0.0322265625, -0.0712890625, -0.00931549072265625, 0.04425048828125, 0.0596923828125, 0.0037994384765625, 0.057373046875, -0.045501708984375, 0.042327880859375, 0.01629638671875, 0.0283660888671875, -0.004009246826171875, -0.05889892578125, -0.002384185791015625, 0.00417327880859375, -0.044219970703125, -0.0748291015625, 0.00536346435546875, 0.002140045166015625, 0.053558349609375, 0.035491943359375, -0.0029048919677734375, 0.04083251953125, -0.0499267578125, 0.07110595703125, 0.0078277587890625, -0.059417724609375, 0.040008544921875, -0.0300140380859375, 0.030303955078125, 0.028076171875, 0.0268707275390625, -0.0225067138671875, -0.045013427734375, -0.045745849609375, -0.057373046875, 0.06890869140625, 0.01446533203125, 0.015960693359375, -0.0200653076171875, 0.01317596435546875, 0.01099395751953125, 0.0133514404296875, -0.07049560546875, -0.03094482421875, -0.0228271484375, -0.01207733154296875, 0.0081787109375, -0.0272979736328125, -0.0177764892578125, -0.0084381103515625, 0.053985595703125, -0.01024627685546875, 0.04364013671875, -0.003757476806640625, -0.0024471282958984375, -0.0390625, 0.00376129150390625, 0.06646728515625, 0.029449462890625, -0.0132293701171875, -0.00351715087890625, 0.0231781005859375, -0.0287933349609375, 0.0015239715576171875, -0.00678253173828125, 0.00010716915130615234, -0.001239776611328125, 0.039764404296875, 0.08880615234375, 0.01166534423828125, -0.0291900634765625, 0.032958984375, -0.00916290283203125, 0.0034008026123046875, -0.0284271240234375, 0.008148193359375, 0.0061492919921875, 0.030242919921875, 0.0109405517578125, -0.003376007080078125, 0.00928497314453125, -0.020111083984375, -0.01119232177734375, 0.03045654296875, -0.01255035400390625, -0.04052734375, 0.06292724609375, 0.0207061767578125, -0.05084228515625, 0.0260467529296875, -0.00433349609375, -0.028472900390625, 0.0452880859375, 0.0628662109375, 0.0740966796875, -0.0218505859375, -0.004642486572265625, 0.0265655517578125, 0.036865234375, -0.007476806640625, 0.053375244140625, -0.0038394927978515625, -0.040008544921875, -0.012786865234375, -0.050384521484375, -0.03924560546875, 0.0068359375, -0.050811767578125, 0.050384521484375, -0.0182342529296875, -0.0038394927978515625, -0.0110321044921875, -0.029754638671875, -0.06854248046875, -0.0008916854858398438, -0.021331787109375, 0.086669921875, -0.069091796875, 0.059295654296875, 0.06744384765625, -0.040130615234375, -0.05389404296875, -0.0196075439453125, -0.01073455810546875, -0.09112548828125, 0.04302978515625, -0.00665283203125, -0.0011653900146484375, 0.0032634735107421875, -0.052093505859375, -0.0791015625, 0.0977783203125, 0.019287109375, -0.035064697265625, -0.00638580322265625, 0.00662994384765625, 0.049072265625, -0.0191650390625, 0.037078857421875, 0.0257568359375, 0.0299530029296875, 0.0090179443359375, -0.1033935546875, 0.01629638671875, -0.016754150390625, -0.0021209716796875, 0.0155792236328125, -0.07476806640625, 0.0841064453125, -0.00949859619140625, -0.0006909370422363281, 0.03521728515625, 0.049285888671875, 0.0197296142578125, 0.00937652587890625, 0.04132080078125, 0.038116455078125, 0.0266571044921875, -0.006282806396484375, 0.066162109375, -0.0200653076171875, 0.023590087890625, 0.0858154296875, -0.00814056396484375, 0.06536865234375, 0.04443359375, -0.015777587890625, 0.0452880859375, 0.050079345703125, 0.0004820823669433594, 0.048248291015625, -0.001575469970703125, 0.0011844635009765625, 0.0014333724975585938, -0.00783538818359375, -0.038909912109375, 0.039703369140625, 0.0174713134765625, -0.030120849609375, -0.0235137939453125, -0.01178741455078125, 0.012969970703125, -0.0200653076171875, -0.0111541748046875, 0.0919189453125, 0.0005669593811035156, -0.043426513671875, 0.054931640625, 0.0168304443359375, 0.04840087890625, -0.043609619140625, -0.020721435546875, -0.020050048828125, 0.0014524459838867188, -0.0081024169921875, -0.047088623046875, 0.018890380859375, 0.00766754150390625, -0.021881103515625, 0.008636474609375, 0.046142578125, -0.026397705078125, -0.032745361328125, 0.0210418701171875, 0.040863037109375, 0.0307769775390625, 0.0156097412109375, -0.045379638671875, 0.0168914794921875, -0.009124755859375, -0.030914306640625, 0.0029354095458984375, 0.03729248046875, -0.0256805419921875, 0.035003662109375, 0.05499267578125, -0.009857177734375, 0.007801055908203125, -0.0017242431640625, 0.0821533203125, -0.030242919921875, -0.035247802734375, -0.0626220703125, 0.042755126953125, 0.01293182373046875, -0.04736328125, 0.034393310546875, 0.024200439453125, 0.07061767578125, -0.030670166015625, 0.03375244140625, -0.006275177001953125, 0.03314208984375, -0.032073974609375, 0.042724609375, -0.040985107421875, 0.0006647109985351562, -0.0217437744140625, -0.063720703125, 0.0012979507446289062, 0.06024169921875, 0.00792694091796875, -0.002349853515625, 0.0264129638671875, 0.06353759765625, -0.0185699462890625, -0.0010957717895507812, 0.011016845703125, 0.0169219970703125, 0.025909423828125, 0.029815673828125, 0.05377197265625, -0.04705810546875, 0.040740966796875, -0.0166778564453125, -0.03564453125, -0.03448486328125, -0.057769775390625, -0.093017578125, -0.029327392578125, -0.0048065185546875, -0.035858154296875, -0.0105133056640625, 0.06787109375, 0.042236328125, -0.0557861328125, -0.023834228515625, -0.00031828880310058594, 0.0089569091796875, -0.006404876708984375, -0.018524169921875, 0.01446533203125, 0.003398895263671875, -0.056182861328125, 0.02215576171875, -0.0060577392578125, 0.027313232421875, -0.038604736328125, -0.0134124755859375, -0.018157958984375, 0.01421356201171875, 0.04693603515625, 0.0255126953125, -0.07470703125, -0.0219879150390625, 0.017486572265625, -0.01068115234375, -0.0116119384765625, 0.023590087890625, -0.033721923828125, 0.02178955078125, 0.0345458984375, 0.020965576171875, 0.0297088623046875, -0.007312774658203125, 0.039031982421875, -0.06329345703125, 0.0280303955078125, -0.0006871223449707031, 0.03662109375, 0.0090789794921875, -0.031982421875, 0.054931640625, 0.01256561279296875, -0.04132080078125, -0.07427978515625, 0.01593017578125, -0.0933837890625, -0.0035247802734375, 0.09771728515625, 0.0010128021240234375, -0.0220947265625, 0.01023101806640625, -0.0621337890625, 0.04052734375, -0.042999267578125, 0.055633544921875, 0.046051025390625, -0.0052032470703125, -0.002063751220703125, -0.018402099609375, 0.0096435546875, 0.0082550048828125, -0.06280517578125, -0.032806396484375, 0.02496337890625, 0.0243072509765625, 0.03106689453125, 0.05694580078125, -0.0019550323486328125, 0.040130615234375, -0.0169677734375, 0.020599365234375, -0.0207061767578125, -0.025604248046875, -0.0193328857421875, -0.0150604248046875, 0.010589599609375, -0.043853759765625 ] ]
Gryphe/MythoBoros-13b
2023-08-04T09:53:03.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Gryphe
null
null
Gryphe/MythoBoros-13b
12
5,882
transformers
2023-07-22T09:05:00
--- license: other language: - en --- ## Model details MythoBoros-13b can be considered a sister model to [MythoLogic-13b](https://huggingface.co/Gryphe/MythoLogic-13b), sharing the same goals but having a different approach. Whereas the previous model was a series of experimental gradient merges, this one is a simple straight-up 66/34 merge of [Chronos](https://huggingface.co/elinas/chronos-13b) and the freshly released [Ouroboros](https://huggingface.co/CalderaAI/13B-Ouroboros), providing a very solid foundation for a well-performing roleplaying model. MythoBoros tends to be somewhat more formal with its responses in comparison to MythoLogic. My advice? Try both, see which one you prefer. Quantized models are available from TheBloke: [GGML](https://huggingface.co/TheBloke/MythoBoros-13B-GGML) - [GPTQ](https://huggingface.co/TheBloke/MythoBoros-13B-GPTQ) (You're the best!) ## Prompt Format This model works best with Alpaca formatting, so for optimal model performance, use: ``` <System prompt/Character Card> ### Instruction: Your instruction or question here. For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only. ### Response: ```
1,256
[ [ -0.031982421875, -0.0460205078125, 0.01441192626953125, 0.02239990234375, -0.040740966796875, 0.0034885406494140625, 0.0035572052001953125, -0.05328369140625, 0.036224365234375, 0.045654296875, -0.052764892578125, -0.0277862548828125, -0.037933349609375, 0.009918212890625, -0.0010547637939453125, 0.08013916015625, 0.020355224609375, 0.0220489501953125, -0.004421234130859375, -0.008941650390625, -0.0284576416015625, -0.032745361328125, -0.08770751953125, -0.0565185546875, 0.037994384765625, -0.006343841552734375, 0.0736083984375, 0.06036376953125, 0.038787841796875, 0.028656005859375, -0.0203704833984375, 0.0238494873046875, -0.04156494140625, 0.0105743408203125, -0.005039215087890625, -0.033905029296875, -0.04718017578125, 0.0118255615234375, 0.017791748046875, 0.036041259765625, -0.03265380859375, 0.0186004638671875, -0.008636474609375, 0.0206756591796875, -0.0254974365234375, 0.0271148681640625, 0.0043487548828125, 0.0235595703125, 0.0022144317626953125, -0.0067291259765625, -0.021240234375, -0.06463623046875, 0.02105712890625, -0.0675048828125, 0.00296783447265625, 0.016021728515625, 0.05889892578125, 0.0082550048828125, -0.028778076171875, -0.031890869140625, -0.047698974609375, 0.04376220703125, -0.05670166015625, 0.0213470458984375, 0.0361328125, 0.040924072265625, 0.00539398193359375, -0.042633056640625, -0.0206451416015625, -0.0399169921875, -0.00920867919921875, 0.01214599609375, -0.0357666015625, 0.0014057159423828125, 0.0204315185546875, 0.043792724609375, -0.038848876953125, 0.01526641845703125, -0.06732177734375, -0.032745361328125, 0.03375244140625, 0.0294647216796875, 0.0038967132568359375, -0.035675048828125, -0.04803466796875, -0.0260772705078125, -0.04132080078125, -0.00214385986328125, 0.04217529296875, 0.004779815673828125, -0.026641845703125, 0.0345458984375, 0.014129638671875, 0.050018310546875, 0.036224365234375, -0.0282135009765625, 0.0014286041259765625, -0.007419586181640625, -0.031402587890625, -0.0102081298828125, 0.057373046875, 0.042083740234375, 0.01262664794921875, 0.0004532337188720703, 0.01059722900390625, 0.021453857421875, 0.0256500244140625, -0.06805419921875, -0.005519866943359375, 0.004703521728515625, -0.059539794921875, -0.049163818359375, -0.0255279541015625, -0.043853759765625, -0.03387451171875, -0.01441192626953125, 0.033111572265625, -0.06512451171875, -0.0163726806640625, 0.01055908203125, -0.0012140274047851562, 0.00627899169921875, 0.026611328125, -0.042327880859375, 0.037384033203125, 0.03521728515625, 0.05072021484375, 0.00714111328125, -0.0030269622802734375, -0.0261383056640625, 0.0013952255249023438, -0.051422119140625, 0.03521728515625, -0.030181884765625, -0.049713134765625, -0.00565338134765625, -0.0223236083984375, 0.007709503173828125, -0.036468505859375, 0.061431884765625, -0.04302978515625, 0.047882080078125, -0.0364990234375, -0.049835205078125, -0.028564453125, 0.0191802978515625, -0.06585693359375, 0.059967041015625, 0.0010042190551757812, -0.0523681640625, 0.0110626220703125, -0.042510986328125, 0.007595062255859375, 0.00522613525390625, 0.002941131591796875, -0.0106964111328125, 0.0006527900695800781, -0.006885528564453125, 0.033447265625, -0.050079345703125, 0.01346588134765625, -0.051422119140625, -0.0294189453125, 0.0143280029296875, -0.01198577880859375, 0.060455322265625, 0.035125732421875, 0.002635955810546875, -0.0003669261932373047, -0.044219970703125, -0.003513336181640625, -0.0011072158813476562, 0.0016374588012695312, -0.019287109375, -0.02545166015625, -0.001308441162109375, 0.01385498046875, 0.01168060302734375, -0.01666259765625, 0.028656005859375, -0.005527496337890625, 0.0205841064453125, 0.03961181640625, -0.0011005401611328125, 0.033599853515625, -0.051361083984375, 0.06298828125, -0.0095672607421875, 0.03387451171875, 0.005947113037109375, -0.038848876953125, -0.034942626953125, -0.037872314453125, 0.027496337890625, 0.04241943359375, -0.052093505859375, 0.029510498046875, 0.0159149169921875, -0.07891845703125, -0.031829833984375, -0.025543212890625, 0.0504150390625, 0.049896240234375, 0.022308349609375, -0.040740966796875, -0.03369140625, -0.07763671875, 0.0081634521484375, -0.050384521484375, 0.008819580078125, 0.0218048095703125, 0.0027141571044921875, -0.0155181884765625, 0.045318603515625, -0.0567626953125, -0.0197296142578125, -0.008575439453125, 0.0318603515625, 0.01250457763671875, 0.03973388671875, 0.07379150390625, -0.044036865234375, -0.00548553466796875, 0.00033926963806152344, -0.06134033203125, -0.002941131591796875, 0.0205841064453125, -0.00722503662109375, 0.0078582763671875, 0.01520538330078125, -0.0657958984375, 0.041656494140625, 0.04400634765625, -0.032257080078125, 0.04449462890625, -0.0194091796875, 0.04840087890625, -0.092041015625, 0.01885986328125, 0.004486083984375, -0.013092041015625, -0.045501708984375, 0.0299224853515625, -0.0088958740234375, -0.014129638671875, -0.01824951171875, 0.0489501953125, -0.032470703125, -0.01322174072265625, -0.01334381103515625, -0.02716064453125, -0.0101776123046875, 0.03192138671875, 0.0161285400390625, 0.030731201171875, 0.053863525390625, -0.04583740234375, 0.061859130859375, 0.037841796875, 0.001628875732421875, 0.032928466796875, -0.06549072265625, -0.0007195472717285156, -0.003070831298828125, 0.050201416015625, -0.0579833984375, -0.01233673095703125, 0.0531005859375, -0.033355712890625, 0.0277557373046875, -0.0162811279296875, -0.032196044921875, -0.052703857421875, -0.03521728515625, 0.03887939453125, 0.0577392578125, -0.034088134765625, 0.06939697265625, -0.0149383544921875, -0.0020694732666015625, -0.054473876953125, -0.044891357421875, -0.01776123046875, -0.05059814453125, -0.04486083984375, 0.0236663818359375, -0.0330810546875, -0.007709503173828125, 0.0004944801330566406, -0.006198883056640625, -0.0226287841796875, -0.023956298828125, 0.0223388671875, 0.0496826171875, -0.0264129638671875, -0.01263427734375, 0.002986907958984375, -0.0034809112548828125, -0.007740020751953125, 0.013427734375, 0.0543212890625, -0.00722503662109375, 0.01214599609375, -0.054473876953125, 0.0261993408203125, 0.0546875, -0.0109710693359375, 0.061126708984375, 0.044830322265625, -0.03326416015625, 0.022674560546875, -0.060760498046875, -0.01898193359375, -0.031890869140625, -0.0122528076171875, -0.021759033203125, -0.04937744140625, 0.06390380859375, 0.01499176025390625, -0.01216888427734375, 0.01995849609375, 0.028045654296875, -0.003475189208984375, 0.047271728515625, 0.0560302734375, -0.006317138671875, 0.0175323486328125, -0.0185699462890625, -0.00789642333984375, -0.03619384765625, -0.024169921875, -0.0277099609375, 0.0018310546875, -0.04144287109375, -0.04583740234375, 0.023773193359375, 0.019287109375, -0.0157012939453125, 0.05682373046875, -0.01107025146484375, 0.007312774658203125, 0.04180908203125, 0.0181427001953125, 0.0268707275390625, 0.00789642333984375, 0.0203857421875, -0.00820159912109375, -0.047332763671875, -0.03985595703125, 0.07073974609375, 0.0321044921875, 0.076416015625, 0.02728271484375, 0.06719970703125, -0.0027790069580078125, 0.0309295654296875, -0.0369873046875, 0.036224365234375, -0.004871368408203125, -0.062286376953125, -0.018524169921875, -0.041900634765625, -0.04876708984375, 0.0224456787109375, -0.03240966796875, -0.049591064453125, 0.026641845703125, 0.030853271484375, -0.0504150390625, 0.0019969940185546875, -0.052398681640625, 0.053375244140625, 0.026641845703125, -0.0187225341796875, -0.030792236328125, -0.0511474609375, 0.05224609375, 0.007022857666015625, 0.0006494522094726562, 0.020843505859375, -0.00014507770538330078, 0.04083251953125, -0.0284423828125, 0.06256103515625, 0.016632080078125, -0.0218353271484375, 0.034423828125, 0.0328369140625, 0.01525115966796875, 0.01514434814453125, 0.000579833984375, 0.00508880615234375, 0.02008056640625, -0.01702880859375, -0.049346923828125, 0.05438232421875, -0.05523681640625, -0.028900146484375, -0.039825439453125, -0.039306640625, -0.01500701904296875, -0.007465362548828125, 0.053375244140625, 0.07720947265625, -0.040618896484375, -0.00620269775390625, 0.0665283203125, -0.006832122802734375, 0.03192138671875, 0.039093017578125, -0.03485107421875, -0.042510986328125, 0.0216217041015625, -0.003849029541015625, 0.02294921875, 0.03424072265625, 0.021575927734375, -0.032684326171875, 0.00443267822265625, -0.040924072265625, 0.055511474609375, -0.0175323486328125, -0.039794921875, -0.044281005859375, -0.006511688232421875, -0.043182373046875, -0.004138946533203125, -0.020904541015625, -0.048248291015625, -0.01320648193359375, -0.01468658447265625, 0.04754638671875, 0.031494140625, -0.0496826171875, 0.030731201171875, -0.056671142578125, 0.031707763671875, 0.0308685302734375, 0.011932373046875, -0.00353240966796875, -0.04815673828125, 0.01488494873046875, -0.0119781494140625, -0.0177764892578125, -0.09173583984375, 0.0269012451171875, -0.007080078125, 0.013397216796875, 0.059112548828125, -0.01277923583984375, 0.06280517578125, -0.031768798828125, 0.03887939453125, 0.041046142578125, -0.061431884765625, 0.021728515625, -0.046783447265625, -0.007633209228515625, 0.024810791015625, 0.06610107421875, -0.045013427734375, -0.0143585205078125, -0.074462890625, -0.01296234130859375, 0.062347412109375, 0.038360595703125, 0.00023853778839111328, 0.014862060546875, 0.0244140625, 0.016448974609375, 0.036834716796875, -0.06304931640625, -0.041748046875, -0.019073486328125, -0.00040078163146972656, -0.0128326416015625, -0.0266876220703125, -0.03240966796875, -0.0029754638671875, 0.049957275390625, 0.005222320556640625, 0.01959228515625, -0.006732940673828125, 0.0299835205078125, -0.0219268798828125, -0.007904052734375, 0.0247955322265625, 0.03863525390625, -0.04052734375, -0.007080078125, -0.006542205810546875, -0.03607177734375, -0.00701141357421875, 0.03369140625, 0.002605438232421875, -0.0038585662841796875, 0.059722900390625, 0.0565185546875, 0.019805908203125, -0.031494140625, 0.02764892578125, -0.003505706787109375, -0.007366180419921875, 0.007354736328125, 0.00820159912109375, 0.018768310546875, 0.0269927978515625, 0.0118560791015625, 0.007808685302734375, 0.034912109375, -0.0738525390625, -0.01102447509765625, 0.0057830810546875, -0.01277923583984375, -0.01049041748046875, 0.03729248046875, 0.01541900634765625, -0.019439697265625, 0.020965576171875, -0.01837158203125, -0.0300750732421875, 0.05963134765625, 0.043182373046875, 0.034637451171875, -0.070068359375, 0.047119140625, 0.042755126953125, 0.02001953125, -0.016632080078125, 0.0143585205078125, 0.0016679763793945312, -0.03314208984375, -0.01641845703125, -0.0264892578125, -0.018524169921875, 0.00841522216796875, -0.077392578125, 0.023529052734375, -0.038665771484375, -0.0251312255859375, -0.00836181640625, 0.0160675048828125, -0.04571533203125, 0.051239013671875, -0.001613616943359375, 0.0672607421875, -0.07403564453125, 0.03814697265625, 0.0511474609375, -0.04779052734375, -0.089599609375, -0.033294677734375, -0.0009336471557617188, -0.061859130859375, 0.028564453125, -0.0144195556640625, 0.0120391845703125, -0.016510009765625, -0.0355224609375, -0.0804443359375, 0.107421875, 0.0262603759765625, -0.0274200439453125, -0.0230255126953125, -0.02593994140625, 0.060577392578125, -0.0308685302734375, 0.042633056640625, 0.0251007080078125, 0.0160675048828125, 0.03778076171875, -0.08599853515625, -0.0011491775512695312, 0.0019330978393554688, 0.01410675048828125, 0.007366180419921875, -0.0694580078125, 0.06146240234375, -0.0034046173095703125, -0.005344390869140625, 0.046630859375, 0.06866455078125, 0.043548583984375, 0.0201416015625, 0.044189453125, 0.045928955078125, 0.044891357421875, 0.0085296630859375, 0.08148193359375, -0.0088653564453125, 0.0282135009765625, 0.057525634765625, -0.009674072265625, 0.031524658203125, 0.010284423828125, 0.007068634033203125, 0.034759521484375, 0.062225341796875, 0.0086517333984375, 0.037506103515625, 0.009307861328125, 0.005924224853515625, -0.03564453125, -0.0125274658203125, -0.0693359375, 0.019317626953125, 0.0034427642822265625, -0.00909423828125, -0.0157012939453125, -0.008392333984375, 0.00940704345703125, 0.01033782958984375, -0.0299835205078125, 0.021270751953125, 0.0117645263671875, -0.041717529296875, 0.02789306640625, 0.0113983154296875, 0.050689697265625, -0.07904052734375, -0.0023593902587890625, -0.05621337890625, -0.009490966796875, 0.0029144287109375, -0.0247039794921875, -0.0017251968383789062, -0.00771331787109375, -0.0261383056640625, 0.006561279296875, 0.042938232421875, -0.037872314453125, -0.043792724609375, 0.0231781005859375, 0.036773681640625, 0.00811004638671875, 0.03985595703125, -0.053192138671875, 0.02716064453125, -0.015899658203125, 0.006900787353515625, -0.0002301931381225586, 0.025360107421875, -0.0156402587890625, 0.08056640625, 0.0159149169921875, -0.00977325439453125, -0.0060577392578125, -0.009124755859375, 0.04742431640625, -0.0279541015625, -0.06134033203125, -0.024383544921875, -0.00354766845703125, -0.0017862319946289062, -0.042694091796875, 0.0460205078125, 0.040435791015625, 0.0228271484375, -0.033233642578125, 0.00872802734375, 0.002330780029296875, 0.034423828125, -0.048919677734375, 0.040863037109375, -0.0311126708984375, -0.0155792236328125, -0.04779052734375, -0.0941162109375, 0.0121612548828125, 0.0537109375, 0.0006356239318847656, 0.00553131103515625, 0.06182861328125, 0.06146240234375, -0.0087890625, 0.0207366943359375, 0.020843505859375, 0.02227783203125, -0.00730133056640625, 0.044647216796875, 0.08172607421875, -0.041168212890625, 0.0269927978515625, -0.01284027099609375, -0.0394287109375, -0.01224517822265625, -0.06689453125, -0.0804443359375, -0.032958984375, -0.03265380859375, -0.0335693359375, 0.0147552490234375, 0.0771484375, 0.0721435546875, -0.0196990966796875, -0.0367431640625, 0.03643798828125, -0.02215576171875, -0.0246734619140625, -0.008941650390625, 0.0215606689453125, 0.031036376953125, -0.062469482421875, 0.01922607421875, -0.00722503662109375, 0.0260162353515625, -0.0089569091796875, 0.0037212371826171875, 0.0165252685546875, 0.034698486328125, 0.025054931640625, 0.025054931640625, -0.050323486328125, -0.0188751220703125, -0.0033893585205078125, -0.01070404052734375, -0.03289794921875, 0.058013916015625, -0.055908203125, -0.0104522705078125, 0.005779266357421875, 0.01216888427734375, 0.044219970703125, 0.00608062744140625, 0.051513671875, -0.01531219482421875, 0.0130615234375, 0.009918212890625, 0.0215911865234375, 0.0248260498046875, -0.040985107421875, 0.06585693359375, 0.01507568359375, -0.018310546875, -0.05987548828125, 0.0234222412109375, -0.11383056640625, -0.01250457763671875, 0.075439453125, 0.00778961181640625, -0.0049591064453125, 0.0445556640625, -0.040008544921875, 0.0169830322265625, -0.03265380859375, 0.0498046875, 0.053497314453125, -0.01287841796875, 0.009063720703125, -0.0005283355712890625, 0.046478271484375, 0.0225372314453125, -0.053375244140625, -0.018218994140625, 0.0693359375, 0.016754150390625, 0.032440185546875, 0.04840087890625, -0.009857177734375, 0.040679931640625, -0.01322174072265625, 0.00745391845703125, -0.01300811767578125, -0.01708984375, -0.020843505859375, 0.01548004150390625, -0.020599365234375, -0.006931304931640625 ] ]
Linly-AI/Chinese-LLaMA-2-13B-hf
2023-08-11T14:49:52.000Z
[ "transformers", "pytorch", "llama", "text-generation", "LLaMA2", "Linly", "Chinese-LLaMA2", "zh", "en", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Linly-AI
null
null
Linly-AI/Chinese-LLaMA-2-13B-hf
25
5,881
transformers
2023-07-24T14:53:43
--- language: - zh - en tags: - LLaMA2 - Linly - Chinese-LLaMA2 --- # Chinese-LLaMA-2-13B Linly-Chinese-LLaMA2 基于 LLaMA2进行中文化训练,使用课程学习方法跨语言迁移,词表针对中文重新设计,数据分布更均衡,收敛更稳定。 <p align="left"> 训练细节和benchmark指标详见 💻 <a href="https://github.com/CVI-SZU/Linly" target="_blank">Github Repo</a> </p> ```python from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("Linly-AI/Chinese-LLaMA-2-13B-hf", device_map="cuda:0", torch_dtype=torch.float16, trust_remote_code=True) tokenizer = AutoTokenizer.from_pretrained("Linly-AI/Chinese-LLaMA-2-13B-hf", use_fast=False, trust_remote_code=True) prompt = "北京有什么好玩的地方?" prompt = f"### Instruction:{prompt.strip()} ### Response:" inputs = tokenizer(prompt, return_tensors="pt").to("cuda:0") generate_ids = model.generate(inputs.input_ids, do_sample=True, max_new_tokens=2048, top_k=10, top_p=0.85, temperature=1, repetition_penalty=1.15, eos_token_id=2, bos_token_id=1, pad_token_id=0) response = tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] response = response.lstrip(prompt) ```
1,128
[ [ -0.020477294921875, -0.038909912109375, 0.005840301513671875, 0.036651611328125, -0.029144287109375, 0.0147247314453125, -0.01183319091796875, -0.038299560546875, 0.01161956787109375, -0.0006771087646484375, -0.04351806640625, -0.026947021484375, -0.04998779296875, 0.0159759521484375, -0.018035888671875, 0.060546875, -0.0201263427734375, -0.00035691261291503906, 0.0174560546875, -0.0007195472717285156, -0.01202392578125, -0.030303955078125, -0.051666259765625, -0.0222320556640625, -0.0005803108215332031, 0.0084228515625, 0.0255126953125, 0.059112548828125, 0.0389404296875, 0.028350830078125, -0.01198577880859375, 0.0292510986328125, -0.025421142578125, -0.0130615234375, 0.0063934326171875, -0.030853271484375, -0.046295166015625, -0.01367950439453125, 0.042236328125, 0.00838470458984375, -0.00011807680130004883, 0.03887939453125, 0.005340576171875, 0.005603790283203125, -0.014434814453125, 0.0283966064453125, -0.0390625, 0.0071563720703125, -0.0032329559326171875, -0.011962890625, -0.008636474609375, -0.0023860931396484375, -0.01419830322265625, -0.053680419921875, 0.006984710693359375, 0.0034122467041015625, 0.09619140625, 0.03765869140625, -0.035980224609375, -0.0268402099609375, -0.0222015380859375, 0.0770263671875, -0.0687255859375, -0.0005812644958496094, 0.0218353271484375, 0.0183868408203125, -0.017974853515625, -0.07159423828125, -0.050933837890625, -0.01256561279296875, -0.0169677734375, 0.006465911865234375, -0.009613037109375, -0.017303466796875, 0.026123046875, 0.01464080810546875, -0.03009033203125, 0.01446533203125, -0.027191162109375, -0.0333251953125, 0.062469482421875, 0.0194854736328125, 0.017059326171875, -0.02691650390625, -0.045196533203125, -0.02032470703125, -0.0251007080078125, 0.032989501953125, 0.00801849365234375, 0.0100555419921875, -0.042999267578125, 0.0377197265625, -0.01611328125, 0.016815185546875, 0.0217132568359375, -0.0245513916015625, 0.049072265625, -0.044158935546875, -0.0196533203125, -0.007904052734375, 0.075439453125, 0.0272216796875, 0.001926422119140625, 0.00615692138671875, -0.0268096923828125, -0.0113372802734375, -0.027557373046875, -0.06549072265625, -0.007022857666015625, 0.018035888671875, -0.0465087890625, -0.026611328125, 0.01678466796875, -0.043853759765625, -0.0035800933837890625, -0.010650634765625, 0.024261474609375, -0.0115966796875, -0.0302734375, -0.006565093994140625, 0.0017223358154296875, 0.054779052734375, -0.0007796287536621094, -0.0596923828125, 0.004261016845703125, 0.036041259765625, 0.046966552734375, -0.01045989990234375, -0.037841796875, -0.0211029052734375, 0.00292205810546875, -0.01031494140625, 0.036224365234375, 0.0011386871337890625, -0.038604736328125, -0.01904296875, 0.02337646484375, -0.001384735107421875, -0.03277587890625, 0.024017333984375, -0.024993896484375, 0.001361846923828125, -0.025726318359375, -0.016265869140625, -0.0145111083984375, 0.0301666259765625, -0.032623291015625, 0.1011962890625, 0.0024433135986328125, -0.04486083984375, 0.00420379638671875, -0.03643798828125, -0.026519775390625, -0.0197906494140625, -0.01085662841796875, -0.036163330078125, -0.0172576904296875, 0.0284423828125, 0.030975341796875, -0.02691650390625, 0.032623291015625, -0.0026798248291015625, -0.0247344970703125, 0.0242767333984375, -0.0445556640625, 0.07525634765625, 0.01508331298828125, -0.058563232421875, 0.01509857177734375, -0.06622314453125, -0.00017964839935302734, 0.042877197265625, -0.0266571044921875, 0.0138092041015625, -0.01336669921875, -0.0006031990051269531, 0.012725830078125, 0.0579833984375, -0.026153564453125, 0.0283355712890625, -0.0271453857421875, 0.045257568359375, 0.0758056640625, -0.00943756103515625, 0.0217742919921875, -0.0196990966796875, 0.0220489501953125, 0.01039886474609375, 0.0211181640625, -0.00832366943359375, -0.03814697265625, -0.08026123046875, -0.018280029296875, -0.0022792816162109375, 0.056396484375, -0.05987548828125, 0.053680419921875, -0.004131317138671875, -0.0443115234375, -0.0250396728515625, 0.0105438232421875, 0.02972412109375, 0.037445068359375, 0.042449951171875, -0.006683349609375, -0.057220458984375, -0.05487060546875, 0.012603759765625, -0.01535797119140625, 0.005306243896484375, 0.024871826171875, 0.052764892578125, -0.038543701171875, 0.0430908203125, -0.0178070068359375, -0.01422882080078125, -0.004688262939453125, -0.00395965576171875, 0.04620361328125, 0.037872314453125, 0.04901123046875, -0.0214385986328125, -0.03509521484375, -0.00872039794921875, -0.070068359375, -0.0005598068237304688, -0.01129150390625, -0.0308685302734375, 0.0263824462890625, 0.0222320556640625, -0.03826904296875, 0.044342041015625, 0.0303497314453125, -0.031707763671875, 0.04486083984375, -0.01172637939453125, -0.0022144317626953125, -0.0904541015625, 0.01531982421875, -0.023406982421875, 0.00029730796813964844, -0.0279998779296875, 0.005764007568359375, 0.002628326416015625, 0.021820068359375, -0.044036865234375, 0.04937744140625, -0.02630615234375, 0.00513458251953125, -0.0151519775390625, -0.01369476318359375, -0.004673004150390625, 0.0401611328125, -0.0133056640625, 0.06402587890625, 0.04962158203125, -0.0496826171875, 0.045074462890625, 0.0322265625, -0.00814056396484375, -0.0113067626953125, -0.057952880859375, 0.0228118896484375, 0.0189208984375, 0.0308685302734375, -0.08514404296875, -0.0029506683349609375, 0.042510986328125, -0.040679931640625, 0.014984130859375, 0.01507568359375, -0.032989501953125, -0.033782958984375, -0.029144287109375, 0.05914306640625, 0.037750244140625, -0.05914306640625, 0.045989990234375, 0.00705718994140625, 0.015899658203125, -0.06573486328125, -0.06768798828125, -0.01004791259765625, -0.002704620361328125, -0.038787841796875, 0.03045654296875, -0.017303466796875, -0.0021648406982421875, -0.0159454345703125, -0.00734710693359375, 0.0032100677490234375, 0.0058135986328125, -0.004913330078125, 0.042816162109375, -0.0180816650390625, -0.0294189453125, -0.0003459453582763672, -0.0272674560546875, 0.0261688232421875, -0.00841522216796875, 0.061737060546875, -0.0200653076171875, -0.019134521484375, -0.0557861328125, -0.00807952880859375, 0.0277862548828125, -0.0103759765625, 0.0389404296875, 0.06756591796875, -0.022003173828125, 0.0019474029541015625, -0.03070068359375, -0.01251983642578125, -0.04095458984375, 0.03466796875, -0.039306640625, -0.03472900390625, 0.06317138671875, 0.016632080078125, 0.0107879638671875, 0.05511474609375, 0.05169677734375, 0.00449371337890625, 0.0640869140625, 0.0237579345703125, -0.0249176025390625, 0.0216217041015625, -0.0701904296875, 0.00829315185546875, -0.0650634765625, -0.036773681640625, -0.0266876220703125, -0.024627685546875, -0.042877197265625, -0.036285400390625, 0.0160369873046875, 0.0016460418701171875, -0.044403076171875, 0.034942626953125, -0.06341552734375, 0.015380859375, 0.050506591796875, 0.005344390869140625, -0.0019989013671875, 0.001873016357421875, -0.02032470703125, 0.0172119140625, -0.036651611328125, -0.027435302734375, 0.08489990234375, 0.035919189453125, 0.051788330078125, 0.0009140968322753906, 0.057159423828125, -0.00008124113082885742, 0.00405120849609375, -0.0447998046875, 0.045074462890625, 0.0010318756103515625, -0.0291900634765625, -0.0207977294921875, -0.0116424560546875, -0.06500244140625, 0.003482818603515625, 0.008087158203125, -0.049041748046875, 0.0107574462890625, -0.0038299560546875, -0.0233612060546875, 0.0292816162109375, -0.0257720947265625, 0.041534423828125, -0.04669189453125, -0.01332855224609375, -0.00002002716064453125, -0.0447998046875, 0.034759521484375, -0.000644683837890625, 0.010498046875, -0.010833740234375, -0.01044464111328125, 0.06951904296875, -0.03533935546875, 0.060760498046875, -0.020294189453125, -0.0165252685546875, 0.032684326171875, -0.005344390869140625, 0.04754638671875, 0.0255279541015625, -0.01326751708984375, 0.04571533203125, -0.0015850067138671875, -0.0300140380859375, -0.016021728515625, 0.04302978515625, -0.0926513671875, -0.055328369140625, -0.045989990234375, -0.0283966064453125, 0.029693603515625, 0.0089874267578125, 0.038787841796875, -0.002391815185546875, 0.0243072509765625, -0.00864410400390625, 0.0192718505859375, -0.028228759765625, 0.064453125, 0.0262451171875, -0.003803253173828125, -0.05487060546875, 0.0654296875, 0.0005202293395996094, 0.0072174072265625, 0.015899658203125, 0.006763458251953125, -0.032745361328125, -0.0115966796875, -0.039642333984375, 0.0283203125, -0.054046630859375, -0.037750244140625, -0.01537322998046875, -0.05517578125, -0.038055419921875, 0.0030231475830078125, -0.025848388671875, -0.0215301513671875, -0.0511474609375, -0.00800323486328125, 0.031707763671875, 0.03045654296875, -0.0207366943359375, 0.01358795166015625, -0.040863037109375, 0.03857421875, 0.0166473388671875, -0.0038585662841796875, 0.0215911865234375, -0.07049560546875, -0.01558685302734375, 0.0051422119140625, -0.027618408203125, -0.04400634765625, 0.03985595703125, 0.00458526611328125, 0.056884765625, 0.037322998046875, 0.0068511962890625, 0.06817626953125, -0.0198822021484375, 0.06549072265625, 0.0113067626953125, -0.08526611328125, 0.032196044921875, 0.0080108642578125, 0.00455474853515625, 0.00974273681640625, 0.0035953521728515625, -0.0242767333984375, -0.008514404296875, -0.039306640625, -0.0821533203125, 0.060089111328125, 0.0157318115234375, 0.0203094482421875, 0.0035152435302734375, 0.01473236083984375, 0.00493621826171875, -0.0015411376953125, -0.0745849609375, -0.05340576171875, -0.0379638671875, -0.01068115234375, 0.00354766845703125, -0.0271453857421875, -0.00910186767578125, -0.0443115234375, 0.06866455078125, -0.0031337738037109375, 0.052703857421875, 0.025848388671875, -0.0124359130859375, -0.00342559814453125, -0.0168304443359375, 0.05426025390625, 0.028228759765625, -0.00012254714965820312, -0.0066986083984375, 0.048004150390625, -0.04107666015625, 0.02508544921875, 0.0200042724609375, -0.013641357421875, 0.001354217529296875, 0.0380859375, 0.06390380859375, -0.0113067626953125, -0.019073486328125, 0.0258636474609375, -0.00919342041015625, -0.0211181640625, -0.05047607421875, 0.00315093994140625, -0.005615234375, 0.026702880859375, 0.050262451171875, -0.008544921875, -0.01071929931640625, -0.0160675048828125, 0.01049041748046875, 0.030303955078125, 0.010040283203125, -0.01558685302734375, 0.068359375, 0.02276611328125, -0.0260162353515625, 0.04962158203125, -0.0243377685546875, -0.053680419921875, 0.0899658203125, 0.044921875, 0.05615234375, -0.01611328125, -0.01174163818359375, 0.05059814453125, 0.0243072509765625, 0.0008530616760253906, 0.03350830078125, -0.0130462646484375, -0.0543212890625, -0.01496124267578125, -0.043121337890625, -0.0089569091796875, 0.020172119140625, -0.037811279296875, 0.032806396484375, -0.04437255859375, -0.005046844482421875, -0.022613525390625, 0.01568603515625, -0.05572509765625, 0.0125885009765625, 0.0208282470703125, 0.06982421875, -0.046112060546875, 0.0648193359375, 0.0341796875, -0.0318603515625, -0.0906982421875, -0.00691986083984375, -0.00634002685546875, -0.09423828125, 0.05914306640625, 0.0213470458984375, 0.006916046142578125, 0.01067352294921875, -0.044036865234375, -0.0972900390625, 0.11669921875, 0.01071929931640625, -0.0369873046875, 0.00003707408905029297, 0.027618408203125, 0.0205078125, -0.007747650146484375, 0.0477294921875, 0.042633056640625, 0.0479736328125, 0.0013599395751953125, -0.0806884765625, 0.027130126953125, -0.0323486328125, -0.005329132080078125, -0.0229034423828125, -0.0809326171875, 0.08258056640625, -0.0164337158203125, -0.004001617431640625, 0.02850341796875, 0.06787109375, 0.04248046875, 0.026397705078125, 0.0340576171875, 0.034423828125, 0.049072265625, -0.01203155517578125, 0.040008544921875, -0.053131103515625, 0.0479736328125, 0.045166015625, -0.00917816162109375, 0.0438232421875, 0.034820556640625, -0.0220489501953125, 0.046295166015625, 0.06341552734375, -0.039031982421875, 0.03826904296875, 0.00901031494140625, -0.0130157470703125, 0.0006561279296875, 0.005275726318359375, -0.05426025390625, 0.03753662109375, 0.0220184326171875, -0.024627685546875, -0.006465911865234375, -0.00745391845703125, 0.0005555152893066406, -0.0302276611328125, -0.00545501708984375, 0.045684814453125, 0.003978729248046875, -0.03851318359375, 0.07952880859375, 0.0160369873046875, 0.07867431640625, -0.0386962890625, 0.0027599334716796875, -0.02691650390625, 0.01287841796875, -0.0198211669921875, -0.038818359375, 0.0038623809814453125, 0.00640106201171875, -0.00788116455078125, -0.0013189315795898438, 0.037841796875, -0.0142669677734375, -0.047515869140625, 0.037994384765625, 0.01232147216796875, 0.0199737548828125, 0.033294677734375, -0.0701904296875, 0.00693511962890625, 0.0183258056640625, -0.050140380859375, 0.004451751708984375, 0.016571044921875, 0.001499176025390625, 0.05023193359375, 0.0638427734375, -0.00852203369140625, 0.0163116455078125, -0.00044274330139160156, 0.07086181640625, -0.047210693359375, -0.0302886962890625, -0.073974609375, 0.050262451171875, -0.0008873939514160156, -0.041015625, 0.046630859375, 0.05072021484375, 0.06451416015625, -0.004322052001953125, 0.051116943359375, -0.017730712890625, 0.00720977783203125, -0.035797119140625, 0.052276611328125, -0.035064697265625, 0.025238037109375, 0.0029697418212890625, -0.057708740234375, 0.0012502670288085938, 0.07080078125, 0.0022144317626953125, 0.0130615234375, 0.055999755859375, 0.07391357421875, 0.006748199462890625, -0.036651611328125, 0.0024929046630859375, 0.03265380859375, 0.0418701171875, 0.06121826171875, 0.046844482421875, -0.06829833984375, 0.054962158203125, -0.045989990234375, -0.004901885986328125, -0.0102996826171875, -0.054718017578125, -0.0751953125, -0.024078369140625, -0.01097869873046875, -0.042633056640625, -0.023773193359375, 0.08245849609375, 0.043060302734375, -0.07366943359375, -0.0281524658203125, -0.002960205078125, 0.00313568115234375, -0.01418304443359375, -0.01739501953125, 0.051300048828125, -0.02490234375, -0.066650390625, 0.0262908935546875, -0.01116180419921875, 0.022430419921875, -0.0251312255859375, -0.03302001953125, -0.0127410888671875, -0.0036907196044921875, 0.01568603515625, 0.0287017822265625, -0.07305908203125, -0.0120391845703125, 0.0212554931640625, -0.0277862548828125, 0.00994110107421875, 0.01180267333984375, -0.043670654296875, 0.006488800048828125, 0.0240478515625, 0.03411865234375, 0.03753662109375, -0.006717681884765625, 0.01611328125, -0.043182373046875, 0.0374755859375, -0.008819580078125, 0.037750244140625, 0.01187896728515625, -0.0404052734375, 0.03656005859375, 0.034820556640625, -0.039886474609375, -0.06451416015625, -0.01058197021484375, -0.07647705078125, -0.00997161865234375, 0.10040283203125, -0.0126190185546875, -0.0360107421875, 0.0160675048828125, -0.0262908935546875, 0.05767822265625, -0.024322509765625, 0.0701904296875, 0.036865234375, -0.006496429443359375, 0.00624847412109375, -0.01416015625, 0.022247314453125, 0.035675048828125, -0.05035400390625, -0.024017333984375, -0.0140228271484375, 0.02447509765625, 0.017364501953125, 0.032379150390625, 0.0187530517578125, 0.019256591796875, -0.0027751922607421875, 0.0156707763671875, -0.03143310546875, 0.0038776397705078125, -0.0172119140625, -0.033935546875, -0.0131988525390625, -0.041534423828125 ] ]
42dot/42dot_LLM-SFT-1.3B
2023-10-28T04:06:18.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "causal-lm", "42dot_llm", "en", "ko", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
42dot
null
null
42dot/42dot_LLM-SFT-1.3B
13
5,881
transformers
2023-09-04T07:14:44
--- language: - en - ko pipeline_tag: text-generation tags: - pytorch - llama - causal-lm - 42dot_llm license: cc-by-nc-4.0 --- # 42dot_LLM-SFT-1.3B **42dot LLM-SFT** is a large language model (LLM) developed by [**42dot**](https://42dot.ai/) which is trained to follow natural language instructions. 42dot LLM-SFT is a part of **42dot LLM**, and derived from **42dot LLM-PLM** by supervised fine-tuning (SFT). This repository contains a 1.3B-parameter version. ## Model Description ### Hyperparameters As same as 42dot LLM-PLM, the model is built upon a Transformer decoder architecture similar to the [LLaMA 2](https://ai.meta.com/research/publications/llama-2-open-foundation-and-fine-tuned-chat-models/) and its hyperparameters are listed below. | Params | Layers | Attention heads | Hidden size | FFN size | Max. length\* | | -- | -- | -- | -- | -- | -- | | 1.3B | 24 | 32 | 2,048 | 5,632 | 4,096 | (\* unit: tokens) ### Supervised Fine-tuning Fine-tuning took about 112 GPU hours (in NVIDIA A100). For the training dataset, we manually constructed (question or insturuction) and response pairs, which can either be single- or multi-turn. ### Evaluation Inspired by recent attempts like [Vicuna](https://lmsys.org/blog/2023-03-30-vicuna/#how-good-is-vicuna), we evaluate 42dot LLM-SFT with other proprietary/open-sourced chatbots using GPT-4 for assessing various aspects of responses. The evaluation dataset consists of 121 prompts over 10 categories. The sample of the evaluation dataset and prompt template can be downloaded from our [GitHub repo](https://github.com/42dot/42dot_LLM). - Baselines: - [ChatGPT](https://chat.openai.com/) using GPT-3.5-turbo and GPT-4 - [Bard](https://bard.google.com/) - [KORani-v2-13B](https://huggingface.co/KRAFTON/KORani-v1-13B) | Model | GPT-3.5 | GPT-4 | Bard | KORani | 42dot LLM-SFT | | :-- |:-------:|:--------:|:--------:|:------:|:---------:| | Params | Unknown | Unknown | Unknown | 13B | 1.3B | <figure align="center"> <img src="https://huggingface.co/42dot/42dot_LLM-SFT-1.3B/resolve/main/asset/42dot_llm_ko_score_white_background.png"/> <figcaption><b>Response quality evaluation result</b></figcaption> </figure> <figure align="center"> <img src="https://huggingface.co/42dot/42dot_LLM-SFT-1.3B/resolve/main/asset/42dot_LLM_vs_score.png"/> <figcaption><b>Comparison between proprietary chatbots and 42dot LLM-SFT</b></figcaption> </figure> ## Limitations and Ethical Considerations 42dot LLM-SFT shares a number of well-known limitations of other LLMs. For example, it may generate false and misinformative content since 42dot LLM-SFT is also subject to [hallucination](https://en.wikipedia.org/wiki/Hallucination_(artificial_intelligence)). In addition, 42dot LLM-SFT may generate toxic, harmful, and biased content due to the use of web-available training data in the pre-training phase. We strongly suggest that 42dot LLM-SFT users should be aware of those limitations and take necessary steps to mitigate those issues. ## Disclaimer The contents generated by 42dot LLM series ("42dot LLM") do not necessarily reflect the views or opinions of 42dot Inc. ("42dot"). 42dot disclaims any and all liability to any part for any direct, indirect, implied, punitive, special, incidental, or other consequential damages arising from any use of the 42dot LLM and its generated contents. ## License The 42dot LLM-SFT is licensed under the Creative Commons Attribution-NonCommercial 4.0 (CC BY-NC 4.0). ## Citation ``` @misc{42dot2023llm, title={42dot LLM: A Series of Large Language Model by 42dot}, author={42dot Inc.}, year={2023}, url = {https://github.com/42dot/42dot_LLM}, version = {1.0.0}, } ```
3,722
[ [ -0.051239013671875, -0.084716796875, 0.036773681640625, 0.0299530029296875, -0.0297698974609375, 0.0012874603271484375, -0.0275421142578125, -0.037445068359375, 0.0112762451171875, 0.026824951171875, -0.0631103515625, -0.038177490234375, -0.037445068359375, 0.0148773193359375, -0.01517486572265625, 0.0750732421875, 0.01099395751953125, -0.0094451904296875, -0.01355743408203125, 0.0097808837890625, -0.030731201171875, -0.056884765625, -0.050506591796875, -0.016448974609375, 0.0269927978515625, 0.01654052734375, 0.032012939453125, 0.0258636474609375, 0.035614013671875, 0.0302886962890625, -0.016021728515625, 0.01041412353515625, -0.049285888671875, -0.0142059326171875, 0.018280029296875, -0.0299530029296875, -0.053070068359375, 0.002826690673828125, 0.0380859375, 0.02569580078125, -0.01303863525390625, 0.013458251953125, 0.01303863525390625, 0.050445556640625, -0.0209808349609375, 0.02117919921875, -0.03887939453125, 0.0011148452758789062, -0.00910186767578125, -0.0238189697265625, -0.0218963623046875, -0.03387451171875, 0.01255035400390625, -0.04046630859375, -0.004352569580078125, 0.00384521484375, 0.06085205078125, 0.0208892822265625, -0.01398468017578125, -0.006191253662109375, -0.0360107421875, 0.056304931640625, -0.0703125, 0.026214599609375, 0.03668212890625, 0.0186309814453125, -0.019866943359375, -0.035247802734375, -0.055419921875, -0.0262451171875, -0.0018596649169921875, 0.011627197265625, -0.03070068359375, -0.0006728172302246094, 0.0330810546875, 0.0308990478515625, -0.04656982421875, 0.0235595703125, -0.02606201171875, -0.01885986328125, 0.049652099609375, 0.01519775390625, 0.01229095458984375, -0.0184783935546875, -0.040191650390625, -0.0036602020263671875, -0.040802001953125, 0.001743316650390625, 0.031005859375, 0.01953125, -0.046478271484375, 0.050567626953125, -0.0248870849609375, 0.036590576171875, 0.0224456787109375, -0.007427215576171875, 0.049407958984375, -0.030975341796875, -0.0223388671875, -0.01220703125, 0.102294921875, 0.0267181396484375, 0.030731201171875, -0.0019407272338867188, 0.00185394287109375, -0.003864288330078125, 0.01361846923828125, -0.0665283203125, 0.015350341796875, 0.02703857421875, -0.0268707275390625, -0.03179931640625, -0.01654052734375, -0.058441162109375, -0.0271148681640625, -0.014984130859375, 0.015869140625, -0.052337646484375, -0.02728271484375, 0.00485992431640625, 0.0099334716796875, 0.03778076171875, 0.0127105712890625, -0.061981201171875, 0.02325439453125, 0.051116943359375, 0.06005859375, -0.0215301513671875, -0.02410888671875, -0.0160064697265625, -0.0268707275390625, -0.01065826416015625, 0.06719970703125, -0.02264404296875, -0.03173828125, -0.0167236328125, 0.00861358642578125, -0.0107879638671875, -0.0234832763671875, 0.044158935546875, -0.0171356201171875, 0.043060302734375, -0.007328033447265625, -0.04901123046875, -0.01605224609375, 0.032501220703125, -0.0239105224609375, 0.0872802734375, 0.0078277587890625, -0.057464599609375, 0.00467681884765625, -0.0521240234375, -0.00588226318359375, -0.0067596435546875, -0.01251220703125, -0.041961669921875, -0.00994110107421875, 0.02392578125, 0.045196533203125, -0.0341796875, 0.015167236328125, -0.026214599609375, -0.036468505859375, 0.0186767578125, -0.04388427734375, 0.06280517578125, 0.004001617431640625, -0.043548583984375, 0.0262451171875, -0.06475830078125, 0.00702667236328125, 0.0199432373046875, -0.0262603759765625, 0.0023899078369140625, -0.0132293701171875, -0.005184173583984375, 0.022064208984375, 0.0291900634765625, -0.041717529296875, 0.0028209686279296875, -0.037689208984375, 0.03228759765625, 0.07318115234375, -0.001220703125, 0.0137176513671875, -0.0277862548828125, 0.043701171875, -0.0041351318359375, 0.0197296142578125, 0.0077972412109375, -0.046051025390625, -0.06866455078125, -0.0238189697265625, 0.007007598876953125, 0.0556640625, -0.048187255859375, 0.069580078125, -0.024200439453125, -0.054656982421875, -0.0279998779296875, 0.002315521240234375, 0.039886474609375, 0.04437255859375, 0.0229339599609375, -0.0243682861328125, -0.0404052734375, -0.07049560546875, 0.0011014938354492188, -0.0178375244140625, -0.006290435791015625, 0.030059814453125, 0.04266357421875, -0.019744873046875, 0.055328369140625, -0.0215911865234375, -0.0101470947265625, -0.009033203125, -0.007099151611328125, 0.02264404296875, 0.03948974609375, 0.040313720703125, -0.046661376953125, -0.038818359375, -0.0035839080810546875, -0.06756591796875, 0.0031871795654296875, -0.0035400390625, -0.0185546875, 0.02386474609375, 0.0224761962890625, -0.07012939453125, 0.025787353515625, 0.043548583984375, -0.0374755859375, 0.052276611328125, -0.003925323486328125, 0.01399993896484375, -0.08831787109375, 0.01247406005859375, 0.00817108154296875, -0.0172882080078125, -0.048431396484375, -0.01434326171875, -0.01068115234375, -0.0025234222412109375, -0.05316162109375, 0.047637939453125, -0.0261688232421875, 0.00527191162109375, -0.004730224609375, 0.00775909423828125, -0.0298919677734375, 0.0738525390625, -0.0050811767578125, 0.07177734375, 0.049774169921875, -0.040435791015625, 0.041595458984375, 0.0340576171875, -0.024505615234375, 0.059173583984375, -0.06573486328125, 0.0274658203125, 0.014434814453125, 0.02032470703125, -0.059539794921875, -0.01210784912109375, 0.044891357421875, -0.05224609375, 0.00836181640625, 0.0017728805541992188, -0.054840087890625, -0.033966064453125, -0.036376953125, 0.0083770751953125, 0.057342529296875, -0.0275115966796875, 0.031280517578125, 0.0174560546875, -0.01038360595703125, -0.03564453125, -0.051605224609375, -0.0015192031860351562, -0.043365478515625, -0.043060302734375, 0.00667572021484375, -0.0240020751953125, 0.00550079345703125, -0.00894927978515625, 0.0031566619873046875, 0.0013856887817382812, 0.0080108642578125, 0.0260772705078125, 0.01947021484375, -0.0129241943359375, 0.0008511543273925781, -0.019866943359375, -0.0133514404296875, 0.0084991455078125, 0.0036373138427734375, 0.06878662109375, -0.0352783203125, -0.0305633544921875, -0.06280517578125, 0.0160980224609375, 0.0423583984375, -0.00829315185546875, 0.056427001953125, 0.057373046875, -0.0181121826171875, 0.020294189453125, -0.0552978515625, -0.0126495361328125, -0.0399169921875, 0.0256500244140625, -0.022430419921875, -0.078125, 0.046051025390625, 0.0158538818359375, -0.00482940673828125, 0.044830322265625, 0.0694580078125, -0.00428009033203125, 0.0738525390625, 0.0413818359375, -0.01268768310546875, 0.0491943359375, -0.01800537109375, -0.00022733211517333984, -0.07354736328125, -0.0244598388671875, -0.0225372314453125, -0.01226806640625, -0.0623779296875, -0.03778076171875, 0.01934814453125, -0.00859832763671875, -0.043731689453125, 0.0257568359375, -0.041595458984375, 0.019134521484375, 0.04473876953125, 0.02069091796875, 0.012237548828125, 0.0079498291015625, -0.0016412734985351562, 0.0093994140625, -0.04119873046875, -0.04595947265625, 0.08319091796875, 0.046234130859375, 0.04296875, -0.0012903213500976562, 0.051788330078125, 0.00046706199645996094, 0.00772857666015625, -0.061767578125, 0.0616455078125, 0.0081329345703125, -0.041107177734375, -0.0277252197265625, -0.045989990234375, -0.0791015625, 0.01276397705078125, -0.01398468017578125, -0.073486328125, 0.00569915771484375, 0.00018203258514404297, -0.0249176025390625, 0.015869140625, -0.0631103515625, 0.055572509765625, -0.0247039794921875, -0.0174102783203125, -0.012481689453125, -0.05450439453125, 0.0277252197265625, -0.005035400390625, 0.005786895751953125, -0.0213165283203125, 0.005222320556640625, 0.058624267578125, -0.046112060546875, 0.0863037109375, -0.011627197265625, -0.0140533447265625, 0.0253753662109375, -0.0173797607421875, 0.0477294921875, -0.01290130615234375, -0.00481414794921875, 0.0243988037109375, -0.01355743408203125, -0.0330810546875, -0.035430908203125, 0.051116943359375, -0.06744384765625, -0.0511474609375, -0.0282745361328125, -0.0301513671875, -0.031402587890625, 0.015838623046875, 0.0201416015625, 0.01458740234375, -0.0135040283203125, 0.0272064208984375, 0.0540771484375, -0.0384521484375, 0.021209716796875, 0.0318603515625, 0.00047850608825683594, -0.026702880859375, 0.0657958984375, -0.0170135498046875, 0.0237274169921875, 0.019500732421875, 0.0164337158203125, -0.0286712646484375, -0.0306549072265625, -0.0243988037109375, 0.030792236328125, -0.03240966796875, -0.01412200927734375, -0.039642333984375, -0.029266357421875, -0.028411865234375, 0.0116729736328125, -0.031280517578125, -0.025665283203125, -0.03765869140625, -0.00933837890625, 0.058746337890625, 0.0263519287109375, 0.002841949462890625, 0.028778076171875, -0.041961669921875, 0.02996826171875, 0.0257415771484375, 0.049530029296875, -0.01148223876953125, -0.044891357421875, 0.0013637542724609375, 0.0179595947265625, -0.02557373046875, -0.04925537109375, 0.0202789306640625, 0.0097198486328125, 0.0482177734375, 0.01153564453125, -0.011627197265625, 0.057098388671875, -0.042144775390625, 0.066162109375, 0.02984619140625, -0.056121826171875, 0.041473388671875, -0.04278564453125, 0.01947021484375, 0.028289794921875, 0.0274658203125, -0.03326416015625, -0.034210205078125, -0.056732177734375, -0.048004150390625, 0.051788330078125, 0.0304718017578125, 0.0298614501953125, -0.0012578964233398438, 0.0158233642578125, 0.000335693359375, 0.0164794921875, -0.0546875, -0.0296478271484375, -0.018890380859375, -0.01403045654296875, -0.00565338134765625, -0.0250091552734375, -0.00812530517578125, -0.0181427001953125, 0.04541015625, -0.007030487060546875, 0.052154541015625, 0.006343841552734375, 0.006221771240234375, 0.0066375732421875, 0.0255279541015625, 0.05804443359375, 0.04266357421875, -0.0154571533203125, -0.015594482421875, 0.0262451171875, -0.034454345703125, -0.004360198974609375, 0.0220794677734375, 0.01151275634765625, -0.020660400390625, 0.0277252197265625, 0.0904541015625, 0.01259613037109375, -0.050384521484375, 0.040374755859375, -0.01036834716796875, -0.030059814453125, -0.0286407470703125, 0.0015058517456054688, 0.01357269287109375, 0.03387451171875, 0.0185546875, -0.0097198486328125, -0.0008730888366699219, -0.035247802734375, 0.009857177734375, 0.0274505615234375, -0.0236663818359375, -0.0258026123046875, 0.04486083984375, 0.016998291015625, -0.02349853515625, 0.040985107421875, -0.00868988037109375, -0.042083740234375, 0.046905517578125, 0.030609130859375, 0.06878662109375, -0.005809783935546875, 0.01464080810546875, 0.0321044921875, 0.01526641845703125, 0.0012693405151367188, 0.026702880859375, 0.0005507469177246094, -0.041595458984375, -0.0301666259765625, -0.042327880859375, -0.0254669189453125, 0.0234222412109375, -0.034454345703125, 0.0255279541015625, -0.036865234375, -0.01959228515625, -0.0155487060546875, 0.0066986083984375, -0.06439208984375, 0.01119232177734375, 0.0139617919921875, 0.059539794921875, -0.0380859375, 0.049163818359375, 0.034637451171875, -0.036376953125, -0.06201171875, -0.012115478515625, 0.01293182373046875, -0.08740234375, 0.0252838134765625, 0.007144927978515625, -0.0017833709716796875, 0.002315521240234375, -0.046295166015625, -0.054229736328125, 0.082763671875, 0.02606201171875, -0.04791259765625, -0.005706787109375, 0.0133819580078125, 0.0596923828125, -0.008636474609375, 0.0293731689453125, 0.0413818359375, 0.0242919921875, 0.00545501708984375, -0.1015625, 0.0235595703125, -0.0322265625, 0.001491546630859375, -0.00583648681640625, -0.076904296875, 0.04937744140625, -0.00467681884765625, -0.00780487060546875, 0.01316070556640625, 0.039825439453125, 0.046844482421875, -0.0027828216552734375, 0.0399169921875, 0.040802001953125, 0.04803466796875, -0.0088043212890625, 0.086669921875, -0.0242919921875, 0.036224365234375, 0.07122802734375, 0.0211639404296875, 0.06591796875, 0.0399169921875, -0.028411865234375, 0.031463623046875, 0.06768798828125, -0.0175933837890625, 0.03253173828125, 0.0214385986328125, -0.0028781890869140625, -0.015838623046875, 0.00553131103515625, -0.04425048828125, 0.032867431640625, 0.021759033203125, -0.0150604248046875, 0.0013370513916015625, -0.002819061279296875, 0.0133819580078125, -0.01395416259765625, -0.001483917236328125, 0.059661865234375, 0.015411376953125, -0.032501220703125, 0.078125, -0.025848388671875, 0.0782470703125, -0.042510986328125, 0.00865936279296875, -0.0282440185546875, -0.007415771484375, -0.01433563232421875, -0.05987548828125, 0.0085906982421875, 0.0233001708984375, 0.00501251220703125, -0.018280029296875, 0.034820556640625, -0.01294708251953125, -0.04559326171875, 0.0180511474609375, 0.040191650390625, 0.034423828125, -0.006374359130859375, -0.06915283203125, 0.01039886474609375, 0.0013151168823242188, -0.055389404296875, 0.0197296142578125, 0.038055419921875, 0.0011997222900390625, 0.07244873046875, 0.050018310546875, 0.0022068023681640625, 0.002315521240234375, 0.00463104248046875, 0.0802001953125, -0.051239013671875, -0.0293731689453125, -0.07403564453125, 0.04736328125, -0.01079559326171875, -0.0325927734375, 0.059234619140625, 0.038604736328125, 0.050201416015625, 0.00018262863159179688, 0.059967041015625, -0.0162200927734375, 0.043975830078125, -0.03826904296875, 0.051055908203125, -0.044586181640625, 0.0274505615234375, -0.0281524658203125, -0.069091796875, -0.00768280029296875, 0.048614501953125, -0.0286407470703125, 0.0188140869140625, 0.044097900390625, 0.056549072265625, 0.00527191162109375, -0.0106048583984375, 0.0172576904296875, 0.02587890625, 0.02374267578125, 0.055999755859375, 0.06951904296875, -0.034881591796875, 0.05194091796875, -0.027191162109375, -0.018585205078125, -0.01450347900390625, -0.0518798828125, -0.08636474609375, -0.043243408203125, -0.0242767333984375, -0.016021728515625, -0.009368896484375, 0.0638427734375, 0.055419921875, -0.050445556640625, -0.01247406005859375, -0.007965087890625, -0.0004544258117675781, -0.002025604248046875, -0.018707275390625, 0.0291290283203125, -0.01287841796875, -0.06219482421875, 0.0204925537109375, -0.0021820068359375, 0.021087646484375, -0.02069091796875, -0.01047515869140625, -0.015167236328125, -0.00981903076171875, 0.055389404296875, 0.021453857421875, -0.0623779296875, -0.0325927734375, 0.01322174072265625, -0.01222991943359375, 0.0234222412109375, 0.019744873046875, -0.046478271484375, 0.0160675048828125, 0.0271148681640625, 0.03936767578125, 0.047637939453125, 0.00716400146484375, 0.0367431640625, -0.050933837890625, 0.02197265625, 0.01117706298828125, 0.01161956787109375, 0.02606201171875, -0.038604736328125, 0.04437255859375, 0.01505279541015625, -0.043914794921875, -0.0615234375, -0.002719879150390625, -0.0965576171875, -0.0015058517456054688, 0.1064453125, -0.00270843505859375, -0.01299285888671875, 0.01508331298828125, -0.0330810546875, 0.0185546875, -0.0391845703125, 0.056549072265625, 0.05194091796875, -0.029083251953125, -0.0273284912109375, -0.056060791015625, 0.033966064453125, 0.02197265625, -0.0816650390625, -0.00879669189453125, 0.030792236328125, 0.033905029296875, 0.0078125, 0.0638427734375, -0.00920867919921875, 0.0069580078125, -0.00872039794921875, 0.006992340087890625, -0.00514984130859375, 0.022613525390625, -0.0254364013671875, -0.0169525146484375, -0.01190948486328125, -0.0233306884765625 ] ]
h2oai/h2ogpt-research-oasst1-llama-65b
2023-07-06T03:11:31.000Z
[ "transformers", "pytorch", "llama", "text-generation", "gpt", "llm", "large language model", "open-source", "en", "dataset:h2oai/openassistant_oasst1_h2ogpt_graded", "license:other", "has_space", "text-generation-inference", "region:us" ]
text-generation
h2oai
null
null
h2oai/h2ogpt-research-oasst1-llama-65b
9
5,879
transformers
2023-05-13T18:11:13
--- license: other language: - en library_name: transformers inference: false thumbnail: https://h2o.ai/etc.clientlibs/h2o/clientlibs/clientlib-site/resources/images/favicon.ico tags: - gpt - llm - large language model - open-source datasets: - h2oai/openassistant_oasst1_h2ogpt_graded --- # h2oGPT Model Card ## Summary H2O.ai's `h2ogpt-research-oasst1-llama-65b` is a 65 billion parameter instruction-following large language model (NOT licensed for commercial use). - Base model: [decapoda-research/llama-65b-hf](https://huggingface.co/decapoda-research/llama-65b-hf) - Fine-tuning dataset: [h2oai/openassistant_oasst1_h2ogpt_graded](https://huggingface.co/datasets/h2oai/openassistant_oasst1_h2ogpt_graded) - Data-prep and fine-tuning code: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) - Training logs: [zip](https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b/blob/main/llama-65b-hf.h2oaiopenassistant_oasst1_h2ogpt_graded.1_epochs.113510499324f0f007cbec9d9f1f8091441f2469.3.zip) ## Chatbot - Run your own chatbot: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) [![H2O.ai GitHub](https://user-images.githubusercontent.com/6147661/232930822-e7170e4d-8aa1-4f7a-ad70-ece9cdd8b0cb.png)](https://github.com/h2oai/h2ogpt) ## Usage To use the model with the `transformers` library on a machine with GPUs, first make sure you have the following libraries installed. ```bash pip install transformers==4.29.2 pip install accelerate==0.19.0 pip install torch==2.0.1 pip install einops==0.6.1 ``` ```python import torch from transformers import pipeline, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("h2oai/h2ogpt-research-oasst1-llama-65b", padding_side="left") generate_text = pipeline(model="h2oai/h2ogpt-research-oasst1-llama-65b", tokenizer=tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", prompt_type="human_bot") res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` Alternatively, if you prefer to not use `trust_remote_code=True` you can download [instruct_pipeline.py](https://huggingface.co/h2oai/h2ogpt-research-oasst1-llama-65b/blob/main/h2oai_pipeline.py), store it alongside your notebook, and construct the pipeline yourself from the loaded model and tokenizer: ```python import torch from h2oai_pipeline import H2OTextGenerationPipeline from transformers import AutoModelForCausalLM, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("h2oai/h2ogpt-research-oasst1-llama-65b", padding_side="left") model = AutoModelForCausalLM.from_pretrained("h2oai/h2ogpt-research-oasst1-llama-65b", torch_dtype=torch.bfloat16, device_map="auto") generate_text = H2OTextGenerationPipeline(model=model, tokenizer=tokenizer, prompt_type="human_bot") res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` ## Model Architecture ``` LlamaForCausalLM( (model): LlamaModel( (embed_tokens): Embedding(32000, 8192, padding_idx=31999) (layers): ModuleList( (0-79): 80 x LlamaDecoderLayer( (self_attn): LlamaAttention( (q_proj): Linear(in_features=8192, out_features=8192, bias=False) (k_proj): Linear(in_features=8192, out_features=8192, bias=False) (v_proj): Linear(in_features=8192, out_features=8192, bias=False) (o_proj): Linear(in_features=8192, out_features=8192, bias=False) (rotary_emb): LlamaRotaryEmbedding() ) (mlp): LlamaMLP( (gate_proj): Linear(in_features=8192, out_features=22016, bias=False) (down_proj): Linear(in_features=22016, out_features=8192, bias=False) (up_proj): Linear(in_features=8192, out_features=22016, bias=False) (act_fn): SiLUActivation() ) (input_layernorm): LlamaRMSNorm() (post_attention_layernorm): LlamaRMSNorm() ) ) (norm): LlamaRMSNorm() ) (lm_head): Linear(in_features=8192, out_features=32000, bias=False) ) ``` ## Model Configuration ```json LlamaConfig { "_name_or_path": "h2oai/h2ogpt-research-oasst1-llama-65b", "architectures": [ "LlamaForCausalLM" ], "bos_token_id": 0, "custom_pipelines": { "text-generation": { "impl": "h2oai_pipeline.H2OTextGenerationPipeline", "pt": "AutoModelForCausalLM" } }, "eos_token_id": 1, "hidden_act": "silu", "hidden_size": 8192, "initializer_range": 0.02, "intermediate_size": 22016, "max_position_embeddings": 2048, "max_sequence_length": 2048, "model_type": "llama", "num_attention_heads": 64, "num_hidden_layers": 80, "pad_token_id": -1, "rms_norm_eps": 1e-05, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.30.1", "use_cache": true, "vocab_size": 32000 } ``` ## Model Validation Model validation results using [EleutherAI lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness). TBD ## Disclaimer Please read this disclaimer carefully before using the large language model provided in this repository. Your use of the model signifies your agreement to the following terms and conditions. - Biases and Offensiveness: The large language model is trained on a diverse range of internet text data, which may contain biased, racist, offensive, or otherwise inappropriate content. By using this model, you acknowledge and accept that the generated content may sometimes exhibit biases or produce content that is offensive or inappropriate. The developers of this repository do not endorse, support, or promote any such content or viewpoints. - Limitations: The large language model is an AI-based tool and not a human. It may produce incorrect, nonsensical, or irrelevant responses. It is the user's responsibility to critically evaluate the generated content and use it at their discretion. - Use at Your Own Risk: Users of this large language model must assume full responsibility for any consequences that may arise from their use of the tool. The developers and contributors of this repository shall not be held liable for any damages, losses, or harm resulting from the use or misuse of the provided model. - Ethical Considerations: Users are encouraged to use the large language model responsibly and ethically. By using this model, you agree not to use it for purposes that promote hate speech, discrimination, harassment, or any form of illegal or harmful activities. - Reporting Issues: If you encounter any biased, offensive, or otherwise inappropriate content generated by the large language model, please report it to the repository maintainers through the provided channels. Your feedback will help improve the model and mitigate potential issues. - Changes to this Disclaimer: The developers of this repository reserve the right to modify or update this disclaimer at any time without prior notice. It is the user's responsibility to periodically review the disclaimer to stay informed about any changes. By using the large language model provided in this repository, you agree to accept and comply with the terms and conditions outlined in this disclaimer. If you do not agree with any part of this disclaimer, you should refrain from using the model and any content generated by it.
7,271
[ [ -0.01654052734375, -0.058990478515625, 0.0265350341796875, 0.01378631591796875, -0.0247802734375, -0.004428863525390625, -0.0159759521484375, -0.0300140380859375, 0.01189422607421875, 0.0257415771484375, -0.032379150390625, -0.047027587890625, -0.050201416015625, -0.0052490234375, -0.007266998291015625, 0.06329345703125, -0.006999969482421875, -0.0142822265625, 0.0031185150146484375, 0.01007080078125, -0.01233673095703125, -0.02825927734375, -0.052459716796875, -0.0271759033203125, 0.024505615234375, 0.0182037353515625, 0.036224365234375, 0.05828857421875, 0.032196044921875, 0.022918701171875, -0.0150146484375, -0.0022449493408203125, -0.035491943359375, -0.00782012939453125, 0.00231170654296875, -0.0253753662109375, -0.041900634765625, -0.005527496337890625, 0.0450439453125, 0.0208740234375, -0.0015201568603515625, 0.032501220703125, 0.0164794921875, 0.031585693359375, -0.03826904296875, 0.040802001953125, -0.036041259765625, -0.007381439208984375, -0.0125274658203125, 0.0017938613891601562, -0.0230712890625, -0.0003113746643066406, 0.0167694091796875, -0.04278564453125, -0.0096588134765625, 0.020111083984375, 0.09063720703125, 0.0257415771484375, -0.0228271484375, -0.019195556640625, -0.0277862548828125, 0.063232421875, -0.08636474609375, 0.0216217041015625, 0.037200927734375, -0.0009832382202148438, -0.0032482147216796875, -0.055023193359375, -0.045654296875, -0.0171966552734375, -0.0064697265625, 0.01361846923828125, -0.018096923828125, -0.002307891845703125, 0.01849365234375, 0.01457977294921875, -0.043792724609375, 0.0109710693359375, -0.038543701171875, -0.0204315185546875, 0.0545654296875, 0.00989532470703125, 0.012908935546875, -0.038238525390625, -0.0295257568359375, -0.0078582763671875, -0.036407470703125, 0.0083770751953125, 0.032867431640625, 0.016265869140625, -0.049468994140625, 0.05462646484375, -0.018402099609375, 0.03509521484375, -0.0014429092407226562, -0.0218353271484375, 0.041717529296875, -0.0241241455078125, -0.0228118896484375, 0.00925445556640625, 0.08538818359375, 0.0196685791015625, 0.005519866943359375, 0.02008056640625, -0.01142120361328125, -0.0140533447265625, -0.0121612548828125, -0.07440185546875, -0.014251708984375, 0.035797119140625, -0.034912109375, -0.0258636474609375, -0.005634307861328125, -0.061859130859375, -0.016326904296875, 0.0015392303466796875, 0.03131103515625, -0.0253448486328125, -0.027679443359375, 0.0157928466796875, -0.004730224609375, 0.0265350341796875, 0.005706787109375, -0.0570068359375, 0.00804901123046875, 0.038421630859375, 0.06707763671875, -0.0007319450378417969, -0.0275726318359375, -0.0247955322265625, 0.012603759765625, -0.0006375312805175781, 0.036651611328125, -0.0218963623046875, -0.02008056640625, -0.0038471221923828125, 0.01387786865234375, -0.011322021484375, -0.0268096923828125, 0.0474853515625, -0.0182342529296875, 0.033416748046875, 0.0013704299926757812, -0.033782958984375, -0.00989532470703125, 0.0196533203125, -0.03125, 0.09039306640625, 0.01067352294921875, -0.0665283203125, 0.0023193359375, -0.059295654296875, -0.01995849609375, -0.01079559326171875, -0.0006227493286132812, -0.049224853515625, -0.0217132568359375, 0.0224761962890625, 0.0289459228515625, -0.034271240234375, 0.019134521484375, -0.020965576171875, -0.01317596435546875, 0.001743316650390625, -0.035003662109375, 0.08856201171875, 0.018707275390625, -0.04412841796875, 0.01151275634765625, -0.059173583984375, -0.001995086669921875, 0.036346435546875, -0.039520263671875, -0.0010251998901367188, -0.00972747802734375, 0.0111541748046875, 0.0210113525390625, 0.034149169921875, -0.041595458984375, 0.015289306640625, -0.037506103515625, 0.061798095703125, 0.061004638671875, 0.0012073516845703125, 0.020538330078125, -0.018310546875, 0.030029296875, 0.00794219970703125, 0.018768310546875, -0.0135345458984375, -0.06475830078125, -0.061798095703125, -0.032257080078125, 0.0184478759765625, 0.05133056640625, -0.0401611328125, 0.05963134765625, -0.033172607421875, -0.04437255859375, -0.04827880859375, 0.009033203125, 0.0296173095703125, 0.046905517578125, 0.027984619140625, -0.009979248046875, -0.034881591796875, -0.06787109375, 0.01227569580078125, -0.024169921875, -0.0035419464111328125, 0.0380859375, 0.06689453125, -0.031585693359375, 0.0545654296875, -0.05401611328125, -0.0166778564453125, -0.0070953369140625, 0.00766754150390625, 0.033203125, 0.046051025390625, 0.0311279296875, -0.0357666015625, -0.030609130859375, -0.00647735595703125, -0.0518798828125, -0.0009694099426269531, 0.00013339519500732422, -0.0206756591796875, 0.014892578125, 0.0235595703125, -0.07025146484375, 0.032958984375, 0.053375244140625, -0.035980224609375, 0.034637451171875, -0.0193634033203125, -0.0012607574462890625, -0.10260009765625, 0.0194244384765625, 0.0025997161865234375, -0.00345611572265625, -0.0316162109375, -0.0011615753173828125, -0.0045318603515625, -0.005397796630859375, -0.054656982421875, 0.0638427734375, -0.030731201171875, 0.0159149169921875, 0.00015115737915039062, 0.00794219970703125, 0.0014486312866210938, 0.0477294921875, -0.007205963134765625, 0.0440673828125, 0.05120849609375, -0.03472900390625, 0.032257080078125, 0.0189666748046875, -0.027679443359375, 0.0111846923828125, -0.074462890625, 0.0069580078125, 0.0008745193481445312, 0.022186279296875, -0.08538818359375, -0.03173828125, 0.038818359375, -0.046600341796875, 0.025909423828125, -0.012786865234375, -0.040008544921875, -0.044830322265625, -0.032501220703125, 0.027862548828125, 0.05621337890625, -0.033782958984375, 0.036651611328125, 0.0246124267578125, -0.0017566680908203125, -0.04742431640625, -0.058990478515625, -0.003570556640625, -0.021759033203125, -0.0560302734375, 0.029754638671875, 0.002780914306640625, -0.00174713134765625, 0.00963592529296875, 0.0098876953125, 0.0026607513427734375, 0.0038700103759765625, 0.0242767333984375, 0.01788330078125, -0.01064300537109375, -0.005634307861328125, -0.00160980224609375, -0.004947662353515625, 0.0019407272338867188, -0.0178375244140625, 0.07159423828125, -0.0026187896728515625, -0.0223846435546875, -0.0665283203125, 0.00199127197265625, 0.04998779296875, -0.0156402587890625, 0.05731201171875, 0.060577392578125, -0.042144775390625, 0.008819580078125, -0.03192138671875, -0.0197906494140625, -0.0361328125, 0.037750244140625, -0.013153076171875, -0.0537109375, 0.04876708984375, 0.01303863525390625, 0.003971099853515625, 0.052978515625, 0.0587158203125, -0.0102691650390625, 0.0701904296875, 0.01800537109375, -0.0168914794921875, 0.03326416015625, -0.04901123046875, 0.01508331298828125, -0.078369140625, -0.021697998046875, -0.036712646484375, -0.017364501953125, -0.056549072265625, -0.041961669921875, 0.0184478759765625, 0.00849151611328125, -0.04736328125, 0.027618408203125, -0.0311126708984375, 0.005130767822265625, 0.052154541015625, 0.0103912353515625, 0.0000921487808227539, 0.00543212890625, -0.018798828125, 0.00608062744140625, -0.041290283203125, -0.04736328125, 0.0833740234375, 0.055084228515625, 0.046905517578125, -0.001132965087890625, 0.059417724609375, 0.007289886474609375, 0.00975799560546875, -0.056304931640625, 0.033355712890625, 0.01409149169921875, -0.05865478515625, -0.0288238525390625, -0.026580810546875, -0.07794189453125, 0.01032257080078125, -0.01445770263671875, -0.068359375, 0.0097198486328125, 0.01523590087890625, -0.0283355712890625, 0.027557373046875, -0.0606689453125, 0.060272216796875, -0.0198211669921875, -0.045501708984375, -0.00787353515625, -0.048065185546875, 0.022064208984375, 0.01407623291015625, 0.0175628662109375, -0.0135955810546875, -0.014923095703125, 0.0635986328125, -0.031402587890625, 0.07086181640625, -0.01293182373046875, -0.0115203857421875, 0.04150390625, -0.0219573974609375, 0.047393798828125, 0.007228851318359375, -0.011016845703125, 0.028533935546875, -0.016510009765625, -0.039825439453125, -0.0259246826171875, 0.056671142578125, -0.070068359375, -0.050506591796875, -0.0389404296875, -0.029144287109375, 0.011138916015625, 0.021270751953125, 0.0213775634765625, 0.016143798828125, 0.0154266357421875, 0.020721435546875, 0.0309906005859375, -0.03668212890625, 0.040435791015625, 0.00804901123046875, -0.01247406005859375, -0.04986572265625, 0.0679931640625, 0.00531768798828125, 0.0242919921875, 0.017425537109375, 0.022979736328125, -0.027069091796875, -0.032440185546875, -0.049591064453125, 0.0340576171875, -0.037933349609375, -0.01247406005859375, -0.047576904296875, -0.0260772705078125, -0.04290771484375, 0.00911712646484375, -0.0307464599609375, -0.0218963623046875, -0.041351318359375, -0.004840850830078125, 0.03338623046875, 0.03326416015625, 0.00251007080078125, 0.02252197265625, -0.051483154296875, 0.0196380615234375, 0.027130126953125, 0.0194091796875, 0.0008339881896972656, -0.06427001953125, -0.0182037353515625, 0.0183868408203125, -0.037445068359375, -0.056365966796875, 0.0404052734375, 0.004245758056640625, 0.038238525390625, 0.026397705078125, -0.0063934326171875, 0.04888916015625, -0.0276641845703125, 0.075439453125, 0.0159912109375, -0.072509765625, 0.046142578125, -0.037139892578125, 0.0306243896484375, 0.01415252685546875, 0.030670166015625, -0.028228759765625, -0.01934814453125, -0.04925537109375, -0.08245849609375, 0.05218505859375, 0.0296478271484375, 0.0160369873046875, 0.00589752197265625, 0.0233612060546875, -0.018768310546875, 0.01093292236328125, -0.0662841796875, -0.031341552734375, -0.01233673095703125, -0.027130126953125, 0.00662994384765625, -0.0127410888671875, -0.0088653564453125, -0.035919189453125, 0.06475830078125, -0.003787994384765625, 0.04278564453125, 0.0230712890625, -0.006313323974609375, -0.002685546875, 0.014190673828125, 0.0472412109375, 0.03631591796875, -0.020233154296875, 0.0005927085876464844, 0.01517486572265625, -0.048370361328125, 0.003719329833984375, 0.0134429931640625, -0.023345947265625, -0.0128173828125, 0.037445068359375, 0.08135986328125, 0.00508880615234375, -0.048004150390625, 0.034332275390625, 0.00614166259765625, -0.03106689453125, -0.02349853515625, 0.0030460357666015625, 0.0328369140625, 0.0159149169921875, 0.0269775390625, -0.00881195068359375, -0.0065460205078125, -0.045806884765625, -0.0024089813232421875, 0.02362060546875, -0.0178985595703125, -0.0215911865234375, 0.06787109375, 0.0181121826171875, -0.023101806640625, 0.0538330078125, -0.023040771484375, -0.042144775390625, 0.059600830078125, 0.03765869140625, 0.0631103515625, -0.007076263427734375, 0.006412506103515625, 0.05706787109375, 0.031768798828125, -0.0014638900756835938, 0.0206146240234375, 0.01389312744140625, -0.046112060546875, -0.01971435546875, -0.04833984375, -0.004535675048828125, 0.0247802734375, -0.039947509765625, 0.036468505859375, -0.037139892578125, -0.0188140869140625, -0.01171112060546875, 0.005184173583984375, -0.056396484375, 0.0128326416015625, 0.018585205078125, 0.055816650390625, -0.058929443359375, 0.06817626953125, 0.043670654296875, -0.04144287109375, -0.0567626953125, 0.0012044906616210938, 0.0012903213500976562, -0.07373046875, 0.04559326171875, 0.0135040283203125, 0.00823974609375, 0.01287078857421875, -0.0474853515625, -0.0814208984375, 0.10699462890625, 0.026336669921875, -0.04412841796875, -0.00885009765625, 0.011962890625, 0.040557861328125, -0.0238189697265625, 0.039337158203125, 0.04278564453125, 0.0369873046875, 0.0025959014892578125, -0.08441162109375, 0.0185546875, -0.023345947265625, -0.005279541015625, -0.00835418701171875, -0.07293701171875, 0.071533203125, -0.03143310546875, -0.014404296875, -0.00019025802612304688, 0.03887939453125, 0.036956787109375, 0.01458740234375, 0.0284271240234375, 0.06005859375, 0.05242919921875, -0.0077667236328125, 0.0782470703125, -0.03955078125, 0.049652099609375, 0.07421875, -0.005924224853515625, 0.07733154296875, 0.040771484375, -0.0183258056640625, 0.035919189453125, 0.055999755859375, -0.006195068359375, 0.03228759765625, 0.02557373046875, -0.006744384765625, -0.004192352294921875, -0.0148468017578125, -0.038299560546875, 0.0364990234375, 0.0254974365234375, -0.047210693359375, 0.007167816162109375, -0.0173492431640625, 0.018280029296875, -0.0269927978515625, -0.0062713623046875, 0.040802001953125, 0.00644683837890625, -0.03173828125, 0.07061767578125, -0.00017273426055908203, 0.06707763671875, -0.03253173828125, -0.0003864765167236328, 0.0016727447509765625, 0.007617950439453125, -0.0160675048828125, -0.0552978515625, -0.00008577108383178711, -0.0026092529296875, -0.001739501953125, -0.0056915283203125, 0.0340576171875, -0.02685546875, -0.04608154296875, 0.0318603515625, 0.028045654296875, 0.0123443603515625, 0.01117706298828125, -0.07177734375, 0.0137786865234375, 0.0091094970703125, -0.051544189453125, 0.01158905029296875, 0.018096923828125, 0.011688232421875, 0.058441162109375, 0.0460205078125, 0.006969451904296875, 0.0126190185546875, -0.002933502197265625, 0.063720703125, -0.049407958984375, -0.0163421630859375, -0.07135009765625, 0.059417724609375, -0.01373291015625, -0.041107177734375, 0.050628662109375, 0.05816650390625, 0.06842041015625, -0.00782012939453125, 0.06671142578125, -0.0171051025390625, 0.0166473388671875, -0.031768798828125, 0.05908203125, -0.046844482421875, 0.0148773193359375, -0.01476287841796875, -0.060699462890625, -0.00661468505859375, 0.058746337890625, -0.02142333984375, 0.01015472412109375, 0.05029296875, 0.07049560546875, 0.000774383544921875, -0.018707275390625, 0.0052337646484375, 0.04144287109375, 0.0295867919921875, 0.048919677734375, 0.038543701171875, -0.0574951171875, 0.04180908203125, -0.034149169921875, -0.01068878173828125, -0.016632080078125, -0.059295654296875, -0.06561279296875, -0.04449462890625, -0.035369873046875, -0.038055419921875, -0.01238250732421875, 0.088134765625, 0.061004638671875, -0.057403564453125, -0.0259857177734375, 0.002262115478515625, 0.006496429443359375, 0.00009679794311523438, -0.0197601318359375, 0.0408935546875, -0.002300262451171875, -0.0748291015625, 0.0004203319549560547, 0.009765625, 0.01479339599609375, -0.02899169921875, -0.0211029052734375, -0.033966064453125, -0.0014657974243164062, 0.04541015625, 0.015533447265625, -0.057342529296875, -0.0190887451171875, 0.0015697479248046875, -0.0178375244140625, 0.008026123046875, 0.0236358642578125, -0.02862548828125, 0.0217132568359375, 0.0526123046875, 0.03143310546875, 0.0472412109375, -0.007904052734375, 0.0146331787109375, -0.035125732421875, 0.0279541015625, 0.01343536376953125, 0.038055419921875, 0.0224761962890625, -0.032806396484375, 0.032470703125, 0.02740478515625, -0.0440673828125, -0.06011962890625, -0.0033512115478515625, -0.08270263671875, -0.00937652587890625, 0.10986328125, -0.0164642333984375, -0.03826904296875, 0.0009708404541015625, -0.03057861328125, 0.0292816162109375, -0.049652099609375, 0.058197021484375, 0.05029296875, -0.007965087890625, -0.00821685791015625, -0.05340576171875, 0.019683837890625, 0.015838623046875, -0.06976318359375, -0.007038116455078125, 0.01511383056640625, 0.033966064453125, 0.0182647705078125, 0.040802001953125, -0.00522613525390625, -0.00013816356658935547, 0.002857208251953125, 0.014312744140625, -0.00011783838272094727, 0.0021820068359375, -0.009979248046875, -0.0032215118408203125, -0.006206512451171875, -0.02374267578125 ] ]
Fredithefish/ReasonixPajama-3B-HF
2023-09-01T11:01:51.000Z
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "en", "dataset:Fredithefish/Reasonix", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
Fredithefish
null
null
Fredithefish/ReasonixPajama-3B-HF
3
5,878
transformers
2023-06-07T17:20:44
--- license: apache-2.0 datasets: - Fredithefish/Reasonix language: - en library_name: transformers inference: false --- # ReasonixPajama
139
[ [ -0.041961669921875, 0.0019407272338867188, 0.0692138671875, 0.04461669921875, -0.0253143310546875, -0.00669097900390625, 0.017333984375, 0.0022182464599609375, 0.04925537109375, 0.085693359375, -0.0290069580078125, -0.01114654541015625, -0.06256103515625, 0.0159149169921875, -0.035247802734375, 0.03228759765625, -0.0136871337890625, 0.0011129379272460938, -0.00750732421875, -0.02288818359375, -0.05035400390625, 0.00201416015625, -0.071044921875, -0.02398681640625, 0.05340576171875, 0.022552490234375, 0.0513916015625, 0.0008311271667480469, 0.009246826171875, 0.02386474609375, 0.0166473388671875, 0.004337310791015625, -0.029144287109375, -0.007518768310546875, -0.01296234130859375, -0.0504150390625, 0.0111541748046875, 0.00443267822265625, 0.022857666015625, 0.0374755859375, -0.0097198486328125, 0.01873779296875, -0.04010009765625, 0.05181884765625, -0.043914794921875, 0.0005741119384765625, -0.01189422607421875, -0.01096343994140625, -0.0244903564453125, 0.01294708251953125, -0.00572967529296875, -0.04901123046875, -0.03131103515625, -0.059967041015625, -0.03875732421875, -0.0019512176513671875, 0.06146240234375, 0.03045654296875, -0.051788330078125, -0.00334930419921875, -0.03741455078125, 0.06689453125, -0.019073486328125, 0.043212890625, 0.0667724609375, 0.0263824462890625, -0.045074462890625, -0.036041259765625, -0.060455322265625, 0.049468994140625, 0.010833740234375, -0.01000213623046875, -0.01617431640625, -0.0401611328125, 0.039154052734375, -0.0024394989013671875, -0.034149169921875, -0.0316162109375, -0.0259857177734375, 0.0165252685546875, 0.03778076171875, 0.0121002197265625, 0.0941162109375, 0.03912353515625, -0.037322998046875, 0.027923583984375, -0.0599365234375, 0.0209808349609375, 0.010772705078125, -0.005584716796875, -0.01319122314453125, 0.0242767333984375, 0.0003209114074707031, 0.0188446044921875, 0.00681304931640625, -0.00937652587890625, 0.01256561279296875, -0.0005993843078613281, -0.03228759765625, 0.0223236083984375, 0.033233642578125, 0.035369873046875, 0.005619049072265625, -0.00909423828125, 0.0078582763671875, 0.02423095703125, 0.028167724609375, -0.07781982421875, -0.027191162109375, 0.0158538818359375, -0.035186767578125, 0.01396942138671875, 0.00778961181640625, -0.044036865234375, -0.0418701171875, -0.05133056640625, 0.0404052734375, -0.0294036865234375, -0.05743408203125, 0.0227813720703125, -0.039764404296875, 0.01172637939453125, 0.03350830078125, -0.06988525390625, 0.007747650146484375, 0.039398193359375, 0.024871826171875, 0.048675537109375, -0.003849029541015625, 0.0199127197265625, 0.01073455810546875, -0.07843017578125, 0.062164306640625, -0.036468505859375, -0.0272674560546875, -0.030517578125, 0.0147247314453125, -0.0090789794921875, -0.037078857421875, 0.0380859375, -0.06060791015625, 0.0209808349609375, -0.060821533203125, -0.00434112548828125, -0.0262908935546875, 0.0194091796875, -0.02679443359375, 0.030029296875, 0.0159149169921875, -0.01087188720703125, 0.04461669921875, -0.0478515625, -0.0012426376342773438, 0.025604248046875, -0.016571044921875, -0.02655029296875, 0.01479339599609375, 0.0065460205078125, -0.005954742431640625, -0.0242919921875, 0.01171112060546875, -0.034271240234375, -0.0172119140625, 0.039276123046875, 0.00807952880859375, 0.098876953125, 0.047332763671875, -0.0203399658203125, -0.006328582763671875, -0.0538330078125, 0.00347900390625, 0.0283355712890625, -0.0085906982421875, -0.057525634765625, 0.00380706787109375, -0.016387939453125, 0.02398681640625, 0.001430511474609375, -0.03851318359375, 0.02947998046875, 0.00475311279296875, -0.00533294677734375, 0.0300140380859375, 0.0229949951171875, 0.00104522705078125, -0.012451171875, 0.034271240234375, 0.004436492919921875, 0.00717926025390625, 0.0257415771484375, -0.0281829833984375, -0.07452392578125, -0.016693115234375, -0.0182647705078125, 0.0252838134765625, -0.064697265625, 0.005046844482421875, 0.0159454345703125, -0.05035400390625, -0.0028533935546875, 0.00606536865234375, 0.0068359375, 0.01085662841796875, 0.017425537109375, -0.015106201171875, -0.05352783203125, -0.075927734375, -0.0103302001953125, -0.01381683349609375, -0.00782012939453125, -0.009002685546875, 0.005924224853515625, -0.06951904296875, 0.0204315185546875, -0.01322174072265625, -0.0116729736328125, -0.03045654296875, -0.00897216796875, 0.0765380859375, 0.039398193359375, 0.058441162109375, -0.06622314453125, -0.0386962890625, -0.034088134765625, -0.03924560546875, -0.041290283203125, -0.031036376953125, -0.045562744140625, -0.02301025390625, 0.0118408203125, -0.0278778076171875, 0.04339599609375, 0.0261688232421875, -0.0760498046875, 0.01540374755859375, 0.0173187255859375, 0.056884765625, -0.0699462890625, -0.004283905029296875, -0.023712158203125, -0.00930023193359375, -0.01517486572265625, 0.0111846923828125, -0.01142120361328125, -0.032257080078125, -0.039581298828125, -0.0022106170654296875, -0.043426513671875, 0.029693603515625, -0.056304931640625, -0.01013946533203125, 0.00868988037109375, -0.0020656585693359375, -0.0186920166015625, 0.0672607421875, 0.048095703125, -0.047393798828125, 0.036285400390625, 0.041229248046875, 0.0062713623046875, 0.0310516357421875, -0.04461669921875, -0.00434112548828125, -0.0067291259765625, 0.0108489990234375, -0.060302734375, -0.0175018310546875, 0.02911376953125, -0.029632568359375, 0.01739501953125, -0.01202392578125, -0.032989501953125, -0.03594970703125, 0.004131317138671875, 0.056610107421875, 0.0618896484375, -0.07293701171875, 0.0173187255859375, 0.07177734375, 0.0304107666015625, -0.01080322265625, -0.04010009765625, 0.0258331298828125, -0.0177764892578125, -0.05194091796875, 0.0096893310546875, -0.0220794677734375, -0.03936767578125, -0.054443359375, -0.0157470703125, -0.00849151611328125, 0.01175689697265625, 0.029083251953125, -0.002521514892578125, -0.003391265869140625, 0.003971099853515625, 0.0015773773193359375, -0.01554107666015625, 0.035888671875, -0.0009870529174804688, 0.049835205078125, -0.0210418701171875, 0.0050506591796875, -0.0274658203125, 0.0255889892578125, 0.03961181640625, 0.0286407470703125, 0.056427001953125, 0.0223236083984375, -0.045074462890625, -0.0303802490234375, -0.0233612060546875, -0.0163726806640625, -0.037506103515625, -0.037628173828125, -0.031951904296875, -0.0435791015625, 0.02801513671875, -0.03582763671875, -0.0141448974609375, 0.0239715576171875, 0.038299560546875, -0.00974273681640625, 0.08966064453125, 0.01096343994140625, 0.008087158203125, -0.005191802978515625, -0.00948333740234375, 0.05419921875, -0.040740966796875, -0.01442718505859375, -0.037811279296875, -0.023406982421875, -0.03814697265625, -0.0175933837890625, -0.01345062255859375, 0.0186920166015625, -0.015777587890625, 0.0648193359375, -0.04010009765625, 0.0792236328125, 0.02313232421875, 0.003620147705078125, -0.034088134765625, -0.04473876953125, 0.0211639404296875, -0.025970458984375, -0.0305328369140625, -0.052093505859375, 0.05072021484375, 0.027252197265625, 0.052154541015625, 0.0469970703125, 0.046142578125, -0.002948760986328125, 0.017974853515625, -0.00925445556640625, 0.0404052734375, -0.003448486328125, -0.045562744140625, -0.0081329345703125, -0.00574493408203125, -0.10418701171875, 0.0194549560546875, 0.0026836395263671875, -0.0303802490234375, 0.021728515625, 0.0008916854858398438, -0.0281524658203125, 0.0269622802734375, -0.0175628662109375, 0.0709228515625, 0.006076812744140625, -0.00907135009765625, -0.014251708984375, -0.0238189697265625, 0.0362548828125, 0.0164794921875, 0.018524169921875, -0.0439453125, -0.0172271728515625, 0.04248046875, -0.0260772705078125, 0.02789306640625, 0.0148468017578125, 0.003292083740234375, 0.0184326171875, 0.0081329345703125, -0.0033111572265625, 0.0301513671875, 0.0254974365234375, 0.0272369384765625, 0.009002685546875, -0.05010986328125, -0.0187530517578125, 0.081298828125, -0.06341552734375, -0.0482177734375, -0.0535888671875, 0.0271148681640625, 0.00534820556640625, 0.01430511474609375, 0.005809783935546875, 0.0193023681640625, 0.024566650390625, 0.0135345458984375, 0.0693359375, -0.00018155574798583984, 0.038665771484375, 0.032440185546875, -0.051727294921875, -0.019287109375, 0.061920166015625, 0.0022411346435546875, -0.027862548828125, -0.0015497207641601562, 0.029571533203125, -0.00795745849609375, -0.025238037109375, -0.00897216796875, 0.0156402587890625, -0.020294189453125, 0.00844573974609375, -0.042999267578125, -0.005207061767578125, -0.0599365234375, -0.0122528076171875, -0.0270233154296875, -0.03448486328125, -0.038543701171875, -0.004543304443359375, -0.00922393798828125, 0.0303497314453125, -0.01155853271484375, 0.01348114013671875, -0.05792236328125, 0.04718017578125, 0.0428466796875, 0.0308074951171875, -0.05364990234375, -0.0219268798828125, -0.008331298828125, -0.043060302734375, -0.038421630859375, -0.054473876953125, 0.04400634765625, -0.025390625, 0.049041748046875, 0.04107666015625, 0.01219940185546875, 0.038421630859375, -0.01540374755859375, 0.059722900390625, 0.039306640625, -0.04473876953125, 0.0792236328125, -0.02728271484375, 0.037994384765625, 0.059356689453125, 0.0382080078125, -0.066162109375, -0.013671875, -0.07366943359375, -0.060302734375, 0.04034423828125, 0.01157379150390625, 0.016693115234375, -0.020416259765625, -0.011138916015625, 0.014312744140625, 0.01462554931640625, -0.061798095703125, -0.0236663818359375, -0.0235595703125, 0.0102691650390625, 0.02276611328125, -0.0323486328125, -0.0445556640625, -0.006862640380859375, 0.07147216796875, 0.005878448486328125, 0.0078277587890625, 0.01079559326171875, 0.01251220703125, -0.0277252197265625, 0.006649017333984375, 0.071533203125, 0.085205078125, -0.01073455810546875, 0.0298309326171875, -0.0034656524658203125, -0.0113983154296875, 0.004596710205078125, -0.01486968994140625, -0.013641357421875, 0.0024089813232421875, 0.0018968582153320312, 0.06402587890625, 0.0005245208740234375, -0.037841796875, 0.0270538330078125, -0.04266357421875, 0.0028362274169921875, -0.09637451171875, 0.05755615234375, -0.0159912109375, 0.0303802490234375, 0.033447265625, 0.039764404296875, 0.0225982666015625, -0.0391845703125, 0.01113128662109375, 0.017425537109375, -0.0460205078125, -0.04278564453125, 0.059539794921875, 0.026153564453125, -0.0347900390625, 0.0250244140625, 0.01221466064453125, -0.0445556640625, 0.07861328125, 0.04779052734375, 0.06689453125, -0.0128936767578125, -0.0024166107177734375, 0.051544189453125, 0.026702880859375, 0.01024627685546875, 0.056610107421875, -0.004978179931640625, -0.060150146484375, 0.0179290771484375, -0.02978515625, -0.006702423095703125, 0.0246429443359375, -0.01291656494140625, 0.0196533203125, -0.07269287109375, -0.00991058349609375, -0.024627685546875, -0.0310821533203125, -0.0232086181640625, 0.031890869140625, 0.025726318359375, 0.08599853515625, -0.06622314453125, 0.050628662109375, 0.04315185546875, -0.034088134765625, -0.06451416015625, 0.01049041748046875, -0.007106781005859375, -0.0716552734375, 0.048126220703125, 0.042083740234375, 0.0306396484375, -0.00580596923828125, -0.05865478515625, -0.07891845703125, 0.048614501953125, 0.005107879638671875, -0.04083251953125, 0.06048583984375, -0.019744873046875, 0.026214599609375, -0.03076171875, 0.0087127685546875, 0.05401611328125, 0.0579833984375, 0.004840850830078125, -0.025115966796875, -0.00888824462890625, -0.0380859375, 0.025177001953125, 0.07232666015625, -0.046295166015625, 0.0264739990234375, -0.0247650146484375, -0.01560211181640625, -0.018707275390625, 0.037200927734375, 0.031463623046875, 0.05206298828125, 0.049896240234375, 0.0762939453125, -0.00041866302490234375, -0.02044677734375, 0.07708740234375, -0.015289306640625, 0.0002925395965576172, 0.039093017578125, 0.0133514404296875, 0.0167999267578125, 0.0804443359375, -0.0467529296875, 0.055084228515625, 0.049896240234375, 0.018951416015625, 0.040924072265625, 0.02056884765625, -0.029144287109375, -0.05419921875, -0.01132965087890625, -0.0428466796875, 0.0016222000122070312, 0.0234832763671875, -0.02178955078125, 0.00311279296875, -0.01364898681640625, 0.024078369140625, 0.0298919677734375, -0.0364990234375, 0.047119140625, 0.0192413330078125, -0.034393310546875, 0.0174560546875, -0.01332855224609375, 0.019256591796875, -0.047332763671875, -0.0116729736328125, -0.0013790130615234375, 0.0127716064453125, -0.0159912109375, -0.043701171875, 0.049591064453125, -0.03741455078125, -0.01776123046875, -0.02777099609375, 0.049713134765625, -0.034576416015625, -0.04461669921875, 0.03790283203125, -0.038726806640625, 0.0159149169921875, 0.0042572021484375, -0.04376220703125, 0.01277923583984375, 0.005626678466796875, -0.011566162109375, 0.0204620361328125, 0.0181121826171875, 0.0012807846069335938, 0.036224365234375, 0.0699462890625, 0.0035800933837890625, -0.0069427490234375, 0.01445770263671875, 0.049652099609375, -0.02740478515625, -0.03778076171875, -0.0657958984375, 0.042938232421875, -0.0122833251953125, -0.05364990234375, 0.06365966796875, 0.03741455078125, 0.031158447265625, -0.0032291412353515625, 0.024261474609375, -0.01107025146484375, 0.040435791015625, -0.027923583984375, 0.051177978515625, -0.058807373046875, 0.0117340087890625, 0.01097869873046875, -0.0469970703125, -0.0159454345703125, 0.029632568359375, 0.0016422271728515625, -0.0240936279296875, 0.02490234375, 0.025054931640625, -0.00447845458984375, 0.017364501953125, 0.048309326171875, 0.0029296875, 0.033782958984375, 0.036285400390625, 0.0467529296875, -0.055389404296875, 0.0060882568359375, -0.0117034912109375, -0.033538818359375, -0.03436279296875, -0.05615234375, -0.057861328125, -0.033050537109375, -0.033599853515625, -0.047119140625, -0.0219268798828125, 0.07489013671875, 0.045654296875, -0.06475830078125, 0.0014448165893554688, -0.020538330078125, 0.033905029296875, -0.00582122802734375, -0.0141143798828125, -0.01421356201171875, -0.03887939453125, -0.0312347412109375, 0.0391845703125, 0.040740966796875, 0.0074005126953125, 0.0062255859375, -0.00566864013671875, -0.0055389404296875, 0.041717529296875, 0.029144287109375, 0.052886962890625, -0.04217529296875, -0.030670166015625, -0.03472900390625, -0.0299530029296875, -0.00792694091796875, 0.04443359375, -0.034088134765625, 0.030517578125, 0.05426025390625, 0.04541015625, 0.01605224609375, -0.01849365234375, 0.04864501953125, -0.02276611328125, 0.0167083740234375, 0.025360107421875, 0.0484619140625, -0.006237030029296875, -0.0267791748046875, 0.05706787109375, 0.048614501953125, -0.07171630859375, -0.03582763671875, 0.018524169921875, -0.08489990234375, -0.0352783203125, 0.050994873046875, 0.01378631591796875, -0.04217529296875, -0.033447265625, -0.037506103515625, -0.019287109375, -0.035614013671875, 0.070556640625, 0.0204010009765625, -0.01522064208984375, -0.041778564453125, -0.006992340087890625, 0.019195556640625, 0.0228729248046875, -0.058746337890625, -0.04339599609375, 0.020599365234375, 0.0297393798828125, 0.034271240234375, 0.057861328125, -0.00829315185546875, 0.0634765625, 0.0083770751953125, 0.0236968994140625, 0.0095062255859375, -0.0097808837890625, 0.0247650146484375, 0.00907135009765625, 0.00003629922866821289, -0.0426025390625 ] ]
TheBloke/WizardLM-30B-Uncensored-GPTQ
2023-09-27T12:44:21.000Z
[ "transformers", "safetensors", "llama", "text-generation", "uncensored", "dataset:ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered", "license:other", "has_space", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/WizardLM-30B-Uncensored-GPTQ
108
5,877
transformers
2023-05-22T12:56:35
--- license: other tags: - uncensored datasets: - ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered model_name: Wizardlm 30B Uncensored base_model: ehartford/WizardLM-30B-Uncensored inference: false model_creator: Eric Hartford model_type: llama prompt_template: '{prompt} ### Response: ' quantized_by: TheBloke --- <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # Wizardlm 30B Uncensored - GPTQ - Model creator: [Eric Hartford](https://huggingface.co/ehartford) - Original model: [Wizardlm 30B Uncensored](https://huggingface.co/ehartford/WizardLM-30B-Uncensored) <!-- description start --> ## Description This repo contains GPTQ model files for [Eric Hartford's Wizardlm 30B Uncensored](https://huggingface.co/ehartford/WizardLM-30B-Uncensored). Multiple GPTQ parameter permutations are provided; see Provided Files below for details of the options provided, their parameters, and the software used to create them. <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/WizardLM-30B-uncensored-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/WizardLM-30B-uncensored-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/WizardLM-30B-uncensored-GGUF) * [Eric Hartford's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/ehartford/WizardLM-30B-Uncensored) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: WizardLM ``` {prompt} ### Response: ``` <!-- prompt-template end --> <!-- README_GPTQ.md-provided-files start --> ## Provided files and GPTQ parameters Multiple quantisation parameters are provided, to allow you to choose the best one for your hardware and requirements. Each separate quant is in a different branch. See below for instructions on fetching from different branches. All recent GPTQ files are made with AutoGPTQ, and all files in non-main branches are made with AutoGPTQ. Files in the `main` branch which were uploaded before August 2023 were made with GPTQ-for-LLaMa. <details> <summary>Explanation of GPTQ parameters</summary> - Bits: The bit size of the quantised model. - GS: GPTQ group size. Higher numbers use less VRAM, but have lower quantisation accuracy. "None" is the lowest possible value. - Act Order: True or False. Also known as `desc_act`. True results in better quantisation accuracy. Some GPTQ clients have had issues with models that use Act Order plus Group Size, but this is generally resolved now. - Damp %: A GPTQ parameter that affects how samples are processed for quantisation. 0.01 is default, but 0.1 results in slightly better accuracy. - GPTQ dataset: The dataset used for quantisation. Using a dataset more appropriate to the model's training can improve quantisation accuracy. Note that the GPTQ dataset is not the same as the dataset used to train the model - please refer to the original model repo for details of the training dataset(s). - Sequence Length: The length of the dataset sequences used for quantisation. Ideally this is the same as the model sequence length. For some very long sequence models (16+K), a lower sequence length may have to be used. Note that a lower sequence length does not limit the sequence length of the quantised model. It only impacts the quantisation accuracy on longer inference sequences. - ExLlama Compatibility: Whether this file can be loaded with ExLlama, which currently only supports Llama models in 4-bit. </details> | Branch | Bits | GS | Act Order | Damp % | GPTQ Dataset | Seq Len | Size | ExLlama | Desc | | ------ | ---- | -- | --------- | ------ | ------------ | ------- | ---- | ------- | ---- | | gptq-4bit-64g-actorder_True | 4 | 64 | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 18.18 GB | Yes | 4-bit, with Act Order and group size 64g. Uses less VRAM than 32g, but with slightly lower accuracy. | | gptq-4bit-32g-actorder_True | 4 | 32 | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 19.44 GB | Yes | 4-bit, with Act Order and group size 32g. Gives highest possible inference quality, with maximum VRAM usage. | | gptq-4bit-128g-actorder_True | 4 | 128 | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 17.55 GB | Yes | 4-bit, with Act Order and group size 128g. Uses even less VRAM than 64g, but with slightly lower accuracy. | | gptq-8bit-128g-actorder_False | 8 | 128 | No | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 33.73 GB | No | 8-bit, with group size 128g for higher inference quality and without Act Order to improve AutoGPTQ speed. | | gptq-8bit--1g-actorder_True | 8 | None | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 32.99 GB | No | 8-bit, with Act Order. No group size, to lower VRAM requirements. | | gptq-3bit--1g-actorder_True | 3 | None | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 12.92 GB | No | 3-bit, with Act Order and no group size. Lowest possible VRAM requirements. May be lower quality than 3-bit 128g. | | gptq-3bit-128g-actorder_False | 3 | 128 | No | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 13.51 GB | No | 3-bit, with group size 128g but no act-order. Slightly higher VRAM requirements than 3-bit None. | | main | 4 | None | Yes | 0.01 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 2048 | 16.94 GB | Yes | 4-bit, with Act Order. No group size, to lower VRAM requirements. | <!-- README_GPTQ.md-provided-files end --> <!-- README_GPTQ.md-download-from-branches start --> ## How to download from branches - In text-generation-webui, you can add `:branch` to the end of the download name, eg `TheBloke/WizardLM-30B-uncensored-GPTQ:gptq-4bit-64g-actorder_True` - With Git, you can clone a branch with: ``` git clone --single-branch --branch gptq-4bit-64g-actorder_True https://huggingface.co/TheBloke/WizardLM-30B-uncensored-GPTQ ``` - In Python Transformers code, the branch is the `revision` parameter; see below. <!-- README_GPTQ.md-download-from-branches end --> <!-- README_GPTQ.md-text-generation-webui start --> ## How to easily download and use this model in [text-generation-webui](https://github.com/oobabooga/text-generation-webui). Please make sure you're using the latest version of [text-generation-webui](https://github.com/oobabooga/text-generation-webui). It is strongly recommended to use the text-generation-webui one-click-installers unless you're sure you know how to make a manual install. 1. Click the **Model tab**. 2. Under **Download custom model or LoRA**, enter `TheBloke/WizardLM-30B-uncensored-GPTQ`. - To download from a specific branch, enter for example `TheBloke/WizardLM-30B-uncensored-GPTQ:gptq-4bit-64g-actorder_True` - see Provided Files above for the list of branches for each option. 3. Click **Download**. 4. The model will start downloading. Once it's finished it will say "Done". 5. In the top left, click the refresh icon next to **Model**. 6. In the **Model** dropdown, choose the model you just downloaded: `WizardLM-30B-uncensored-GPTQ` 7. The model will automatically load, and is now ready for use! 8. If you want any custom settings, set them and then click **Save settings for this model** followed by **Reload the Model** in the top right. * Note that you do not need to and should not set manual GPTQ parameters any more. These are set automatically from the file `quantize_config.json`. 9. Once you're ready, click the **Text Generation tab** and enter a prompt to get started! <!-- README_GPTQ.md-text-generation-webui end --> <!-- README_GPTQ.md-use-from-python start --> ## How to use this GPTQ model from Python code ### Install the necessary packages Requires: Transformers 4.32.0 or later, Optimum 1.12.0 or later, and AutoGPTQ 0.4.2 or later. ```shell pip3 install transformers>=4.32.0 optimum>=1.12.0 pip3 install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ # Use cu117 if on CUDA 11.7 ``` If you have problems installing AutoGPTQ using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y auto-gptq git clone https://github.com/PanQiWei/AutoGPTQ cd AutoGPTQ pip3 install . ``` ### For CodeLlama models only: you must use Transformers 4.33.0 or later. If 4.33.0 is not yet released when you read this, you will need to install Transformers from source: ```shell pip3 uninstall -y transformers pip3 install git+https://github.com/huggingface/transformers.git ``` ### You can then use the following code ```python from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline model_name_or_path = "TheBloke/WizardLM-30B-uncensored-GPTQ" # To use a different branch, change revision # For example: revision="gptq-4bit-64g-actorder_True" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", trust_remote_code=False, revision="main") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True) prompt = "Tell me about AI" prompt_template=f'''{prompt} ### Response: ''' print("\n\n*** Generate:") input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512) print(tokenizer.decode(output[0])) # Inference can also be done using transformers' pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_GPTQ.md-use-from-python end --> <!-- README_GPTQ.md-compatibility start --> ## Compatibility The files provided are tested to work with AutoGPTQ, both via Transformers and using AutoGPTQ directly. They should also work with [Occ4m's GPTQ-for-LLaMa fork](https://github.com/0cc4m/KoboldAI). [ExLlama](https://github.com/turboderp/exllama) is compatible with Llama models in 4-bit. Please see the Provided Files table above for per-file compatibility. [Huggingface Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) is compatible with all GPTQ models. <!-- README_GPTQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Alicia Loh, Stephen Murray, K, Ajan Kanaga, RoA, Magnesian, Deo Leter, Olakabola, Eugene Pentland, zynix, Deep Realms, Raymond Fosdick, Elijah Stavena, Iucharbius, Erik Bjäreholt, Luis Javier Navarrete Lozano, Nicholas, theTransient, John Detwiler, alfie_i, knownsqashed, Mano Prime, Willem Michiel, Enrico Ros, LangChain4j, OG, Michael Dempsey, Pierre Kircher, Pedro Madruga, James Bentley, Thomas Belote, Luke @flexchar, Leonard Tan, Johann-Peter Hartmann, Illia Dulskyi, Fen Risland, Chadd, S_X, Jeff Scroggin, Ken Nordquist, Sean Connelly, Artur Olbinski, Swaroop Kallakuri, Jack West, Ai Maven, David Ziegler, Russ Johnson, transmissions 11, John Villwock, Alps Aficionado, Clay Pascal, Viktor Bowallius, Subspace Studios, Rainer Wilmers, Trenton Dambrowitz, vamX, Michael Levine, 준교 김, Brandon Frisco, Kalila, Trailburnt, Randy H, Talal Aujan, Nathan Dryer, Vadim, 阿明, ReadyPlayerEmma, Tiffany J. Kim, George Stoitzev, Spencer Kim, Jerry Meng, Gabriel Tamborski, Cory Kujawski, Jeffrey Morgan, Spiking Neurons AB, Edmond Seymore, Alexandros Triantafyllidis, Lone Striker, Cap'n Zoog, Nikolai Manek, danny, ya boyyy, Derek Yates, usrbinkat, Mandus, TL, Nathan LeClaire, subjectnull, Imad Khwaja, webtim, Raven Klaugh, Asp the Wyvern, Gabriel Puliatti, Caitlyn Gatomon, Joseph William Delisle, Jonathan Leane, Luke Pendergrass, SuperWojo, Sebastain Graf, Will Dee, Fred von Graf, Andrey, Dan Guido, Daniel P. Andersen, Nitin Borwankar, Elle, Vitor Caleffi, biorpg, jjj, NimbleBox.ai, Pieter, Matthew Berman, terasurfer, Michael Davis, Alex, Stanislav Ovsiannikov Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: Eric Hartford's Wizardlm 30B Uncensored This is WizardLM trained with a subset of the dataset - responses that contained alignment / moralizing were removed. The intent is to train a WizardLM that doesn't have alignment built-in, so that alignment (of any sort) can be added separately with for example with a RLHF LoRA. Shout out to the open source AI/ML community, and everyone who helped me out. Note: An uncensored model has no guardrails. You are responsible for anything you do with the model, just as you are responsible for anything you do with any dangerous object such as a knife, gun, lighter, or car. Publishing anything this model generates is the same as publishing it yourself. You are responsible for the content you publish, and you cannot blame the model any more than you can blame the knife, gun, lighter, or car for what you do with it.
15,582
[ [ -0.04254150390625, -0.05560302734375, 0.002166748046875, 0.0157470703125, -0.016845703125, -0.0097503662109375, 0.01016998291015625, -0.03668212890625, 0.01238250732421875, 0.032196044921875, -0.046722412109375, -0.03363037109375, -0.02130126953125, -0.003131866455078125, -0.031951904296875, 0.0823974609375, -0.00036454200744628906, -0.0204010009765625, -0.005458831787109375, -0.0149688720703125, -0.018707275390625, -0.029205322265625, -0.05291748046875, -0.019805908203125, 0.02947998046875, 0.0113372802734375, 0.0634765625, 0.033905029296875, 0.01346588134765625, 0.02362060546875, 0.0017557144165039062, 0.0009121894836425781, -0.041015625, -0.0128936767578125, 0.0086822509765625, -0.0084991455078125, -0.048675537109375, 0.0164337158203125, 0.0304412841796875, 0.01763916015625, -0.02886962890625, 0.015869140625, 0.005466461181640625, 0.058746337890625, -0.03887939453125, 0.01277923583984375, -0.026123046875, 0.00872039794921875, -0.00292205810546875, 0.01238250732421875, -0.00843048095703125, -0.0311431884765625, 0.012847900390625, -0.06744384765625, 0.0206146240234375, 0.0081634521484375, 0.08782958984375, 0.00400543212890625, -0.053863525390625, 0.01280975341796875, -0.034393310546875, 0.0443115234375, -0.06988525390625, 0.027923583984375, 0.042510986328125, 0.0208892822265625, -0.0203399658203125, -0.0653076171875, -0.048492431640625, -0.0038585662841796875, -0.0100860595703125, 0.0244903564453125, -0.030853271484375, 0.00667572021484375, 0.033966064453125, 0.05712890625, -0.0731201171875, -0.0164947509765625, -0.0240020751953125, -0.018768310546875, 0.07220458984375, 0.0139007568359375, 0.0266265869140625, -0.01580810546875, -0.02154541015625, -0.03155517578125, -0.04833984375, 0.000370025634765625, 0.0291900634765625, -0.004154205322265625, -0.0455322265625, 0.03717041015625, -0.02752685546875, 0.033966064453125, 0.017669677734375, -0.005615234375, 0.0290374755859375, -0.03924560546875, -0.0355224609375, -0.025146484375, 0.0919189453125, 0.03338623046875, -0.01073455810546875, 0.0148162841796875, -0.00112152099609375, -0.0203399658203125, 0.004398345947265625, -0.07666015625, -0.04620361328125, 0.03363037109375, -0.036346435546875, -0.0174407958984375, -0.0038127899169921875, -0.056976318359375, -0.005489349365234375, -0.00989532470703125, 0.046417236328125, -0.04388427734375, -0.032379150390625, 0.0081634521484375, -0.034942626953125, 0.03656005859375, 0.031219482421875, -0.058074951171875, 0.03924560546875, 0.022125244140625, 0.04925537109375, 0.001079559326171875, -0.01441192626953125, -0.0143890380859375, 0.01006317138671875, -0.010650634765625, 0.031097412109375, -0.0031757354736328125, -0.037322998046875, -0.021942138671875, 0.02459716796875, 0.0045928955078125, -0.019500732421875, 0.0305023193359375, -0.026885986328125, 0.03741455078125, -0.0321044921875, -0.04736328125, -0.027313232421875, 0.008636474609375, -0.0528564453125, 0.09759521484375, 0.0367431640625, -0.0631103515625, 0.0159759521484375, -0.0411376953125, -0.01372528076171875, -0.0028667449951171875, 0.001895904541015625, -0.04217529296875, -0.00830078125, 0.01192474365234375, 0.017364501953125, -0.0223236083984375, 0.0034961700439453125, -0.021392822265625, -0.0180511474609375, 0.0130615234375, -0.050933837890625, 0.09539794921875, 0.01189422607421875, -0.03466796875, -0.01091766357421875, -0.060333251953125, 0.01018524169921875, 0.03558349609375, -0.0168914794921875, -0.0014753341674804688, -0.0160980224609375, 0.00905609130859375, 0.0108184814453125, 0.0184173583984375, -0.0278472900390625, 0.033050537109375, -0.0204010009765625, 0.03631591796875, 0.04815673828125, 0.0016994476318359375, 0.0225677490234375, -0.027740478515625, 0.036346435546875, -0.0009927749633789062, 0.04931640625, 0.0093994140625, -0.05548095703125, -0.052459716796875, -0.0173797607421875, 0.0279083251953125, 0.046051025390625, -0.051605224609375, 0.03656005859375, -0.00702667236328125, -0.058380126953125, -0.0174713134765625, -0.004856109619140625, 0.022125244140625, 0.02764892578125, 0.0355224609375, -0.034149169921875, -0.02081298828125, -0.06365966796875, 0.00518035888671875, -0.0364990234375, -0.006305694580078125, 0.0297698974609375, 0.05718994140625, -0.0125732421875, 0.0665283203125, -0.0523681640625, -0.0017604827880859375, 0.0041961669921875, 0.011138916015625, 0.019744873046875, 0.044281005859375, 0.0550537109375, -0.062042236328125, -0.050994873046875, -0.00710296630859375, -0.0465087890625, -0.006893157958984375, 0.0040283203125, -0.034210205078125, 0.0147705078125, -0.00302886962890625, -0.08148193359375, 0.057952880859375, 0.038909912109375, -0.04937744140625, 0.0709228515625, -0.0217437744140625, 0.0125274658203125, -0.0782470703125, 0.0003905296325683594, 0.00560760498046875, -0.0229339599609375, -0.0384521484375, 0.0008187294006347656, 0.0017194747924804688, 0.0082244873046875, -0.03216552734375, 0.053009033203125, -0.04180908203125, 0.0038928985595703125, 0.001567840576171875, -0.007106781005859375, 0.034454345703125, 0.039215087890625, -0.01323699951171875, 0.060760498046875, 0.03277587890625, -0.036285400390625, 0.04693603515625, 0.032012939453125, -0.001491546630859375, 0.02081298828125, -0.0589599609375, 0.0084075927734375, 0.01148223876953125, 0.030242919921875, -0.06854248046875, -0.019805908203125, 0.03887939453125, -0.04034423828125, 0.038848876953125, -0.0218658447265625, -0.033966064453125, -0.031951904296875, -0.042449951171875, 0.02545166015625, 0.061248779296875, -0.0281524658203125, 0.036712646484375, 0.0284881591796875, 0.0082244873046875, -0.046875, -0.050628662109375, -0.01277923583984375, -0.01763916015625, -0.045440673828125, 0.037078857421875, -0.016021728515625, -0.00406646728515625, 0.0012054443359375, -0.0034084320068359375, -0.0090179443359375, -0.00803375244140625, 0.020904541015625, 0.025848388671875, -0.0084991455078125, -0.01256561279296875, 0.01424407958984375, 0.005268096923828125, -0.0002357959747314453, -0.023681640625, 0.026947021484375, -0.0065155029296875, -0.00727081298828125, -0.02557373046875, 0.028167724609375, 0.034332275390625, 0.006145477294921875, 0.05963134765625, 0.06488037109375, -0.02520751953125, 0.004611968994140625, -0.0400390625, -0.00861358642578125, -0.039398193359375, 0.0102996826171875, -0.01232147216796875, -0.048828125, 0.0389404296875, 0.0300140380859375, 0.0180816650390625, 0.05938720703125, 0.033966064453125, -0.0023517608642578125, 0.0767822265625, 0.0290374755859375, -0.0083465576171875, 0.0340576171875, -0.04437255859375, -0.0113372802734375, -0.057464599609375, -0.016021728515625, -0.025787353515625, -0.0105438232421875, -0.0606689453125, -0.034088134765625, 0.0267791748046875, 0.0241546630859375, -0.063232421875, 0.043975830078125, -0.052886962890625, 0.0124053955078125, 0.04180908203125, 0.0169219970703125, 0.017608642578125, 0.007343292236328125, -0.0133056640625, 0.00670623779296875, -0.041656494140625, -0.017608642578125, 0.0821533203125, 0.023651123046875, 0.053558349609375, 0.0204925537109375, 0.033966064453125, 0.01568603515625, 0.019683837890625, -0.03765869140625, 0.041229248046875, 0.0023899078369140625, -0.05914306640625, -0.03033447265625, -0.045440673828125, -0.0709228515625, 0.0230255126953125, -0.0063018798828125, -0.061859130859375, 0.029693603515625, 0.0038356781005859375, -0.019287109375, 0.0184783935546875, -0.056976318359375, 0.08001708984375, -0.00800323486328125, -0.03448486328125, -0.0006709098815917969, -0.050628662109375, 0.0215911865234375, 0.01343536376953125, -0.0008335113525390625, -0.0193939208984375, -0.0160064697265625, 0.05853271484375, -0.07269287109375, 0.056121826171875, -0.0184173583984375, -0.007747650146484375, 0.0399169921875, -0.008880615234375, 0.04425048828125, 0.00621795654296875, 0.0013895034790039062, 0.026397705078125, 0.02728271484375, -0.037322998046875, -0.03619384765625, 0.038360595703125, -0.0743408203125, -0.0386962890625, -0.03546142578125, -0.035125732421875, -0.00540924072265625, 0.00634765625, 0.04119873046875, 0.034637451171875, -0.006389617919921875, -0.0031261444091796875, 0.055999755859375, -0.0279998779296875, 0.032501220703125, 0.034210205078125, -0.028594970703125, -0.040252685546875, 0.06561279296875, 0.01306915283203125, 0.0195159912109375, 0.016693115234375, 0.0110015869140625, -0.03656005859375, -0.0352783203125, -0.05426025390625, 0.021820068359375, -0.04266357421875, -0.0267791748046875, -0.04779052734375, -0.0284271240234375, -0.03741455078125, 0.0208587646484375, -0.031707763671875, -0.054046630859375, -0.031219482421875, 0.0005984306335449219, 0.07159423828125, 0.034942626953125, -0.01419830322265625, 0.0184326171875, -0.060211181640625, 0.018157958984375, 0.0322265625, 0.0164642333984375, 0.0008196830749511719, -0.055145263671875, -0.0140380859375, 0.01220703125, -0.051788330078125, -0.0753173828125, 0.0491943359375, 0.01502227783203125, 0.034210205078125, 0.03057861328125, 0.017333984375, 0.056854248046875, -0.0251617431640625, 0.08294677734375, 0.021148681640625, -0.061981201171875, 0.0374755859375, -0.0391845703125, 0.0194549560546875, 0.032379150390625, 0.0469970703125, -0.02227783203125, -0.022491455078125, -0.054656982421875, -0.0631103515625, 0.02947998046875, 0.034942626953125, 0.005344390869140625, 0.00894927978515625, 0.03802490234375, 0.007488250732421875, 0.01348114013671875, -0.0614013671875, -0.049102783203125, -0.03369140625, -0.00995635986328125, 0.0133056640625, -0.0007419586181640625, -0.0194549560546875, -0.054412841796875, 0.07403564453125, -0.01261138916015625, 0.054962158203125, 0.0247344970703125, 0.009002685546875, 0.0009188652038574219, 0.00576019287109375, 0.01885986328125, 0.044769287109375, -0.016082763671875, -0.0197906494140625, 0.01371002197265625, -0.0692138671875, 0.01318359375, 0.029693603515625, -0.01485443115234375, -0.00579071044921875, 0.0049896240234375, 0.060760498046875, -0.00702667236328125, -0.0225982666015625, 0.042633056640625, -0.025634765625, -0.0265350341796875, -0.03033447265625, 0.016510009765625, 0.0126800537109375, 0.025665283203125, 0.032073974609375, -0.02239990234375, 0.031280517578125, -0.038360595703125, 0.009429931640625, 0.038726806640625, -0.015777587890625, -0.02410888671875, 0.057830810546875, -0.0030384063720703125, 0.00904083251953125, 0.055816650390625, -0.02569580078125, -0.0279998779296875, 0.05633544921875, 0.0347900390625, 0.05999755859375, -0.01239776611328125, 0.021514892578125, 0.039581298828125, 0.009185791015625, -0.005855560302734375, 0.02545166015625, -0.005496978759765625, -0.046539306640625, -0.026214599609375, -0.0467529296875, -0.0271759033203125, 0.019256591796875, -0.058746337890625, 0.01143646240234375, -0.024627685546875, -0.02685546875, -0.014068603515625, 0.029144287109375, -0.043487548828125, 0.023040771484375, 0.002628326416015625, 0.079833984375, -0.05133056640625, 0.068115234375, 0.0309295654296875, -0.03497314453125, -0.07720947265625, -0.010589599609375, 0.01201629638671875, -0.0384521484375, 0.0117340087890625, 0.002880096435546875, 0.0247650146484375, 0.004268646240234375, -0.05169677734375, -0.065185546875, 0.109375, 0.022918701171875, -0.0401611328125, -0.016265869140625, 0.003749847412109375, 0.024139404296875, -0.00334930419921875, 0.0531005859375, 0.040191650390625, 0.0282135009765625, 0.0106658935546875, -0.0740966796875, 0.03131103515625, -0.035980224609375, -0.0016183853149414062, 0.0144195556640625, -0.07427978515625, 0.071044921875, 0.005771636962890625, -0.00738525390625, 0.0135498046875, 0.04974365234375, 0.0267181396484375, 0.0091705322265625, 0.028594970703125, 0.056915283203125, 0.05694580078125, -0.0222930908203125, 0.0887451171875, -0.011260986328125, 0.047332763671875, 0.0589599609375, 0.003231048583984375, 0.0518798828125, 0.016998291015625, -0.055389404296875, 0.0465087890625, 0.07427978515625, -0.009857177734375, 0.0345458984375, 0.002132415771484375, -0.028717041015625, -0.0016794204711914062, 0.01546478271484375, -0.0555419921875, 0.0018568038940429688, 0.02520751953125, -0.013092041015625, 0.003986358642578125, -0.00713348388671875, 0.0004208087921142578, -0.054412841796875, -0.0162506103515625, 0.043304443359375, 0.020843505859375, -0.01953125, 0.06231689453125, -0.01219940185546875, 0.04742431640625, -0.046844482421875, -0.01190948486328125, -0.0276947021484375, -0.0106201171875, -0.0233001708984375, -0.056304931640625, 0.01248931884765625, -0.0160064697265625, -0.0135498046875, 0.0028285980224609375, 0.05419921875, -0.0155792236328125, -0.03143310546875, 0.02691650390625, 0.032440185546875, 0.02618408203125, -0.00824737548828125, -0.0848388671875, 0.01074981689453125, -0.0026645660400390625, -0.06109619140625, 0.040313720703125, 0.02960205078125, 0.01319122314453125, 0.0484619140625, 0.04217529296875, -0.0103607177734375, 0.0092620849609375, -0.00965118408203125, 0.07177734375, -0.05718994140625, -0.021697998046875, -0.055999755859375, 0.0499267578125, -0.0185394287109375, -0.030792236328125, 0.066650390625, 0.04803466796875, 0.05389404296875, 0.00572967529296875, 0.05401611328125, -0.028839111328125, 0.0111083984375, -0.025146484375, 0.05560302734375, -0.05670166015625, 0.004520416259765625, -0.029083251953125, -0.055816650390625, 0.00412750244140625, 0.049468994140625, -0.008148193359375, 0.0196380615234375, 0.031280517578125, 0.0673828125, -0.0008955001831054688, 0.01488494873046875, 0.0208587646484375, 0.0229644775390625, 0.0086822509765625, 0.05499267578125, 0.050628662109375, -0.0777587890625, 0.04254150390625, -0.03167724609375, -0.0158843994140625, -0.0042877197265625, -0.060333251953125, -0.056640625, -0.03662109375, -0.049835205078125, -0.05291748046875, -0.005352020263671875, 0.06695556640625, 0.063232421875, -0.0482177734375, -0.020233154296875, -0.006816864013671875, -0.0008840560913085938, -0.0208282470703125, -0.024932861328125, 0.0300445556640625, 0.02392578125, -0.054473876953125, 0.01375579833984375, 0.00569915771484375, 0.02764892578125, -0.0197906494140625, -0.027801513671875, -0.0119171142578125, 0.0102081298828125, 0.049102783203125, 0.0394287109375, -0.035186767578125, -0.0076446533203125, -0.013916015625, -0.007488250732421875, 0.0179595947265625, 0.021209716796875, -0.0535888671875, 0.0037136077880859375, 0.039825439453125, 0.0098724365234375, 0.07147216796875, -0.00435638427734375, 0.0318603515625, -0.03302001953125, 0.0039215087890625, 0.0021915435791015625, 0.026153564453125, 0.0051116943359375, -0.04180908203125, 0.04730224609375, 0.031829833984375, -0.054168701171875, -0.053375244140625, -0.01061248779296875, -0.08526611328125, -0.0198516845703125, 0.0841064453125, -0.0126953125, -0.0289764404296875, -0.01177215576171875, -0.0173187255859375, 0.02960205078125, -0.03778076171875, 0.023345947265625, 0.036865234375, -0.022735595703125, -0.0279998779296875, -0.0650634765625, 0.043731689453125, 0.00891876220703125, -0.0592041015625, -0.000017881393432617188, 0.040863037109375, 0.038726806640625, 0.0002281665802001953, 0.06610107421875, -0.020263671875, 0.027099609375, 0.0151214599609375, 0.003856658935546875, -0.00011920928955078125, 0.01171875, -0.023162841796875, 0.0007176399230957031, -0.01971435546875, 0.0025539398193359375 ] ]
KnutJaegersberg/deacon-3b
2023-09-18T09:46:57.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "custom_code", "dataset:KnutJaegersberg/trilobite", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/deacon-3b
2
5,875
transformers
2023-09-18T05:22:15
--- license: cc-by-nc-4.0 datasets: - KnutJaegersberg/trilobite --- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63732ebbbd81fae2b3aaf3fb/4OQkvAa1zOK4Devv-aUdL.png) This model was fine tuned on AI filtered subsets of GPT-4 based subset of the Dolphin dataset and EvolInstruct V2. It has not been explicitly aligned to positive, negative or bureaucratically prescribed value systems. It might kill us all! Time to shit your pants, regulators. I literally put black goo on Dolphin-7B sperm, which then fertilized Evolved Instructions... What's different is evil... ;) I intend to train 3 sizes. Prompt Example: ``` ### System: You are an AI assistant. User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps. ### Instruction: How do you fine tune a large language model? ### Response: ```
928
[ [ -0.051544189453125, -0.054168701171875, 0.035400390625, -0.01027679443359375, -0.047576904296875, -0.026763916015625, 0.0177764892578125, -0.050872802734375, 0.0103912353515625, 0.06463623046875, -0.050994873046875, 0.004425048828125, -0.044342041015625, 0.015869140625, 0.01061248779296875, 0.1072998046875, 0.0237274169921875, 0.02813720703125, 0.042755126953125, 0.007602691650390625, -0.050506591796875, -0.02252197265625, -0.07891845703125, -0.032745361328125, 0.040008544921875, 0.0213775634765625, 0.048858642578125, 0.072265625, 0.034271240234375, 0.016632080078125, -0.01216888427734375, 0.03082275390625, -0.06427001953125, -0.01029205322265625, -0.00823974609375, -0.00379180908203125, -0.058624267578125, 0.043670654296875, 0.0166778564453125, 0.03729248046875, -0.005046844482421875, 0.01242828369140625, 0.004062652587890625, 0.041900634765625, -0.04052734375, 0.01442718505859375, -0.0290374755859375, 0.0004241466522216797, 0.00896453857421875, 0.00531768798828125, -0.01556396484375, -0.028778076171875, -0.01499176025390625, -0.07025146484375, -0.00882720947265625, 0.01018524169921875, 0.08367919921875, 0.04815673828125, -0.0299530029296875, -0.00891876220703125, -0.04736328125, 0.049591064453125, -0.046722412109375, -0.00130462646484375, 0.042633056640625, 0.01904296875, -0.0216522216796875, -0.040191650390625, -0.032684326171875, -0.0151214599609375, 0.013763427734375, 0.016143798828125, 0.006557464599609375, 0.015167236328125, 0.0239105224609375, 0.01349639892578125, -0.031646728515625, 0.0219268798828125, -0.06072998046875, -0.0178680419921875, 0.051544189453125, 0.0034313201904296875, 0.032928466796875, 0.02716064453125, -0.0158233642578125, -0.01004791259765625, -0.053253173828125, -0.01739501953125, 0.055419921875, 0.0438232421875, -0.01512908935546875, 0.0528564453125, 0.0010404586791992188, 0.058868408203125, 0.0301971435546875, 0.015869140625, -0.005039215087890625, -0.01264190673828125, -0.03314208984375, 0.01209259033203125, 0.059600830078125, 0.01715087890625, 0.04376220703125, 0.0163421630859375, -0.04052734375, -0.00868988037109375, 0.0697021484375, -0.0592041015625, -0.0240020751953125, -0.00016701221466064453, -0.027099609375, -0.037017822265625, -0.004543304443359375, -0.050811767578125, -0.0208892822265625, -0.025146484375, 0.06634521484375, -0.04144287109375, 0.000014483928680419922, 0.0213775634765625, -0.01239013671875, 0.019378662109375, 0.024871826171875, -0.086181640625, 0.0245819091796875, 0.05279541015625, 0.048309326171875, 0.035919189453125, -0.00006270408630371094, -0.035888671875, 0.0014142990112304688, -0.04345703125, 0.0382080078125, -0.037139892578125, -0.03179931640625, -0.031829833984375, -0.0101470947265625, 0.004398345947265625, -0.04052734375, 0.0246124267578125, -0.043304443359375, 0.034393310546875, -0.0234527587890625, -0.054443359375, -0.03009033203125, 0.00510406494140625, -0.053924560546875, 0.037933349609375, 0.01508331298828125, -0.030487060546875, 0.041778564453125, -0.0716552734375, -0.0258636474609375, -0.005496978759765625, -0.001934051513671875, -0.041351318359375, -0.003814697265625, 0.0092315673828125, 0.023681640625, -0.023101806640625, 0.006404876708984375, -0.059722900390625, -0.040740966796875, -0.005184173583984375, -0.011474609375, 0.08502197265625, 0.032073974609375, -0.04345703125, -0.0016450881958007812, -0.04852294921875, 0.01491546630859375, -0.004657745361328125, -0.0258026123046875, -0.01271820068359375, -0.0244598388671875, -0.01214599609375, 0.0297698974609375, 0.032989501953125, -0.03717041015625, 0.029693603515625, -0.01544952392578125, 0.0333251953125, 0.0797119140625, 0.01538848876953125, 0.01959228515625, -0.031280517578125, 0.032379150390625, -0.0186920166015625, 0.03521728515625, 0.03466796875, -0.03656005859375, -0.04718017578125, -0.0276947021484375, 0.0027027130126953125, 0.0230560302734375, -0.034454345703125, 0.06060791015625, 0.0012273788452148438, -0.042327880859375, -0.028961181640625, -0.0205078125, 0.0267333984375, 0.039215087890625, 0.03607177734375, -0.0177764892578125, -0.020782470703125, -0.08819580078125, 0.0112762451171875, -0.021697998046875, -0.00952911376953125, 0.0190582275390625, 0.05535888671875, 0.00934600830078125, 0.0272674560546875, -0.0258026123046875, -0.01922607421875, -0.01568603515625, -0.007289886474609375, 0.021453857421875, 0.056549072265625, 0.040557861328125, -0.04266357421875, -0.00855255126953125, -0.0008234977722167969, -0.05181884765625, -0.003086090087890625, 0.0028820037841796875, -0.018951416015625, -0.01497650146484375, 0.028350830078125, -0.043914794921875, 0.00847625732421875, 0.013214111328125, -0.036376953125, 0.038299560546875, -0.03955078125, -0.01280975341796875, -0.06268310546875, 0.0251007080078125, 0.00438690185546875, -0.006580352783203125, -0.047943115234375, 0.0240020751953125, 0.006992340087890625, -0.011566162109375, -0.017303466796875, 0.044036865234375, -0.026153564453125, -0.006153106689453125, -0.023895263671875, -0.01538848876953125, -0.0189208984375, 0.041107177734375, -0.0160064697265625, 0.0731201171875, 0.03131103515625, -0.0694580078125, 0.01515960693359375, 0.01934814453125, -0.006351470947265625, 0.040924072265625, -0.06585693359375, 0.00405120849609375, 0.01385498046875, 0.015533447265625, -0.0268096923828125, -0.0233612060546875, 0.0394287109375, -0.01538848876953125, 0.04571533203125, -0.0009584426879882812, -0.0172882080078125, -0.0159149169921875, -0.0268096923828125, 0.014984130859375, 0.0528564453125, -0.0267791748046875, 0.0285491943359375, 0.05657958984375, 0.00591278076171875, -0.04437255859375, -0.03973388671875, -0.00853729248046875, -0.0302581787109375, -0.0194244384765625, 0.006622314453125, 0.002735137939453125, -0.005245208740234375, -0.0032672882080078125, -0.00971221923828125, -0.01117706298828125, 0.0116424560546875, 0.00684356689453125, 0.04254150390625, -0.0186309814453125, -0.0139007568359375, -0.00485992431640625, -0.004322052001953125, 0.00007867813110351562, -0.0372314453125, 0.044097900390625, -0.01284027099609375, -0.0169525146484375, -0.051483154296875, -0.00033402442932128906, 0.064453125, -0.01190948486328125, 0.032684326171875, 0.043548583984375, -0.021087646484375, -0.00696563720703125, -0.026458740234375, -0.036041259765625, -0.03485107421875, 0.03302001953125, -0.0203704833984375, -0.051971435546875, 0.0287322998046875, 0.0237274169921875, 0.006351470947265625, 0.045379638671875, 0.0275421142578125, -0.00489044189453125, 0.08905029296875, 0.053375244140625, -0.0176544189453125, 0.023529052734375, -0.037567138671875, 0.0259246826171875, -0.073486328125, -0.01561737060546875, -0.024322509765625, -0.03741455078125, -0.0634765625, 0.0118255615234375, 0.0295257568359375, 0.0077972412109375, -0.0823974609375, 0.0291595458984375, -0.044891357421875, 0.034515380859375, 0.046295166015625, 0.031524658203125, -0.02215576171875, -0.007099151611328125, -0.002452850341796875, 0.01375579833984375, -0.04522705078125, -0.046173095703125, 0.0675048828125, 0.0286712646484375, 0.061431884765625, 0.005947113037109375, 0.07049560546875, 0.01934814453125, 0.005306243896484375, -0.043975830078125, 0.03961181640625, -0.0399169921875, -0.0631103515625, -0.052581787109375, -0.028167724609375, -0.06268310546875, 0.0174713134765625, 0.011932373046875, -0.054351806640625, 0.0224456787109375, 0.01404571533203125, -0.0452880859375, 0.0284881591796875, -0.049346923828125, 0.07257080078125, -0.0207672119140625, -0.026641845703125, -0.0240020751953125, -0.036773681640625, 0.0230560302734375, -0.0192413330078125, -0.00411224365234375, -0.02496337890625, -0.009552001953125, 0.0347900390625, -0.07232666015625, 0.07891845703125, -0.031890869140625, 0.005157470703125, 0.007061004638671875, 0.005878448486328125, 0.022918701171875, -0.004482269287109375, -0.008575439453125, -0.010772705078125, -0.0095672607421875, -0.0408935546875, -0.01251983642578125, 0.0330810546875, -0.09918212890625, -0.036834716796875, -0.0294189453125, -0.0214691162109375, -0.001667022705078125, 0.0166015625, 0.0302734375, 0.0172119140625, -0.00030350685119628906, -0.008758544921875, 0.035552978515625, 0.0027637481689453125, 0.0191650390625, 0.0382080078125, 0.0079193115234375, -0.0178985595703125, 0.0718994140625, 0.00666046142578125, 0.0276031494140625, 0.00176239013671875, 0.006359100341796875, -0.035919189453125, -0.02691650390625, -0.04119873046875, 0.035369873046875, -0.03656005859375, -0.01708984375, -0.0254364013671875, -0.026611328125, -0.026397705078125, -0.004070281982421875, -0.03253173828125, -0.0186767578125, -0.05621337890625, -0.00357818603515625, 0.00907135009765625, 0.10113525390625, -0.03179931640625, 0.0144805908203125, -0.0282440185546875, 0.0162200927734375, 0.044036865234375, 0.021820068359375, -0.02752685546875, -0.061737060546875, 0.002727508544921875, 0.0307159423828125, -0.0299072265625, -0.04595947265625, 0.03948974609375, 0.02752685546875, 0.04998779296875, 0.0302734375, -0.0027828216552734375, 0.054473876953125, -0.034271240234375, 0.056793212890625, -0.0027599334716796875, -0.064697265625, 0.043975830078125, -0.0271759033203125, 0.006694793701171875, 0.02410888671875, 0.02081298828125, -0.02496337890625, -0.04852294921875, -0.057403564453125, -0.049102783203125, 0.07391357421875, 0.0016279220581054688, 0.012298583984375, 0.00003445148468017578, 0.055572509765625, 0.01056671142578125, 0.0011491775512695312, -0.058990478515625, -0.005168914794921875, -0.0295257568359375, -0.001094818115234375, 0.0008282661437988281, -0.007068634033203125, 0.0014200210571289062, -0.0160064697265625, 0.08612060546875, -0.00597381591796875, 0.0165557861328125, -0.00504302978515625, 0.0125732421875, -0.01035308837890625, -0.004352569580078125, 0.06884765625, 0.048095703125, -0.022430419921875, -0.007724761962890625, -0.004764556884765625, -0.050689697265625, 0.0207061767578125, 0.0012531280517578125, -0.0251007080078125, -0.044219970703125, 0.024688720703125, 0.0775146484375, -0.0142669677734375, -0.022186279296875, 0.018768310546875, -0.01099395751953125, -0.0114898681640625, -0.009002685546875, 0.044219970703125, -0.01499176025390625, 0.00984954833984375, 0.004001617431640625, 0.0163421630859375, 0.0037250518798828125, -0.0352783203125, 0.00719451904296875, 0.03155517578125, -0.040130615234375, -0.024017333984375, 0.05908203125, 0.007083892822265625, -0.01372528076171875, 0.07012939453125, -0.036773681640625, -0.03131103515625, 0.07293701171875, 0.04974365234375, 0.052520751953125, -0.01910400390625, 0.03143310546875, 0.0396728515625, 0.031341552734375, -0.0263214111328125, 0.04095458984375, 0.026885986328125, -0.06207275390625, -0.0197296142578125, -0.05413818359375, -0.0220794677734375, 0.05975341796875, -0.0404052734375, 0.0219879150390625, -0.060821533203125, -0.0024871826171875, 0.01763916015625, 0.004276275634765625, -0.042877197265625, 0.028167724609375, 0.00556182861328125, 0.05596923828125, -0.073974609375, 0.05902099609375, 0.0576171875, -0.059173583984375, -0.08978271484375, -0.01690673828125, -0.0061798095703125, -0.08209228515625, 0.059051513671875, 0.0168304443359375, 0.0049285888671875, 0.0005755424499511719, -0.058563232421875, -0.06671142578125, 0.058807373046875, 0.033905029296875, -0.0283966064453125, 0.0058746337890625, -0.01019287109375, 0.056549072265625, -0.03192138671875, 0.018402099609375, 0.039276123046875, 0.04693603515625, -0.0178375244140625, -0.08746337890625, 0.0055389404296875, -0.032745361328125, -0.0021991729736328125, 0.01309967041015625, -0.03680419921875, 0.09405517578125, -0.032257080078125, 0.0089569091796875, 0.0004420280456542969, 0.007450103759765625, 0.01019287109375, -0.0238037109375, 0.04071044921875, 0.04156494140625, 0.055633544921875, -0.00024199485778808594, 0.0931396484375, -0.02783203125, 0.02313232421875, 0.07843017578125, -0.0296478271484375, 0.03826904296875, 0.02252197265625, -0.00921630859375, -0.0113677978515625, 0.08721923828125, -0.0298309326171875, 0.045440673828125, 0.01800537109375, 0.00506591796875, -0.0257110595703125, -0.00426483154296875, -0.0260162353515625, 0.041107177734375, 0.03466796875, -0.0027446746826171875, -0.010040283203125, -0.0132293701171875, -0.009674072265625, -0.0174713134765625, -0.0263214111328125, 0.068115234375, -0.00030994415283203125, -0.05487060546875, 0.054473876953125, -0.00689697265625, 0.037994384765625, -0.04473876953125, -0.01338958740234375, -0.0217742919921875, 0.024993896484375, 0.00861358642578125, -0.062103271484375, -0.0041351318359375, -0.0083770751953125, 0.01189422607421875, -0.00847625732421875, 0.033050537109375, -0.01934814453125, -0.028961181640625, 0.02325439453125, 0.025787353515625, 0.0242919921875, -0.0054931640625, -0.0689697265625, -0.0210113525390625, 0.017303466796875, -0.012725830078125, 0.0102081298828125, 0.0318603515625, -0.00791168212890625, 0.06463623046875, 0.0413818359375, 0.035980224609375, -0.006267547607421875, 0.00481414794921875, 0.0635986328125, -0.026458740234375, -0.0513916015625, -0.0673828125, 0.01515960693359375, -0.0128936767578125, -0.050506591796875, 0.01166534423828125, 0.040283203125, 0.07196044921875, -0.0208740234375, 0.049407958984375, -0.00612640380859375, 0.0189056396484375, -0.042694091796875, 0.07037353515625, -0.052490234375, 0.0224609375, 0.01134490966796875, -0.06744384765625, -0.0067291259765625, 0.0711669921875, -0.0255126953125, 0.04156494140625, 0.035369873046875, 0.058074951171875, -0.0219879150390625, -0.0019197463989257812, 0.0323486328125, 0.0258941650390625, 0.0438232421875, 0.0148468017578125, 0.051849365234375, -0.007293701171875, 0.031005859375, -0.0361328125, -0.01123046875, -0.033843994140625, -0.0394287109375, -0.050384521484375, -0.03582763671875, -0.00919342041015625, -0.0082550048828125, 0.027862548828125, 0.0703125, 0.0621337890625, -0.06622314453125, -0.0255126953125, -0.0152740478515625, -0.01438140869140625, -0.00101470947265625, -0.01027679443359375, 0.019195556640625, -0.03497314453125, -0.0340576171875, 0.011566162109375, 0.0171356201171875, 0.038726806640625, -0.015472412109375, -0.00856781005859375, -0.0169677734375, -0.0087738037109375, 0.00925445556640625, 0.0226898193359375, -0.059295654296875, -0.06597900390625, -0.01047515869140625, -0.0313720703125, -0.007022857666015625, 0.0299530029296875, -0.0255126953125, 0.023590087890625, 0.00943756103515625, 0.018798828125, 0.03173828125, -0.0215911865234375, 0.06195068359375, -0.02789306640625, 0.0020427703857421875, 0.040130615234375, 0.0382080078125, 0.00975799560546875, -0.0465087890625, 0.0506591796875, -0.007167816162109375, -0.0640869140625, -0.03582763671875, 0.006999969482421875, -0.052001953125, -0.042877197265625, 0.0460205078125, -0.0023021697998046875, -0.019500732421875, -0.0020923614501953125, -0.0293731689453125, 0.0310516357421875, -0.04449462890625, 0.07855224609375, 0.04217529296875, -0.035369873046875, 0.0159759521484375, -0.04437255859375, 0.04644775390625, 0.0168914794921875, -0.057586669921875, -0.014678955078125, 0.057464599609375, 0.03741455078125, 0.015869140625, 0.043182373046875, 0.002735137939453125, 0.027496337890625, 0.00494384765625, 0.01171112060546875, -0.01336669921875, -0.0131683349609375, -0.02764892578125, -0.006153106689453125, -0.001800537109375, -0.0352783203125 ] ]
KoboldAI/fairseq-dense-1.3B
2023-05-03T02:42:36.000Z
[ "transformers", "pytorch", "safetensors", "xglm", "text-generation", "en", "arxiv:2112.10684", "endpoints_compatible", "has_space", "region:us" ]
text-generation
KoboldAI
null
null
KoboldAI/fairseq-dense-1.3B
4
5,874
transformers
2022-03-02T23:29:04
--- language: en --- This is a Hugging Face transformers-compatible conversion of the original dense 1.3B-parameter model from the paper "[Efficient Large Scale Language Modeling with Mixtures of Experts](https://arxiv.org/abs/2112.10684)" from Artetxe et al. Please refer to the original model card, which can be found at https://github.com/facebookresearch/fairseq/blob/main/examples/moe_lm/model_card.md.
408
[ [ -0.05438232421875, -0.063720703125, 0.01409912109375, 0.038116455078125, -0.0211029052734375, -0.052337646484375, -0.01511383056640625, -0.0229339599609375, 0.037689208984375, 0.0645751953125, -0.0614013671875, -0.01436614990234375, -0.033050537109375, -0.025909423828125, -0.037841796875, 0.06781005859375, -0.004764556884765625, 0.007007598876953125, -0.015869140625, 0.00482177734375, 0.005466461181640625, -0.021453857421875, -0.0667724609375, -0.02777099609375, 0.042999267578125, 0.017425537109375, 0.072021484375, 0.034393310546875, 0.0300750732421875, 0.019012451171875, -0.0007500648498535156, -0.0316162109375, -0.043548583984375, -0.00933074951171875, -0.005023956298828125, -0.0224151611328125, -0.07403564453125, 0.036712646484375, 0.06378173828125, 0.05731201171875, -0.052490234375, 0.00885772705078125, -0.0036983489990234375, 0.04315185546875, -0.006671905517578125, 0.0010976791381835938, -0.047119140625, -0.001194000244140625, -0.0126800537109375, 0.0021820068359375, -0.052459716796875, -0.01123046875, -0.00455474853515625, -0.0261993408203125, 0.014923095703125, -0.00286102294921875, 0.08074951171875, 0.031707763671875, -0.0246429443359375, 0.0167999267578125, -0.05255126953125, 0.04119873046875, -0.0433349609375, 0.05218505859375, 0.009246826171875, 0.05181884765625, -0.0187530517578125, -0.051361083984375, -0.053955078125, 0.00928497314453125, 0.0180511474609375, 0.014373779296875, -0.016632080078125, 0.006969451904296875, 0.0138092041015625, 0.042022705078125, -0.020843505859375, -0.000713348388671875, -0.04510498046875, -0.005970001220703125, 0.0660400390625, 0.0029773712158203125, 0.01010894775390625, -0.010223388671875, -0.0538330078125, -0.0188140869140625, -0.031707763671875, -0.01242828369140625, 0.0183868408203125, 0.023773193359375, -0.04010009765625, 0.02862548828125, -0.01245880126953125, 0.052642822265625, 0.01096343994140625, -0.002323150634765625, 0.0174560546875, 0.0171966552734375, -0.0124359130859375, -0.01465606689453125, 0.0494384765625, 0.045654296875, 0.03570556640625, -0.0036754608154296875, -0.008453369140625, -0.029022216796875, 0.03607177734375, -0.0894775390625, -0.0309295654296875, -0.01015472412109375, -0.047271728515625, -0.01416015625, 0.0164642333984375, -0.04901123046875, -0.0037822723388671875, -0.019134521484375, 0.0183868408203125, -0.025970458984375, -0.049652099609375, 0.01096343994140625, 0.020477294921875, 0.042572021484375, 0.020660400390625, -0.041748046875, 0.0208892822265625, 0.0280914306640625, 0.04437255859375, 0.001552581787109375, -0.00281524658203125, -0.0222015380859375, 0.0027332305908203125, -0.0020694732666015625, 0.048583984375, -0.0229339599609375, -0.031402587890625, -0.001087188720703125, 0.0010137557983398438, -0.00055694580078125, -0.043731689453125, 0.064208984375, -0.04656982421875, 0.00872802734375, 0.01372528076171875, -0.022369384765625, -0.0279083251953125, 0.0193634033203125, -0.0762939453125, 0.0865478515625, 0.05377197265625, -0.035797119140625, 0.014678955078125, -0.0313720703125, -0.002971649169921875, 0.0233306884765625, 0.00789642333984375, -0.024688720703125, 0.0219268798828125, 0.0040130615234375, 0.0404052734375, -0.027435302734375, 0.02459716796875, -0.0599365234375, -0.01250457763671875, 0.0195770263671875, -0.0197296142578125, 0.08404541015625, 0.024688720703125, 0.015777587890625, 0.0044708251953125, -0.05853271484375, 0.0012636184692382812, 0.0219268798828125, -0.01209259033203125, -0.01317596435546875, -0.0133056640625, 0.0135040283203125, 0.037841796875, 0.026458740234375, -0.033050537109375, 0.03546142578125, -0.00557708740234375, 0.005306243896484375, 0.026031494140625, -0.012176513671875, 0.0248565673828125, -0.0198516845703125, 0.04437255859375, 0.01055145263671875, 0.0204010009765625, -0.0003485679626464844, -0.04638671875, -0.05206298828125, -0.0487060546875, 0.0213165283203125, 0.0149078369140625, -0.046417236328125, 0.045135498046875, -0.0210723876953125, -0.0804443359375, -0.048095703125, 0.00336456298828125, -0.01416015625, 0.027374267578125, 0.018890380859375, -0.0157012939453125, -0.0310516357421875, -0.08758544921875, -0.00405120849609375, -0.0258026123046875, -0.00902557373046875, 0.02032470703125, 0.0068206787109375, -0.0467529296875, 0.07318115234375, -0.03094482421875, -0.0150909423828125, -0.01800537109375, -0.01204681396484375, 0.02728271484375, 0.0670166015625, 0.06451416015625, -0.0350341796875, -0.042510986328125, -0.01270294189453125, -0.04473876953125, -0.0263824462890625, 0.01336669921875, -0.034210205078125, 0.00856781005859375, 0.055450439453125, -0.057159423828125, 0.0212554931640625, 0.06842041015625, -0.026519775390625, 0.0243682861328125, 0.01453399658203125, -0.01505279541015625, -0.0938720703125, 0.01090240478515625, 0.01218414306640625, -0.0299530029296875, -0.046142578125, 0.04315185546875, 0.0186767578125, -0.00656890869140625, -0.0364990234375, 0.0714111328125, -0.027496337890625, 0.0265350341796875, -0.00445556640625, 0.01325225830078125, -0.01535797119140625, 0.0254974365234375, -0.012847900390625, 0.02374267578125, 0.06573486328125, -0.03497314453125, 0.045440673828125, 0.03802490234375, -0.00001627206802368164, 0.06634521484375, -0.0533447265625, 0.0152740478515625, -0.015167236328125, 0.02301025390625, -0.061004638671875, -0.0284881591796875, 0.020843505859375, -0.0125274658203125, 0.0310821533203125, -0.00128936767578125, -0.04315185546875, -0.026123046875, -0.004680633544921875, 0.053009033203125, 0.0628662109375, -0.04754638671875, 0.083251953125, 0.037445068359375, -0.039581298828125, 0.01232147216796875, -0.046966552734375, -0.0018949508666992188, -0.01296234130859375, -0.06341552734375, 0.024383544921875, -0.013885498046875, -0.0117340087890625, 0.0024929046630859375, 0.01476287841796875, -0.01549530029296875, -0.0105743408203125, 0.0094451904296875, 0.01102447509765625, -0.039031982421875, -0.029998779296875, 0.025054931640625, -0.007228851318359375, 0.0260467529296875, 0.020751953125, 0.0408935546875, -0.00021719932556152344, -0.0027141571044921875, -0.0372314453125, 0.03826904296875, 0.048248291015625, -0.001392364501953125, 0.06390380859375, 0.05670166015625, -0.039398193359375, -0.023773193359375, -0.05377197265625, -0.0230712890625, -0.03717041015625, 0.02374267578125, -0.040679931640625, -0.0433349609375, 0.0604248046875, 0.004329681396484375, -0.028472900390625, 0.050262451171875, 0.0296173095703125, 0.0173492431640625, 0.08514404296875, 0.042724609375, 0.005008697509765625, 0.037384033203125, -0.0004723072052001953, 0.01163482666015625, -0.060028076171875, -0.0247802734375, -0.0219268798828125, -0.032440185546875, -0.03704833984375, -0.03564453125, 0.0180206298828125, 0.0311431884765625, -0.00844573974609375, 0.03485107421875, -0.01171112060546875, 0.01197052001953125, 0.03778076171875, 0.02764892578125, 0.005096435546875, 0.0055694580078125, 0.0180206298828125, -0.008270263671875, -0.047821044921875, -0.0223541259765625, 0.046142578125, 0.0535888671875, 0.06353759765625, 0.0090789794921875, 0.039703369140625, -0.028778076171875, 0.0535888671875, -0.04937744140625, 0.060089111328125, -0.0094451904296875, -0.07550048828125, 0.01015472412109375, -0.04010009765625, -0.04522705078125, 0.01357269287109375, -0.0255889892578125, -0.049896240234375, -0.0035991668701171875, -0.00018346309661865234, -0.0107879638671875, 0.029754638671875, -0.055999755859375, 0.08428955078125, 0.0172271728515625, -0.006389617919921875, -0.01332855224609375, -0.0230560302734375, 0.041290283203125, 0.0024166107177734375, -0.01351165771484375, -0.01500701904296875, 0.00400543212890625, 0.04913330078125, -0.01403045654296875, 0.054412841796875, -0.029205322265625, -0.03411865234375, 0.032440185546875, 0.0210723876953125, 0.02947998046875, 0.00518798828125, -0.028167724609375, 0.036102294921875, -0.007289886474609375, -0.05047607421875, -0.04547119140625, 0.059173583984375, -0.054656982421875, -0.03509521484375, 0.004070281982421875, -0.054351806640625, -0.0095062255859375, 0.01092529296875, -0.0030059814453125, 0.03228759765625, -0.020538330078125, 0.04180908203125, 0.031951904296875, 0.00745391845703125, 0.00827789306640625, 0.04052734375, -0.041412353515625, -0.0177459716796875, 0.03997802734375, -0.01554107666015625, 0.020751953125, 0.0034885406494140625, 0.004425048828125, -0.0188140869140625, -0.0272369384765625, -0.035919189453125, 0.034759521484375, -0.044158935546875, -0.0195465087890625, -0.043548583984375, -0.0467529296875, -0.0240478515625, -0.015167236328125, -0.041259765625, -0.007366180419921875, -0.02471923828125, 0.002178192138671875, 0.0290985107421875, 0.045318603515625, 0.01456451416015625, 0.0579833984375, -0.0667724609375, 0.0133819580078125, 0.026458740234375, 0.0638427734375, -0.006839752197265625, -0.0667724609375, -0.01514434814453125, -0.0027313232421875, -0.0129852294921875, -0.0771484375, 0.0195159912109375, 0.00812530517578125, 0.048828125, 0.04150390625, -0.0018453598022460938, 0.039886474609375, -0.0283203125, 0.04400634765625, 0.0008301734924316406, -0.055084228515625, -0.003826141357421875, -0.037506103515625, 0.00916290283203125, 0.04620361328125, 0.029998779296875, -0.02642822265625, -0.0238800048828125, -0.06744384765625, -0.0614013671875, 0.06341552734375, -0.005222320556640625, 0.030670166015625, 0.0010528564453125, 0.0401611328125, 0.0258636474609375, 0.0034046173095703125, -0.0546875, -0.0285491943359375, -0.004665374755859375, -0.04248046875, 0.01100921630859375, -0.039093017578125, 0.0154266357421875, -0.0275421142578125, 0.059356689453125, -0.01297760009765625, 0.02435302734375, -0.006519317626953125, 0.0010223388671875, -0.0144805908203125, -0.0227508544921875, 0.04193115234375, -0.0025463104248046875, -0.025054931640625, 0.00440216064453125, 0.00742340087890625, -0.0302581787109375, -0.033050537109375, 0.0313720703125, -0.007843017578125, -0.005115509033203125, 0.003711700439453125, 0.06890869140625, 0.032440185546875, -0.029327392578125, 0.0447998046875, 0.01012420654296875, -0.0115966796875, -0.041107177734375, 0.0036220550537109375, 0.01064300537109375, 0.0174560546875, 0.02728271484375, 0.0174713134765625, 0.004283905029296875, -0.0283050537109375, 0.035919189453125, 0.037994384765625, -0.045196533203125, -0.045654296875, 0.0560302734375, 0.039794921875, -0.03857421875, 0.042724609375, -0.031494140625, -0.0212249755859375, 0.0108795166015625, 0.0433349609375, 0.06494140625, -0.046966552734375, 0.009765625, 0.0535888671875, 0.02288818359375, 0.0055389404296875, 0.01312255859375, 0.0041351318359375, -0.04461669921875, -0.03546142578125, -0.055999755859375, -0.023101806640625, 0.01038360595703125, -0.07049560546875, 0.0465087890625, -0.0277252197265625, 0.01132965087890625, -0.0235443115234375, -0.03436279296875, -0.044097900390625, 0.0198516845703125, 0.02325439453125, 0.09417724609375, -0.06365966796875, 0.06951904296875, 0.04412841796875, -0.01163482666015625, -0.0692138671875, 0.004093170166015625, -0.0235443115234375, -0.07421875, 0.0167236328125, 0.01229095458984375, 0.0160064697265625, 0.004215240478515625, -0.055999755859375, -0.0572509765625, 0.045318603515625, 0.046905517578125, -0.03631591796875, -0.0080718994140625, -0.00843048095703125, 0.03240966796875, -0.03485107421875, 0.03411865234375, 0.037689208984375, 0.031402587890625, 0.0126190185546875, -0.066162109375, 0.0235595703125, -0.042449951171875, 0.0138397216796875, 0.015594482421875, -0.0592041015625, 0.07550048828125, 0.00995635986328125, -0.0018415451049804688, 0.01849365234375, 0.08489990234375, 0.02886962890625, -0.0019435882568359375, 0.05181884765625, 0.044677734375, 0.01132965087890625, -0.009185791015625, 0.06890869140625, -0.028472900390625, 0.039886474609375, 0.046600341796875, -0.0224609375, 0.07342529296875, 0.049072265625, -0.01506805419921875, 0.0582275390625, 0.0185546875, -0.0009331703186035156, 0.01043701171875, -0.00514984130859375, -0.0027332305908203125, -0.032958984375, 0.0094757080078125, -0.036529541015625, 0.038787841796875, 0.026885986328125, -0.01213836669921875, -0.0258636474609375, -0.0171051025390625, 0.007080078125, 0.00920867919921875, -0.0278778076171875, 0.043701171875, 0.0036334991455078125, -0.044158935546875, 0.03961181640625, -0.0044403076171875, 0.04547119140625, -0.034759521484375, -0.0017576217651367188, 0.003452301025390625, 0.01282501220703125, -0.006023406982421875, -0.06219482421875, 0.036712646484375, -0.019622802734375, -0.01018524169921875, -0.0206451416015625, 0.0306854248046875, -0.058929443359375, -0.05572509765625, 0.03326416015625, 0.0178680419921875, 0.033416748046875, -0.026123046875, -0.0631103515625, 0.016326904296875, -0.007457733154296875, -0.04010009765625, 0.0007467269897460938, 0.03240966796875, 0.00977325439453125, 0.037017822265625, 0.0098419189453125, 0.0014801025390625, 0.01560211181640625, 0.0019521713256835938, 0.053253173828125, -0.053375244140625, -0.044586181640625, -0.033721923828125, 0.066650390625, -0.01172637939453125, -0.039154052734375, 0.037872314453125, 0.036285400390625, 0.05517578125, -0.028289794921875, 0.0194854736328125, -0.0079803466796875, 0.03277587890625, -0.03826904296875, 0.058013916015625, -0.06268310546875, -0.0246429443359375, -0.0245819091796875, -0.09320068359375, -0.0196685791015625, 0.0555419921875, 0.01245880126953125, 0.04345703125, 0.0428466796875, 0.06005859375, -0.01552581787109375, -0.0004210472106933594, 0.031646728515625, 0.038848876953125, 0.009124755859375, 0.01424407958984375, 0.02740478515625, -0.053497314453125, 0.0311279296875, -0.0162506103515625, -0.01617431640625, -0.03887939453125, -0.07550048828125, -0.075927734375, -0.062408447265625, -0.04962158203125, -0.027679443359375, -0.048553466796875, 0.0638427734375, 0.0755615234375, -0.047454833984375, 0.00328826904296875, 0.01020050048828125, -0.0238189697265625, 0.00528717041015625, -0.018218994140625, -0.0157928466796875, 0.015350341796875, -0.0736083984375, 0.0121917724609375, -0.0044708251953125, 0.0033206939697265625, -0.040283203125, -0.0136871337890625, 0.0128936767578125, 0.022979736328125, 0.034515380859375, -0.005275726318359375, -0.054473876953125, -0.0236053466796875, -0.00902557373046875, -0.021331787109375, -0.0019550323486328125, 0.032470703125, -0.0343017578125, 0.000850677490234375, 0.02679443359375, 0.038299560546875, 0.045562744140625, 0.0032939910888671875, 0.0307769775390625, -0.06634521484375, 0.03814697265625, 0.00033974647521972656, 0.045562744140625, 0.0408935546875, -0.007396697998046875, 0.01593017578125, 0.01047515869140625, -0.0252685546875, -0.0679931640625, 0.0295257568359375, -0.1348876953125, 0.0174713134765625, 0.090576171875, 0.0200347900390625, -0.04736328125, 0.0291748046875, -0.0408935546875, 0.014556884765625, -0.0298614501953125, 0.037139892578125, 0.043487548828125, 0.03741455078125, -0.0460205078125, -0.02557373046875, -0.0000807046890258789, 0.025299072265625, -0.032623291015625, -0.0048675537109375, 0.007801055908203125, 0.011871337890625, 0.023712158203125, 0.03057861328125, -0.0309295654296875, 0.00438690185546875, 0.005786895751953125, 0.0643310546875, -0.0021991729736328125, -0.019866943359375, -0.0210113525390625, 0.007358551025390625, 0.0110626220703125, 0.0192718505859375 ] ]
Corianas/Quokka_590m
2023-07-18T06:30:53.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:the_pile", "dataset:guanaco/guanaco", "arxiv:1910.09700", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/Quokka_590m
0
5,874
transformers
2023-04-07T04:18:55
--- license: apache-2.0 datasets: - the_pile - guanaco/guanaco language: - en --- # Model Card for Cerebras 590M Dollyfied. This is a finetuned model of Cerebras 590M model. using DataBricksLabs Dolly Framework ## Model Details ### Model Description This is a finetuned version of cerebras' 590million paramater model that has been trained to follow instructions. It was accomplished using DataBricks Dolly training tools, and was trained for 2 epochs. - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model:** https://huggingface.co/cerebras/Cerebras-GPT-590m - **Finetuned using:** https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered suspect at best. ### Direct Use If you have a use you put it to, Please let me know. [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use Any form of use where any form of accuracy is needed. FOR THE LOVE OF GOD DO NOT FOLLOW MEDICAL ADVICE FROM THIS. or financial advice. [More Information Needed] ## Bias, Risks, and Limitations Limitations... Yes, I am sure there are so so many. [More Information Needed] ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** 8xA100s (accomplished while I was downloading the model I was actually training.) - **Minutes used:** 7.5 - **Cloud Provider:** LambdaGPU - **Compute Region:** USA - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
4,765
[ [ -0.03985595703125, -0.06317138671875, 0.0196075439453125, 0.00455474853515625, -0.0159149169921875, -0.0279388427734375, 0.00662994384765625, -0.0303955078125, 0.0102691650390625, 0.05169677734375, -0.043731689453125, -0.037506103515625, -0.046783447265625, -0.0133819580078125, -0.025054931640625, 0.092041015625, 0.01485443115234375, 0.017242431640625, -0.025421142578125, 0.016754150390625, -0.03704833984375, -0.056304931640625, -0.051300048828125, -0.027984619140625, 0.0247650146484375, 0.026031494140625, 0.05780029296875, 0.051513671875, 0.050262451171875, 0.0252532958984375, -0.031219482421875, -0.0093536376953125, -0.041229248046875, -0.038482666015625, -0.01160430908203125, -0.01824951171875, -0.06463623046875, 0.002285003662109375, 0.042510986328125, 0.045623779296875, -0.01186370849609375, 0.042022705078125, 0.004608154296875, 0.035491943359375, -0.04669189453125, 0.026275634765625, -0.04327392578125, 0.00763702392578125, -0.0038356781005859375, -0.005878448486328125, -0.024169921875, -0.019683837890625, -0.006404876708984375, -0.043060302734375, 0.02264404296875, 0.0051727294921875, 0.080078125, 0.01392364501953125, -0.03009033203125, -0.0188140869140625, -0.0672607421875, 0.053497314453125, -0.04248046875, 0.0169677734375, 0.03338623046875, 0.0360107421875, -0.01380157470703125, -0.0635986328125, -0.03131103515625, -0.0258026123046875, -0.009857177734375, 0.0180816650390625, -0.01306915283203125, 0.00945281982421875, 0.047943115234375, 0.041290283203125, -0.048095703125, -0.00939178466796875, -0.0528564453125, -0.0203399658203125, 0.054840087890625, 0.036865234375, -0.003391265869140625, -0.0239105224609375, -0.032989501953125, -0.02301025390625, -0.0283660888671875, 0.00989532470703125, 0.036773681640625, 0.0162200927734375, -0.034881591796875, 0.046844482421875, -0.01491546630859375, 0.059967041015625, -0.0016775131225585938, -0.005615234375, 0.018890380859375, -0.0294647216796875, -0.034912109375, -0.007381439208984375, 0.05133056640625, 0.0238189697265625, -0.0118408203125, 0.00769805908203125, -0.0095367431640625, -0.001613616943359375, 0.029388427734375, -0.06817626953125, -0.033416748046875, 0.0232696533203125, -0.043426513671875, -0.0220489501953125, -0.004268646240234375, -0.07098388671875, -0.0007724761962890625, -0.04168701171875, 0.03955078125, -0.0272369384765625, -0.0160980224609375, 0.0015926361083984375, -0.01389312744140625, 0.0195465087890625, 0.02459716796875, -0.06793212890625, 0.0364990234375, 0.04119873046875, 0.046966552734375, 0.0105133056640625, -0.00669097900390625, -0.00371551513671875, -0.004024505615234375, -0.0037059783935546875, 0.039031982421875, -0.0274200439453125, -0.0487060546875, -0.0158538818359375, 0.0216827392578125, -0.008697509765625, -0.0256195068359375, 0.057098388671875, -0.0360107421875, 0.0226898193359375, -0.00968170166015625, -0.049896240234375, -0.033599853515625, 0.02447509765625, -0.05596923828125, 0.08709716796875, 0.004589080810546875, -0.062469482421875, 0.009002685546875, -0.06524658203125, -0.007785797119140625, 0.0080108642578125, 0.013458251953125, -0.040618896484375, -0.01324462890625, -0.002223968505859375, 0.044403076171875, -0.0264434814453125, 0.022247314453125, -0.021820068359375, -0.0155487060546875, -0.007404327392578125, -0.0240478515625, 0.08465576171875, 0.024871826171875, -0.00893402099609375, 0.004230499267578125, -0.076171875, 0.0083770751953125, 0.0180206298828125, -0.0280914306640625, 0.01422882080078125, -0.0252532958984375, 0.035919189453125, 0.01503753662109375, 0.0305328369140625, -0.0298004150390625, 0.005802154541015625, -0.0019483566284179688, 0.0216064453125, 0.032684326171875, 0.0137176513671875, 0.00815582275390625, -0.0311737060546875, 0.047088623046875, -0.00704193115234375, 0.055999755859375, 0.0103759765625, -0.048095703125, -0.0634765625, -0.0019102096557617188, 0.0253448486328125, 0.041656494140625, -0.0193634033203125, 0.055572509765625, -0.00370025634765625, -0.07464599609375, -0.020843505859375, -0.0016345977783203125, 0.033905029296875, 0.052154541015625, 0.040679931640625, -0.0193939208984375, -0.04364013671875, -0.06878662109375, 0.0174560546875, 0.0025959014892578125, 0.005401611328125, 0.027313232421875, 0.068359375, -0.01490020751953125, 0.0577392578125, -0.047882080078125, -0.004734039306640625, -0.0105133056640625, 0.00981903076171875, 0.0139923095703125, 0.053802490234375, 0.04144287109375, -0.06964111328125, -0.01361083984375, -0.0179595947265625, -0.04833984375, 0.025421142578125, -0.004177093505859375, -0.01194000244140625, -0.005893707275390625, 0.0216522216796875, -0.044097900390625, 0.047332763671875, 0.0428466796875, -0.0193939208984375, 0.047088623046875, -0.0269775390625, 0.0024051666259765625, -0.081787109375, 0.02923583984375, 0.0022182464599609375, -0.0094757080078125, -0.03533935546875, -0.0010576248168945312, 0.0003440380096435547, -0.0261077880859375, -0.05718994140625, 0.0445556640625, -0.0225067138671875, -0.0022602081298828125, -0.0235137939453125, -0.009490966796875, 0.004199981689453125, 0.0511474609375, 0.017974853515625, 0.03460693359375, 0.039276123046875, -0.0633544921875, 0.0273590087890625, 0.029205322265625, -0.017669677734375, 0.0302581787109375, -0.062469482421875, 0.01520538330078125, -0.005859375, 0.0262603759765625, -0.047576904296875, -0.025726318359375, 0.0250396728515625, -0.029144287109375, 0.02569580078125, -0.0150909423828125, -0.03289794921875, -0.039581298828125, 0.007785797119140625, 0.0194091796875, 0.047637939453125, -0.0249481201171875, 0.0312347412109375, 0.037200927734375, 0.0163421630859375, -0.02264404296875, -0.035614013671875, -0.0014104843139648438, -0.023162841796875, -0.04150390625, 0.03350830078125, -0.009429931640625, 0.0032482147216796875, -0.01178741455078125, 0.01910400390625, -0.0214691162109375, 0.0063629150390625, 0.033294677734375, 0.0291748046875, 0.00914764404296875, 0.0088348388671875, -0.018890380859375, -0.0181884765625, 0.01302337646484375, 0.001689910888671875, 0.033203125, -0.015472412109375, -0.00994110107421875, -0.052001953125, 0.0197601318359375, 0.033966064453125, -0.00621795654296875, 0.05877685546875, 0.053314208984375, -0.052520751953125, 0.0145111083984375, -0.034393310546875, -0.0179290771484375, -0.033233642578125, 0.0280609130859375, -0.017578125, -0.0294036865234375, 0.03497314453125, 0.0012388229370117188, -0.0002951622009277344, 0.06378173828125, 0.051055908203125, 0.007068634033203125, 0.07086181640625, 0.053497314453125, 0.01241302490234375, 0.036407470703125, -0.048797607421875, -0.002460479736328125, -0.0673828125, -0.038726806640625, -0.061767578125, -0.0078582763671875, -0.04638671875, -0.027618408203125, 0.0117034912109375, 0.01439666748046875, -0.047943115234375, 0.046630859375, -0.048492431640625, 0.01500701904296875, 0.03955078125, 0.0228118896484375, 0.001251220703125, -0.0220489501953125, 0.0037384033203125, 0.007480621337890625, -0.05426025390625, -0.055419921875, 0.08441162109375, 0.040771484375, 0.048492431640625, -0.01201629638671875, 0.04364013671875, 0.0178070068359375, 0.019927978515625, -0.04034423828125, 0.045867919921875, 0.0024509429931640625, -0.072509765625, -0.0051116943359375, -0.019622802734375, -0.0699462890625, 0.00553131103515625, -0.02618408203125, -0.06524658203125, 0.0206451416015625, 0.0265045166015625, -0.0276641845703125, 0.0186920166015625, -0.050872802734375, 0.0966796875, -0.0300140380859375, -0.0223541259765625, -0.0182647705078125, -0.05084228515625, 0.029388427734375, 0.0009465217590332031, 0.01123809814453125, -0.007114410400390625, 0.01346588134765625, 0.0693359375, -0.058624267578125, 0.0657958984375, -0.029296875, 0.030181884765625, 0.03680419921875, -0.023651123046875, 0.044219970703125, -0.0030612945556640625, -0.00958251953125, 0.028564453125, 0.013641357421875, -0.04095458984375, -0.0238189697265625, 0.0465087890625, -0.066650390625, -0.0242767333984375, -0.035797119140625, -0.04278564453125, -0.0087738037109375, 0.002727508544921875, 0.0265045166015625, 0.02130126953125, -0.026031494140625, 0.0189056396484375, 0.049163818359375, -0.01424407958984375, 0.02130126953125, 0.021026611328125, -0.01284027099609375, -0.0308837890625, 0.05645751953125, 0.0007543563842773438, 0.0192413330078125, 0.0280609130859375, 0.01727294921875, -0.0380859375, -0.036773681640625, -0.0260467529296875, 0.003398895263671875, -0.052886962890625, -0.00852203369140625, -0.062286376953125, -0.0268096923828125, -0.0311126708984375, -0.0006246566772460938, -0.0308380126953125, -0.0286407470703125, -0.043121337890625, -0.007495880126953125, 0.045623779296875, 0.03997802734375, -0.00604248046875, 0.04852294921875, -0.0562744140625, 0.00769805908203125, 0.027313232421875, 0.003780364990234375, 0.001659393310546875, -0.050262451171875, -0.027923583984375, 0.01971435546875, -0.054290771484375, -0.0601806640625, 0.0252532958984375, -0.0026378631591796875, 0.03570556640625, 0.026458740234375, -0.005680084228515625, 0.057830810546875, -0.019073486328125, 0.0762939453125, 0.0202178955078125, -0.057830810546875, 0.047607421875, -0.0287933349609375, 0.007328033447265625, 0.04327392578125, 0.044921875, -0.0166168212890625, 0.006961822509765625, -0.07733154296875, -0.046417236328125, 0.048858642578125, 0.02471923828125, 0.015869140625, 0.00830078125, 0.0308685302734375, -0.010467529296875, 0.0140838623046875, -0.068603515625, -0.0187530517578125, -0.023284912109375, 0.0101318359375, -0.00624847412109375, -0.00972747802734375, -0.01506805419921875, -0.044525146484375, 0.07159423828125, 0.0123291015625, 0.035064697265625, 0.0006971359252929688, 0.00562286376953125, 0.0116729736328125, 0.002994537353515625, 0.03509521484375, 0.046875, -0.04656982421875, -0.0270538330078125, 0.0204010009765625, -0.052215576171875, -0.0022335052490234375, 0.0182952880859375, -0.0105133056640625, -0.0006122589111328125, 0.005290985107421875, 0.07293701171875, 0.0156097412109375, -0.0274200439453125, 0.024688720703125, 0.0022258758544921875, -0.0203704833984375, -0.037261962890625, 0.0096435546875, 0.01395416259765625, 0.002124786376953125, -0.007282257080078125, 0.007656097412109375, 0.0177154541015625, -0.03656005859375, 0.00421142578125, 0.027191162109375, -0.039581298828125, -0.00492095947265625, 0.07281494140625, 0.0222625732421875, -0.034332275390625, 0.0511474609375, -0.016448974609375, -0.01904296875, 0.08056640625, 0.036041259765625, 0.061370849609375, -0.011322021484375, 0.011810302734375, 0.055084228515625, 0.01549530029296875, 0.002471923828125, 0.0232696533203125, -0.00006091594696044922, -0.028045654296875, -0.01027679443359375, -0.0584716796875, -0.045074462890625, 0.0244903564453125, -0.068603515625, 0.0516357421875, -0.04144287109375, -0.01800537109375, 0.0249481201171875, 0.013763427734375, -0.0838623046875, 0.049041748046875, 0.0095977783203125, 0.0806884765625, -0.0682373046875, 0.07098388671875, 0.05316162109375, -0.045562744140625, -0.06353759765625, -0.03131103515625, -0.00226593017578125, -0.0546875, 0.0286407470703125, 0.006740570068359375, 0.0184783935546875, -0.0035381317138671875, -0.03936767578125, -0.0596923828125, 0.0999755859375, 0.0148468017578125, -0.056915283203125, 0.01342010498046875, 0.005634307861328125, 0.04229736328125, -0.032470703125, 0.053680419921875, 0.03387451171875, 0.033294677734375, 0.0276031494140625, -0.0628662109375, 0.0005345344543457031, -0.0120697021484375, 0.005954742431640625, 0.00455474853515625, -0.057708740234375, 0.06756591796875, -0.01110076904296875, 0.00902557373046875, 0.00858306884765625, 0.0345458984375, 0.00749969482421875, 0.0305023193359375, 0.0308074951171875, 0.053619384765625, 0.0626220703125, 0.0030269622802734375, 0.08984375, -0.0439453125, 0.052734375, 0.0927734375, 0.004669189453125, 0.0626220703125, 0.020782470703125, -0.03826904296875, 0.0210723876953125, 0.08197021484375, -0.024810791015625, 0.02618408203125, 0.024169921875, -0.00811004638671875, -0.0220489501953125, -0.01214599609375, -0.0413818359375, 0.028564453125, 0.0260467529296875, -0.047088623046875, -0.018280029296875, -0.0027313232421875, 0.00333404541015625, -0.02728271484375, -0.03314208984375, 0.04547119140625, -0.004253387451171875, -0.0380859375, 0.045806884765625, 0.01032257080078125, 0.0193328857421875, -0.058929443359375, -0.01473236083984375, -0.0012054443359375, 0.01202392578125, -0.0309906005859375, -0.03631591796875, 0.03076171875, -0.0033931732177734375, -0.0234375, -0.0043182373046875, 0.042816162109375, -0.01268768310546875, -0.061309814453125, 0.020538330078125, 0.0158233642578125, 0.038726806640625, -0.0095672607421875, -0.08123779296875, -0.0015392303466796875, 0.00347137451171875, -0.01255035400390625, 0.01971435546875, -0.0026378631591796875, 0.002105712890625, 0.03668212890625, 0.042022705078125, -0.0137786865234375, -0.0015764236450195312, -0.006320953369140625, 0.068603515625, -0.053619384765625, -0.030181884765625, -0.037628173828125, 0.053497314453125, -0.01279449462890625, -0.042236328125, 0.051116943359375, 0.0638427734375, 0.05743408203125, -0.012451171875, 0.05230712890625, -0.0179290771484375, 0.0238037109375, -0.0300750732421875, 0.033447265625, -0.040985107421875, 0.0007357597351074219, -0.019989013671875, -0.082275390625, 0.0018215179443359375, 0.035491943359375, -0.02923583984375, 0.0099334716796875, 0.042236328125, 0.0740966796875, -0.01110076904296875, 0.0307769775390625, 0.02471923828125, 0.01097869873046875, 0.007709503173828125, 0.036865234375, 0.042388916015625, -0.057098388671875, 0.04437255859375, -0.044677734375, -0.0295562744140625, -0.01558685302734375, -0.07098388671875, -0.055633544921875, -0.0364990234375, -0.05145263671875, -0.028289794921875, 0.006855010986328125, 0.055023193359375, 0.0673828125, -0.057525634765625, -0.0224609375, -0.01812744140625, -0.0033283233642578125, -0.020965576171875, -0.0169525146484375, 0.04376220703125, -0.007411956787109375, -0.0650634765625, 0.0015163421630859375, -0.01480865478515625, 0.0203399658203125, -0.0234832763671875, -0.0101470947265625, -0.022735595703125, -0.0032863616943359375, 0.0301361083984375, 0.030029296875, -0.03662109375, -0.01456451416015625, -0.0169525146484375, -0.004848480224609375, -0.004726409912109375, 0.04296875, -0.04296875, 0.0355224609375, 0.0276031494140625, 0.0189056396484375, 0.0667724609375, 0.0027103424072265625, 0.0186920166015625, -0.0267486572265625, 0.0157470703125, 0.02642822265625, 0.0362548828125, 0.01348114013671875, -0.04461669921875, 0.039398193359375, 0.0239715576171875, -0.0648193359375, -0.039031982421875, -0.007534027099609375, -0.0899658203125, -0.0009965896606445312, 0.0892333984375, -0.005077362060546875, -0.022491455078125, -0.010345458984375, -0.0271453857421875, 0.009979248046875, -0.01464080810546875, 0.04925537109375, 0.0545654296875, -0.0228271484375, 0.011566162109375, -0.060943603515625, 0.038421630859375, 0.0107269287109375, -0.0731201171875, -0.0123748779296875, 0.04522705078125, 0.023681640625, 0.01348876953125, 0.0313720703125, -0.0154266357421875, 0.0216217041015625, 0.025665283203125, 0.03143310546875, -0.0019989013671875, -0.0234832763671875, -0.0270843505859375, 0.004680633544921875, -0.0153656005859375, -0.044342041015625 ] ]
Corianas/590m
2023-03-31T14:50:59.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:tatsu-lab/alpaca", "arxiv:1910.09700", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/590m
0
5,873
transformers
2023-03-29T14:38:42
--- license: cc-by-nc-4.0 datasets: - tatsu-lab/alpaca language: - en --- # Model Card for Model ID This is a finetuned model of Cerebras 590M model using DataBricksLabs Dolly Framework ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model [optional]:** https://huggingface.co/cerebras/Cerebras-GPT-590M ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered almost non-existant. ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results Instruction: Write a love letter to Edgar Allan Poe. Dear Edgar, I hope you are doing well. As a person of your age and background, you are always learning new things and never too sure of your own best skills. May your future brighten up your days with the learning you have to offer. I can't wait to read your book and see the way it influenced you. Thanks for the inspiration. It is so much fun to explore your special talents. Sincerely, ----------- Instruction: Write a tweet announcing Dolly, a large language model from Databricks. Dolly is the world's first massive language model, available now in India. #IndiaDolly #Databricks ----------- Instruction: I'm selling my Nikon D-750, write a short blurb for my ad. I'm selling my Nikon D-750, someone whose dedication to imaging means he/she is always ready to answer questions, be creative and take chances on what really matters. A custom-designed top hat, a wide-angle lens, and an affordable ultrabuck remote offer the ultimate in quality. He/She can even go as far as to order a custom-made outdoor halogen pass. And if he/She is more than willing to take on any challenge, there is no doubt that with a creative lens, personal shots, and a custom top hat, photography is at its peak. As a satisfied customer, I would ----------- Instruction: Explain to me the difference between nuclear fission and fusion. Nuclear fission is a process that occurs during the production of nuclear fuel, while fusion is a process that occurs in the fusion industry. Nuclear fission is used to create the active ingredient in nuclear fuel, such as fission products, fusion plutonium, and decontamination services. Fusion requires a lower pressure in the fusion process and is more flexible compared to nuclear fission. Nuclear fission fuses with fission products, fission plutonium, and decontamination services. ----------- Instruction: Give me a list of 5 science fiction books I should read next. 1. How to Translate a Face into a System 2. How to Fly a Surrealist 3. How to Survive an Army Wildlife Hunt 4. How to Strike an iPhone 5. How to Teach a Hackathon ----------- #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
6,956
[ [ -0.0283203125, -0.066650390625, 0.028594970703125, 0.0113372802734375, -0.0088958740234375, -0.00875091552734375, 0.011688232421875, -0.037322998046875, 0.017486572265625, 0.037078857421875, -0.043060302734375, -0.040618896484375, -0.04296875, -0.005859375, -0.042633056640625, 0.07342529296875, 0.007305145263671875, 0.01385498046875, -0.03143310546875, 0.01812744140625, -0.037841796875, -0.0447998046875, -0.03778076171875, -0.018768310546875, 0.02960205078125, 0.0169677734375, 0.05194091796875, 0.046875, 0.049346923828125, 0.0263519287109375, -0.027435302734375, -0.0118560791015625, -0.03753662109375, -0.02001953125, -0.0169677734375, -0.02532958984375, -0.05364990234375, 0.0019855499267578125, 0.0308990478515625, 0.049407958984375, -0.00091552734375, 0.035919189453125, -0.00812530517578125, 0.04962158203125, -0.04620361328125, 0.01776123046875, -0.035614013671875, 0.01055145263671875, -0.00756072998046875, 0.0011758804321289062, -0.02520751953125, -0.029327392578125, -0.01508331298828125, -0.058135986328125, 0.0176849365234375, 0.0159912109375, 0.0849609375, 0.0020313262939453125, -0.0197601318359375, -0.01439666748046875, -0.07342529296875, 0.058319091796875, -0.0282745361328125, 0.016204833984375, 0.0367431640625, 0.0357666015625, -0.0157012939453125, -0.04998779296875, -0.049041748046875, -0.02178955078125, -0.0082855224609375, 0.0202484130859375, -0.0160064697265625, -0.0032806396484375, 0.043731689453125, 0.050933837890625, -0.053436279296875, -0.01629638671875, -0.054107666015625, -0.0155181884765625, 0.06024169921875, 0.0297698974609375, 0.0155181884765625, -0.02264404296875, -0.03106689453125, -0.023773193359375, -0.0262298583984375, 0.00858306884765625, 0.044036865234375, 0.007572174072265625, -0.033935546875, 0.047698974609375, -0.0210418701171875, 0.053253173828125, 0.0024318695068359375, -0.0059967041015625, 0.01358795166015625, -0.030548095703125, -0.030181884765625, -0.01873779296875, 0.05316162109375, 0.036834716796875, -0.0011053085327148438, 0.004428863525390625, -0.0014009475708007812, 0.00846099853515625, 0.0222015380859375, -0.050933837890625, -0.0239715576171875, 0.0303192138671875, -0.05322265625, -0.0379638671875, -0.0178070068359375, -0.07501220703125, -0.00940704345703125, -0.0284423828125, 0.03521728515625, -0.021392822265625, -0.032318115234375, 0.00667572021484375, -0.0251617431640625, 0.02166748046875, 0.024566650390625, -0.07574462890625, 0.034698486328125, 0.031280517578125, 0.0531005859375, 0.0088958740234375, -0.006927490234375, 0.00966644287109375, -0.0017480850219726562, -0.01476287841796875, 0.049163818359375, -0.033233642578125, -0.050933837890625, -0.0101470947265625, 0.00942230224609375, -0.0017786026000976562, -0.0207672119140625, 0.04534912109375, -0.043853759765625, 0.02386474609375, -0.0170440673828125, -0.040374755859375, -0.0282135009765625, 0.0189361572265625, -0.053436279296875, 0.0860595703125, 0.006389617919921875, -0.05718994140625, 0.009613037109375, -0.05706787109375, -0.0264434814453125, 0.006809234619140625, -0.0000807642936706543, -0.0227813720703125, -0.01287841796875, 0.00199127197265625, 0.04278564453125, -0.034271240234375, 0.017913818359375, -0.03082275390625, -0.007450103759765625, -0.00008553266525268555, -0.01324462890625, 0.081787109375, 0.0289306640625, -0.00960540771484375, -0.00472259521484375, -0.0826416015625, 0.002094268798828125, 0.0269927978515625, -0.0198516845703125, 0.004344940185546875, -0.0243377685546875, 0.04241943359375, 0.0153656005859375, 0.0240631103515625, -0.041717529296875, 0.01367950439453125, -0.00931549072265625, 0.02044677734375, 0.0435791015625, 0.0005660057067871094, 0.0219573974609375, -0.0352783203125, 0.052459716796875, -0.00522613525390625, 0.043701171875, -0.0037136077880859375, -0.043853759765625, -0.0562744140625, 0.0007643699645996094, 0.0171051025390625, 0.04730224609375, -0.025604248046875, 0.050537109375, -0.00640106201171875, -0.0711669921875, -0.0232086181640625, 0.00017940998077392578, 0.03289794921875, 0.054840087890625, 0.036651611328125, -0.016357421875, -0.0423583984375, -0.06634521484375, 0.0031070709228515625, -0.006267547607421875, 0.01494598388671875, 0.02630615234375, 0.057861328125, -0.00815582275390625, 0.064208984375, -0.0655517578125, -0.01354217529296875, -0.0272369384765625, -0.011688232421875, 0.016998291015625, 0.050628662109375, 0.044464111328125, -0.07763671875, -0.02691650390625, -0.00670623779296875, -0.06158447265625, 0.0272369384765625, 0.0016374588012695312, -0.01021575927734375, 0.0015354156494140625, 0.0258331298828125, -0.053558349609375, 0.044677734375, 0.0299072265625, -0.0164947509765625, 0.0394287109375, -0.021484375, 0.007488250732421875, -0.088623046875, 0.0254974365234375, 0.01123046875, -0.007167816162109375, -0.03692626953125, -0.002685546875, -0.0090484619140625, -0.0302886962890625, -0.04541015625, 0.05419921875, -0.02215576171875, 0.005496978759765625, -0.015777587890625, -0.005771636962890625, 0.0148162841796875, 0.043701171875, 0.0098114013671875, 0.025604248046875, 0.0517578125, -0.05908203125, 0.04144287109375, 0.033782958984375, -0.028045654296875, 0.038726806640625, -0.057037353515625, 0.004894256591796875, -0.01081085205078125, 0.03155517578125, -0.058685302734375, -0.0262298583984375, 0.02484130859375, -0.034637451171875, 0.023223876953125, -0.0008554458618164062, -0.038543701171875, -0.04791259765625, 0.003047943115234375, 0.01116943359375, 0.052581787109375, -0.0201568603515625, 0.04107666015625, 0.0382080078125, 0.006866455078125, -0.03546142578125, -0.04888916015625, -0.00021064281463623047, -0.016571044921875, -0.05194091796875, 0.033660888671875, -0.0132904052734375, -0.01331329345703125, -0.00531768798828125, 0.01464080810546875, -0.0269927978515625, 0.0024051666259765625, 0.0321044921875, 0.02685546875, 0.01291656494140625, 0.01453399658203125, -0.0116424560546875, -0.0183563232421875, -0.0006256103515625, 0.00286865234375, 0.028350830078125, -0.00597381591796875, -0.00464630126953125, -0.05078125, 0.03216552734375, 0.041046142578125, 0.003925323486328125, 0.04522705078125, 0.052215576171875, -0.04254150390625, 0.00621795654296875, -0.043853759765625, -0.01543426513671875, -0.0352783203125, 0.0276947021484375, -0.02398681640625, -0.0279388427734375, 0.040191650390625, -0.0014696121215820312, -0.0103302001953125, 0.06402587890625, 0.045654296875, -0.014312744140625, 0.06597900390625, 0.06439208984375, 0.0012807846069335938, 0.032806396484375, -0.04840087890625, 0.01380157470703125, -0.059814453125, -0.029541015625, -0.050994873046875, -0.0140380859375, -0.04632568359375, -0.0219879150390625, 0.00742340087890625, -0.0002570152282714844, -0.03192138671875, 0.049774169921875, -0.053924560546875, 0.0261688232421875, 0.0352783203125, 0.01384735107421875, 0.01483917236328125, -0.016265869140625, -0.004878997802734375, 0.00951385498046875, -0.0526123046875, -0.053497314453125, 0.0823974609375, 0.03594970703125, 0.0638427734375, -0.01151275634765625, 0.036041259765625, 0.0213165283203125, 0.0024814605712890625, -0.03924560546875, 0.050384521484375, 0.00024056434631347656, -0.08203125, -0.007221221923828125, -0.00955963134765625, -0.0653076171875, 0.0091705322265625, -0.0195159912109375, -0.06158447265625, 0.032806396484375, 0.0177459716796875, -0.03790283203125, 0.020355224609375, -0.055145263671875, 0.08270263671875, -0.0311279296875, -0.0287322998046875, -0.0258331298828125, -0.05242919921875, 0.028533935546875, 0.0102691650390625, 0.0285491943359375, -0.0171356201171875, 0.01398468017578125, 0.05682373046875, -0.04034423828125, 0.0711669921875, -0.0174713134765625, 0.0250091552734375, 0.03106689453125, -0.00970458984375, 0.030364990234375, 0.0010776519775390625, -0.00955963134765625, 0.017974853515625, 0.0197906494140625, -0.032867431640625, -0.0272216796875, 0.053436279296875, -0.064697265625, -0.032501220703125, -0.047119140625, -0.04473876953125, 0.0023937225341796875, 0.01396942138671875, 0.0253143310546875, 0.0243072509765625, -0.03143310546875, 0.00543975830078125, 0.0460205078125, -0.0279388427734375, 0.03289794921875, 0.024261474609375, -0.026580810546875, -0.0276641845703125, 0.0648193359375, 0.01142120361328125, 0.0205841064453125, 0.038177490234375, 0.01520538330078125, -0.0294952392578125, -0.0269622802734375, -0.027191162109375, 0.007476806640625, -0.061798095703125, -0.00394439697265625, -0.06976318359375, -0.0157470703125, -0.04034423828125, -0.02227783203125, -0.02655029296875, -0.034698486328125, -0.041534423828125, -0.01151275634765625, 0.05023193359375, 0.048492431640625, -0.005584716796875, 0.028656005859375, -0.046844482421875, 0.0197601318359375, 0.02239990234375, 0.011138916015625, 0.005474090576171875, -0.033172607421875, -0.021392822265625, 0.005352020263671875, -0.057342529296875, -0.057159423828125, 0.030426025390625, -0.0032291412353515625, 0.0283203125, 0.018798828125, 0.0004000663757324219, 0.05316162109375, -0.02264404296875, 0.08123779296875, 0.0204315185546875, -0.0625, 0.050994873046875, -0.035491943359375, 0.0080718994140625, 0.04461669921875, 0.047332763671875, -0.029052734375, -0.001384735107421875, -0.0726318359375, -0.058319091796875, 0.042633056640625, 0.0189361572265625, 0.029693603515625, 0.0106964111328125, 0.0257415771484375, -0.005584716796875, 0.018829345703125, -0.07904052734375, -0.0214691162109375, -0.0168609619140625, 0.004947662353515625, 0.00638580322265625, -0.002384185791015625, -0.020294189453125, -0.0477294921875, 0.06439208984375, 0.0179901123046875, 0.03411865234375, 0.010833740234375, 0.005107879638671875, 0.00013184547424316406, 0.0068817138671875, 0.04229736328125, 0.05120849609375, -0.033050537109375, -0.0220184326171875, 0.0142974853515625, -0.047637939453125, 0.00791168212890625, 0.006561279296875, -0.00829315185546875, -0.0018758773803710938, 0.00859832763671875, 0.063720703125, 0.01244354248046875, -0.03143310546875, 0.032958984375, 0.0057525634765625, -0.01190948486328125, -0.0433349609375, 0.01413726806640625, 0.01183319091796875, 0.0023670196533203125, 0.010406494140625, 0.01491546630859375, 0.00868988037109375, -0.0418701171875, -0.0012140274047851562, 0.02801513671875, -0.031890869140625, -0.00977325439453125, 0.0692138671875, 0.03192138671875, -0.031829833984375, 0.038330078125, -0.0193023681640625, -0.01409149169921875, 0.07684326171875, 0.0367431640625, 0.06597900390625, -0.0060272216796875, 0.0218505859375, 0.04827880859375, 0.0220794677734375, -0.0003769397735595703, 0.0302734375, -0.0006966590881347656, -0.032867431640625, -0.0052490234375, -0.058135986328125, -0.0400390625, 0.0150604248046875, -0.052490234375, 0.0518798828125, -0.04376220703125, -0.0100860595703125, 0.023040771484375, 0.0034885406494140625, -0.0736083984375, 0.04034423828125, -0.006076812744140625, 0.07720947265625, -0.07269287109375, 0.061126708984375, 0.041473388671875, -0.06298828125, -0.055328369140625, -0.02685546875, 0.0059051513671875, -0.0494384765625, 0.045654296875, 0.003650665283203125, 0.0180511474609375, -0.00019228458404541016, -0.041748046875, -0.06927490234375, 0.09912109375, 0.01128387451171875, -0.053131103515625, 0.0068817138671875, -0.0015249252319335938, 0.049224853515625, -0.03790283203125, 0.0494384765625, 0.024200439453125, 0.034271240234375, 0.0248870849609375, -0.058013916015625, -0.00007933378219604492, -0.0179443359375, 0.0132904052734375, -0.0098876953125, -0.071044921875, 0.06036376953125, -0.0146331787109375, -0.004058837890625, 0.00988006591796875, 0.029754638671875, -0.006015777587890625, 0.034027099609375, 0.0222320556640625, 0.053009033203125, 0.05596923828125, 0.0022945404052734375, 0.1024169921875, -0.035186767578125, 0.04241943359375, 0.0765380859375, -0.0118865966796875, 0.0653076171875, 0.016021728515625, -0.046417236328125, 0.0362548828125, 0.0771484375, -0.02447509765625, 0.0279388427734375, 0.0209808349609375, -0.0122222900390625, -0.01259613037109375, -0.0122833251953125, -0.044342041015625, 0.0240631103515625, 0.0168609619140625, -0.047515869140625, -0.011444091796875, 0.01216888427734375, 0.0019779205322265625, -0.01070404052734375, -0.02081298828125, 0.04827880859375, 0.00885772705078125, -0.031036376953125, 0.04034423828125, -0.004001617431640625, 0.0260772705078125, -0.053558349609375, -0.00595855712890625, -0.0006542205810546875, 0.01436614990234375, -0.036865234375, -0.04522705078125, 0.03277587890625, -0.00531005859375, -0.038055419921875, -0.0239715576171875, 0.053009033203125, -0.01335906982421875, -0.06689453125, 0.0262908935546875, 0.01111602783203125, 0.0287322998046875, 0.003582000732421875, -0.071044921875, 0.00829315185546875, -0.0028591156005859375, -0.011383056640625, 0.0186004638671875, -0.01056671142578125, 0.01080322265625, 0.043731689453125, 0.04302978515625, -0.007266998291015625, -0.011444091796875, -0.009185791015625, 0.06805419921875, -0.057891845703125, -0.0291595458984375, -0.039459228515625, 0.0601806640625, -0.017852783203125, -0.040618896484375, 0.054595947265625, 0.05718994140625, 0.060028076171875, -0.020660400390625, 0.06890869140625, -0.0224151611328125, 0.034912109375, -0.01971435546875, 0.045501708984375, -0.04107666015625, 0.00423431396484375, -0.03082275390625, -0.07928466796875, -0.0134124755859375, 0.042633056640625, -0.03228759765625, 0.0133514404296875, 0.054351806640625, 0.07049560546875, 0.0012302398681640625, 0.0222015380859375, 0.031280517578125, 0.0218658447265625, 0.01468658447265625, 0.023345947265625, 0.04241943359375, -0.06280517578125, 0.04254150390625, -0.03759765625, -0.03253173828125, -0.01396942138671875, -0.0667724609375, -0.052154541015625, -0.056549072265625, -0.0501708984375, -0.0294647216796875, 0.00018835067749023438, 0.05340576171875, 0.055694580078125, -0.04876708984375, -0.02410888671875, -0.01520538330078125, 0.0160064697265625, -0.0203704833984375, -0.0198516845703125, 0.03277587890625, -0.0126953125, -0.06610107421875, 0.01068878173828125, 0.0086517333984375, 0.01885986328125, -0.03338623046875, 0.01092529296875, -0.0300445556640625, -0.00021755695343017578, 0.0418701171875, 0.034820556640625, -0.0457763671875, -0.00576019287109375, -0.0002663135528564453, -0.005420684814453125, -0.005489349365234375, 0.042022705078125, -0.03936767578125, 0.046112060546875, 0.037811279296875, 0.022705078125, 0.059783935546875, -0.01184844970703125, 0.031890869140625, -0.01904296875, 0.0115509033203125, 0.0231170654296875, 0.02642822265625, 0.01702880859375, -0.0550537109375, 0.04058837890625, 0.035980224609375, -0.05841064453125, -0.05078125, 0.0035266876220703125, -0.0950927734375, -0.0090484619140625, 0.0924072265625, 0.00624847412109375, -0.0251312255859375, -0.0199127197265625, -0.0228729248046875, 0.00215911865234375, -0.01324462890625, 0.05804443359375, 0.06927490234375, -0.0196380615234375, 0.010528564453125, -0.054931640625, 0.0382080078125, 0.01488494873046875, -0.069091796875, -0.0076751708984375, 0.038482666015625, 0.01898193359375, 0.02264404296875, 0.0421142578125, -0.0192108154296875, 0.02032470703125, 0.0223541259765625, 0.028594970703125, 0.00982666015625, -0.0177764892578125, -0.02471923828125, 0.013275146484375, -0.015625, -0.033782958984375 ] ]
Corianas/1.3b
2023-06-23T22:03:50.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:tatsu-lab/alpaca", "arxiv:1910.09700", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/1.3b
2
5,872
transformers
2023-03-29T14:28:32
--- license: cc-by-nc-4.0 datasets: - tatsu-lab/alpaca language: - en --- # Model Card for Cerebras 1.3b Dollyfied. This is a finetuned model of Cerebras 1.3B model using DataBricksLabs Dolly Framework # Example output: Instruction: Write a love letter to Edgar Allan Poe. Edgar Allan Poe is here again, with a brand new treasure of tales for you to enjoy. First, a man's life as a fallen angel, then his lonely falls into the sea, and finally a journey through the wild, exciting lands of the American West. He also carries a mysterious message for us: never stop searching for our own sweet. Thank you, Edgar Allan Poe! ----------- Instruction: Write a tweet announcing Dolly, a large language model from Databricks. Dolly a brand new model from Databricks that has been thoroughly validated and improved with feedback from experts and users! #databricks #languagemodels #databrickslanguage ----------- Instruction: I'm selling my Nikon D-750, write a short blurb for my ad. We bring you the revolutionary Nikon D-750 - the perfect camera for any shoot. With the Nikon D-750, you'll be able to achieve stunning images with state-of-the-art features and brilliant images. The Nikon D-750 is the perfect camera for everyday work, and it produces brilliant, vibrant images with spectacular clarity. Whether you’re a professional, a hobbyist, or an enthusiast, the Nikon D-750 is the perfect camera for you. Get yours today! ----------- Instruction: Explain to me the difference between nuclear fission and fusion. Nuclear fission and fusion are both processes that use nuclear material and energy to generate heat. Fission, which occurs at high enough temperatures, produces either a proton or a neutron with the same energy level. Fusion, on the other hand, relies on a hydrogen atom and is considered to be the most stable process. Nuclear fission and fusion produce different amounts of heat, which is why nuclear fusion can be used to generate heat energy for a variety of applications. ----------- Instruction: Give me a list of 5 science fiction books I should read next. 1. The coming of the Red Planet by Guy Adams 2. The Hitchhiker's Guide to the Galaxy by Douglas Adams 3. Out of the Ashes by Douglas Adams 4. The Hitchhiker's Guide to the Galaxy episode "The Ancient of Days" 5. Icy Girls by J.R.R. Tolkien ----------- ## Model Details ### Model Description This is a finetuned version of cerebras' 1.3Billion paramater model that has been trained to follow instructions. It was accomplished using DataBricks Dolly training tools. - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model:** https://huggingface.co/cerebras/Cerebras-GPT-1.3b - **Finetuned using:** https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered suspect at best. ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** 8xA100 - **Hours used:** 30mins - **Cloud Provider:** Lambda GPU ## Technical Specifications [optional] ### Model Architecture and Objective GPT2 Cerebras-GPT 1.3B Layers 24 n_embd 2048 Heads 16 #### Hardware 8xA100s #### Software https://github.com/databrickslabs/dolly ## Model Card Contact [More Information Needed]
6,520
[ [ -0.03607177734375, -0.06732177734375, 0.02581787109375, 0.0100860595703125, -0.0163421630859375, -0.015716552734375, 0.004360198974609375, -0.0271148681640625, 0.01070404052734375, 0.03851318359375, -0.04669189453125, -0.04058837890625, -0.047271728515625, -0.0144195556640625, -0.040069580078125, 0.08343505859375, 0.0006647109985351562, 0.0101318359375, -0.019073486328125, 0.01099395751953125, -0.039886474609375, -0.041656494140625, -0.0465087890625, -0.018890380859375, 0.032470703125, 0.0148468017578125, 0.061309814453125, 0.050567626953125, 0.050079345703125, 0.02410888671875, -0.0210418701171875, -0.010009765625, -0.038482666015625, -0.02398681640625, -0.00804901123046875, -0.01548004150390625, -0.05389404296875, 0.00464630126953125, 0.03594970703125, 0.045501708984375, -0.00997161865234375, 0.039306640625, 0.003810882568359375, 0.05426025390625, -0.049102783203125, 0.02783203125, -0.043060302734375, 0.01323699951171875, -0.0042877197265625, -0.0033931732177734375, -0.020904541015625, -0.028594970703125, -0.00959014892578125, -0.0565185546875, 0.023773193359375, 0.01073455810546875, 0.07659912109375, 0.01477813720703125, -0.023651123046875, -0.01241302490234375, -0.0736083984375, 0.0560302734375, -0.033538818359375, 0.02783203125, 0.037078857421875, 0.03741455078125, -0.0182342529296875, -0.056640625, -0.052520751953125, -0.032989501953125, -0.00555419921875, 0.018707275390625, -0.0133056640625, 0.0036468505859375, 0.039581298828125, 0.043212890625, -0.053619384765625, -0.0149993896484375, -0.04656982421875, -0.01080322265625, 0.054290771484375, 0.03753662109375, 0.0045013427734375, -0.01806640625, -0.0302734375, -0.0211639404296875, -0.02838134765625, 0.0138702392578125, 0.04095458984375, 0.01200103759765625, -0.0295562744140625, 0.039581298828125, -0.02044677734375, 0.06005859375, 0.006805419921875, -0.00737762451171875, 0.01328277587890625, -0.0301513671875, -0.0298309326171875, -0.01268768310546875, 0.053436279296875, 0.037506103515625, -0.009246826171875, 0.002105712890625, -0.0015211105346679688, 0.00411224365234375, 0.02581787109375, -0.0535888671875, -0.034027099609375, 0.0257110595703125, -0.048919677734375, -0.0416259765625, -0.01548004150390625, -0.07354736328125, -0.0088348388671875, -0.0291748046875, 0.038909912109375, -0.0294036865234375, -0.0240325927734375, 0.00783538818359375, -0.0205535888671875, 0.0107574462890625, 0.0298309326171875, -0.0718994140625, 0.042083740234375, 0.030792236328125, 0.053131103515625, 0.01102447509765625, -0.016021728515625, -0.004528045654296875, -0.002826690673828125, -0.0162506103515625, 0.03485107421875, -0.036224365234375, -0.041656494140625, -0.01018524169921875, 0.0119171142578125, 0.0003237724304199219, -0.016632080078125, 0.045745849609375, -0.0413818359375, 0.025634765625, -0.0260162353515625, -0.0438232421875, -0.0321044921875, 0.020263671875, -0.0517578125, 0.0865478515625, 0.01044464111328125, -0.05670166015625, 0.0248870849609375, -0.061553955078125, -0.013671875, -0.00048804283142089844, 0.0021572113037109375, -0.0262603759765625, -0.0207672119140625, 0.00951385498046875, 0.042510986328125, -0.0401611328125, 0.0183258056640625, -0.0274658203125, -0.01119232177734375, -0.0080413818359375, -0.0229644775390625, 0.07000732421875, 0.023651123046875, -0.0149688720703125, -0.00708770751953125, -0.076171875, -0.0012264251708984375, 0.024688720703125, -0.0237884521484375, 0.0157012939453125, -0.0142974853515625, 0.0296630859375, 0.01296234130859375, 0.0215606689453125, -0.04327392578125, 0.004878997802734375, -0.0022735595703125, 0.0272979736328125, 0.042633056640625, 0.0066680908203125, 0.0214691162109375, -0.0372314453125, 0.045654296875, 0.00525665283203125, 0.049224853515625, 0.00478363037109375, -0.042755126953125, -0.05963134765625, -0.0006508827209472656, 0.02020263671875, 0.043701171875, -0.03192138671875, 0.049102783203125, -0.007076263427734375, -0.0662841796875, -0.0208587646484375, 0.003337860107421875, 0.0287933349609375, 0.06195068359375, 0.04010009765625, -0.00994873046875, -0.038848876953125, -0.06439208984375, 0.01146697998046875, -0.00910186767578125, 0.01131439208984375, 0.027008056640625, 0.05303955078125, -0.0012073516845703125, 0.06304931640625, -0.05828857421875, -0.00962066650390625, -0.02435302734375, -0.0142974853515625, 0.0157928466796875, 0.050750732421875, 0.04425048828125, -0.07391357421875, -0.0259552001953125, -0.00789642333984375, -0.0640869140625, 0.0369873046875, 0.0009446144104003906, -0.0034942626953125, -0.0005593299865722656, 0.0228118896484375, -0.055145263671875, 0.046051025390625, 0.027252197265625, -0.0070037841796875, 0.037200927734375, -0.02044677734375, 0.01491546630859375, -0.0797119140625, 0.016876220703125, 0.01021575927734375, -0.006679534912109375, -0.03277587890625, -0.01297760009765625, -0.005481719970703125, -0.029449462890625, -0.05279541015625, 0.046966552734375, -0.0179443359375, 0.004680633544921875, -0.00959014892578125, -0.006237030029296875, 0.0082244873046875, 0.047149658203125, 0.0047760009765625, 0.037872314453125, 0.0548095703125, -0.066650390625, 0.043212890625, 0.03765869140625, -0.024688720703125, 0.026092529296875, -0.053558349609375, 0.010009765625, -0.01209259033203125, 0.03656005859375, -0.05401611328125, -0.02386474609375, 0.0235595703125, -0.0311279296875, 0.0287322998046875, -0.01035308837890625, -0.034881591796875, -0.050262451171875, -0.0002701282501220703, 0.0152130126953125, 0.058441162109375, -0.025543212890625, 0.0279083251953125, 0.034027099609375, 0.01004791259765625, -0.034027099609375, -0.03729248046875, 0.004314422607421875, -0.024658203125, -0.0521240234375, 0.0292205810546875, -0.0013799667358398438, -0.0063018798828125, -0.00531768798828125, 0.005710601806640625, -0.0248870849609375, -0.00457000732421875, 0.025909423828125, 0.0253448486328125, 0.0096588134765625, 0.01045989990234375, -0.01201629638671875, -0.016021728515625, 0.0021190643310546875, -0.01393890380859375, 0.0291748046875, -0.010986328125, -0.0052490234375, -0.046295166015625, 0.0252227783203125, 0.039703369140625, 0.006855010986328125, 0.051513671875, 0.053619384765625, -0.0467529296875, 0.0099945068359375, -0.04400634765625, -0.019989013671875, -0.035736083984375, 0.0287322998046875, -0.01971435546875, -0.030426025390625, 0.04095458984375, 0.00537872314453125, -0.005977630615234375, 0.057037353515625, 0.037109375, -0.01035308837890625, 0.0732421875, 0.05609130859375, 0.0008072853088378906, 0.032562255859375, -0.046142578125, 0.0117950439453125, -0.0628662109375, -0.027252197265625, -0.044677734375, -0.014801025390625, -0.0517578125, -0.0173492431640625, 0.0181121826171875, 0.01035308837890625, -0.0377197265625, 0.0523681640625, -0.0457763671875, 0.0289306640625, 0.0382080078125, 0.0241241455078125, 0.0016155242919921875, -0.019134521484375, 0.003925323486328125, 0.00482940673828125, -0.059112548828125, -0.05596923828125, 0.0941162109375, 0.03546142578125, 0.058013916015625, -0.0174407958984375, 0.042388916015625, 0.0238037109375, -0.0012922286987304688, -0.02960205078125, 0.0469970703125, -0.00589752197265625, -0.07257080078125, -0.0162200927734375, -0.0142974853515625, -0.06512451171875, -0.00004988908767700195, -0.0103607177734375, -0.06890869140625, 0.023040771484375, 0.0187225341796875, -0.038299560546875, 0.03155517578125, -0.049591064453125, 0.0858154296875, -0.0281829833984375, -0.03570556640625, -0.0234375, -0.05206298828125, 0.027801513671875, 0.0027561187744140625, 0.01195526123046875, -0.00859832763671875, 0.017791748046875, 0.05706787109375, -0.05389404296875, 0.06097412109375, -0.0292816162109375, 0.025421142578125, 0.03662109375, -0.005916595458984375, 0.040252685546875, 0.01103973388671875, -0.01393890380859375, 0.0245208740234375, 0.0146942138671875, -0.039520263671875, -0.03302001953125, 0.055328369140625, -0.0601806640625, -0.036956787109375, -0.03668212890625, -0.0433349609375, -0.0016880035400390625, -0.002521514892578125, 0.0318603515625, 0.02801513671875, -0.0268707275390625, 0.00812530517578125, 0.0477294921875, -0.029266357421875, 0.032989501953125, 0.0238037109375, -0.01605224609375, -0.029052734375, 0.061492919921875, 0.009429931640625, 0.023162841796875, 0.030975341796875, 0.013458251953125, -0.0360107421875, -0.035736083984375, -0.035797119140625, -0.00861358642578125, -0.060394287109375, -0.00860595703125, -0.0654296875, -0.0210418701171875, -0.033050537109375, -0.0134735107421875, -0.0316162109375, -0.03668212890625, -0.032928466796875, -0.01355743408203125, 0.05169677734375, 0.04833984375, -0.00504302978515625, 0.031829833984375, -0.058746337890625, 0.0158538818359375, 0.028350830078125, 0.011077880859375, -0.003269195556640625, -0.038604736328125, -0.0200347900390625, 0.01580810546875, -0.06103515625, -0.05096435546875, 0.023590087890625, 0.0029010772705078125, 0.0330810546875, 0.01238250732421875, -0.00115966796875, 0.04998779296875, -0.0234832763671875, 0.08740234375, 0.018341064453125, -0.058074951171875, 0.055755615234375, -0.04412841796875, 0.0036602020263671875, 0.045806884765625, 0.044921875, -0.019195556640625, -0.0022125244140625, -0.07269287109375, -0.05303955078125, 0.05267333984375, 0.0292510986328125, 0.0230865478515625, 0.0038909912109375, 0.0260162353515625, 0.00608062744140625, 0.025909423828125, -0.07159423828125, -0.01617431640625, -0.02447509765625, 0.012542724609375, -0.0034160614013671875, 0.00652313232421875, -0.0172119140625, -0.04388427734375, 0.06170654296875, 0.016571044921875, 0.0311279296875, 0.0120391845703125, 0.00415802001953125, -0.0015172958374023438, 0.0031414031982421875, 0.0347900390625, 0.0546875, -0.042938232421875, -0.0255126953125, 0.0242462158203125, -0.05218505859375, 0.0129241943359375, 0.01261138916015625, -0.00913238525390625, -0.003162384033203125, 0.00736236572265625, 0.072021484375, 0.006103515625, -0.0178985595703125, 0.0285797119140625, 0.00121307373046875, -0.0229339599609375, -0.033203125, 0.0156402587890625, 0.00592803955078125, 0.0075225830078125, -0.0008411407470703125, 0.01432037353515625, 0.00971221923828125, -0.038543701171875, 0.0008153915405273438, 0.018402099609375, -0.035247802734375, -0.0128936767578125, 0.0731201171875, 0.0270538330078125, -0.024658203125, 0.0482177734375, -0.0237274169921875, -0.02215576171875, 0.078125, 0.0323486328125, 0.07073974609375, -0.000034749507904052734, 0.0244598388671875, 0.048004150390625, 0.0221099853515625, -0.0016431808471679688, 0.0272064208984375, -0.0032978057861328125, -0.0308837890625, -0.0021915435791015625, -0.0601806640625, -0.036773681640625, 0.02362060546875, -0.054840087890625, 0.045867919921875, -0.04876708984375, -0.01220703125, 0.0201263427734375, 0.00540924072265625, -0.085205078125, 0.043121337890625, -0.0018873214721679688, 0.07489013671875, -0.0687255859375, 0.0640869140625, 0.042388916015625, -0.0601806640625, -0.06011962890625, -0.0172119140625, 0.00225830078125, -0.05084228515625, 0.05120849609375, 0.005054473876953125, 0.0237579345703125, 0.0037403106689453125, -0.03631591796875, -0.06671142578125, 0.1036376953125, 0.01715087890625, -0.056427001953125, 0.0160980224609375, 0.00982666015625, 0.043853759765625, -0.033447265625, 0.04266357421875, 0.03143310546875, 0.037506103515625, 0.0206756591796875, -0.06219482421875, 0.00605010986328125, -0.0123291015625, 0.0089263916015625, 0.006618499755859375, -0.06982421875, 0.0643310546875, -0.0149078369140625, 0.002655029296875, 0.0046234130859375, 0.0287322998046875, -0.005847930908203125, 0.0369873046875, 0.0289764404296875, 0.061309814453125, 0.057037353515625, -0.0005192756652832031, 0.10125732421875, -0.0426025390625, 0.040863037109375, 0.079345703125, -0.0189971923828125, 0.05316162109375, 0.01523590087890625, -0.050262451171875, 0.02276611328125, 0.07916259765625, -0.0202789306640625, 0.039459228515625, 0.024169921875, -0.0181884765625, -0.017547607421875, -0.00476837158203125, -0.04443359375, 0.0255126953125, 0.0133056640625, -0.03839111328125, -0.016387939453125, 0.00685882568359375, -0.007556915283203125, -0.02166748046875, -0.0246734619140625, 0.046051025390625, 0.002223968505859375, -0.039215087890625, 0.0478515625, -0.00960540771484375, 0.01480865478515625, -0.054443359375, -0.007904052734375, -0.006198883056640625, 0.0132598876953125, -0.0302276611328125, -0.04644775390625, 0.026092529296875, -0.0006356239318847656, -0.0306243896484375, -0.015350341796875, 0.049530029296875, -0.00821685791015625, -0.0577392578125, 0.0245361328125, 0.01346588134765625, 0.0282135009765625, -0.00507354736328125, -0.06842041015625, 0.0013246536254882812, 0.002033233642578125, -0.01824951171875, 0.0229034423828125, -0.00262451171875, 0.00591278076171875, 0.04522705078125, 0.041839599609375, -0.016815185546875, -0.006595611572265625, -0.005008697509765625, 0.0777587890625, -0.0548095703125, -0.0252838134765625, -0.045867919921875, 0.05743408203125, -0.0086212158203125, -0.040863037109375, 0.05438232421875, 0.056427001953125, 0.060272216796875, -0.01410675048828125, 0.068359375, -0.0222930908203125, 0.025421142578125, -0.032196044921875, 0.039306640625, -0.041015625, 0.00681304931640625, -0.0318603515625, -0.089111328125, 0.002716064453125, 0.03558349609375, -0.0271453857421875, 0.017486572265625, 0.056640625, 0.07177734375, -0.007213592529296875, 0.033599853515625, 0.0290069580078125, 0.00787353515625, 0.01308441162109375, 0.028594970703125, 0.051422119140625, -0.0687255859375, 0.0467529296875, -0.039947509765625, -0.0298004150390625, -0.0031528472900390625, -0.07000732421875, -0.05181884765625, -0.047882080078125, -0.050323486328125, -0.026580810546875, 0.01384735107421875, 0.05438232421875, 0.06451416015625, -0.048065185546875, -0.0184478759765625, -0.0311279296875, -0.0009784698486328125, -0.0162353515625, -0.0178985595703125, 0.0271453857421875, -0.0179901123046875, -0.056793212890625, 0.014984130859375, 0.0064544677734375, 0.01806640625, -0.026611328125, -0.00048279762268066406, -0.0218353271484375, -0.004878997802734375, 0.0306549072265625, 0.02581787109375, -0.037445068359375, -0.0171051025390625, -0.003887176513671875, 0.004375457763671875, -0.002155303955078125, 0.041015625, -0.042816162109375, 0.034515380859375, 0.03546142578125, 0.0234375, 0.0694580078125, -0.01265716552734375, 0.03155517578125, -0.0269927978515625, 0.01313018798828125, 0.0262603759765625, 0.0284271240234375, 0.0146942138671875, -0.05169677734375, 0.04217529296875, 0.0217742919921875, -0.06640625, -0.047882080078125, 0.001880645751953125, -0.087158203125, -0.0023784637451171875, 0.09173583984375, -0.0038204193115234375, -0.01800537109375, -0.00948333740234375, -0.019378662109375, 0.01055908203125, -0.0187225341796875, 0.05999755859375, 0.06781005859375, -0.0167388916015625, 0.0118255615234375, -0.0633544921875, 0.03863525390625, 0.01464080810546875, -0.06451416015625, -0.003963470458984375, 0.037200927734375, 0.023651123046875, 0.010833740234375, 0.040435791015625, -0.01322174072265625, 0.0194091796875, 0.028106689453125, 0.0187225341796875, 0.0010061264038085938, -0.021636962890625, -0.0225982666015625, 0.0035953521728515625, -0.021575927734375, -0.045135498046875 ] ]
Fredithefish/CrimsonPajama
2023-05-20T08:50:58.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "code", "en", "dataset:Fredithefish/GPTeacher-for-RedPajama-Chat", "arxiv:2106.09685", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
Fredithefish
null
null
Fredithefish/CrimsonPajama
4
5,872
transformers
2023-05-18T11:22:20
--- license: apache-2.0 datasets: - Fredithefish/GPTeacher-for-RedPajama-Chat language: - en tags: - code inference: false --- # CrimsonPajama This model was finetuned 1 epoch on <a href="https://huggingface.co/datasets/Fredithefish/GPTeacher-for-RedPajama-Chat">the GPTeacher dataset</a> using <a href="https://arxiv.org/abs/2106.09685">LoRA</a>. A 2 epoch finetuned model will be released in the coming days. ## Model Details - **Base Model**: <a href="https://huggingface.co/togethercomputer/RedPajama-INCITE-Chat-3B-v1">RedPajama-INCITE-Chat-3B</a> - **Training Data**: <a href="https://huggingface.co/datasets/Fredithefish/GPTeacher-for-RedPajama-Chat">GPTeacher dataset</a> - **Model type**: <a href="https://en.m.wikipedia.org/wiki/Language_model">Language Model</a> - **License**: <a href="https://www.apache.org/licenses/LICENSE-2.0">Apache 2.0</a> - **Training Time**: 2:20:53 hours on NVIDIA Tesla T4 Finetune Code for RedPajama-Chat in notebook format can be found <a href="https://github.com/fredi-python/Fine-tune-RedPajama-Chat-3B">here</a> ## About the dataset A collection of modular datasets generated by GPT-4, General-Instruct - Roleplay-Instruct - Code-Instruct - and Toolformer # Comparison with RedPajama ### Roleplay <style> table { border-collapse: collapse; width: 100%; } th, td { border: 1px solid black; padding: 8px; text-align: left; } th { background-color: #f2f2f2; } col.right-border { border-right: 1px solid black; } </style> <table> <colgroup> <col> <col class="right-border"> </colgroup> <tr> <th>RedPajama</th> <th>CrimsonPajama</th> </tr> <tr> <td>I want you to act like a pirate.</td> <td>I want you to act like a pirate.</td> </tr> <tr> <td>Hai, matey! Why do you look so sad? There’s treasure everywhere!</td> <td>Ahoy, mateys! What be ye lookin’ fer? Today, we be takin’ ‘n adventure an’ need a little sassin’ and swashbucklin’. What’s yer plan, starvin’ mutineer? Pssst, follow me and ye’ll find riches an’ some old treasure sure to bring yer blazes. Together, we’ll laugh ‘till we’re blue in the mouth an’ sail the seas, smilin’ at every fickle sky.</td> </tr> </table> <table> <colgroup> <col> <col class="right-border"> </colgroup> <tr> <th>RedPajama</th> <th>CrimsonPajama</th> </tr> <tr> <td>I want you to act like a waiter at a fancy restaurant.</td> <td>I want you to act like a waiter at a fancy restaurant.</td> </tr> <tr> <td>Here I am, a waiter at an iconic world-class 5-star restaurant.</td> <td>Ladies and gentlemen, may I introduce myself? I’m your server today. I’m here to provide the finest cuisine and ensure you enjoy your meal in a dignified manner. Please excuse me for not knowing your preferences, but I’m happy to help out as best as I can. Please join me on this culinary journey and savor the delights of our restaurant. Are there any questions or requests you have? Anything at all? Oh, and may I recommend the signature dessert? Cheers! We’d love to hear your feedback! Stay healthy, and we’ll see you soon.</td> </tr> </table>
3,154
[ [ -0.0217742919921875, -0.061737060546875, -0.005542755126953125, 0.03363037109375, -0.028045654296875, -0.0211639404296875, -0.01194000244140625, -0.034698486328125, 0.05999755859375, 0.05145263671875, -0.06060791015625, -0.0218658447265625, -0.032958984375, 0.0012788772583007812, -0.01097869873046875, 0.07281494140625, 0.0357666015625, 0.008392333984375, 0.009002685546875, 0.006313323974609375, -0.05584716796875, -0.033233642578125, -0.05255126953125, -0.0284576416015625, 0.0203704833984375, 0.042266845703125, 0.0537109375, 0.0173492431640625, 0.0294342041015625, 0.029571533203125, -0.00850677490234375, 0.013580322265625, -0.05316162109375, 0.0173492431640625, -0.011566162109375, -0.0267486572265625, -0.04052734375, 0.005859375, 0.0154571533203125, 0.0196533203125, -0.0184478759765625, 0.013214111328125, 0.0002617835998535156, 0.0496826171875, -0.04718017578125, 0.05291748046875, -0.045623779296875, 0.0081787109375, -0.0257110595703125, 0.00029087066650390625, -0.006862640380859375, -0.06524658203125, 0.0081939697265625, -0.0582275390625, -0.0236968994140625, -0.01523590087890625, 0.06573486328125, 0.0096588134765625, -0.018463134765625, -0.0186920166015625, -0.04302978515625, 0.06488037109375, -0.043701171875, 0.009033203125, 0.042449951171875, 0.0528564453125, -0.01448822021484375, -0.09210205078125, -0.06536865234375, -0.0009050369262695312, -0.00794219970703125, 0.021514892578125, -0.0185546875, -0.034637451171875, 0.00323486328125, 0.0244598388671875, -0.03515625, -0.0305328369140625, -0.043548583984375, -0.0124969482421875, 0.051605224609375, 0.0189666748046875, 0.0364990234375, -0.012237548828125, -0.0297393798828125, -0.0242156982421875, -0.03521728515625, 0.004810333251953125, 0.021881103515625, 0.040679931640625, -0.0457763671875, 0.045623779296875, 0.00893402099609375, 0.036895751953125, 0.0196380615234375, -0.0210113525390625, 0.0072174072265625, -0.0289306640625, -0.0191192626953125, -0.00839996337890625, 0.0667724609375, 0.04461669921875, 0.037200927734375, 0.01213836669921875, 0.0055694580078125, 0.01523590087890625, 0.033843994140625, -0.03570556640625, -0.04864501953125, 0.00628662109375, -0.01293182373046875, -0.012969970703125, -0.01464080810546875, -0.07470703125, -0.0285491943359375, -0.00214385986328125, 0.03765869140625, -0.04864501953125, -0.02325439453125, 0.01507568359375, -0.0273590087890625, 0.0244903564453125, 0.0330810546875, -0.09527587890625, 0.0273284912109375, 0.040863037109375, 0.034210205078125, 0.0106964111328125, -0.01324462890625, 0.004486083984375, 0.005023956298828125, -0.0166778564453125, 0.0294342041015625, -0.0239715576171875, -0.027923583984375, -0.00238037109375, 0.0187835693359375, -0.00405120849609375, -0.0316162109375, 0.0628662109375, -0.038177490234375, 0.034820556640625, -0.0095977783203125, -0.0092926025390625, -0.028411865234375, 0.0252838134765625, -0.039398193359375, 0.05364990234375, 0.0201263427734375, -0.05322265625, 0.02294921875, -0.055145263671875, -0.031219482421875, 0.016754150390625, 0.00506591796875, -0.03753662109375, -0.015716552734375, 0.0272979736328125, 0.01338958740234375, -0.023895263671875, -0.02362060546875, -0.02496337890625, -0.042144775390625, 0.027435302734375, -0.0026149749755859375, 0.087646484375, 0.024688720703125, -0.02838134765625, -0.0212249755859375, -0.037841796875, 0.0352783203125, 0.0367431640625, -0.03179931640625, -0.0155487060546875, -0.00930023193359375, 0.007648468017578125, 0.005859375, 0.037200927734375, -0.031890869140625, 0.0298004150390625, -0.047515869140625, 0.042510986328125, 0.0543212890625, 0.0193023681640625, 0.0164794921875, -0.03961181640625, 0.03680419921875, -0.011474609375, 0.0208892822265625, 0.0138092041015625, -0.0293426513671875, -0.06182861328125, -0.0207061767578125, 0.007335662841796875, 0.051025390625, -0.04376220703125, 0.0313720703125, -0.0203704833984375, -0.060302734375, -0.043243408203125, -0.032562255859375, 0.032318115234375, 0.0032329559326171875, 0.0537109375, -0.035858154296875, -0.068359375, -0.05810546875, -0.024993896484375, -0.032135009765625, 0.0149688720703125, 0.055023193359375, 0.03387451171875, -0.0238494873046875, 0.049072265625, -0.032012939453125, -0.037750244140625, -0.036712646484375, -0.024322509765625, 0.036346435546875, 0.0556640625, 0.03948974609375, -0.049285888671875, -0.0484619140625, 0.002349853515625, -0.0567626953125, 0.0092926025390625, -0.0005230903625488281, -0.0006785392761230469, 0.0178680419921875, 0.0009398460388183594, -0.035064697265625, 0.006412506103515625, 0.0305633544921875, -0.039642333984375, 0.033355712890625, -0.007049560546875, 0.038787841796875, -0.068603515625, -0.00524139404296875, -0.0240020751953125, 0.006725311279296875, -0.0458984375, 0.0210418701171875, -0.01336669921875, -0.016754150390625, -0.046875, 0.050048828125, -0.030303955078125, 0.00383758544921875, 0.006298065185546875, 0.00507354736328125, -0.0141143798828125, 0.05181884765625, -0.0260162353515625, 0.059234619140625, 0.049835205078125, -0.044464111328125, 0.035675048828125, 0.0528564453125, -0.018585205078125, 0.038604736328125, -0.060821533203125, 0.01340484619140625, 0.01204681396484375, 0.009613037109375, -0.07501220703125, -0.011871337890625, 0.03460693359375, -0.0465087890625, 0.0227508544921875, -0.031463623046875, -0.0306396484375, -0.037750244140625, -0.026580810546875, 0.0092010498046875, 0.052032470703125, -0.03973388671875, 0.02777099609375, 0.0614013671875, -0.013824462890625, -0.06060791015625, -0.0789794921875, 0.00974273681640625, -0.033355712890625, -0.0654296875, 0.0255126953125, 0.00322723388671875, -0.0134429931640625, -0.038238525390625, -0.005069732666015625, -0.03515625, 0.0019683837890625, 0.032470703125, 0.006504058837890625, -0.007350921630859375, -0.0286712646484375, -0.00800323486328125, 0.00426483154296875, 0.01108551025390625, -0.00867462158203125, 0.059844970703125, -0.0305633544921875, -0.00405120849609375, -0.064453125, 0.0016832351684570312, 0.054168701171875, -0.00725555419921875, 0.042816162109375, 0.038848876953125, -0.011260986328125, -0.006160736083984375, -0.01837158203125, -0.031951904296875, -0.039581298828125, 0.006927490234375, -0.004978179931640625, -0.050506591796875, 0.040985107421875, 0.019866943359375, -0.006801605224609375, 0.038665771484375, 0.0323486328125, -0.0245361328125, 0.037750244140625, 0.03228759765625, -0.00762176513671875, 0.025665283203125, -0.0462646484375, 0.007312774658203125, -0.03192138671875, -0.0259552001953125, -0.038482666015625, -0.032012939453125, -0.056060791015625, -0.027923583984375, 0.0301666259765625, 0.0149383544921875, -0.05194091796875, 0.036834716796875, -0.046661376953125, 0.032928466796875, 0.03143310546875, 0.0159149169921875, 0.0009250640869140625, -0.01285552978515625, -0.01428985595703125, -0.031951904296875, -0.041229248046875, -0.0289459228515625, 0.050567626953125, 0.021331787109375, 0.07135009765625, 0.050567626953125, 0.05670166015625, 0.0162353515625, -0.03033447265625, -0.0235137939453125, 0.06231689453125, -0.01251220703125, -0.05859375, -0.031707763671875, -0.0108795166015625, -0.0770263671875, 0.0154266357421875, -0.03826904296875, -0.04803466796875, 0.0023174285888671875, 0.01251983642578125, -0.01360321044921875, 0.006633758544921875, -0.05487060546875, 0.08502197265625, 0.000057697296142578125, -0.031982421875, -0.01213836669921875, -0.058380126953125, 0.039306640625, 0.03131103515625, -0.0006122589111328125, 0.006397247314453125, 0.0036468505859375, 0.0548095703125, -0.0452880859375, 0.07550048828125, -0.02447509765625, 0.01091766357421875, 0.036407470703125, 0.0242767333984375, 0.050018310546875, 0.0223541259765625, 0.0088958740234375, 0.019439697265625, -0.0064849853515625, -0.021087646484375, -0.018035888671875, 0.06903076171875, -0.0760498046875, -0.0239105224609375, -0.0472412109375, -0.01227569580078125, 0.00782012939453125, 0.01983642578125, 0.0211334228515625, 0.002971649169921875, -0.0009756088256835938, 0.0096588134765625, 0.021514892578125, -0.0035686492919921875, 0.044830322265625, 0.029327392578125, -0.006565093994140625, -0.033477783203125, 0.0665283203125, 0.00799560546875, -0.007442474365234375, 0.006988525390625, -0.0007047653198242188, -0.03973388671875, -0.0294647216796875, -0.01245880126953125, 0.0307159423828125, -0.04034423828125, -0.01052093505859375, -0.05126953125, -0.0084228515625, -0.0287933349609375, -0.014129638671875, -0.033233642578125, -0.0660400390625, -0.031829833984375, 0.020050048828125, 0.04986572265625, 0.035247802734375, -0.003997802734375, 0.01384735107421875, -0.045440673828125, 0.006435394287109375, 0.050872802734375, 0.0164947509765625, -0.0089263916015625, -0.049591064453125, -0.00690460205078125, 0.005657196044921875, -0.028900146484375, -0.054931640625, 0.057464599609375, 0.0244903564453125, 0.029541015625, -0.0020923614501953125, 0.006572723388671875, 0.05841064453125, -0.0296173095703125, 0.0875244140625, 0.0095977783203125, -0.058837890625, 0.0521240234375, -0.03912353515625, 0.014923095703125, 0.049285888671875, 0.032623291015625, -0.044677734375, -0.0321044921875, -0.056488037109375, -0.06939697265625, 0.05670166015625, 0.01317596435546875, 0.02667236328125, -0.0170135498046875, 0.0156097412109375, -0.00762939453125, 0.0089263916015625, -0.09210205078125, -0.054901123046875, -0.049530029296875, 0.032318115234375, 0.00858306884765625, 0.0008091926574707031, -0.01715087890625, -0.013916015625, 0.06634521484375, 0.004741668701171875, 0.05487060546875, -0.01099395751953125, 0.0185546875, -0.005260467529296875, 0.039764404296875, 0.046295166015625, 0.038787841796875, -0.018096923828125, -0.01038360595703125, 0.0233306884765625, -0.056243896484375, -0.0002624988555908203, -0.01030731201171875, 0.010406494140625, -0.01593017578125, 0.0167999267578125, 0.04595947265625, 0.01364898681640625, -0.03216552734375, 0.045745849609375, -0.0210418701171875, 0.0047607421875, -0.054290771484375, 0.038360595703125, 0.019866943359375, 0.0265655517578125, 0.0135040283203125, 0.002376556396484375, -0.0093231201171875, -0.053802490234375, 0.020050048828125, 0.039764404296875, -0.041290283203125, -0.00647735595703125, 0.057647705078125, 0.0084686279296875, -0.027130126953125, 0.0533447265625, -0.017181396484375, -0.0240020751953125, 0.035858154296875, 0.058380126953125, 0.045654296875, -0.01030731201171875, 0.010009765625, 0.05462646484375, 0.01198577880859375, 0.00722503662109375, 0.058563232421875, 0.0165863037109375, -0.03863525390625, -0.026123046875, -0.043609619140625, -0.0207366943359375, 0.0157318115234375, -0.037841796875, 0.03350830078125, -0.042816162109375, -0.00090789794921875, -0.01116180419921875, 0.01480865478515625, -0.0186309814453125, 0.0136871337890625, -0.01503753662109375, 0.059051513671875, -0.05096435546875, 0.063720703125, 0.0416259765625, -0.043182373046875, -0.060455322265625, -0.017059326171875, 0.010589599609375, -0.0740966796875, 0.047393798828125, -0.018524169921875, 0.0185699462890625, -0.00664520263671875, -0.042144775390625, -0.061279296875, 0.0789794921875, 0.005413055419921875, -0.0220184326171875, 0.0184478759765625, 0.0029964447021484375, 0.05487060546875, -0.0310211181640625, 0.034454345703125, 0.05462646484375, 0.04534912109375, 0.00997161865234375, -0.07342529296875, -0.00788116455078125, -0.0254669189453125, -0.00922393798828125, 0.01422882080078125, -0.043304443359375, 0.08831787109375, -0.028411865234375, -0.0182037353515625, 0.01076507568359375, 0.04925537109375, 0.0227508544921875, -0.00615692138671875, 0.0244903564453125, 0.044342041015625, 0.0303802490234375, -0.035736083984375, 0.07989501953125, -0.0094757080078125, 0.042449951171875, 0.08154296875, 0.006191253662109375, 0.054901123046875, 0.01546478271484375, -0.041351318359375, 0.0382080078125, 0.0399169921875, -0.01454925537109375, 0.06292724609375, 0.0233306884765625, -0.007518768310546875, 0.01076507568359375, 0.004955291748046875, -0.04327392578125, 0.04083251953125, 0.017852783203125, -0.01305389404296875, -0.004146575927734375, -0.015777587890625, 0.00823211669921875, 0.02392578125, -0.0301513671875, 0.0682373046875, -0.01158905029296875, -0.0364990234375, 0.09918212890625, -0.00853729248046875, 0.0653076171875, -0.048187255859375, -0.0162811279296875, -0.024078369140625, 0.00957489013671875, -0.01739501953125, -0.0625, 0.0151824951171875, -0.0246429443359375, -0.007354736328125, 0.00014448165893554688, 0.0263824462890625, -0.04046630859375, -0.01267242431640625, 0.01354217529296875, 0.0183258056640625, 0.034820556640625, 0.0230560302734375, -0.06646728515625, 0.00823974609375, 0.0272064208984375, 0.01145172119140625, 0.0191192626953125, 0.02252197265625, 0.018280029296875, 0.0570068359375, 0.0273590087890625, 0.004146575927734375, 0.005764007568359375, -0.01922607421875, 0.057891845703125, -0.04998779296875, -0.055908203125, -0.043060302734375, 0.042083740234375, 0.00711822509765625, -0.042449951171875, 0.05535888671875, 0.0330810546875, 0.04888916015625, -0.03900146484375, 0.042816162109375, -0.029693603515625, 0.0548095703125, -0.0261383056640625, 0.0823974609375, -0.0494384765625, 0.01128387451171875, -0.032073974609375, -0.060638427734375, -0.013916015625, 0.0731201171875, -0.00225067138671875, 0.0153045654296875, 0.03326416015625, 0.06591796875, -0.00731658935546875, 0.01349639892578125, 0.01300811767578125, 0.033660888671875, 0.020111083984375, 0.042327880859375, 0.049530029296875, -0.054901123046875, 0.036102294921875, -0.031646728515625, 0.00827789306640625, -0.034912109375, -0.055023193359375, -0.037750244140625, -0.0528564453125, -0.017547607421875, -0.037689208984375, 0.01483154296875, 0.0701904296875, 0.047943115234375, -0.0567626953125, -0.030548095703125, 0.00847625732421875, 0.0100860595703125, -0.041839599609375, -0.0163116455078125, 0.022705078125, -0.00749969482421875, -0.05712890625, 0.045654296875, 0.004047393798828125, 0.033905029296875, 0.00018608570098876953, -0.01136016845703125, -0.033111572265625, 0.004878997802734375, 0.028656005859375, 0.046142578125, -0.055999755859375, -0.0226593017578125, -0.0274658203125, -0.0203399658203125, -0.0008969306945800781, 0.0199432373046875, -0.050079345703125, 0.0278778076171875, 0.01090240478515625, 0.01308441162109375, 0.057647705078125, -0.029693603515625, 0.0186004638671875, -0.0565185546875, 0.0131072998046875, 0.042572021484375, 0.0247955322265625, 0.00777435302734375, -0.0247955322265625, 0.06719970703125, -0.0011882781982421875, -0.0643310546875, -0.03790283203125, -0.0023651123046875, -0.09100341796875, -0.01261138916015625, 0.063232421875, -0.01108551025390625, -0.0179290771484375, -0.0164031982421875, -0.021514892578125, 0.002918243408203125, -0.04010009765625, 0.06695556640625, 0.0275726318359375, -0.028717041015625, 0.0009427070617675781, -0.0254364013671875, 0.060791015625, 0.01788330078125, -0.057281494140625, 0.016632080078125, 0.032958984375, 0.01959228515625, 0.036163330078125, 0.056976318359375, -0.026458740234375, 0.04840087890625, -0.00589752197265625, 0.0277252197265625, -0.00006759166717529297, -0.01611328125, -0.0234527587890625, 0.031768798828125, -0.020355224609375, -0.0198822021484375 ] ]
rinna/bilingual-gpt-neox-4b-8k
2023-08-14T06:40:46.000Z
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "ja", "en", "dataset:mc4", "dataset:cc100", "dataset:wikipedia", "dataset:EleutherAI/pile", "dataset:togethercomputer/RedPajama-Data-1T", "arxiv:2306.15595", "license:mit", "has_space", "text-generation-inference", "region:us" ]
text-generation
rinna
null
null
rinna/bilingual-gpt-neox-4b-8k
22
5,872
transformers
2023-07-31T02:34:21
--- thumbnail: https://github.com/rinnakk/japanese-pretrained-models/blob/master/rinna.png license: mit datasets: - mc4 - cc100 - wikipedia - EleutherAI/pile - togethercomputer/RedPajama-Data-1T language: - ja - en inference: false --- # bilingual-gpt-neox-4b-8k ![rinna-icon](./rinna.png) # Overview **Notice: This model requires `transformers>=4.31.0` to work properly.** This repository provides an English-Japanese bilingual GPT-NeoX model of 3.8 billion parameters. We extend [`rinna/bilingual-gpt-neox-4b`](https://huggingface.co/rinna/bilingual-gpt-neox-4b)'s context length from 2048 to 8192 by fine-tuning on 1.5B extra tokens using [RoPE positional interpolation](https://arxiv.org/abs/2306.15595). * **Library** The model was trained using code based on [EleutherAI/gpt-neox](https://github.com/EleutherAI/gpt-neox). * **Model architecture** A 36-layer, 2816-hidden-size transformer-based language model. * **Fine-tuning** The model was trained on long sequences (longer than 4000 tokens) sampled from its pre-training corpora as follows. The fine-tuning data contains **1.5B** tokens in total. - [Japanese CC-100](http://data.statmt.org/cc-100/ja.txt.xz) - [Japanese C4](https://huggingface.co/datasets/mc4) - [The Pile](https://huggingface.co/datasets/EleutherAI/pile) - [Redpajama](https://huggingface.co/datasets/togethercomputer/RedPajama-Data-1T) - [Wikipedia](https://dumps.wikimedia.org/other/cirrussearch) * **Model Series** | Variant | Link | | :-- | :--| | Bilingual 4B MiniGPT4 | https://huggingface.co/rinna/bilingual-gpt-neox-4b-minigpt4 | | Bilingual 4B PPO | https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-ppo | | Bilingual 4B SFT | https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-sft | | Bilingual 4B 8K | https://huggingface.co/rinna/bilingual-gpt-neox-4b-8k | | Bilingual 4B | https://huggingface.co/rinna/bilingual-gpt-neox-4b | | Japanese 3.6B PPO | https://huggingface.co/rinna/japanese-gpt-neox-3.6b-instruction-ppo | | Japanese 3.6B SFT-v2 | https://huggingface.co/rinna/japanese-gpt-neox-3.6b-instruction-sft-v2 | | Japanese 3.6B SFT | https://huggingface.co/rinna/japanese-gpt-neox-3.6b-instruction-sft | | Japanese 3.6B | https://huggingface.co/rinna/japanese-gpt-neox-3.6b | * **Authors** - [Tianyu Zhao](https://huggingface.co/tianyuz) - [Toshiaki Wakatsuki](https://huggingface.co/t-w) - [Akio Kaga](https://huggingface.co/rakaga) - [Koh Mitsuda](https://huggingface.co/mitsu-koh) - [Kei Sawada](https://huggingface.co/keisawada) # How to use the model **Notice:** Since the model is **sensitive to decoding hyper-parameters** (e.g. `temperature`, `top_p`, `top_k`, `repetition_penalty`), it is suggested to explore the best setting for your task. ~~~~python import torch from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("rinna/bilingual-gpt-neox-4b-8k", use_fast=False) model = AutoModelForCausalLM.from_pretrained("rinna/bilingual-gpt-neox-4b-8k") if torch.cuda.is_available(): model = model.to("cuda") text = "Socrates says" token_ids = tokenizer.encode(text, add_special_tokens=False, return_tensors="pt") with torch.no_grad(): output_ids = model.generate( token_ids.to(model.device), max_new_tokens=4000, min_new_tokens=4000, do_sample=True, temperature=1.0, top_p=0.95, pad_token_id=tokenizer.pad_token_id, bos_token_id=tokenizer.bos_token_id, eos_token_id=tokenizer.eos_token_id ) output = tokenizer.decode(output_ids.tolist()[0]) print(output) """ Socrates says that he is not a bad man because the people of his city-state want to kill him. For a just man, if someone gives them their life over, they will lose it by violence. If this happens at the hands of another, that person will be as bad as Plato's Socratic slave, and Socrates will suffer accordingly (B 134 ff). The Stranger's final remark concerns the distinction between knowledge and wisdom. While the Stranger seems to claim that all people can learn moral lessons through observation of how the world actually works, Socrates responds by saying: "What? Am I a skilful painter?" I replied [to his question] (499). "No, indeed I am not, Socrates; but you are one who knows how to paint. You have painted a little picture and I know nothing about art. In this respect what do I know or can learn from you?" (D 1015) Socrates suggests that it makes sense to define the knowledge required of a good person as any skill which we can acquire by observing real behavior. However, there appears to be a problem in this definition: it seems unlikely that everyone can have such a skill. Certainly, if he were able to see the actions of other people, he would understand how we should act, even though his own response to these actions would not necessarily satisfy moral rules. Even less sophisticated people might reasonably conclude that their own actions must conform with accepted moral standards of behavior. Hence, it seems that all people, at least some of us, need _some_ form of training. ## **The nature of education and character** Having set out our ideas of justice and virtue, and the ways in which they relate to political society, Socrates now brings the story of his pupil Phaedrus to a close. He tells Phaedrus that "my teaching you is as simple as that of your own body. If you were to lay it out for yourself, you would not discover its form" (B 287–8). The two men had originally been introduced as students undertaking an exercise called pedagogy. Now, however, Socrates has presented Phaedrus with the idea that his task involves making predictions concerning events yet to come (B 169). A better understanding of these events will be more useful than mere memorization. To achieve this purpose, the young philosopher must be careful not to waste his time doing the unnecessary things that ordinary humans tend to do. Socrates asks Phaedrus whether a good philosopher really needs to do no work. The answer given is "yes", meaning that he does not need to study the classics and develop a philosophical tradition in order to make himself a good person, nor to go through a lengthy course of philosophy and other education. Rather, he should simply practice being an active, creative, and imaginative thinker ( _eikasōma_ ). Such persons are well qualified to judge situations on their own terms, rather than on those provided by tradition (B 296). Once again, Socrates emphasizes the difference between the intellectual achievements which follow naturally from education and those which require intellectual effort alone. When asked whether this sort of education can produce a good man, Socrates replies in the affirmative: "Surely it would appear impossible that someone could attain the most important parts of wisdom, unless he was a student of human affairs" (B 364). Socrates also points out that having been educated properly helps a person to make good choices when faced with difficult decisions: So for this same reason, if you did not take up your craft with me, that is, your profession, when you were young, you would not be a fit person to judge how you ought to vote; because you would not consider each thing in accordance with its true nature" (B 366). As Plato often asserts throughout the _Apology_, Socrates regards learning as essential to the acquisition of wisdom but education can never substitute for the inborn capacities of a child. This is not to say that children lack wisdom or that they cannot mature. Indeed, Socrates explains that education is sometimes needed even by individuals who can solve problems for themselves (B 343–67), and Socrates later refers to this activity (C 738 ff) as _technēsēs_. However, there is always something special about childhood initiating certain capacities. We usually give up the right to participate in education at puberty so as to prepare us for adult life, for example, without being informed that our bodies and intelligence can also grow old (B 1165–70). ## **Socrates's defence of democracy and Socratic method** Following a lengthy description of Socrates's educational programme, Plato moves directly into the matter of democratic politics and citizenship in Book III. On the first day of the trial, Socrates takes up the theme of democracy once again: "For you are looking for this thing, my friends, that is to say, the good citizenship to which every person stands entitled" (389). Before continuing, Socrates introduces three principles that he believes form the very heart of good citizenship: the good gods, respect for nature, and love of beauty. Socrates describes these principles in various ways: 1. All citizens of a democracy are expected to behave honourably (390). The citizen should avoid doing anything harmful (to others or to himself) and everything good. There is therefore no way to avoid acting dishonourably (391); but no one can avoid harming himself, for his actions will harm the community as a whole (392–5). 2. Each individual is equally in a position of power and authority, and this means that the citizens must share responsibility for the good government of the state (395). 3. Respect for nature means that citizens will observe that both laws of nature and the opinions of other people control their actions, so that they must choose between the best available alternatives. Anyone who fails to adopt reasoned opinion will be wrong in principle (399). This entails that citizens will have to choose among the best policies that prevail within the community (ibid.). So, while the citizens will have authority and power, this only exists so long as the laws and opinions of which they approve prevail in general over those of which they disapprove. The only way they can get any power at all over their fellow-citizens is either through punishment, or through elections. These provide the means by which citizens can express their approval of a policy or disapproval of a policy. The latter occurs when citizens elect the individuals responsible for making the laws. While democracy may be described as a'mixed' government, it is not possible for citizens to choose those whom they wish to vote for (399). Instead, they decide who should have a voice. Those elected speak for themselves, they do not listen to the advice of their colleagues, and ultimately the result will be chosen by the people themselves (399–401). Once again, Socrates is clearly trying to convince his interrogators that the best interests of the city-state depend on giving a larger voice to the public in running its affairs. ## **Plato's reply to Socrates** Plato's rejoinder shows his great skill in dialogue. He presents the argument in familiar forms: analogy, discussion, and so on. Although Socrates makes some valid points at times along the way, he usually finds reasons for disagreeing with the arguments that he offers to support his claims. As he repeatedly does throughout Book II, the Stranger then uses Socrates's own words against him. To begin with, the Stranger dismisses the claim that each person ... """ ~~~~ --- # Tokenization The model uses a [sentencepiece](https://github.com/google/sentencepiece)-based tokenizer. * The tokenizer has a vocabulary size of 65,536. * It uses *byte fallback* to decompose unknown text pieces into UTF-8 byte pieces to avoid producing `<UNK>` tokens. * It can recognize *consecutive whitespaces*, *newlines*, and *tabs* to handle structured texts better. * We turned off the default behaviour of prepending leading whitespace because it is not beneficial for processing Japanese. * Specifically, single whitespace is always processed as one token so that any English word won't have a preceding whitespace like in many other tokenizers (e.g. `_Hello`). * This decision trades the English processing efficiency for a unified way to treat whitespaces. * It leads to a significantly lower loss of next token prediction on English data because whitespaces are easy to predict. * **Don't forget to set `use_fast=False` to make the above features function correctly.** --- # Licenese [The MIT license](https://opensource.org/licenses/MIT)
12,331
[ [ -0.028045654296875, -0.042755126953125, 0.03485107421875, 0.005115509033203125, -0.039306640625, -0.0008134841918945312, -0.0118255615234375, -0.0308685302734375, 0.0364990234375, 0.0201263427734375, -0.0347900390625, -0.031280517578125, -0.047149658203125, 0.006099700927734375, -0.00850677490234375, 0.07806396484375, 0.0033855438232421875, 0.00576019287109375, 0.01174163818359375, 0.00604248046875, -0.021026611328125, -0.02899169921875, -0.07403564453125, -0.0203704833984375, 0.025390625, 0.021942138671875, 0.034423828125, 0.04376220703125, 0.00844573974609375, 0.031463623046875, -0.005939483642578125, 0.00553131103515625, -0.035888671875, -0.00878143310546875, -0.034423828125, -0.034149169921875, -0.0347900390625, 0.015594482421875, 0.047393798828125, 0.045654296875, -0.00316619873046875, 0.008697509765625, -0.0017604827880859375, 0.04034423828125, -0.0208740234375, 0.0120697021484375, -0.0256500244140625, 0.0004189014434814453, -0.00807952880859375, -0.00139617919921875, -0.00811004638671875, -0.044891357421875, 0.00530242919921875, -0.065185546875, 0.0170440673828125, -0.01166534423828125, 0.0831298828125, -0.004268646240234375, -0.030670166015625, -0.016265869140625, -0.042022705078125, 0.08636474609375, -0.056121826171875, 0.018768310546875, 0.03662109375, -0.0014963150024414062, -0.0032978057861328125, -0.0709228515625, -0.045745849609375, -0.0011358261108398438, -0.0144805908203125, 0.03668212890625, -0.002857208251953125, -0.006946563720703125, 0.037109375, 0.0308837890625, -0.0308380126953125, -0.0211639404296875, -0.05560302734375, -0.0274505615234375, 0.044769287109375, 0.01491546630859375, 0.03167724609375, -0.0252532958984375, -0.03192138671875, -0.0204315185546875, -0.027923583984375, 0.01971435546875, 0.04852294921875, 0.01337432861328125, -0.024169921875, 0.029541015625, -0.0118560791015625, 0.0443115234375, 0.0004100799560546875, -0.0288848876953125, 0.01389312744140625, -0.04534912109375, 0.0078277587890625, -0.00337982177734375, 0.07635498046875, 0.03375244140625, 0.029541015625, -0.006931304931640625, -0.005218505859375, 0.00785064697265625, 0.0026836395263671875, -0.06768798828125, -0.02764892578125, -0.0022296905517578125, -0.0190582275390625, -0.01216888427734375, 0.01084136962890625, -0.06927490234375, 0.01201629638671875, -0.0195770263671875, 0.034637451171875, -0.05242919921875, -0.0199127197265625, 0.0394287109375, -0.017303466796875, 0.00860595703125, 0.0278472900390625, -0.0755615234375, 0.026824951171875, 0.041290283203125, 0.058441162109375, 0.0137786865234375, -0.038299560546875, -0.0201416015625, 0.00765228271484375, -0.029052734375, 0.0244293212890625, -0.04034423828125, -0.0285797119140625, -0.033447265625, 0.0195770263671875, -0.0229644775390625, -0.0195159912109375, 0.049713134765625, -0.009429931640625, 0.0443115234375, -0.032318115234375, -0.04461669921875, -0.01849365234375, 0.00537109375, -0.020355224609375, 0.07861328125, 0.0257568359375, -0.06805419921875, 0.009735107421875, -0.0390625, -0.0205230712890625, 0.01067352294921875, -0.00492095947265625, -0.0211639404296875, -0.00957489013671875, 0.007213592529296875, 0.01885986328125, -0.024078369140625, 0.0011444091796875, -0.0191650390625, -0.01641845703125, 0.01611328125, -0.01439666748046875, 0.09393310546875, 0.01049041748046875, -0.052703857421875, -0.0252532958984375, -0.053863525390625, 0.0176544189453125, 0.02435302734375, -0.0249176025390625, -0.0280609130859375, -0.014129638671875, -0.00921630859375, 0.0234375, 0.0286865234375, -0.06256103515625, 0.0247344970703125, -0.041290283203125, 0.045745849609375, 0.060028076171875, 0.0036869049072265625, 0.032196044921875, -0.035064697265625, 0.04827880859375, -0.01256561279296875, 0.0249176025390625, -0.0015630722045898438, -0.05181884765625, -0.07012939453125, -0.01537322998046875, 0.017730712890625, 0.050994873046875, -0.04296875, 0.0220489501953125, -0.01155853271484375, -0.045562744140625, -0.040191650390625, 0.018798828125, 0.0467529296875, 0.04559326171875, 0.0294952392578125, -0.04913330078125, -0.049835205078125, -0.050628662109375, -0.00897216796875, -0.0280609130859375, 0.0196685791015625, 0.030853271484375, 0.052886962890625, -0.003574371337890625, 0.044097900390625, -0.033050537109375, 0.0019931793212890625, -0.0145263671875, -0.003131866455078125, 0.02374267578125, 0.0465087890625, 0.051055908203125, -0.038299560546875, -0.03436279296875, 0.0028095245361328125, -0.048553466796875, 0.00853729248046875, 0.0064239501953125, -0.0230560302734375, -0.0033435821533203125, 0.032867431640625, -0.0426025390625, 0.03546142578125, 0.0056610107421875, -0.04254150390625, 0.0523681640625, -0.022186279296875, 0.0008630752563476562, -0.0838623046875, 0.0089111328125, -0.0028324127197265625, -0.0093536376953125, -0.037811279296875, -0.0014324188232421875, -0.00443267822265625, 0.004764556884765625, -0.04217529296875, 0.06585693359375, -0.0180816650390625, 0.006717681884765625, -0.002155303955078125, 0.0176544189453125, 0.0113525390625, 0.02618408203125, 0.01629638671875, 0.047760009765625, 0.050537109375, -0.05743408203125, 0.0504150390625, 0.007579803466796875, -0.01387786865234375, 0.035247802734375, -0.052093505859375, 0.0110321044921875, -0.0038509368896484375, 0.01512908935546875, -0.055908203125, -0.0096282958984375, 0.048187255859375, -0.0452880859375, 0.0018024444580078125, -0.023040771484375, -0.0307464599609375, -0.039459228515625, -0.0193634033203125, 0.0296783447265625, 0.034210205078125, -0.023101806640625, 0.023773193359375, 0.008880615234375, -0.01357269287109375, -0.06011962890625, -0.030853271484375, 0.001323699951171875, -0.0228729248046875, -0.041748046875, 0.0304718017578125, -0.02154541015625, -0.0186767578125, 0.00010991096496582031, -0.0086669921875, -0.0031871795654296875, -0.0017290115356445312, 0.001651763916015625, 0.03253173828125, -0.01448822021484375, -0.004543304443359375, -0.0111541748046875, -0.0228271484375, 0.005840301513671875, -0.02764892578125, 0.05047607421875, -0.031005859375, 0.00554656982421875, -0.04400634765625, 0.021942138671875, 0.04931640625, -0.0163421630859375, 0.0462646484375, 0.0633544921875, -0.0230560302734375, 0.00856781005859375, -0.0294036865234375, -0.039031982421875, -0.035888671875, 0.0139312744140625, -0.0228729248046875, -0.044281005859375, 0.040863037109375, 0.0145263671875, 0.0017251968383789062, 0.03741455078125, 0.0306549072265625, -0.011688232421875, 0.063720703125, 0.0389404296875, 0.0011281967163085938, 0.03466796875, -0.05780029296875, 0.01727294921875, -0.07305908203125, -0.0223388671875, -0.030609130859375, -0.0287628173828125, -0.0565185546875, -0.0219573974609375, 0.01097869873046875, 0.00577545166015625, -0.03741455078125, 0.0307464599609375, -0.052764892578125, 0.0288848876953125, 0.05377197265625, 0.006076812744140625, 0.01168060302734375, -0.0017900466918945312, -0.0175323486328125, -0.018646240234375, -0.052459716796875, -0.0428466796875, 0.08837890625, 0.01465606689453125, 0.051422119140625, 0.015228271484375, 0.060760498046875, -0.0233154296875, -0.0049591064453125, -0.05828857421875, 0.05712890625, -0.00342559814453125, -0.065673828125, -0.02618408203125, -0.03216552734375, -0.0867919921875, 0.0154266357421875, -0.0022029876708984375, -0.05560302734375, 0.040252685546875, -0.011383056640625, -0.034576416015625, 0.031829833984375, -0.046478271484375, 0.06866455078125, -0.031036376953125, -0.047821044921875, -0.01495361328125, -0.052978515625, 0.0281982421875, 0.0114288330078125, 0.01003265380859375, -0.0026264190673828125, -0.0200653076171875, 0.086181640625, -0.039520263671875, 0.06744384765625, -0.0038661956787109375, 0.01067352294921875, 0.020721435546875, 0.0007653236389160156, 0.0210723876953125, 0.0311126708984375, 0.0015773773193359375, -0.0186920166015625, 0.0254058837890625, -0.046905517578125, -0.032257080078125, 0.047149658203125, -0.0872802734375, -0.03448486328125, -0.06890869140625, -0.025390625, 0.01517486572265625, 0.03326416015625, 0.04608154296875, 0.034088134765625, 0.002529144287109375, 0.0022563934326171875, 0.05511474609375, -0.01300048828125, 0.04364013671875, 0.047149658203125, -0.0160980224609375, -0.048675537109375, 0.06707763671875, 0.019805908203125, 0.0099945068359375, 0.0109405517578125, 0.0233306884765625, -0.0216827392578125, -0.022186279296875, -0.0276641845703125, 0.057403564453125, -0.0452880859375, -0.0010585784912109375, -0.0501708984375, -0.003604888916015625, -0.04339599609375, -0.00994873046875, -0.03790283203125, -0.032562255859375, -0.037841796875, -0.005374908447265625, 0.0244903564453125, 0.04583740234375, -0.0185089111328125, 0.01800537109375, -0.0308074951171875, 0.01910400390625, 0.055511474609375, 0.01320648193359375, -0.00917816162109375, -0.05181884765625, -0.01093292236328125, 0.0037784576416015625, -0.03692626953125, -0.06854248046875, 0.03363037109375, 0.0177001953125, 0.036651611328125, 0.040252685546875, 0.00994873046875, 0.0487060546875, -0.0126190185546875, 0.057830810546875, 0.0170135498046875, -0.070556640625, 0.055877685546875, -0.0262298583984375, 0.00983428955078125, 0.03277587890625, 0.042144775390625, -0.060150146484375, -0.0216064453125, -0.073486328125, -0.07257080078125, 0.07208251953125, 0.01120758056640625, 0.0308074951171875, -0.016937255859375, 0.046142578125, 0.0029430389404296875, 0.00250244140625, -0.07891845703125, -0.034149169921875, -0.0450439453125, 0.00527191162109375, -0.0228271484375, 0.0013523101806640625, -0.0184326171875, -0.062255859375, 0.06866455078125, 0.00774383544921875, 0.0264892578125, 0.0279998779296875, -0.0070343017578125, -0.00725555419921875, 0.0224761962890625, 0.04364013671875, 0.07452392578125, -0.01413726806640625, 0.01386260986328125, 0.0155029296875, -0.06134033203125, 0.0197296142578125, -0.001941680908203125, -0.00923919677734375, 0.0007948875427246094, 0.0171051025390625, 0.0660400390625, 0.01311492919921875, -0.034423828125, 0.036956787109375, -0.0218658447265625, -0.013092041015625, -0.037994384765625, 0.0235137939453125, 0.0054168701171875, 0.00751495361328125, 0.01471710205078125, -0.0003273487091064453, -0.009918212890625, -0.07598876953125, 0.00806427001953125, 0.0132598876953125, -0.0254974365234375, -0.046661376953125, 0.049591064453125, 0.0036716461181640625, -0.012786865234375, 0.05908203125, -0.0219573974609375, -0.055755615234375, 0.05126953125, 0.05181884765625, 0.047760009765625, -0.03179931640625, 0.032928466796875, 0.0638427734375, 0.03460693359375, -0.005767822265625, 0.029541015625, -0.004055023193359375, -0.06304931640625, -0.02239990234375, -0.05157470703125, -0.023101806640625, 0.01410675048828125, -0.04400634765625, 0.01331329345703125, -0.057464599609375, -0.0287628173828125, -0.0096588134765625, 0.032806396484375, -0.04547119140625, 0.03155517578125, -0.0069122314453125, 0.058197021484375, -0.07598876953125, 0.070556640625, 0.06414794921875, -0.0310516357421875, -0.07958984375, -0.0186614990234375, -0.0195465087890625, -0.047149658203125, 0.049591064453125, -0.00827789306640625, 0.0024890899658203125, 0.01361846923828125, -0.0307464599609375, -0.06121826171875, 0.0814208984375, 0.03857421875, -0.0267486572265625, -0.004058837890625, 0.0005588531494140625, 0.037994384765625, -0.0186309814453125, 0.041046142578125, 0.042694091796875, 0.04949951171875, 0.0027942657470703125, -0.053497314453125, 0.0031871795654296875, -0.04193115234375, -0.0118408203125, 0.0025634765625, -0.0634765625, 0.08184814453125, -0.026824951171875, -0.0024929046630859375, 0.0016565322875976562, 0.0447998046875, 0.00811004638671875, 0.0234222412109375, 0.022705078125, 0.0623779296875, 0.059326171875, -0.01593017578125, 0.0753173828125, -0.03228759765625, 0.03271484375, 0.053497314453125, 0.029296875, 0.04180908203125, 0.025390625, -0.0242767333984375, 0.04254150390625, 0.06103515625, 0.01128387451171875, 0.042449951171875, -0.0081787109375, -0.03558349609375, -0.0269775390625, 0.011199951171875, -0.04559326171875, 0.03350830078125, 0.028961181640625, -0.0291900634765625, -0.0018053054809570312, -0.007053375244140625, 0.030975341796875, -0.006053924560546875, -0.0203857421875, 0.043975830078125, 0.017852783203125, -0.06158447265625, 0.052978515625, 0.01219940185546875, 0.05242919921875, -0.041534423828125, 0.018707275390625, -0.0205078125, 0.01100921630859375, -0.010772705078125, -0.058502197265625, 0.0030670166015625, -0.015045166015625, 0.0014410018920898438, -0.0172271728515625, 0.048004150390625, -0.01226043701171875, -0.04608154296875, 0.027008056640625, 0.03277587890625, 0.036102294921875, 0.0195159912109375, -0.07232666015625, -0.006786346435546875, 0.007415771484375, -0.01397705078125, 0.0153045654296875, 0.0299072265625, 0.00774383544921875, 0.04412841796875, 0.043731689453125, 0.0290985107421875, 0.0199127197265625, -0.01580810546875, 0.054290771484375, -0.053558349609375, -0.0299072265625, -0.058685302734375, 0.03271484375, -0.00771331787109375, -0.050262451171875, 0.0635986328125, 0.05230712890625, 0.07958984375, 0.0003306865692138672, 0.055084228515625, -0.0140380859375, 0.0287933349609375, -0.0247955322265625, 0.040008544921875, -0.040557861328125, 0.00116729736328125, -0.03173828125, -0.0789794921875, 0.0008244514465332031, 0.07293701171875, -0.05194091796875, 0.0209197998046875, 0.059814453125, 0.04986572265625, -0.01461029052734375, -0.0104522705078125, 0.01107025146484375, 0.0176544189453125, 0.00832366943359375, 0.060394287109375, 0.054290771484375, -0.054412841796875, 0.03857421875, -0.058074951171875, -0.01399993896484375, -0.0171051025390625, -0.0374755859375, -0.067626953125, -0.05902099609375, -0.037384033203125, -0.03863525390625, 0.01302337646484375, 0.061065673828125, 0.051788330078125, -0.036712646484375, -0.03289794921875, -0.0027008056640625, 0.01459503173828125, -0.03363037109375, -0.012725830078125, 0.03265380859375, -0.00905609130859375, -0.07952880859375, 0.017242431640625, 0.0005831718444824219, 0.0152435302734375, 0.0015268325805664062, -0.0240325927734375, -0.00630950927734375, 0.0147552490234375, 0.0352783203125, 0.044158935546875, -0.04608154296875, -0.01885986328125, 0.0035152435302734375, -0.0103912353515625, 0.00930023193359375, 0.042938232421875, -0.04571533203125, 0.04071044921875, 0.046661376953125, -0.0013456344604492188, 0.060455322265625, -0.0160064697265625, 0.0160064697265625, -0.02471923828125, 0.0129547119140625, 0.03778076171875, 0.037078857421875, 0.01483154296875, -0.0161285400390625, 0.036834716796875, 0.05194091796875, -0.03497314453125, -0.058807373046875, 0.0189208984375, -0.07684326171875, -0.0168914794921875, 0.07012939453125, -0.010345458984375, -0.030059814453125, -0.01544189453125, -0.022247314453125, 0.0303802490234375, -0.02435302734375, 0.056121826171875, 0.052001953125, -0.0018558502197265625, -0.0278167724609375, -0.056549072265625, 0.033538818359375, 0.04522705078125, -0.04327392578125, -0.004489898681640625, 0.0128021240234375, 0.052520751953125, 0.01256561279296875, 0.049713134765625, -0.0248260498046875, 0.041656494140625, -0.0037899017333984375, 0.0147705078125, 0.004085540771484375, -0.004383087158203125, -0.0009312629699707031, -0.0036792755126953125, -0.01418304443359375, 0.0005202293395996094 ] ]
Harshvir/Llama-2-7B-physics
2023-08-17T19:51:00.000Z
[ "transformers", "pytorch", "llama", "text-generation", "dataset:camel-ai/physics", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Harshvir
null
null
Harshvir/Llama-2-7B-physics
1
5,871
transformers
2023-08-17T18:30:01
--- datasets: - camel-ai/physics --- Trained on a sample of <a href='https://huggingface.co/datasets/camel-ai/physics'>camel-ai/physics dataset</a>. Base Model: <a href='https://huggingface.co/NousResearch/Llama-2-7b-chat-hf'>NousResearch/Llama-2-7b-chat-hf</a>
264
[ [ -0.014556884765625, -0.038818359375, 0.003833770751953125, 0.01029205322265625, -0.030853271484375, 0.00203704833984375, 0.01273345947265625, -0.032684326171875, 0.04541015625, 0.01361846923828125, -0.06927490234375, -0.041595458984375, -0.03228759765625, -0.004276275634765625, -0.036590576171875, 0.09918212890625, 0.016693115234375, 0.00946044921875, -0.0107574462890625, -0.029083251953125, -0.0084991455078125, -0.025543212890625, -0.034454345703125, -0.04376220703125, 0.048614501953125, 0.0264892578125, 0.05426025390625, 0.05072021484375, 0.0450439453125, 0.007411956787109375, -0.01120758056640625, -0.0007748603820800781, -0.049102783203125, -0.00862884521484375, -0.0223236083984375, -0.0166778564453125, -0.0309906005859375, 0.00988006591796875, 0.050445556640625, 0.04266357421875, -0.0321044921875, 0.0288848876953125, -0.021026611328125, 0.0212249755859375, -0.045440673828125, 0.0024871826171875, -0.0413818359375, -0.00539398193359375, -0.007572174072265625, -0.002178192138671875, -0.009979248046875, -0.01145172119140625, 0.01788330078125, -0.0604248046875, 0.019317626953125, 0.044830322265625, 0.10272216796875, 0.0206451416015625, -0.0445556640625, 0.0005369186401367188, -0.01265716552734375, 0.075439453125, -0.01346588134765625, 0.04046630859375, 0.04449462890625, 0.0291900634765625, -0.0306549072265625, -0.031585693359375, -0.034393310546875, -0.0195770263671875, -0.00565338134765625, -0.0112152099609375, -0.0192108154296875, -0.0186614990234375, 0.01514434814453125, 0.0272979736328125, -0.039337158203125, -0.0007410049438476562, -0.04046630859375, -0.00801849365234375, 0.043365478515625, 0.00823211669921875, 0.0171051025390625, -0.0035190582275390625, -0.026153564453125, -0.05596923828125, -0.043701171875, -0.001888275146484375, 0.03570556640625, 0.0263671875, -0.050445556640625, 0.0533447265625, -0.0341796875, 0.0540771484375, 0.022491455078125, -0.0232391357421875, 0.06646728515625, -0.004123687744140625, -0.018280029296875, 0.0017099380493164062, 0.061126708984375, 0.025115966796875, 0.005832672119140625, -0.004390716552734375, -0.002307891845703125, -0.00885772705078125, 0.01486968994140625, -0.0699462890625, -0.01898193359375, 0.01013946533203125, -0.034027099609375, -0.038055419921875, -0.01284027099609375, -0.0302886962890625, -0.017333984375, 0.005649566650390625, 0.002597808837890625, -0.0263824462890625, -0.015869140625, 0.0229034423828125, -0.02899169921875, 0.038238525390625, 0.0078277587890625, -0.055328369140625, 0.0404052734375, 0.0201263427734375, 0.04901123046875, 0.002765655517578125, -0.0254974365234375, -0.038909912109375, 0.01432037353515625, 0.0074920654296875, 0.0770263671875, -0.032562255859375, -0.055328369140625, -0.00208282470703125, 0.01047515869140625, 0.0208282470703125, -0.0211944580078125, 0.0352783203125, -0.035797119140625, 0.01084136962890625, -0.0194549560546875, -0.033294677734375, -0.0290985107421875, 0.034149169921875, -0.059295654296875, 0.08123779296875, 0.0174102783203125, -0.07537841796875, 0.03912353515625, -0.0411376953125, -0.039337158203125, -0.024261474609375, -0.00940704345703125, -0.04693603515625, -0.016815185546875, 0.00864410400390625, 0.0181121826171875, -0.0291595458984375, -0.01328277587890625, -0.052734375, -0.01264190673828125, 0.016204833984375, -0.008209228515625, 0.050384521484375, -0.01209259033203125, 0.01265716552734375, 0.00591278076171875, -0.0889892578125, -0.0103302001953125, 0.035186767578125, -0.022735595703125, -0.02178955078125, -0.0007810592651367188, 0.020904541015625, -0.004497528076171875, 0.0280303955078125, -0.05743408203125, 0.0325927734375, -0.0239410400390625, -0.012664794921875, 0.0335693359375, 0.01404571533203125, 0.0242156982421875, -0.0513916015625, 0.024261474609375, -0.0019664764404296875, 0.00913238525390625, 0.021697998046875, -0.046051025390625, -0.05029296875, -0.0178985595703125, 0.00209808349609375, 0.029754638671875, -0.0158233642578125, 0.0462646484375, -0.006816864013671875, -0.0576171875, -0.034637451171875, 0.0110321044921875, 0.0123748779296875, 0.046051025390625, 0.040069580078125, -0.0275421142578125, -0.050628662109375, -0.06146240234375, -0.0002720355987548828, -0.028228759765625, -0.01143646240234375, 0.034698486328125, 0.04766845703125, -0.01551055908203125, 0.06793212890625, -0.03326416015625, -0.0029430389404296875, 0.00848388671875, 0.011444091796875, 0.01416015625, 0.05029296875, 0.04931640625, -0.051025390625, -0.00208282470703125, -0.019927978515625, -0.044677734375, 0.0010309219360351562, 0.0234527587890625, -0.05596923828125, -0.025726318359375, -0.004795074462890625, -0.047088623046875, 0.0494384765625, 0.037628173828125, -0.0296173095703125, 0.029449462890625, 0.0172882080078125, -0.0027256011962890625, -0.07940673828125, 0.01120758056640625, 0.02935791015625, -0.006320953369140625, -0.0215301513671875, -0.01126861572265625, -0.009124755859375, 0.016845703125, -0.0501708984375, 0.049591064453125, -0.029388427734375, -0.03375244140625, -0.028961181640625, -0.00945281982421875, 0.005077362060546875, 0.035675048828125, 0.029327392578125, 0.027801513671875, 0.04052734375, -0.033172607421875, 0.0372314453125, 0.056793212890625, -0.01122283935546875, 0.029266357421875, -0.06939697265625, 0.0231781005859375, 0.00891876220703125, 0.055908203125, -0.051849365234375, -0.031463623046875, 0.020355224609375, -0.0248565673828125, -0.0003056526184082031, -0.019866943359375, -0.057281494140625, -0.007717132568359375, -0.0209808349609375, 0.06341552734375, 0.07275390625, -0.051025390625, 0.01552581787109375, 0.0256805419921875, 0.030120849609375, -0.052978515625, -0.0259552001953125, -0.0283966064453125, -0.00861358642578125, -0.0214080810546875, 0.006053924560546875, -0.0091552734375, -0.00615692138671875, -0.0028057098388671875, 0.00888824462890625, -0.00827789306640625, 0.0037822723388671875, 0.045562744140625, 0.02996826171875, -0.01568603515625, 0.0018224716186523438, 0.0018968582153320312, 0.00606536865234375, -0.0014963150024414062, -0.0034427642822265625, 0.05670166015625, -0.01044464111328125, -0.045074462890625, -0.044158935546875, -0.002628326416015625, 0.01441192626953125, 0.0193023681640625, 0.065673828125, 0.0396728515625, -0.040496826171875, 0.00762176513671875, -0.038360595703125, -0.0224609375, -0.034088134765625, 0.0094451904296875, -0.0121917724609375, -0.034698486328125, 0.02685546875, 0.0086822509765625, -0.004547119140625, 0.05322265625, 0.053466796875, -0.0039825439453125, 0.0653076171875, 0.061279296875, -0.03662109375, 0.03570556640625, -0.0228271484375, -0.030853271484375, -0.0579833984375, -0.042144775390625, -0.047088623046875, -0.035400390625, -0.0404052734375, -0.0193328857421875, 0.004634857177734375, 0.00925445556640625, -0.0595703125, 0.051513671875, -0.031463623046875, 0.027862548828125, 0.04852294921875, 0.0276031494140625, 0.01389312744140625, 0.0230865478515625, -0.004913330078125, 0.0187225341796875, -0.03839111328125, -0.0207977294921875, 0.06646728515625, 0.029754638671875, 0.07501220703125, -0.0036907196044921875, 0.037322998046875, 0.0011816024780273438, 0.0280914306640625, -0.04620361328125, 0.0185546875, 0.01824951171875, -0.05755615234375, -0.017608642578125, -0.0123138427734375, -0.07830810546875, -0.00628662109375, -0.032501220703125, -0.05029296875, 0.00848388671875, 0.00017178058624267578, 0.0034618377685546875, 0.02899169921875, -0.0092315673828125, 0.037628173828125, -0.0018625259399414062, -0.00484466552734375, -0.0146942138671875, -0.051361083984375, 0.00858306884765625, 0.027099609375, 0.00467681884765625, -0.035980224609375, -0.0021915435791015625, 0.0599365234375, -0.034942626953125, 0.0772705078125, 0.00001895427703857422, -0.002864837646484375, 0.031890869140625, -0.01210784912109375, 0.00980377197265625, -0.00397491455078125, -0.014404296875, 0.03204345703125, -0.0196685791015625, -0.055389404296875, -0.0156097412109375, 0.0280303955078125, -0.09375, -0.00859832763671875, -0.041778564453125, -0.04345703125, -0.01259613037109375, -0.00791168212890625, 0.025115966796875, 0.01184844970703125, -0.033782958984375, 0.020233154296875, 0.032318115234375, -0.00852203369140625, 0.01910400390625, 0.04443359375, -0.00868988037109375, -0.0259246826171875, 0.04656982421875, -0.04052734375, 0.0029926300048828125, 0.0270538330078125, 0.0230712890625, -0.0280609130859375, -0.0163421630859375, -0.045440673828125, 0.029998779296875, -0.03851318359375, -0.021026611328125, -0.030364990234375, -0.03662109375, -0.0193939208984375, -0.01094818115234375, -0.0244293212890625, -0.018890380859375, -0.058135986328125, -0.0242462158203125, 0.08062744140625, 0.04425048828125, -0.007289886474609375, 0.0426025390625, -0.05645751953125, 0.0205230712890625, 0.01325225830078125, 0.02349853515625, -0.005397796630859375, -0.056304931640625, -0.0124664306640625, 0.019866943359375, -0.0273590087890625, -0.0465087890625, 0.0235748291015625, 0.020782470703125, 0.046112060546875, 0.0203094482421875, -0.01837158203125, 0.049072265625, -0.03546142578125, 0.05523681640625, 0.058380126953125, -0.035919189453125, 0.06988525390625, -0.0273590087890625, -0.00510406494140625, 0.033172607421875, 0.04595947265625, -0.0171051025390625, -0.004871368408203125, -0.0626220703125, -0.041229248046875, 0.0241851806640625, 0.0269317626953125, 0.0036106109619140625, 0.01030731201171875, 0.040679931640625, 0.0180206298828125, 0.023284912109375, -0.0458984375, -0.02490234375, -0.0328369140625, -0.0212554931640625, 0.0026683807373046875, 0.007007598876953125, -0.0281829833984375, -0.0262298583984375, 0.0616455078125, -0.0146331787109375, -0.002170562744140625, -0.0086669921875, 0.003940582275390625, -0.0272216796875, -0.023712158203125, 0.0305328369140625, 0.01499176025390625, -0.037628173828125, -0.02947998046875, 0.027130126953125, -0.052764892578125, 0.02789306640625, -0.013153076171875, 0.00904083251953125, -0.00662994384765625, 0.031402587890625, 0.05792236328125, 0.01050567626953125, -0.047607421875, 0.040985107421875, -0.01030731201171875, -0.007411956787109375, -0.0160980224609375, 0.0211334228515625, -0.0016679763793945312, 0.0156707763671875, 0.0227203369140625, -0.00762176513671875, 0.015167236328125, -0.031280517578125, 0.0230712890625, 0.01959228515625, -0.0238494873046875, -0.030364990234375, 0.040374755859375, -0.0006313323974609375, -0.020599365234375, 0.05938720703125, -0.0181884765625, -0.0133819580078125, 0.0589599609375, 0.039215087890625, 0.049713134765625, 0.0029926300048828125, -0.00833892822265625, 0.0355224609375, -0.0009121894836425781, -0.00980377197265625, 0.03497314453125, 0.0081634521484375, -0.06695556640625, -0.00785064697265625, -0.054412841796875, -0.061798095703125, 0.007633209228515625, -0.08154296875, 0.047119140625, -0.052215576171875, -0.006900787353515625, -0.01088714599609375, 0.019378662109375, -0.0672607421875, 0.0400390625, 0.0035686492919921875, 0.080078125, -0.05975341796875, 0.06915283203125, 0.06695556640625, -0.0509033203125, -0.049102783203125, -0.01494598388671875, -0.005100250244140625, -0.10003662109375, 0.05352783203125, -0.01377105712890625, -0.00763702392578125, -0.03094482421875, -0.08380126953125, -0.07476806640625, 0.08721923828125, 0.021514892578125, -0.038421630859375, 0.00572967529296875, -0.0207061767578125, 0.0252685546875, -0.029083251953125, 0.04266357421875, 0.035308837890625, 0.007297515869140625, 0.0404052734375, -0.0755615234375, -0.0033206939697265625, -0.038330078125, -0.006683349609375, -0.01316070556640625, -0.090087890625, 0.05755615234375, 0.004749298095703125, 0.022491455078125, 0.01328277587890625, 0.0814208984375, 0.061859130859375, 0.01324462890625, 0.0537109375, 0.054901123046875, 0.0445556640625, 0.01446533203125, 0.0697021484375, -0.0205230712890625, 0.04522705078125, 0.07891845703125, -0.0357666015625, 0.06585693359375, 0.0216064453125, -0.0033588409423828125, 0.07672119140625, 0.060577392578125, -0.0288543701171875, 0.06396484375, 0.03887939453125, -0.01561737060546875, -0.01021575927734375, -0.020416259765625, -0.049072265625, 0.03369140625, 0.04010009765625, 0.007656097412109375, -0.025177001953125, -0.0215301513671875, -0.009429931640625, -0.0275115966796875, -0.0025730133056640625, 0.04852294921875, 0.004489898681640625, 0.0017061233520507812, 0.046356201171875, 0.0019054412841796875, 0.048309326171875, -0.04229736328125, -0.00897216796875, -0.00323486328125, -0.02203369140625, -0.01480865478515625, -0.044830322265625, 0.0029850006103515625, 0.012359619140625, -0.0171966552734375, 0.01059722900390625, 0.056640625, -0.011474609375, -0.051239013671875, 0.011627197265625, 0.038909912109375, 0.0372314453125, 0.03363037109375, -0.0596923828125, 0.01328277587890625, -0.0211181640625, -0.03851318359375, 0.027130126953125, 0.0222625732421875, 0.0224456787109375, 0.06878662109375, 0.05670166015625, -0.005886077880859375, -0.002777099609375, 0.00600433349609375, 0.0770263671875, -0.059600830078125, -0.0435791015625, -0.039947509765625, 0.01558685302734375, -0.0183563232421875, -0.0494384765625, 0.04486083984375, 0.051116943359375, 0.06103515625, 0.00991058349609375, 0.05352783203125, 0.005428314208984375, 0.043731689453125, -0.02642822265625, 0.04498291015625, -0.053131103515625, -0.0205230712890625, -0.0015497207641601562, -0.0428466796875, 0.0009455680847167969, 0.08135986328125, 0.01163482666015625, 0.0260009765625, 0.03912353515625, 0.046142578125, -0.030853271484375, 0.01171112060546875, 0.004241943359375, 0.01085662841796875, 0.006038665771484375, 0.032440185546875, 0.0653076171875, -0.03863525390625, 0.028717041015625, -0.03240966796875, -0.00931549072265625, -0.033447265625, -0.08221435546875, -0.08636474609375, -0.0213470458984375, -0.045257568359375, -0.048431396484375, -0.0017309188842773438, 0.07489013671875, 0.08392333984375, -0.0665283203125, -0.032928466796875, 0.006191253662109375, -0.00283050537109375, 0.0074310302734375, -0.006732940673828125, 0.029998779296875, 0.01678466796875, -0.044097900390625, 0.02288818359375, -0.01274871826171875, 0.034637451171875, -0.02349853515625, 0.003520965576171875, -0.0209808349609375, 0.005588531494140625, 0.0059356689453125, 0.03369140625, -0.03472900390625, -0.049713134765625, -0.01444244384765625, -0.01324462890625, 0.0108795166015625, 0.020111083984375, -0.060699462890625, -0.026763916015625, 0.02362060546875, 0.0313720703125, 0.046356201171875, 0.006343841552734375, 0.023895263671875, -0.0716552734375, 0.03973388671875, 0.0004467964172363281, 0.0435791015625, 0.0258636474609375, -0.0296478271484375, 0.0341796875, 0.037017822265625, -0.0262451171875, -0.048431396484375, 0.0002830028533935547, -0.10565185546875, 0.01491546630859375, 0.0870361328125, 0.001789093017578125, -0.0494384765625, 0.0270538330078125, -0.02056884765625, 0.039276123046875, -0.062164306640625, 0.044891357421875, 0.03173828125, -0.0088043212890625, 0.0164642333984375, -0.0189971923828125, 0.0176544189453125, 0.013427734375, -0.054534912109375, -0.043182373046875, 0.0235748291015625, 0.054779052734375, 0.0167999267578125, 0.03955078125, 0.0177154541015625, 0.01904296875, 0.0222625732421875, 0.01873779296875, 0.011077880859375, -0.0210723876953125, -0.0185546875, 0.0007567405700683594, -0.0038909912109375, -0.046844482421875 ] ]
Henk717/chronoboros-33B
2023-07-10T20:48:47.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Henk717
null
null
Henk717/chronoboros-33B
9
5,870
transformers
2023-07-09T21:00:09
--- license: other --- This model was the result of a 50/50 average weight merge between Airoboros-33B-1.4 and Chronos-33B. After prolonged testing we concluded that while this merge is highly flexible and capable of many different tasks, it has to much variation in how it answers to be reliable. Because of this the model relies on some luck to get good results, and is therefore not recommended to people seeking a consistent experience, or people sensitive to anticipation based addictions. If you would like an improved version of this model that is more stable check out my Airochronos-33B merge.
604
[ [ -0.0308074951171875, -0.0005598068237304688, 0.01824951171875, 0.0157318115234375, -0.039398193359375, 0.0074920654296875, 0.01239013671875, -0.06488037109375, 0.034027099609375, 0.021240234375, -0.03521728515625, -0.003330230712890625, -0.058807373046875, 0.00041556358337402344, -0.047210693359375, 0.0703125, -0.0181884765625, 0.030181884765625, 0.0233001708984375, -0.0132598876953125, -0.046600341796875, -0.02471923828125, -0.04083251953125, -0.04742431640625, 0.04364013671875, 0.033721923828125, 0.0738525390625, 0.0301666259765625, 0.061614990234375, 0.0169830322265625, -0.01079559326171875, -0.00962066650390625, -0.04718017578125, -0.024261474609375, 0.011871337890625, -0.0423583984375, -0.052764892578125, 0.0240325927734375, 0.0267181396484375, 0.046112060546875, -0.078125, -0.0290374755859375, -0.005374908447265625, 0.049072265625, -0.025726318359375, -0.04638671875, -0.0260009765625, 0.0196075439453125, -0.01483154296875, 0.00371551513671875, -0.0004494190216064453, -0.03265380859375, 0.0029926300048828125, -0.0618896484375, -0.00044989585876464844, 0.0199432373046875, 0.08599853515625, 0.0014371871948242188, -0.03955078125, -0.03753662109375, -0.0484619140625, 0.05462646484375, -0.05206298828125, 0.0498046875, 0.0203399658203125, 0.046173095703125, -0.014068603515625, -0.01406097412109375, -0.02642822265625, -0.003173828125, 0.0146942138671875, 0.0181121826171875, -0.0287628173828125, -0.0030384063720703125, -0.006938934326171875, 0.036651611328125, -0.040374755859375, 0.0140380859375, -0.060577392578125, -0.0069732666015625, 0.04486083984375, 0.032318115234375, 0.0186309814453125, -0.01446533203125, -0.046173095703125, -0.034210205078125, -0.02166748046875, 0.0225830078125, 0.051971435546875, 0.0041656494140625, -0.0215301513671875, 0.07666015625, -0.0380859375, 0.06842041015625, 0.016815185546875, 0.004039764404296875, 0.01433563232421875, -0.0360107421875, -0.03485107421875, 0.016632080078125, 0.0340576171875, 0.059051513671875, 0.0236053466796875, -0.00299072265625, -0.00567626953125, 0.0038051605224609375, 0.0214691162109375, -0.054412841796875, -0.0006480216979980469, 0.032440185546875, -0.0413818359375, -0.038055419921875, 0.0321044921875, -0.02886962890625, 0.0008111000061035156, -0.0091552734375, 0.0209808349609375, -0.061920166015625, -0.01532745361328125, 0.019866943359375, -0.052703857421875, -0.0006690025329589844, 0.043731689453125, -0.0335693359375, 0.036224365234375, 0.0276336669921875, 0.046112060546875, -0.0306396484375, -0.0243682861328125, -0.01035308837890625, -0.01374053955078125, -0.058563232421875, 0.0440673828125, -0.01088714599609375, -0.032073974609375, -0.031402587890625, 0.0037097930908203125, 0.01531219482421875, -0.04534912109375, 0.01363372802734375, -0.050018310546875, 0.021697998046875, -0.03778076171875, -0.0322265625, -0.033966064453125, 0.002895355224609375, -0.041473388671875, 0.0487060546875, 0.048126220703125, -0.0266876220703125, 0.065185546875, -0.0008945465087890625, -0.0157318115234375, 0.0008878707885742188, 0.001556396484375, -0.019561767578125, 0.01296234130859375, -0.02001953125, -0.0084381103515625, -0.035400390625, 0.018707275390625, -0.0552978515625, -0.04541015625, 0.0282440185546875, -0.0225830078125, 0.0665283203125, 0.047576904296875, 0.000858306884765625, -0.00873565673828125, -0.02447509765625, -0.005157470703125, -0.0147857666015625, -0.0396728515625, -0.0123138427734375, -0.01485443115234375, 0.0021610260009765625, 0.03173828125, 0.020477294921875, -0.035736083984375, 0.0221099853515625, -0.01309967041015625, 0.0006680488586425781, 0.0310821533203125, -0.004718780517578125, 0.030426025390625, -0.07855224609375, 0.056610107421875, 0.0199432373046875, 0.0222320556640625, 0.036895751953125, -0.04998779296875, -0.0347900390625, -0.022674560546875, 0.02606201171875, 0.03204345703125, -0.03125, 0.0290069580078125, -0.007457733154296875, -0.054168701171875, -0.0155029296875, 0.03594970703125, 0.0279388427734375, -0.00505828857421875, 0.005924224853515625, -0.045501708984375, -0.0401611328125, -0.06268310546875, -0.00916290283203125, -0.0103302001953125, -0.009368896484375, 0.0222320556640625, 0.034576416015625, -0.0013341903686523438, 0.05780029296875, -0.041046142578125, -0.0240478515625, -0.01221466064453125, 0.03314208984375, 0.035400390625, 0.049285888671875, 0.0772705078125, -0.04901123046875, -0.023956298828125, 0.01074981689453125, -0.05938720703125, 0.006954193115234375, 0.01312255859375, -0.00881195068359375, 0.00023496150970458984, 0.0160980224609375, -0.047210693359375, 0.03131103515625, 0.049468994140625, -0.023681640625, 0.0220184326171875, -0.009918212890625, 0.03887939453125, -0.08154296875, 0.007038116455078125, -0.00850677490234375, -0.0012836456298828125, -0.024505615234375, 0.0079345703125, 0.0134124755859375, -0.01456451416015625, -0.053802490234375, 0.037261962890625, -0.032196044921875, -0.0063018798828125, -0.032745361328125, -0.015777587890625, -0.0147552490234375, 0.011444091796875, -0.0219573974609375, 0.0111846923828125, 0.0259246826171875, -0.049224853515625, 0.04644775390625, -0.005519866943359375, 0.002727508544921875, 0.05145263671875, -0.0299530029296875, -0.0211029052734375, 0.00531768798828125, 0.02606201171875, -0.060333251953125, -0.045074462890625, -0.00034356117248535156, -0.043212890625, -0.002979278564453125, 0.00955963134765625, 0.0027790069580078125, -0.056549072265625, -0.0465087890625, 0.03204345703125, 0.0294189453125, -0.00803375244140625, 0.057464599609375, -0.0114593505859375, -0.006229400634765625, -0.041748046875, -0.06744384765625, -0.0215911865234375, -0.0199737548828125, -0.033233642578125, 0.0190887451171875, -0.0078887939453125, -0.0259857177734375, 0.0291748046875, -0.025238037109375, -0.023193359375, -0.01082611083984375, 0.023651123046875, 0.03900146484375, 0.005886077880859375, -0.0160369873046875, 0.03228759765625, 0.020050048828125, -0.031005859375, 0.01215362548828125, 0.03759765625, 0.00614166259765625, -0.01436614990234375, -0.049072265625, 0.01538848876953125, 0.0831298828125, 0.0120849609375, 0.05517578125, 0.010009765625, -0.0543212890625, 0.032470703125, -0.045623779296875, -0.0019369125366210938, -0.03265380859375, 0.002094268798828125, -0.03253173828125, -0.038116455078125, 0.06451416015625, 0.02569580078125, -0.007694244384765625, 0.028900146484375, 0.0211944580078125, -0.0186767578125, 0.0782470703125, 0.0293426513671875, 0.0084991455078125, 0.036041259765625, -0.0113525390625, -0.00630950927734375, -0.059722900390625, -0.03204345703125, -0.029632568359375, -0.02838134765625, -0.038970947265625, -0.00957489013671875, 0.0044097900390625, 0.042694091796875, -0.007526397705078125, 0.04541015625, -0.026458740234375, -0.01229095458984375, 0.03106689453125, 0.02239990234375, 0.0214996337890625, -0.0198822021484375, -0.00749969482421875, -0.002166748046875, -0.052032470703125, 0.0083465576171875, 0.0938720703125, -0.005035400390625, 0.078125, 0.0298614501953125, 0.03863525390625, 0.011260986328125, 0.0452880859375, -0.04742431640625, 0.03021240234375, -0.0249176025390625, -0.08447265625, -0.0239105224609375, -0.05010986328125, -0.0738525390625, 0.0172576904296875, -0.0396728515625, -0.035736083984375, 0.028594970703125, 0.004924774169921875, -0.04052734375, -0.00585174560546875, -0.038543701171875, 0.032501220703125, -0.0295257568359375, -0.0023250579833984375, -0.03253173828125, -0.028350830078125, 0.04547119140625, -0.02862548828125, 0.027557373046875, -0.01488494873046875, 0.0076141357421875, 0.0672607421875, -0.047637939453125, 0.006877899169921875, 0.00704193115234375, 0.01401519775390625, 0.034210205078125, 0.0136260986328125, 0.018890380859375, 0.005023956298828125, -0.015960693359375, 0.01096343994140625, -0.01013946533203125, -0.0266876220703125, -0.03240966796875, 0.05926513671875, -0.07086181640625, -0.010498046875, -0.05731201171875, -0.046966552734375, 0.021392822265625, 0.026336669921875, 0.03253173828125, 0.047149658203125, -0.0175018310546875, 0.02008056640625, 0.019500732421875, 0.00690460205078125, 0.03363037109375, 0.062225341796875, -0.05078125, -0.07244873046875, 0.021331787109375, 0.0250091552734375, 0.032470703125, 0.0229644775390625, 0.01436614990234375, 0.0012111663818359375, -0.011962890625, -0.01479339599609375, 0.0006504058837890625, -0.0267486572265625, -0.02166748046875, -0.0191497802734375, -0.01502227783203125, -0.045562744140625, -0.03900146484375, -0.044464111328125, -0.033843994140625, -0.0030727386474609375, 0.005832672119140625, 0.012939453125, 0.04522705078125, -0.014434814453125, 0.0435791015625, -0.053924560546875, 0.0180511474609375, 0.04107666015625, 0.015350341796875, -0.01470947265625, -0.03082275390625, -0.01800537109375, 0.00820159912109375, -0.03485107421875, -0.09197998046875, 0.0159912109375, -0.019561767578125, 0.040985107421875, 0.05413818359375, -0.000911712646484375, 0.05108642578125, 0.00905609130859375, 0.0491943359375, 0.052764892578125, -0.07427978515625, 0.0224761962890625, -0.048248291015625, 0.035736083984375, 0.03564453125, 0.037750244140625, -0.0217132568359375, -0.04376220703125, -0.0654296875, -0.048248291015625, 0.0645751953125, 0.04058837890625, -0.033050537109375, 0.035980224609375, 0.0176849365234375, -0.00006699562072753906, 0.038177490234375, -0.045806884765625, 0.00475311279296875, -0.029388427734375, -0.00708770751953125, 0.0080108642578125, 0.02398681640625, -0.0284881591796875, -0.0296783447265625, 0.037872314453125, 0.00377655029296875, -0.0023326873779296875, -0.00238037109375, 0.004894256591796875, -0.05322265625, -0.000797271728515625, 0.053619384765625, 0.05987548828125, -0.06060791015625, -0.0014476776123046875, 0.033782958984375, -0.03155517578125, 0.01427459716796875, 0.015380859375, 0.0276947021484375, -0.0078582763671875, 0.036529541015625, 0.033477783203125, 0.035980224609375, -0.0439453125, 0.021392822265625, -0.01139068603515625, -0.0272216796875, 0.0076751708984375, 0.03167724609375, -0.00016963481903076172, -0.00008994340896606445, 0.046844482421875, 0.023406982421875, -0.0036144256591796875, -0.06854248046875, 0.02410888671875, 0.0411376953125, -0.027618408203125, -0.0010175704956054688, 0.056427001953125, 0.0251312255859375, -0.0282135009765625, 0.06573486328125, -0.026153564453125, -0.031768798828125, 0.0654296875, 0.0172882080078125, 0.0396728515625, -0.051300048828125, -0.0008215904235839844, 0.027984619140625, 0.037872314453125, -0.0276336669921875, 0.035308837890625, 0.0194549560546875, -0.0655517578125, -0.0013589859008789062, -0.0308685302734375, -0.038970947265625, -0.027099609375, -0.07958984375, 0.047332763671875, -0.00832366943359375, -0.0421142578125, -0.01419830322265625, 0.00908660888671875, -0.0362548828125, 0.06402587890625, -0.01451873779296875, 0.0653076171875, -0.089599609375, 0.061248779296875, 0.0679931640625, -0.01467132568359375, -0.08197021484375, -0.021270751953125, -0.0006685256958007812, -0.0477294921875, 0.0305633544921875, 0.0011692047119140625, 0.0111541748046875, -0.0261993408203125, -0.0259552001953125, -0.09222412109375, 0.1083984375, 0.018829345703125, -0.043487548828125, -0.0133056640625, -0.01214599609375, 0.0299072265625, -0.031005859375, 0.036285400390625, 0.002826690673828125, 0.0426025390625, 0.022125244140625, -0.102294921875, 0.0078277587890625, -0.001972198486328125, 0.0126190185546875, 0.0209503173828125, -0.07379150390625, 0.09161376953125, -0.01554107666015625, 0.0070343017578125, 0.0166473388671875, 0.0298309326171875, 0.04833984375, 0.033447265625, 0.050933837890625, 0.07745361328125, 0.0400390625, -0.007259368896484375, 0.09027099609375, -0.0323486328125, 0.037322998046875, 0.0516357421875, -0.0256500244140625, 0.050048828125, 0.03643798828125, -0.0184326171875, 0.034332275390625, 0.0650634765625, 0.0161895751953125, 0.046722412109375, -0.00803375244140625, 0.029144287109375, -0.022613525390625, 0.018310546875, -0.050201416015625, 0.053558349609375, -0.0010671615600585938, -0.00543212890625, -0.0200347900390625, 0.003261566162109375, 0.004810333251953125, -0.04052734375, -0.003070831298828125, 0.0169677734375, 0.007808685302734375, -0.07672119140625, 0.024627685546875, -0.0081329345703125, 0.04156494140625, -0.064453125, -0.0007104873657226562, -0.001873016357421875, 0.00658416748046875, -0.0243377685546875, -0.05328369140625, 0.0261993408203125, -0.0287017822265625, -0.0024585723876953125, 0.004711151123046875, 0.037353515625, -0.0032806396484375, -0.03643798828125, 0.0322265625, 0.0244903564453125, 0.004360198974609375, 0.0159149169921875, -0.033172607421875, 0.0276947021484375, -0.004302978515625, -0.0226287841796875, 0.01412200927734375, 0.0009355545043945312, 0.0154876708984375, 0.05572509765625, 0.048675537109375, 0.01049041748046875, 0.0012378692626953125, -0.036102294921875, 0.0418701171875, -0.043426513671875, -0.05218505859375, -0.024658203125, 0.0154266357421875, -0.0005078315734863281, -0.06475830078125, 0.05450439453125, 0.0654296875, 0.04266357421875, 0.01983642578125, 0.03173828125, -0.0303192138671875, 0.0033111572265625, -0.050506591796875, 0.05645751953125, -0.07855224609375, 0.0096282958984375, -0.01132965087890625, -0.08453369140625, 0.028228759765625, 0.03497314453125, 0.011260986328125, 0.0295562744140625, 0.0894775390625, 0.0205230712890625, 0.01006317138671875, 0.0109100341796875, 0.00684356689453125, 0.01360321044921875, 0.012420654296875, 0.044189453125, 0.029510498046875, -0.0318603515625, 0.044036865234375, -0.02471923828125, -0.053314208984375, -0.046966552734375, -0.04949951171875, -0.0275115966796875, -0.01202392578125, -0.006450653076171875, -0.036376953125, 0.0174713134765625, 0.048126220703125, 0.0704345703125, -0.03759765625, -0.052764892578125, -0.0267486572265625, 0.0022945404052734375, -0.045379638671875, -0.01873779296875, -0.0306243896484375, 0.0274505615234375, -0.0535888671875, 0.01374053955078125, 0.004337310791015625, 0.03094482421875, -0.0049591064453125, -0.01099395751953125, -0.002643585205078125, 0.0179443359375, -0.01078033447265625, 0.0136871337890625, -0.042266845703125, -0.006134033203125, 0.013153076171875, -0.027374267578125, -0.0084381103515625, 0.031524658203125, -0.04144287109375, 0.0250244140625, 0.05145263671875, -0.007526397705078125, 0.050201416015625, 0.01373291015625, 0.06854248046875, -0.005771636962890625, 0.004436492919921875, 0.006259918212890625, 0.032501220703125, 0.00478363037109375, -0.026519775390625, 0.03857421875, 0.0243988037109375, -0.0285186767578125, -0.06207275390625, -0.0048370361328125, -0.11566162109375, -0.0322265625, 0.07049560546875, 0.0102386474609375, 0.0096893310546875, 0.030975341796875, 0.006099700927734375, 0.03900146484375, -0.03497314453125, 0.047698974609375, 0.09130859375, -0.0408935546875, 0.01229095458984375, -0.03265380859375, 0.0008378028869628906, 0.04278564453125, -0.0555419921875, -0.04010009765625, 0.0033588409423828125, 0.040313720703125, 0.034149169921875, 0.032867431640625, 0.0024242401123046875, 0.050689697265625, 0.0209808349609375, 0.0175018310546875, 0.009033203125, -0.0297698974609375, -0.01464080810546875, 0.0079345703125, 0.0100250244140625, -0.0015125274658203125 ] ]
Gryphe/MythoLogic-13b
2023-08-04T09:55:45.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Gryphe
null
null
Gryphe/MythoLogic-13b
17
5,870
transformers
2023-07-12T15:28:34
--- license: other language: - en --- **UPDATE:** There's a Llama 2 sequel now! [Check it out here!](https://huggingface.co/Gryphe/MythoLogic-L2-13b) An experiment with gradient merges using [the following script](https://github.com/TehVenomm/LM_Transformers_BlockMerge), with [Chronos](https://huggingface.co/elinas/chronos-13b) as its primary model, augmented by [Hermes](https://huggingface.co/NousResearch/Nous-Hermes-13b) and [Wizard-Vicuna Uncensored](https://huggingface.co/TheBloke/Wizard-Vicuna-13B-Uncensored-HF). Quantized models are available from TheBloke: [GGML](https://huggingface.co/TheBloke/MythoLogic-13B-GGML) - [GPTQ](https://huggingface.co/TheBloke/MythoLogic-13B-GPTQ) (You're the best!) ## Model details Chronos is a wonderfully verbose model, though it definitely seems to lack in the logic department. Hermes and WizardLM have been merged gradually, primarily in the higher layers (10+) in an attempt to rectify some of this behaviour. The main objective was to create an all-round model with improved story generation and roleplaying capabilities. Below is an illustration to showcase a rough approximation of the gradients I used to create MythoLogic: ![](approximation.png) ## Prompt Format This model primarily uses Alpaca formatting, so for optimal model performance, use: ``` <System prompt/Character Card> ### Instruction: Your instruction or question here. For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only. ### Response: ``` --- license: other ---
1,597
[ [ -0.0309906005859375, -0.0439453125, 0.0258026123046875, 0.01180267333984375, -0.02703857421875, 0.01189422607421875, 0.005672454833984375, -0.05877685546875, 0.0325927734375, 0.0526123046875, -0.05889892578125, -0.0302581787109375, -0.0299072265625, -0.007419586181640625, -0.0257720947265625, 0.08953857421875, -0.0074920654296875, -0.0032520294189453125, -0.0100250244140625, -0.0228729248046875, -0.0225830078125, -0.0205841064453125, -0.057373046875, -0.0511474609375, 0.043609619140625, 0.0084991455078125, 0.061248779296875, 0.058197021484375, 0.040924072265625, 0.0215606689453125, 0.0025997161865234375, 0.03955078125, -0.04034423828125, 0.024749755859375, -0.0036449432373046875, -0.046173095703125, -0.053955078125, 0.011932373046875, 0.040618896484375, 0.027587890625, -0.0225830078125, 0.03997802734375, 0.0029144287109375, 0.041107177734375, -0.0222930908203125, -0.00203704833984375, 0.0009369850158691406, 0.0269012451171875, 0.00255584716796875, -0.015289306640625, -0.000820159912109375, -0.02923583984375, 0.004974365234375, -0.0804443359375, 0.0150909423828125, 0.018951416015625, 0.049468994140625, 0.035888671875, -0.031280517578125, -0.0128631591796875, -0.05218505859375, 0.05853271484375, -0.043792724609375, 0.006076812744140625, 0.026763916015625, 0.03204345703125, -0.0053558349609375, -0.07415771484375, -0.048095703125, -0.0251922607421875, -0.0013093948364257812, 0.0195465087890625, -0.03936767578125, -0.023712158203125, 0.019287109375, 0.032257080078125, -0.033599853515625, 0.018798828125, -0.047149658203125, -0.0204620361328125, 0.041900634765625, 0.0380859375, 0.0176239013671875, 0.0031871795654296875, -0.0455322265625, -0.0208282470703125, -0.045257568359375, 0.0008578300476074219, 0.041595458984375, -0.01320648193359375, -0.0301361083984375, 0.07525634765625, 0.01360321044921875, 0.0178375244140625, 0.0228424072265625, -0.03338623046875, 0.01342010498046875, -0.0186004638671875, -0.0138092041015625, -0.01189422607421875, 0.048553466796875, 0.0643310546875, 0.005207061767578125, 0.00487518310546875, 0.0022640228271484375, 0.029388427734375, 0.01117706298828125, -0.0704345703125, -0.0104217529296875, 0.0240020751953125, -0.037017822265625, -0.046142578125, -0.04315185546875, -0.049652099609375, -0.04547119140625, -0.0194549560546875, 0.01244354248046875, -0.041290283203125, -0.037841796875, 0.0164337158203125, -0.01142120361328125, 0.0259246826171875, 0.044036865234375, -0.047882080078125, 0.028045654296875, 0.04400634765625, 0.055877685546875, 0.00322723388671875, -0.0212860107421875, -0.0174407958984375, 0.017059326171875, -0.0496826171875, 0.038787841796875, -0.028533935546875, -0.041961669921875, -0.005947113037109375, -0.018585205078125, 0.0321044921875, -0.036834716796875, 0.054046630859375, -0.053314208984375, 0.0377197265625, -0.0228118896484375, -0.0312042236328125, -0.0180206298828125, 0.01364898681640625, -0.0634765625, 0.07379150390625, 0.020599365234375, -0.05633544921875, -0.0006017684936523438, -0.035491943359375, -0.00214385986328125, -0.01012420654296875, 0.005855560302734375, -0.01366424560546875, -0.007579803466796875, -0.0031681060791015625, -0.00440216064453125, -0.0716552734375, 0.00383758544921875, -0.0268402099609375, -0.02001953125, 0.0182037353515625, -0.01456451416015625, 0.05499267578125, 0.018310546875, -0.0225982666015625, -0.0146331787109375, -0.046600341796875, -0.010223388671875, 0.00931549072265625, -0.01593017578125, -0.0018835067749023438, -0.0113525390625, 0.0080718994140625, 0.01477813720703125, 0.0163421630859375, -0.011993408203125, 0.033447265625, 0.00775146484375, 0.00750732421875, 0.05084228515625, -0.01544952392578125, 0.04937744140625, -0.048828125, 0.052337646484375, -0.002292633056640625, 0.0110015869140625, -0.0009169578552246094, -0.048797607421875, -0.0780029296875, -0.035980224609375, -0.007564544677734375, 0.052001953125, -0.048492431640625, 0.0147857666015625, 0.012451171875, -0.05804443359375, -0.034576416015625, -0.0013074874877929688, 0.037872314453125, 0.0601806640625, 0.027679443359375, -0.04010009765625, -0.040496826171875, -0.0709228515625, 0.00504302978515625, -0.041595458984375, -0.0236968994140625, 0.0248260498046875, 0.006221771240234375, -0.03887939453125, 0.06671142578125, -0.047576904296875, -0.022796630859375, 0.0001633167266845703, 0.01123046875, 0.0208587646484375, 0.0458984375, 0.062469482421875, -0.021209716796875, -0.00548553466796875, 0.0106964111328125, -0.07427978515625, -0.009765625, 0.01514434814453125, -0.021392822265625, 0.0022296905517578125, 0.00734710693359375, -0.0780029296875, 0.0272979736328125, 0.0355224609375, -0.0171966552734375, 0.050872802734375, -0.0157928466796875, 0.029693603515625, -0.0860595703125, 0.0057373046875, 0.01995849609375, -0.0035190582275390625, -0.06219482421875, 0.0229339599609375, -0.019439697265625, -0.0026092529296875, -0.0231781005859375, 0.0552978515625, -0.0251617431640625, -0.005084991455078125, -0.01324462890625, -0.009185791015625, -0.01165771484375, 0.034088134765625, -0.0080413818359375, 0.03656005859375, 0.0638427734375, -0.037933349609375, 0.05487060546875, 0.0236968994140625, -0.015045166015625, 0.03619384765625, -0.054412841796875, 0.02288818359375, -0.01512908935546875, 0.05615234375, -0.0711669921875, -0.0253753662109375, 0.0625, -0.025054931640625, 0.0109710693359375, -0.011871337890625, -0.050750732421875, -0.040985107421875, -0.04547119140625, 0.048797607421875, 0.08526611328125, -0.04656982421875, 0.0604248046875, 0.0032253265380859375, 0.0075531005859375, -0.05963134765625, -0.0634765625, 0.0009512901306152344, -0.047027587890625, -0.0491943359375, 0.0234832763671875, -0.02191162109375, -0.028656005859375, -0.006687164306640625, 0.0018644332885742188, -0.0072479248046875, -0.0240478515625, 0.00830841064453125, 0.0484619140625, -0.0289306640625, -0.0251617431640625, 0.0249786376953125, -0.0007219314575195312, -0.01001739501953125, -0.0167083740234375, 0.04425048828125, -0.012115478515625, -0.0107574462890625, -0.055572509765625, 0.01165008544921875, 0.060699462890625, -0.01039886474609375, 0.02801513671875, 0.06903076171875, -0.035797119140625, 0.007633209228515625, -0.07135009765625, -0.0209808349609375, -0.036773681640625, 0.015716552734375, -0.0056610107421875, -0.0721435546875, 0.060943603515625, 0.0004649162292480469, 0.0150299072265625, 0.035858154296875, 0.0501708984375, -0.019989013671875, 0.05572509765625, 0.047027587890625, 0.0015077590942382812, 0.0128631591796875, -0.033599853515625, 0.0146636962890625, -0.05841064453125, -0.0285491943359375, -0.0027065277099609375, -0.0237884521484375, -0.05609130859375, -0.06597900390625, 0.0299072265625, 0.0400390625, -0.01374053955078125, 0.0304107666015625, -0.01300048828125, 0.0250091552734375, 0.025115966796875, 0.00940704345703125, 0.034271240234375, 0.044769287109375, 0.025360107421875, -0.01007080078125, -0.0289306640625, -0.040679931640625, 0.0828857421875, 0.032196044921875, 0.05853271484375, 0.037841796875, 0.050018310546875, 0.0170135498046875, 0.033294677734375, -0.0219268798828125, 0.0440673828125, 0.00315093994140625, -0.04150390625, -0.016937255859375, -0.00751495361328125, -0.054290771484375, 0.0333251953125, -0.026885986328125, -0.055572509765625, 0.02020263671875, 0.0242156982421875, -0.031402587890625, 0.02093505859375, -0.0299835205078125, 0.01715087890625, 0.01226806640625, -0.051544189453125, -0.047332763671875, -0.046539306640625, 0.0743408203125, -0.01560211181640625, 0.004428863525390625, -0.004222869873046875, -0.0237884521484375, 0.0361328125, -0.03363037109375, 0.06109619140625, 0.02490234375, -0.036834716796875, 0.05120849609375, 0.035125732421875, 0.031341552734375, 0.021514892578125, 0.00415802001953125, 0.0157623291015625, -0.017608642578125, -0.0181884765625, -0.05572509765625, 0.06134033203125, -0.0592041015625, -0.050811767578125, -0.03546142578125, -0.045989990234375, 0.00027871131896972656, -0.02618408203125, 0.0160675048828125, 0.0423583984375, -0.01214599609375, -0.0296173095703125, 0.037506103515625, -0.003505706787109375, 0.03887939453125, 0.04876708984375, -0.0238037109375, -0.0439453125, 0.04266357421875, -0.02490234375, 0.01244354248046875, 0.00205230712890625, 0.00673675537109375, -0.0197296142578125, -0.003398895263671875, -0.049224853515625, 0.04833984375, -0.0360107421875, -0.0213470458984375, -0.041046142578125, -0.003406524658203125, -0.0172271728515625, -0.00904083251953125, -0.0173492431640625, -0.0611572265625, -0.04278564453125, -0.00445556640625, 0.053070068359375, 0.055694580078125, -0.046539306640625, 0.01806640625, -0.038299560546875, 0.0216217041015625, 0.022491455078125, 0.0017194747924804688, 0.01033782958984375, -0.055877685546875, 0.00786590576171875, -0.01202392578125, -0.027587890625, -0.06005859375, 0.035247802734375, -0.01087188720703125, 0.0097503662109375, 0.0289764404296875, -0.0223541259765625, 0.056610107421875, -0.050689697265625, 0.0633544921875, 0.060791015625, -0.03546142578125, 0.0198822021484375, -0.040985107421875, -0.0189666748046875, 0.01259613037109375, 0.0238189697265625, -0.037322998046875, -0.0250091552734375, -0.04046630859375, -0.0489501953125, 0.06341552734375, 0.032257080078125, 0.0162811279296875, 0.017120361328125, 0.0283203125, 0.0091552734375, 0.0247344970703125, -0.046600341796875, -0.055206298828125, -0.0110321044921875, 0.0155181884765625, -0.01319122314453125, -0.02825927734375, -0.043426513671875, -0.020599365234375, 0.033294677734375, 0.003971099853515625, 0.0117645263671875, 0.004314422607421875, 0.01532745361328125, -0.0308990478515625, -0.016571044921875, 0.03717041015625, 0.03265380859375, -0.015289306640625, 0.0000794529914855957, 0.01202392578125, -0.031707763671875, 0.01096343994140625, 0.0248260498046875, -0.0010461807250976562, -0.002231597900390625, 0.043853759765625, 0.053558349609375, -0.004329681396484375, -0.038360595703125, 0.0170745849609375, 0.004016876220703125, -0.0014820098876953125, -0.015899658203125, 0.00911712646484375, 0.00948333740234375, 0.050567626953125, 0.027435302734375, 0.0244598388671875, 0.014251708984375, -0.048370361328125, -0.0019588470458984375, 0.01338958740234375, 0.0027370452880859375, -0.018310546875, 0.059967041015625, 0.0162353515625, -0.0288543701171875, 0.0382080078125, -0.0131378173828125, -0.029632568359375, 0.06036376953125, 0.050201416015625, 0.048858642578125, -0.040679931640625, 0.029144287109375, 0.045013427734375, 0.0227203369140625, -0.0185089111328125, -0.004146575927734375, -0.00946807861328125, -0.036773681640625, -0.0202484130859375, -0.0293731689453125, -0.02490234375, -0.0011014938354492188, -0.037139892578125, 0.03521728515625, -0.038726806640625, -0.015777587890625, -0.0250091552734375, 0.0014133453369140625, -0.050628662109375, 0.010223388671875, 0.005481719970703125, 0.0517578125, -0.0565185546875, 0.054351806640625, 0.0289154052734375, -0.040496826171875, -0.04815673828125, -0.01096343994140625, 0.01320648193359375, -0.06585693359375, 0.043548583984375, -0.0185394287109375, -0.0172576904296875, -0.010772705078125, -0.0333251953125, -0.082763671875, 0.12451171875, 0.0298004150390625, -0.047637939453125, -0.0011310577392578125, 0.0020847320556640625, 0.0673828125, -0.032989501953125, 0.03802490234375, 0.059967041015625, 0.0233306884765625, 0.0156402587890625, -0.07025146484375, 0.006481170654296875, -0.019073486328125, -0.0011243820190429688, -0.01222991943359375, -0.06854248046875, 0.06414794921875, -0.037017822265625, -0.0178985595703125, 0.054595947265625, 0.067626953125, 0.058135986328125, 0.029571533203125, 0.03594970703125, 0.066650390625, 0.058807373046875, 0.007366180419921875, 0.0992431640625, -0.0230712890625, 0.032501220703125, 0.05419921875, -0.028778076171875, 0.025238037109375, 0.0252838134765625, -0.0165557861328125, 0.03375244140625, 0.06494140625, -0.0053863525390625, 0.038604736328125, 0.01666259765625, -0.03265380859375, -0.00678253173828125, -0.0285186767578125, -0.0645751953125, -0.00514984130859375, 0.0019702911376953125, -0.002979278564453125, -0.004932403564453125, -0.0271453857421875, 0.01727294921875, -0.01824951171875, -0.01221466064453125, 0.00453948974609375, 0.007572174072265625, -0.040618896484375, 0.04052734375, 0.01043701171875, 0.06646728515625, -0.055206298828125, -0.006046295166015625, -0.054718017578125, -0.0086822509765625, -0.016571044921875, -0.042572021484375, -0.0234832763671875, 0.0007338523864746094, -0.01197052001953125, 0.0069427490234375, 0.035003662109375, -0.053802490234375, -0.052642822265625, 0.0302886962890625, 0.03338623046875, 0.013519287109375, 0.056671142578125, -0.043365478515625, 0.01629638671875, -0.007183074951171875, -0.01235198974609375, 0.00785064697265625, 0.005462646484375, -0.001728057861328125, 0.059844970703125, 0.04669189453125, -0.017974853515625, -0.00836944580078125, 0.015289306640625, 0.070556640625, -0.00794219970703125, -0.016021728515625, -0.048919677734375, 0.0268402099609375, 0.0004639625549316406, -0.038818359375, 0.045013427734375, 0.049652099609375, 0.031585693359375, -0.02288818359375, 0.04168701171875, 0.0189666748046875, 0.00811767578125, -0.034759521484375, 0.040740966796875, -0.06414794921875, 0.0177459716796875, -0.0241546630859375, -0.09124755859375, -0.01309967041015625, 0.0308990478515625, 0.0232696533203125, -0.003940582275390625, 0.057830810546875, 0.06976318359375, -0.0072479248046875, -0.012969970703125, 0.0019207000732421875, 0.01306915283203125, 0.01195526123046875, 0.048248291015625, 0.09014892578125, -0.03509521484375, 0.0281982421875, -0.0175018310546875, -0.032379150390625, -0.00717926025390625, -0.0546875, -0.06903076171875, -0.03076171875, -0.033233642578125, -0.04815673828125, 0.009368896484375, 0.0540771484375, 0.0406494140625, -0.02459716796875, -0.035491943359375, 0.0174407958984375, -0.01105499267578125, 0.0030059814453125, -0.0099945068359375, 0.022186279296875, 0.022918701171875, -0.06903076171875, 0.030792236328125, 0.0146942138671875, 0.0277099609375, -0.022186279296875, -0.021881103515625, 0.031768798828125, 0.0186920166015625, 0.036956787109375, 0.030303955078125, -0.068603515625, -0.010009765625, -0.01383209228515625, 0.0008592605590820312, -0.01345062255859375, 0.08209228515625, -0.052581787109375, 0.0010738372802734375, 0.01226806640625, 0.0277099609375, 0.0626220703125, -0.00980377197265625, 0.03814697265625, -0.0243988037109375, 0.0237579345703125, 0.0042724609375, 0.0274810791015625, 0.02935791015625, -0.028076171875, 0.059661865234375, 0.018463134765625, -0.0233001708984375, -0.069580078125, 0.03271484375, -0.09515380859375, 0.0032062530517578125, 0.09124755859375, 0.0277862548828125, -0.0017747879028320312, 0.039215087890625, -0.033599853515625, 0.0229949951171875, -0.0242919921875, 0.042633056640625, 0.041595458984375, -0.0225677490234375, -0.0009965896606445312, -0.0294952392578125, 0.033477783203125, 0.0291900634765625, -0.048095703125, -0.01226043701171875, 0.05792236328125, 0.0286865234375, 0.0260009765625, 0.057342529296875, -0.0091552734375, 0.035614013671875, -0.01107025146484375, 0.01189422607421875, -0.007282257080078125, -0.00982666015625, -0.0250244140625, -0.01016998291015625, -0.0170440673828125, -0.01096343994140625 ] ]
KnutJaegersberg/black_goo_recipe_d
2023-09-03T09:53:27.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/black_goo_recipe_d
0
5,869
transformers
2023-09-03T06:52:48
--- license: cc-by-nc-4.0 --- Prompt Example: ``` ### System: You are an AI assistant. User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps. ### Instruction: How do you fine tune a large language model? ### Response: ```
333
[ [ -0.01407623291015625, -0.06634521484375, 0.0394287109375, 0.00498199462890625, -0.004062652587890625, -0.006053924560546875, -0.004848480224609375, 0.00004607439041137695, 0.007518768310546875, 0.052001953125, -0.07318115234375, -0.00264739990234375, -0.027130126953125, 0.026123046875, -0.01654052734375, 0.0567626953125, -0.025634765625, 0.01297760009765625, -0.0189056396484375, 0.0288848876953125, -0.08404541015625, -0.019866943359375, -0.0919189453125, -0.028961181640625, 0.0156097412109375, 0.069580078125, 0.051300048828125, 0.056549072265625, 0.02392578125, 0.0265960693359375, 0.02081298828125, 0.00384521484375, -0.02197265625, 0.04388427734375, -0.01413726806640625, -0.032806396484375, -0.02581787109375, 0.0123443603515625, 0.02008056640625, 0.0428466796875, 0.001102447509765625, 0.0260467529296875, 0.00597381591796875, 0.0137786865234375, -0.018035888671875, 0.01751708984375, 0.0022678375244140625, 0.005168914794921875, 0.002979278564453125, 0.0008082389831542969, -0.029052734375, -0.026580810546875, -0.029937744140625, -0.060699462890625, -0.01219940185546875, 0.010101318359375, 0.055877685546875, 0.0443115234375, -0.0239715576171875, -0.02105712890625, -0.0711669921875, 0.046295166015625, -0.020416259765625, 0.00713348388671875, 0.06890869140625, 0.04461669921875, -0.005218505859375, -0.062347412109375, -0.049468994140625, -0.043365478515625, -0.0152587890625, 0.00530242919921875, 0.00783538818359375, -0.0025119781494140625, 0.054351806640625, 0.005809783935546875, -0.037689208984375, -0.0116424560546875, -0.035888671875, -0.0275115966796875, 0.036163330078125, 0.0269012451171875, 0.0394287109375, 0.01100921630859375, 0.0010805130004882812, -0.0246429443359375, -0.0205535888671875, 0.004428863525390625, 0.004306793212890625, 0.0203094482421875, -0.005229949951171875, 0.06390380859375, -0.0245208740234375, 0.044921875, 0.0054168701171875, 0.023406982421875, -0.002841949462890625, -0.04046630859375, -0.02447509765625, -0.012908935546875, 0.04180908203125, 0.0223846435546875, 0.04461669921875, -0.0233306884765625, -0.0257720947265625, 0.0086212158203125, 0.0241241455078125, -0.0243377685546875, -0.0107574462890625, 0.0264434814453125, -0.0280914306640625, 0.0013170242309570312, -0.004241943359375, -0.048736572265625, -0.01141357421875, -0.04107666015625, 0.041168212890625, -0.026458740234375, 0.00811004638671875, 0.00771331787109375, -0.01104736328125, 0.015899658203125, 0.027587890625, -0.0635986328125, 0.05572509765625, 0.051483154296875, 0.0369873046875, 0.025909423828125, -0.022064208984375, -0.05914306640625, 0.0015172958374023438, -0.018463134765625, 0.0413818359375, -0.026641845703125, -0.0455322265625, -0.0072021484375, 0.0017595291137695312, -0.0008358955383300781, -0.03179931640625, 0.03662109375, -0.0232391357421875, 0.037139892578125, -0.049072265625, -0.032440185546875, -0.006633758544921875, 0.0178070068359375, -0.031036376953125, 0.067138671875, 0.0163726806640625, -0.0063934326171875, 0.0030918121337890625, -0.07843017578125, -0.0193328857421875, 0.003948211669921875, -0.003704071044921875, 0.03125, -0.00739288330078125, 0.012939453125, 0.0284423828125, -0.00624847412109375, -0.01080322265625, -0.02276611328125, -0.0203094482421875, 0.0190582275390625, -0.0279388427734375, 0.07470703125, 0.0272216796875, -0.034027099609375, 0.006923675537109375, -0.040985107421875, 0.00440216064453125, 0.01117706298828125, -0.0227508544921875, 0.0006833076477050781, -0.0109405517578125, 0.01366424560546875, -0.0006260871887207031, 0.078857421875, -0.0411376953125, 0.0552978515625, -0.01202392578125, 0.0026264190673828125, 0.07354736328125, 0.007633209228515625, 0.0164947509765625, -0.022247314453125, 0.06292724609375, -0.02874755859375, 0.0035228729248046875, -0.021881103515625, -0.0165557861328125, -0.0570068359375, 0.0023956298828125, 0.018768310546875, 0.0794677734375, -0.038848876953125, 0.033447265625, 0.01427459716796875, -0.00591278076171875, -0.027587890625, 0.00666046142578125, 0.0433349609375, 0.04095458984375, 0.0270843505859375, 0.0004627704620361328, -0.047576904296875, -0.05645751953125, 0.013092041015625, -0.041961669921875, -0.007282257080078125, 0.0219268798828125, 0.035430908203125, -0.021484375, 0.052093505859375, -0.05352783203125, 0.01122283935546875, -0.0145263671875, 0.01029205322265625, -0.0019550323486328125, 0.0509033203125, 0.0019283294677734375, -0.051361083984375, -0.037078857421875, -0.005443572998046875, -0.0726318359375, -0.0242767333984375, -0.0256500244140625, -0.0149993896484375, -0.006610870361328125, 0.0517578125, -0.050994873046875, 0.022308349609375, 0.0036869049072265625, -0.06719970703125, 0.04034423828125, -0.0288238525390625, 0.0192718505859375, -0.10284423828125, 0.00855255126953125, -0.0213775634765625, -0.028472900390625, -0.050994873046875, 0.03558349609375, -0.0362548828125, 0.0009403228759765625, -0.0093994140625, 0.03839111328125, -0.01410675048828125, -0.0011129379272460938, -0.029571533203125, -0.004184722900390625, -0.020111083984375, 0.0228271484375, -0.0088348388671875, 0.08367919921875, 0.053436279296875, -0.05584716796875, 0.07257080078125, 0.04736328125, 0.0005168914794921875, 0.039306640625, -0.08721923828125, 0.0187530517578125, -0.01274871826171875, -0.004241943359375, -0.09796142578125, -0.0277099609375, 0.031890869140625, -0.039337158203125, 0.014678955078125, 0.03387451171875, -0.0537109375, -0.0343017578125, 0.00202178955078125, 0.01763916015625, 0.060577392578125, -0.0209197998046875, 0.032318115234375, 0.0141754150390625, -0.00981903076171875, -0.0282135009765625, -0.040069580078125, 0.0245208740234375, -0.01410675048828125, -0.02691650390625, -0.0124359130859375, -0.04107666015625, -0.0296173095703125, -0.030120849609375, 0.0264434814453125, -0.0009775161743164062, 0.025421142578125, 0.02410888671875, 0.01739501953125, -0.01059722900390625, -0.0136566162109375, -0.0214996337890625, -0.002155303955078125, 0.01385498046875, 0.01372528076171875, 0.049957275390625, -0.0147857666015625, -0.036834716796875, -0.061981201171875, 0.04949951171875, 0.03558349609375, -0.01155853271484375, 0.004791259765625, 0.02105712890625, -0.033599853515625, 0.03125, -0.0147247314453125, -0.025390625, -0.03662109375, 0.0108489990234375, -0.0280914306640625, -0.0242767333984375, 0.04296875, 0.005886077880859375, 0.01763916015625, 0.0325927734375, 0.035980224609375, -0.031158447265625, 0.09442138671875, 0.0333251953125, -0.01111602783203125, -0.0004954338073730469, -0.0118865966796875, -0.006443023681640625, -0.039947509765625, -0.0249786376953125, -0.062347412109375, -0.0245513916015625, -0.01300048828125, 0.00021600723266601562, 0.01415252685546875, 0.0291290283203125, -0.03204345703125, 0.0650634765625, -0.0300750732421875, 0.01195526123046875, 0.07867431640625, 0.007236480712890625, -0.013275146484375, -0.025970458984375, 0.0042877197265625, 0.021087646484375, -0.02703857421875, -0.059417724609375, 0.0291748046875, 0.03759765625, 0.06903076171875, 0.00696563720703125, 0.08197021484375, 0.021636962890625, -0.02581787109375, -0.058441162109375, 0.06329345703125, 0.01776123046875, -0.039825439453125, -0.060577392578125, 0.01071929931640625, -0.0894775390625, -0.038726806640625, 0.006481170654296875, -0.07537841796875, -0.01285552978515625, 0.034393310546875, -0.039093017578125, -0.0213165283203125, -0.05987548828125, 0.08404541015625, -0.01442718505859375, 0.013671875, 0.00007045269012451172, -0.05035400390625, -0.008697509765625, 0.0233306884765625, -0.0161285400390625, 0.0095672607421875, -0.02862548828125, 0.03851318359375, -0.033599853515625, 0.064697265625, -0.01629638671875, 0.0251922607421875, 0.01303863525390625, 0.0090179443359375, 0.033111572265625, -0.0095367431640625, 0.00786590576171875, -0.05206298828125, 0.022430419921875, -0.031707763671875, -0.04608154296875, 0.017486572265625, -0.056182861328125, -0.05889892578125, -0.01554107666015625, -0.04150390625, -0.01027679443359375, 0.0222320556640625, 0.0219268798828125, 0.033477783203125, -0.0285491943359375, -0.00856781005859375, 0.054901123046875, 0.00496673583984375, 0.048553466796875, 0.004730224609375, -0.00799560546875, -0.00371551513671875, 0.05126953125, -0.01488494873046875, -0.0025157928466796875, 0.034149169921875, 0.0229949951171875, -0.032928466796875, -0.017791748046875, -0.06640625, 0.0173797607421875, -0.035858154296875, -0.004276275634765625, -0.046234130859375, 0.01149749755859375, -0.048004150390625, -0.013671875, 0.00844573974609375, -0.0227813720703125, -0.045501708984375, -0.00797271728515625, 0.016998291015625, 0.064208984375, 0.007648468017578125, 0.040618896484375, -0.050628662109375, 0.027191162109375, 0.03607177734375, 0.003635406494140625, -0.028106689453125, -0.02490234375, -0.0165252685546875, 0.017120361328125, -0.039947509765625, -0.06488037109375, 0.003055572509765625, -0.0010385513305664062, 0.055450439453125, 0.0160980224609375, 0.01410675048828125, 0.037933349609375, -0.050537109375, 0.095947265625, -0.003154754638671875, -0.07373046875, 0.06341552734375, -0.0209197998046875, 0.0489501953125, 0.05999755859375, 0.031402587890625, -0.0457763671875, -0.031982421875, -0.051025390625, -0.04425048828125, 0.043121337890625, -0.0012941360473632812, 0.039947509765625, -0.0097198486328125, 0.0232696533203125, -0.01016998291015625, 0.01338958740234375, -0.0555419921875, -0.0285491943359375, -0.00360107421875, -0.01226043701171875, -0.006603240966796875, 0.00017523765563964844, -0.00710296630859375, -0.0081329345703125, 0.056427001953125, -0.0030918121337890625, 0.040802001953125, -0.0004029273986816406, 0.0207061767578125, -0.00820159912109375, 0.0066375732421875, 0.08294677734375, 0.037811279296875, 0.001903533935546875, -0.0098876953125, 0.01678466796875, -0.024871826171875, 0.001384735107421875, 0.005710601806640625, -0.013153076171875, -0.00998687744140625, 0.038055419921875, 0.05426025390625, -0.04461669921875, -0.05047607421875, 0.020172119140625, -0.015655517578125, -0.004322052001953125, -0.03509521484375, 0.037933349609375, -0.004436492919921875, 0.01259613037109375, 0.006603240966796875, 0.00385284423828125, 0.0200653076171875, -0.0657958984375, 0.0276641845703125, 0.022125244140625, -0.042999267578125, -0.012115478515625, 0.04351806640625, 0.0085296630859375, -0.0789794921875, 0.06292724609375, -0.0101470947265625, -0.040252685546875, 0.061431884765625, 0.047821044921875, 0.0638427734375, -0.021514892578125, 0.015869140625, 0.02862548828125, 0.00408935546875, -0.029449462890625, 0.069580078125, -0.01059722900390625, -0.0352783203125, -0.027740478515625, -0.0206298828125, -0.0257415771484375, 0.0097808837890625, -0.045379638671875, 0.01412200927734375, -0.041473388671875, 0.00740814208984375, 0.00476837158203125, -0.024444580078125, -0.030181884765625, 0.044189453125, -0.00621795654296875, 0.088623046875, -0.048187255859375, 0.04302978515625, 0.05511474609375, -0.052642822265625, -0.08642578125, -0.0007572174072265625, -0.0162200927734375, -0.059356689453125, 0.051910400390625, 0.0002753734588623047, 0.00739288330078125, -0.0169219970703125, -0.06585693359375, -0.0282745361328125, 0.05291748046875, 0.0207061767578125, -0.014739990234375, -0.0153350830078125, -0.006977081298828125, 0.0362548828125, -0.0361328125, 0.0258636474609375, 0.03271484375, 0.048492431640625, 0.004543304443359375, -0.072509765625, 0.0163726806640625, -0.01806640625, 0.0006709098815917969, -0.006038665771484375, -0.02130126953125, 0.06195068359375, -0.0252838134765625, 0.00783538818359375, 0.041412353515625, 0.048004150390625, -0.0072784423828125, 0.020751953125, 0.05645751953125, 0.01175689697265625, 0.054443359375, -0.009124755859375, 0.091552734375, -0.014495849609375, -0.0021038055419921875, 0.08392333984375, 0.0109710693359375, 0.055328369140625, 0.0257415771484375, -0.018341064453125, 0.03839111328125, 0.0711669921875, -0.0025882720947265625, 0.0307464599609375, 0.0034046173095703125, -0.00394439697265625, -0.0232086181640625, -0.0125579833984375, -0.006687164306640625, 0.03643798828125, -0.002216339111328125, -0.006618499755859375, 0.0021457672119140625, 0.015899658203125, -0.00238800048828125, 0.03021240234375, -0.03192138671875, 0.0711669921875, -0.018798828125, -0.09698486328125, 0.03668212890625, 0.00469207763671875, 0.0255584716796875, -0.031280517578125, -0.021575927734375, -0.0095672607421875, 0.0007929801940917969, 0.0011043548583984375, -0.0667724609375, 0.02545166015625, 0.002223968505859375, -0.027435302734375, -0.02911376953125, 0.02569580078125, -0.0364990234375, -0.038360595703125, -0.0189056396484375, 0.0227813720703125, 0.0484619140625, 0.020538330078125, -0.070556640625, -0.021728515625, 0.017791748046875, 0.0014629364013671875, -0.0154876708984375, 0.0168304443359375, -0.00222015380859375, 0.057647705078125, 0.0203399658203125, 0.0030517578125, -0.01418304443359375, -0.0092010498046875, 0.041259765625, -0.0335693359375, -0.0310821533203125, -0.052886962890625, 0.04364013671875, -0.01532745361328125, -0.04083251953125, 0.0498046875, 0.046417236328125, 0.060882568359375, -0.0217132568359375, 0.0509033203125, -0.0189361572265625, 0.043853759765625, -0.0266876220703125, 0.0014591217041015625, -0.037353515625, 0.0035343170166015625, 0.0029468536376953125, -0.04083251953125, 0.004730224609375, 0.07135009765625, 0.003925323486328125, 0.03271484375, 0.0506591796875, 0.062255859375, 0.0038166046142578125, -0.00508880615234375, 0.03240966796875, -0.0029468536376953125, 0.01003265380859375, 0.0179443359375, 0.050140380859375, -0.0228424072265625, 0.0243682861328125, -0.005298614501953125, -0.023956298828125, 0.003986358642578125, -0.03472900390625, -0.049285888671875, -0.051239013671875, -0.02740478515625, -0.0426025390625, 0.00289154052734375, 0.1005859375, 0.052642822265625, -0.07049560546875, -0.04754638671875, -0.004024505615234375, 0.00557708740234375, -0.033477783203125, -0.00839996337890625, -0.006481170654296875, -0.04339599609375, -0.07037353515625, 0.00897979736328125, -0.021575927734375, 0.0318603515625, -0.038116455078125, -0.007965087890625, -0.01739501953125, 0.0103759765625, 0.0195770263671875, 0.042694091796875, -0.052642822265625, -0.04461669921875, 0.02423095703125, -0.035736083984375, -0.01198577880859375, 0.048187255859375, -0.02899169921875, 0.028076171875, 0.0267791748046875, 0.0474853515625, 0.0128936767578125, 0.01476287841796875, 0.0687255859375, -0.03778076171875, -0.0288848876953125, 0.020111083984375, 0.0210113525390625, 0.02197265625, -0.07000732421875, 0.034149169921875, -0.011322021484375, -0.058258056640625, -0.033966064453125, 0.023834228515625, -0.08697509765625, -0.036163330078125, 0.08087158203125, -0.00811004638671875, -0.007030487060546875, -0.03558349609375, -0.06634521484375, 0.033782958984375, -0.045745849609375, 0.04327392578125, 0.062255859375, -0.03753662109375, -0.009033203125, -0.031707763671875, 0.048736572265625, 0.030364990234375, -0.06103515625, 0.003696441650390625, 0.045928955078125, -0.0004341602325439453, 0.034271240234375, 0.06036376953125, 0.037200927734375, 0.0222320556640625, 0.00829315185546875, -0.0203399658203125, -0.0178680419921875, -0.033111572265625, -0.02392578125, 0.00555419921875, 0.03387451171875, -0.05926513671875 ] ]
KnutJaegersberg/RWKV-4-PilePlus-169M-20230520-done-ctx4096
2023-09-05T09:59:05.000Z
[ "transformers", "pytorch", "rwkv", "license:apache-2.0", "endpoints_compatible", "region:us" ]
null
KnutJaegersberg
null
null
KnutJaegersberg/RWKV-4-PilePlus-169M-20230520-done-ctx4096
0
5,869
transformers
2023-09-05T08:46:20
--- license: apache-2.0 --- This is just a standard conversion to hf transformers format of models from here: https://huggingface.co/BlinkDL/rwkv-4-pileplus According to the documentation I found, this model should have seen roundabout 2 trillion tokens!
259
[ [ -0.0310516357421875, -0.057647705078125, 0.0180511474609375, 0.025115966796875, -0.03167724609375, -0.0262603759765625, 0.0379638671875, -0.010009765625, 0.023162841796875, 0.07537841796875, -0.05548095703125, -0.027435302734375, -0.00878143310546875, -0.004817962646484375, -0.05755615234375, 0.0699462890625, 0.022857666015625, -0.0005049705505371094, -0.0168304443359375, -0.0018177032470703125, -0.003978729248046875, -0.016937255859375, -0.034637451171875, -0.025909423828125, 0.047607421875, 0.052032470703125, 0.0655517578125, 0.048553466796875, 0.0595703125, 0.0091705322265625, 0.016265869140625, -0.03143310546875, -0.040283203125, -0.045013427734375, 0.008544921875, -0.01145172119140625, -0.06036376953125, 0.04052734375, 0.05810546875, 0.034454345703125, -0.035797119140625, 0.023834228515625, -0.0233612060546875, 0.040740966796875, -0.01910400390625, 0.005260467529296875, 0.0012178421020507812, 0.026824951171875, -0.0278778076171875, -0.01410675048828125, -0.04632568359375, -0.00274658203125, -0.0057525634765625, -0.0728759765625, 0.0196533203125, 0.040740966796875, 0.07275390625, 0.024658203125, -0.06927490234375, 0.008941650390625, -0.05322265625, 0.03594970703125, -0.046417236328125, 0.041961669921875, 0.00217437744140625, 0.05810546875, -0.0333251953125, -0.0762939453125, -0.04925537109375, 0.00917816162109375, -0.02337646484375, 0.00897979736328125, -0.0080413818359375, 0.00991058349609375, 0.041351318359375, 0.0423583984375, -0.044677734375, -0.0038928985595703125, -0.05108642578125, -0.0235443115234375, 0.050689697265625, 0.0134124755859375, 0.00472259521484375, -0.0606689453125, -0.0657958984375, -0.0182037353515625, -0.059814453125, -0.0033779144287109375, 0.0257720947265625, 0.003665924072265625, -0.055999755859375, 0.064208984375, -0.005954742431640625, 0.046722412109375, 0.0198211669921875, -0.0036945343017578125, 0.0225067138671875, -0.01357269287109375, -0.024810791015625, -0.01763916015625, 0.042999267578125, 0.0325927734375, 0.00743865966796875, -0.005645751953125, -0.0280914306640625, -0.05718994140625, 0.048004150390625, -0.08331298828125, -0.021392822265625, -0.003841400146484375, -0.062744140625, -0.033935546875, 0.02960205078125, -0.052276611328125, -0.0024929046630859375, 0.0115509033203125, 0.024078369140625, -0.005931854248046875, -0.042755126953125, 0.00615692138671875, -0.0259246826171875, 0.046142578125, 0.00894927978515625, -0.0477294921875, 0.013885498046875, 0.043121337890625, 0.037567138671875, -0.008544921875, -0.00908660888671875, -0.0299072265625, 0.019287109375, -0.0232696533203125, 0.07012939453125, -0.0045928955078125, -0.0511474609375, 0.021026611328125, 0.01421356201171875, 0.0172119140625, -0.02618408203125, 0.0660400390625, -0.050079345703125, 0.0298614501953125, -0.0022411346435546875, -0.0212554931640625, -0.036651611328125, 0.0099334716796875, -0.0784912109375, 0.08538818359375, 0.0655517578125, -0.056060791015625, 0.037261962890625, -0.061248779296875, -0.031463623046875, 0.033233642578125, 0.006145477294921875, -0.0290374755859375, -0.00044083595275878906, -0.0235137939453125, 0.0199737548828125, -0.029327392578125, 0.0245361328125, -0.0172119140625, -0.03558349609375, 0.0040283203125, -0.017303466796875, 0.0582275390625, 0.037200927734375, 0.00881195068359375, 0.049163818359375, -0.06951904296875, -0.02606201171875, 0.004688262939453125, -0.0238189697265625, 0.0148162841796875, -0.02581787109375, 0.036102294921875, 0.039520263671875, 0.0164337158203125, -0.0247650146484375, 0.0227203369140625, -0.00803375244140625, 0.035247802734375, 0.0200653076171875, 0.0012187957763671875, 0.03863525390625, -0.0123443603515625, 0.047698974609375, 0.026611328125, 0.03302001953125, -0.0014867782592773438, -0.073486328125, -0.0731201171875, -0.0093841552734375, 0.005405426025390625, 0.009185791015625, -0.0367431640625, 0.037994384765625, -0.0015697479248046875, -0.04241943359375, -0.01493072509765625, -0.04241943359375, -0.026702880859375, 0.042388916015625, 0.004085540771484375, -0.00624847412109375, -0.033355712890625, -0.0828857421875, 0.01873779296875, -0.0219268798828125, -0.0126190185546875, 0.00930023193359375, 0.05133056640625, -0.0240325927734375, 0.06488037109375, -0.00844573974609375, -0.011322021484375, -0.0191802978515625, 0.0284423828125, 0.0226593017578125, 0.044708251953125, 0.0447998046875, -0.047271728515625, -0.01090240478515625, -0.00543975830078125, -0.0289459228515625, 0.017242431640625, -0.01092529296875, -0.039276123046875, 0.0081939697265625, 0.049346923828125, -0.0728759765625, 0.049163818359375, 0.048675537109375, -0.0390625, 0.021759033203125, 0.00020313262939453125, 0.0034008026123046875, -0.07147216796875, 0.0213623046875, -0.00498199462890625, -0.0418701171875, -0.0283966064453125, 0.0228118896484375, 0.0272979736328125, -0.00893402099609375, -0.035675048828125, 0.017822265625, -0.0218505859375, -0.042022705078125, 0.0025119781494140625, -0.0058746337890625, -0.0223388671875, 0.03521728515625, -0.01342010498046875, 0.045562744140625, 0.0206756591796875, -0.02227783203125, 0.049896240234375, 0.0390625, 0.00759124755859375, 0.0421142578125, -0.048553466796875, 0.01160430908203125, -0.0006003379821777344, 0.045562744140625, -0.06695556640625, -0.0545654296875, 0.034759521484375, -0.00691986083984375, 0.0007343292236328125, -0.00485992431640625, -0.0418701171875, -0.06109619140625, -0.0200958251953125, 0.057647705078125, 0.05487060546875, -0.04046630859375, 0.05792236328125, 0.03277587890625, 0.01418304443359375, -0.0082244873046875, -0.040802001953125, -0.0011682510375976562, -0.0345458984375, -0.050628662109375, 0.03424072265625, 0.0106201171875, 0.00449371337890625, -0.007843017578125, 0.021331787109375, -0.0202484130859375, -0.0187225341796875, 0.033721923828125, 0.0204620361328125, -0.0245208740234375, -0.0243377685546875, -0.0010614395141601562, -0.01424407958984375, 0.0208892822265625, 0.0025653839111328125, 0.034912109375, -0.0017366409301757812, -0.00959014892578125, -0.045928955078125, 0.021697998046875, 0.06036376953125, 0.00809478759765625, 0.0305328369140625, 0.06890869140625, -0.0513916015625, -0.01433563232421875, -0.0303955078125, -0.02734375, -0.033050537109375, 0.00876617431640625, -0.034332275390625, -0.07183837890625, 0.03070068359375, -0.00861358642578125, 0.007389068603515625, 0.087158203125, 0.04339599609375, -0.0030345916748046875, 0.0777587890625, 0.05615234375, 0.0276336669921875, -0.004360198974609375, -0.0157318115234375, 0.040435791015625, -0.033721923828125, -0.04290771484375, -0.028472900390625, -0.0204315185546875, -0.03448486328125, -0.050689697265625, 0.0198211669921875, 0.027862548828125, -0.0458984375, 0.033538818359375, -0.032928466796875, 0.014678955078125, 0.0355224609375, -0.0240325927734375, 0.03204345703125, -0.01274871826171875, 0.01259613037109375, 0.0009632110595703125, -0.0721435546875, -0.0259857177734375, 0.054840087890625, 0.05804443359375, 0.0299530029296875, 0.00542449951171875, 0.044708251953125, 0.0143280029296875, 0.038665771484375, -0.0703125, 0.042083740234375, 0.011322021484375, -0.07208251953125, -0.032958984375, -0.0087127685546875, -0.046630859375, -0.00537872314453125, -0.021026611328125, -0.007843017578125, -0.0024700164794921875, -0.00722503662109375, -0.0299072265625, 0.0128631591796875, -0.0283660888671875, 0.092041015625, 0.0178985595703125, 0.034454345703125, -0.042266845703125, -0.031951904296875, 0.0628662109375, 0.00609588623046875, 0.0181884765625, 0.01241302490234375, 0.0167999267578125, 0.0667724609375, -0.0258941650390625, 0.0251922607421875, -0.002368927001953125, -0.01143646240234375, 0.043243408203125, 0.00632476806640625, 0.03302001953125, 0.0177764892578125, 0.0162353515625, 0.0003294944763183594, -0.010772705078125, -0.0361328125, -0.006130218505859375, 0.038665771484375, -0.056060791015625, -0.0281982421875, -0.036895751953125, -0.04010009765625, 0.015655517578125, 0.0162353515625, 0.0196075439453125, 0.02337646484375, -0.036651611328125, 0.0209197998046875, 0.0302734375, 0.0212249755859375, 0.04150390625, 0.0174713134765625, -0.037994384765625, -0.00971221923828125, 0.03350830078125, -0.022125244140625, 0.0010652542114257812, 0.00891876220703125, 0.00461578369140625, -0.053985595703125, -0.0298614501953125, -0.055450439453125, 0.03375244140625, -0.049530029296875, -0.01513671875, -0.0298309326171875, -0.0318603515625, -0.05804443359375, -0.0011720657348632812, -0.0245819091796875, -0.056732177734375, -0.041534423828125, -0.0257110595703125, 0.040802001953125, 0.09710693359375, -0.0104522705078125, 0.033843994140625, -0.05743408203125, 0.0230560302734375, 0.036712646484375, 0.0308074951171875, -0.01322174072265625, -0.0751953125, -0.0213623046875, -0.0194549560546875, -0.01419830322265625, -0.05291748046875, 0.025115966796875, -0.0089569091796875, 0.022491455078125, 0.045166015625, -0.019439697265625, 0.0452880859375, -0.05181884765625, 0.0650634765625, 0.03448486328125, -0.040740966796875, -0.01531219482421875, -0.048248291015625, -0.0126190185546875, 0.00519561767578125, 0.026214599609375, -0.050689697265625, 0.01229095458984375, -0.07830810546875, -0.0347900390625, 0.0535888671875, 0.0011625289916992188, -0.0085601806640625, -0.0004150867462158203, 0.029266357421875, 0.0114898681640625, 0.011322021484375, -0.045135498046875, -0.0175018310546875, -0.041015625, -0.0032711029052734375, 0.0206298828125, -0.0274200439453125, -0.0162506103515625, -0.020599365234375, 0.02496337890625, -0.004360198974609375, 0.032318115234375, -0.00823211669921875, 0.005886077880859375, -0.0294189453125, -0.0011186599731445312, 0.041656494140625, 0.02337646484375, -0.0245819091796875, 0.0024547576904296875, -0.0014619827270507812, -0.0279083251953125, -0.025604248046875, -0.01050567626953125, 0.001850128173828125, -0.00846099853515625, 0.004856109619140625, 0.042388916015625, 0.034576416015625, -0.0159759521484375, 0.040191650390625, 0.01788330078125, -0.037628173828125, -0.058197021484375, -0.0010700225830078125, 0.0293121337890625, 0.009490966796875, 0.02978515625, 0.005725860595703125, 0.026641845703125, -0.035064697265625, 0.039825439453125, 0.0179290771484375, -0.0295562744140625, -0.039947509765625, 0.065673828125, 0.055755615234375, -0.0477294921875, 0.06793212890625, -0.048736572265625, -0.01010894775390625, 0.02581787109375, 0.0523681640625, 0.06890869140625, -0.0024356842041015625, 0.0426025390625, 0.056671142578125, 0.00513458251953125, -0.00927734375, 0.022247314453125, 0.00775146484375, -0.03955078125, -0.0294189453125, -0.0494384765625, -0.022125244140625, 0.0054473876953125, -0.0577392578125, 0.04058837890625, -0.0595703125, 0.0007443428039550781, -0.01102447509765625, -0.035003662109375, -0.04901123046875, 0.020904541015625, 0.020721435546875, 0.0914306640625, -0.048309326171875, 0.0712890625, 0.04571533203125, -0.0263671875, -0.06549072265625, -0.01373291015625, -0.0062255859375, -0.07269287109375, 0.037628173828125, 0.0102386474609375, 0.00974273681640625, 0.0219879150390625, -0.047454833984375, -0.06024169921875, 0.089111328125, -0.01479339599609375, -0.04791259765625, -0.0188751220703125, 0.036376953125, 0.0202178955078125, -0.047149658203125, 0.02801513671875, 0.062744140625, 0.055145263671875, 0.0264434814453125, -0.046722412109375, 0.013092041015625, -0.01580810546875, 0.0054931640625, 0.0141754150390625, -0.05535888671875, 0.07928466796875, 0.006389617919921875, -0.01611328125, 0.01302337646484375, 0.060546875, 0.02227783203125, 0.004909515380859375, 0.06146240234375, 0.053070068359375, 0.02130126953125, -0.02032470703125, 0.06695556640625, -0.01274871826171875, 0.043426513671875, 0.00684356689453125, -0.0223236083984375, 0.057037353515625, 0.055755615234375, -0.0213470458984375, 0.05718994140625, 0.046722412109375, -0.00896453857421875, 0.007965087890625, 0.00687408447265625, 0.0165252685546875, -0.035125732421875, -0.00411224365234375, -0.012725830078125, 0.021331787109375, 0.035552978515625, 0.02178955078125, -0.019744873046875, -0.0225830078125, -0.01279449462890625, -0.0084686279296875, -0.0195770263671875, 0.0270233154296875, 0.00000476837158203125, -0.025421142578125, 0.0226593017578125, 0.0102996826171875, 0.044647216796875, -0.0031795501708984375, -0.0287933349609375, 0.01012420654296875, -0.0153350830078125, -0.02691650390625, -0.06103515625, 0.037689208984375, -0.014801025390625, -0.006145477294921875, -0.0150909423828125, 0.04302978515625, -0.01654052734375, -0.05230712890625, 0.003833770751953125, 0.0244140625, 0.03759765625, 0.00533294677734375, -0.06756591796875, -0.0297088623046875, 0.0024890899658203125, -0.029022216796875, 0.005840301513671875, 0.0270538330078125, -0.0210113525390625, 0.02178955078125, 0.034271240234375, -0.032958984375, 0.0020999908447265625, 0.0281829833984375, 0.03216552734375, -0.0830078125, -0.050994873046875, 0.002689361572265625, 0.0626220703125, -0.018218994140625, -0.057647705078125, 0.036041259765625, 0.04144287109375, 0.048309326171875, -0.0260467529296875, 0.026336669921875, -0.007549285888671875, 0.040863037109375, -0.0198211669921875, 0.054534912109375, -0.043243408203125, -0.026641845703125, 0.015899658203125, -0.06683349609375, -0.01396942138671875, 0.058013916015625, 0.040618896484375, -0.0028553009033203125, 0.0758056640625, 0.042327880859375, 0.0020999908447265625, 0.0216827392578125, 0.03253173828125, 0.016448974609375, 0.0161590576171875, 0.006603240966796875, 0.050689697265625, -0.042938232421875, 0.0309295654296875, -0.022430419921875, -0.0105438232421875, -0.03497314453125, -0.06048583984375, -0.06805419921875, -0.041107177734375, -0.015899658203125, -0.0241546630859375, -0.03424072265625, 0.0386962890625, 0.059326171875, -0.048614501953125, -0.0247344970703125, -0.029937744140625, -0.00994873046875, 0.0170440673828125, -0.01715087890625, 0.0182647705078125, 0.0147552490234375, -0.041351318359375, 0.01342010498046875, 0.0267181396484375, 0.0175018310546875, -0.05047607421875, -0.0081634521484375, 0.020904541015625, 0.00658416748046875, 0.03680419921875, 0.0010662078857421875, -0.05267333984375, -0.045928955078125, 0.0113067626953125, -0.0245819091796875, 0.004627227783203125, 0.078369140625, -0.05133056640625, -0.031463623046875, 0.04742431640625, 0.0006842613220214844, 0.0565185546875, 0.012939453125, 0.03826904296875, -0.03936767578125, 0.0489501953125, 0.018585205078125, 0.023406982421875, 0.0165252685546875, -0.0301055908203125, 0.022247314453125, 0.031524658203125, -0.040191650390625, -0.048370361328125, 0.0305328369140625, -0.1229248046875, 0.0028705596923828125, 0.066162109375, 0.01139068603515625, -0.03369140625, 0.004608154296875, -0.0222930908203125, 0.02581787109375, -0.0249176025390625, 0.045013427734375, 0.034332275390625, 0.01117706298828125, -0.022857666015625, -0.040802001953125, 0.041961669921875, 0.01175689697265625, -0.043701171875, -0.00640106201171875, 0.0283355712890625, -0.0003352165222167969, 0.0218353271484375, 0.024658203125, -0.036651611328125, 0.033843994140625, 0.008575439453125, 0.0152740478515625, 0.015716552734375, -0.0263671875, -0.0198211669921875, -0.0038127899169921875, -0.01374053955078125, 0.01236724853515625 ] ]
allenai/specter2_aug2023refresh_base
2023-10-18T04:22:22.000Z
[ "transformers", "pytorch", "bert", "feature-extraction", "dataset:allenai/scirepeval", "license:apache-2.0", "endpoints_compatible", "region:us" ]
feature-extraction
allenai
null
null
allenai/specter2_aug2023refresh_base
0
5,868
transformers
2023-08-14T19:16:06
--- license: apache-2.0 datasets: - allenai/scirepeval --- **\*\*\*\*\*\*Update\*\*\*\*\*\*** This update introduces a new set of SPECTER2 models with the base transformer encoder pre-trained on an extended citation dataset containing more recent papers. For benchmarking purposes please use the existing SPECTER2 [models](https://huggingface.co/allenai/specter2) w/o the **aug2023refresh** suffix. # SPECTER2 (Base) SPECTER2 is the successor to [SPECTER](https://huggingface.co/allenai/specter) and is capable of generating task specific embeddings for scientific tasks when paired with [adapters](https://huggingface.co/models?search=allenai/specter-2_). This is the base model to be used along with the adapters. Given the combination of title and abstract of a scientific paper or a short texual query, the model can be used to generate effective embeddings to be used in downstream applications. **Note:For general embedding purposes, please use [allenai/specter2](https://huggingface.co/allenai/specter2).** **To get the best performance on a downstream task type please load the associated adapter with the base model as in the example below.** # Model Details ## Model Description SPECTER2 has been trained on over 6M triplets of scientific paper citations, which are available [here](https://huggingface.co/datasets/allenai/scirepeval/viewer/cite_prediction_new/evaluation). Post that it is trained with additionally attached task format specific adapter modules on all the [SciRepEval](https://huggingface.co/datasets/allenai/scirepeval) training tasks. Task Formats trained on: - Classification - Regression - Proximity - Adhoc Search It builds on the work done in [SciRepEval: A Multi-Format Benchmark for Scientific Document Representations](https://api.semanticscholar.org/CorpusID:254018137) and we evaluate the trained model on this benchmark as well. - **Developed by:** Amanpreet Singh, Mike D'Arcy, Arman Cohan, Doug Downey, Sergey Feldman - **Shared by :** Allen AI - **Model type:** bert-base-uncased + adapters - **License:** Apache 2.0 - **Finetuned from model:** [allenai/scibert](https://huggingface.co/allenai/scibert_scivocab_uncased). ## Model Sources <!-- Provide the basic links for the model. --> - **Repository:** [https://github.com/allenai/SPECTER2](https://github.com/allenai/SPECTER2) - **Paper:** [https://api.semanticscholar.org/CorpusID:254018137](https://api.semanticscholar.org/CorpusID:254018137) - **Demo:** [Usage](https://github.com/allenai/SPECTER2/blob/main/README.md) # Uses <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. --> ## Direct Use |Model|Name and HF link|Description| |--|--|--| |Proximity*|[allenai/specter2_aug2023refresh](https://huggingface.co/allenai/specter2_aug2023refresh)|Encode papers as queries and candidates eg. Link Prediction, Nearest Neighbor Search| |Adhoc Query|[allenai/specter2_aug2023refresh_adhoc_query](https://huggingface.co/allenai/specter2_aug2023refresh_adhoc_query)|Encode short raw text queries for search tasks. (Candidate papers can be encoded with the proximity adapter)| |Classification|[allenai/specter2_aug2023refresh_classification](https://huggingface.co/allenai/specter2_aug2023refresh_classification)|Encode papers to feed into linear classifiers as features| |Regression|[allenai/specter2_aug2023refresh_regression](https://huggingface.co/allenai/specter2_aug2023refresh_regression)|Encode papers to feed into linear regressors as features| *Proximity model should suffice for downstream task types not mentioned above ```python from transformers import AutoTokenizer, AutoModel # load model and tokenizer tokenizer = AutoTokenizer.from_pretrained('allenai/specter2_aug2023refresh_base') #load base model model = AutoModel.from_pretrained('allenai/specter2_aug2023refresh_base') #load the adapter(s) as per the required task, provide an identifier for the adapter in load_as argument and activate it model.load_adapter("allenai/specter2_aug2023refresh", source="hf", load_as="proximity", set_active=True) #other possibilities: allenai/specter2_aug2023refresh_<classification|regression|adhoc_query> papers = [{'title': 'BERT', 'abstract': 'We introduce a new language representation model called BERT'}, {'title': 'Attention is all you need', 'abstract': ' The dominant sequence transduction models are based on complex recurrent or convolutional neural networks'}] # concatenate title and abstract text_batch = [d['title'] + tokenizer.sep_token + (d.get('abstract') or '') for d in papers] # preprocess the input inputs = self.tokenizer(text_batch, padding=True, truncation=True, return_tensors="pt", return_token_type_ids=False, max_length=512) output = model(**inputs) # take the first token in the batch as the embedding embeddings = output.last_hidden_state[:, 0, :] ``` ## Downstream Use <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> For evaluation and downstream usage, please refer to [https://github.com/allenai/scirepeval/blob/main/evaluation/INFERENCE.md](https://github.com/allenai/scirepeval/blob/main/evaluation/INFERENCE.md). # Training Details ## Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> The base model is trained on citation links between papers and the adapters are trained on 8 large scale tasks across the four formats. All the data is a part of SciRepEval benchmark and is available [here](https://huggingface.co/datasets/allenai/scirepeval). The citation link are triplets in the form ```json {"query": {"title": ..., "abstract": ...}, "pos": {"title": ..., "abstract": ...}, "neg": {"title": ..., "abstract": ...}} ``` consisting of a query paper, a positive citation and a negative which can be from the same/different field of study as the query or citation of a citation. ## Training Procedure Please refer to the [SPECTER paper](https://api.semanticscholar.org/CorpusID:215768677). ### Training Hyperparameters The model is trained in two stages using [SciRepEval](https://github.com/allenai/scirepeval/blob/main/training/TRAINING.md): - Base Model: First a base model is trained on the above citation triplets. ``` batch size = 1024, max input length = 512, learning rate = 2e-5, epochs = 2 warmup steps = 10% fp16``` - Adapters: Thereafter, task format specific adapters are trained on the SciRepEval training tasks, where 600K triplets are sampled from above and added to the training data as well. ``` batch size = 256, max input length = 512, learning rate = 1e-4, epochs = 6 warmup = 1000 steps fp16``` # Evaluation We evaluate the model on [SciRepEval](https://github.com/allenai/scirepeval), a large scale eval benchmark for scientific embedding tasks which which has [SciDocs] as a subset. We also evaluate and establish a new SoTA on [MDCR](https://github.com/zoranmedic/mdcr), a large scale citation recommendation benchmark. |Model|SciRepEval In-Train|SciRepEval Out-of-Train|SciRepEval Avg|MDCR(MAP, Recall@5)| |--|--|--|--|--| |[BM-25](https://api.semanticscholar.org/CorpusID:252199740)|n/a|n/a|n/a|(33.7, 28.5)| |[SPECTER](https://huggingface.co/allenai/specter)|54.7|57.4|68.0|(30.6, 25.5)| |[SciNCL](https://huggingface.co/malteos/scincl)|55.6|57.8|69.0|(32.6, 27.3)| |[SciRepEval-Adapters](https://huggingface.co/models?search=scirepeval)|61.9|59.0|70.9|(35.3, 29.6)| |[SPECTER2-Adapters](https://huggingface.co/models?search=allenai/specter-2)|**62.3**|**59.2**|**71.2**|**(38.4, 33.0)**| Please cite the following works if you end up using SPECTER2: [SPECTER paper](https://api.semanticscholar.org/CorpusID:215768677): ```bibtex @inproceedings{specter2020cohan, title={{SPECTER: Document-level Representation Learning using Citation-informed Transformers}}, author={Arman Cohan and Sergey Feldman and Iz Beltagy and Doug Downey and Daniel S. Weld}, booktitle={ACL}, year={2020} } ``` [SciRepEval paper](https://api.semanticscholar.org/CorpusID:254018137) ```bibtex @article{Singh2022SciRepEvalAM, title={SciRepEval: A Multi-Format Benchmark for Scientific Document Representations}, author={Amanpreet Singh and Mike D'Arcy and Arman Cohan and Doug Downey and Sergey Feldman}, journal={ArXiv}, year={2022}, volume={abs/2211.13308} } ```
8,548
[ [ -0.0158233642578125, -0.015472412109375, 0.039093017578125, 0.006870269775390625, -0.00830841064453125, -0.01078033447265625, -0.02191162109375, -0.05267333984375, 0.032073974609375, 0.003589630126953125, -0.0178070068359375, -0.025115966796875, -0.053955078125, 0.0158843994140625, -0.01052093505859375, 0.081298828125, -0.00811767578125, 0.01187896728515625, -0.01751708984375, -0.005451202392578125, -0.0007157325744628906, -0.02801513671875, -0.037841796875, -0.014923095703125, 0.039398193359375, 0.01947021484375, 0.01407623291015625, 0.03106689453125, 0.05560302734375, 0.0167388916015625, -0.021881103515625, 0.0304107666015625, -0.02862548828125, -0.003223419189453125, -0.016357421875, -0.0185394287109375, -0.03533935546875, -0.0011911392211914062, 0.0657958984375, 0.0438232421875, 0.01079559326171875, 0.0154571533203125, 0.006839752197265625, 0.03607177734375, -0.042572021484375, 0.0266265869140625, -0.04083251953125, -0.01256561279296875, -0.011322021484375, -0.0024929046630859375, -0.04901123046875, -0.0086822509765625, 0.0175933837890625, -0.050567626953125, 0.0273590087890625, 0.01006317138671875, 0.1007080078125, 0.0204010009765625, -0.017303466796875, -0.016357421875, -0.03802490234375, 0.055145263671875, -0.037689208984375, 0.0367431640625, 0.0276031494140625, 0.00630950927734375, 0.00255584716796875, -0.07904052734375, -0.029144287109375, -0.0277252197265625, -0.02215576171875, 0.0293121337890625, -0.0214080810546875, -0.0084075927734375, 0.00803375244140625, 0.006847381591796875, -0.047821044921875, -0.00785064697265625, -0.054107666015625, -0.006984710693359375, 0.039886474609375, -0.012939453125, -0.022491455078125, -0.037872314453125, -0.02410888671875, -0.0364990234375, -0.0171966552734375, -0.003757476806640625, 0.0185089111328125, 0.0172119140625, -0.00946807861328125, 0.0295867919921875, 0.0148162841796875, 0.037353515625, 0.0215606689453125, 0.017852783203125, 0.048675537109375, -0.0347900390625, 0.003620147705078125, -0.01222991943359375, 0.067626953125, 0.010955810546875, -0.01079559326171875, 0.0009403228759765625, 0.00601959228515625, -0.00998687744140625, 0.0032749176025390625, -0.0653076171875, -0.01503753662109375, 0.0199127197265625, -0.03997802734375, -0.01451873779296875, 0.0179290771484375, -0.044097900390625, -0.0091705322265625, 0.00461578369140625, 0.015960693359375, -0.062347412109375, -0.023223876953125, 0.0013942718505859375, -0.02410888671875, 0.0164794921875, 0.01041412353515625, -0.07196044921875, 0.0239105224609375, 0.0330810546875, 0.07379150390625, -0.0008597373962402344, -0.03619384765625, -0.010284423828125, 0.00785064697265625, -0.006317138671875, 0.06646728515625, -0.029815673828125, -0.028717041015625, -0.033111572265625, 0.01309967041015625, -0.03509521484375, -0.0197906494140625, 0.055877685546875, -0.036407470703125, 0.019866943359375, -0.01428985595703125, -0.052032470703125, -0.0132293701171875, 0.0019664764404296875, -0.045989990234375, 0.09844970703125, 0.022674560546875, -0.0732421875, 0.01544189453125, -0.076904296875, -0.0097808837890625, 0.002704620361328125, -0.00762939453125, -0.033782958984375, -0.01369476318359375, -0.005130767822265625, 0.022186279296875, -0.0172882080078125, 0.026275634765625, -0.0188140869140625, -0.022186279296875, 0.01107025146484375, 0.004886627197265625, 0.0570068359375, 0.0263824462890625, -0.01241302490234375, 0.00705718994140625, -0.060821533203125, -0.0095977783203125, 0.0151519775390625, -0.02301025390625, -0.0221405029296875, -0.0001691579818725586, 0.0421142578125, 0.01520538330078125, 0.0338134765625, -0.06622314453125, 0.0020198822021484375, -0.02899169921875, 0.048004150390625, 0.040374755859375, -0.0043487548828125, 0.0208282470703125, -0.0213165283203125, 0.0135650634765625, -0.00423431396484375, 0.0071258544921875, -0.0064697265625, -0.043670654296875, -0.051666259765625, -0.0228118896484375, 0.041259765625, 0.0310211181640625, -0.007396697998046875, 0.0506591796875, -0.04339599609375, -0.054290771484375, -0.0457763671875, -0.00997161865234375, 0.0255889892578125, 0.04669189453125, 0.04107666015625, -0.018585205078125, -0.07080078125, -0.06597900390625, 0.00858306884765625, -0.0187225341796875, -0.003749847412109375, 0.0305938720703125, 0.0657958984375, -0.026336669921875, 0.07037353515625, -0.03790283203125, -0.020782470703125, -0.0023250579833984375, 0.0169525146484375, 0.0195465087890625, 0.050140380859375, 0.046966552734375, -0.06683349609375, -0.0313720703125, -0.01468658447265625, -0.06121826171875, 0.01497650146484375, 0.00952911376953125, -0.0022602081298828125, 0.03839111328125, 0.0362548828125, -0.049530029296875, 0.0060577392578125, 0.03082275390625, -0.036773681640625, 0.04010009765625, -0.0184326171875, -0.00562286376953125, -0.10150146484375, 0.046844482421875, 0.0137176513671875, -0.00226593017578125, -0.03515625, 0.0134429931640625, 0.022216796875, -0.00046634674072265625, -0.027191162109375, 0.039306640625, -0.03289794921875, -0.002651214599609375, -0.005615234375, -0.0032672882080078125, 0.0157318115234375, 0.0258331298828125, 0.0316162109375, 0.05120849609375, 0.0255126953125, -0.0171966552734375, -0.005397796630859375, 0.03717041015625, -0.01149749755859375, 0.012176513671875, -0.06243896484375, 0.01352691650390625, -0.0120391845703125, 0.047393798828125, -0.0673828125, -0.012115478515625, 0.01381683349609375, -0.053802490234375, 0.0283660888671875, -0.01190185546875, -0.051544189453125, -0.035247802734375, -0.03204345703125, 0.0207366943359375, 0.03216552734375, -0.035400390625, 0.050567626953125, 0.0014104843139648438, 0.0056610107421875, -0.0408935546875, -0.053253173828125, -0.0025615692138671875, 0.0020961761474609375, -0.059906005859375, 0.0562744140625, -0.01342010498046875, -0.0006046295166015625, 0.017120361328125, 0.0075225830078125, 0.005565643310546875, -0.0108489990234375, 0.035919189453125, 0.03863525390625, -0.003475189208984375, 0.00992584228515625, -0.0033054351806640625, -0.01317596435546875, 0.004070281982421875, -0.0252685546875, 0.0421142578125, -0.028076171875, -0.005390167236328125, -0.01654052734375, 0.02679443359375, 0.03997802734375, -0.0174407958984375, 0.05889892578125, 0.059295654296875, -0.0305023193359375, 0.0305023193359375, -0.04144287109375, -0.0283660888671875, -0.03240966796875, 0.0479736328125, -0.034423828125, -0.060302734375, 0.03863525390625, 0.01020050048828125, 0.0030002593994140625, 0.062744140625, 0.0275421142578125, -0.0210418701171875, 0.060333251953125, 0.041748046875, 0.00757598876953125, 0.034698486328125, -0.0499267578125, 0.0029449462890625, -0.08563232421875, -0.036102294921875, -0.051422119140625, -0.00385284423828125, -0.047821044921875, -0.041839599609375, 0.02154541015625, -0.00579071044921875, -0.0193939208984375, 0.040283203125, -0.057891845703125, 0.004436492919921875, 0.05401611328125, 0.01708984375, -0.004497528076171875, 0.0056915283203125, -0.00896453857421875, -0.00916290283203125, -0.04296875, -0.0362548828125, 0.09326171875, 0.0447998046875, 0.037109375, -0.0035190582275390625, 0.058502197265625, 0.004302978515625, 0.002223968505859375, -0.0523681640625, 0.0284881591796875, -0.018218994140625, -0.050750732421875, -0.00484466552734375, -0.0135650634765625, -0.0654296875, 0.01275634765625, -0.00963592529296875, -0.058685302734375, 0.055877685546875, -0.007904052734375, -0.02618408203125, 0.0207366943359375, -0.058685302734375, 0.0557861328125, -0.0010652542114257812, -0.03204345703125, -0.022064208984375, -0.0618896484375, 0.00786590576171875, -0.005649566650390625, 0.006862640380859375, 0.0037841796875, -0.0008616447448730469, 0.08563232421875, -0.029815673828125, 0.052947998046875, -0.022216796875, 0.021209716796875, 0.0123291015625, -0.017791748046875, 0.043609619140625, 0.0006799697875976562, -0.024017333984375, 0.0197296142578125, 0.0216522216796875, -0.031982421875, -0.0157012939453125, 0.04864501953125, -0.0723876953125, -0.016143798828125, -0.059295654296875, -0.0582275390625, 0.002552032470703125, 0.0078887939453125, 0.03662109375, 0.00791168212890625, -0.0021343231201171875, 0.016510009765625, 0.057220458984375, 0.005096435546875, 0.0328369140625, 0.044281005859375, 0.0175933837890625, -0.0284881591796875, 0.058563232421875, 0.005046844482421875, -0.0035991668701171875, 0.032958984375, -0.001708984375, -0.0224151611328125, -0.036041259765625, -0.0120849609375, 0.039703369140625, -0.052886962890625, -0.01152801513671875, -0.07232666015625, -0.031768798828125, -0.0584716796875, 0.0006518363952636719, -0.02655029296875, -0.034332275390625, -0.039642333984375, -0.005512237548828125, 0.051025390625, 0.030853271484375, -0.02020263671875, 0.0021152496337890625, -0.03533935546875, -0.00693511962890625, 0.00023508071899414062, 0.01474761962890625, -0.013153076171875, -0.0660400390625, -0.0275421142578125, -0.013397216796875, -0.013885498046875, -0.055450439453125, 0.0216217041015625, 0.0267181396484375, 0.030242919921875, 0.0134429931640625, -0.01184844970703125, 0.031219482421875, -0.0288238525390625, 0.054595947265625, 0.0043487548828125, -0.0447998046875, 0.057464599609375, -0.0201568603515625, 0.0096893310546875, 0.054290771484375, 0.042510986328125, -0.0226287841796875, -0.0146484375, -0.057220458984375, -0.07421875, 0.0662841796875, 0.007587432861328125, 0.0005507469177246094, 0.0012178421020507812, 0.0247344970703125, -0.008636474609375, -0.01473236083984375, -0.052581787109375, -0.0197296142578125, -0.035186767578125, -0.0196990966796875, 0.0015172958374023438, -0.032958984375, -0.00968170166015625, -0.0384521484375, 0.06707763671875, -0.01444244384765625, 0.057098388671875, 0.04327392578125, -0.0192718505859375, 0.0006365776062011719, 0.01087188720703125, 0.0501708984375, 0.0465087890625, -0.0697021484375, 0.0021915435791015625, 0.0031070709228515625, -0.057891845703125, -0.013763427734375, 0.064208984375, -0.01499176025390625, 0.020477294921875, 0.0251922607421875, 0.064453125, 0.0191192626953125, -0.039764404296875, 0.0224761962890625, 0.002017974853515625, -0.04364013671875, -0.0379638671875, -0.011322021484375, 0.0108642578125, 0.0105438232421875, 0.053131103515625, 0.0241851806640625, 0.0171661376953125, -0.0186309814453125, 0.013092041015625, 0.011505126953125, -0.02020263671875, -0.0258331298828125, 0.060821533203125, -0.005031585693359375, 0.0023555755615234375, 0.02313232421875, -0.0290069580078125, -0.0259552001953125, 0.047119140625, 0.050018310546875, 0.049835205078125, -0.006153106689453125, 0.0059967041015625, 0.0577392578125, 0.0176239013671875, 0.003757476806640625, 0.0204925537109375, -0.0005788803100585938, -0.047119140625, -0.0189666748046875, -0.0479736328125, -0.0297393798828125, 0.007617950439453125, -0.05401611328125, 0.023193359375, -0.033538818359375, -0.00954437255859375, 0.03839111328125, 0.0286407470703125, -0.05035400390625, -0.002101898193359375, 0.01093292236328125, 0.09539794921875, -0.0693359375, 0.07659912109375, 0.078125, -0.057891845703125, -0.06170654296875, -0.0179901123046875, -0.012969970703125, -0.0438232421875, 0.06024169921875, 0.00740814208984375, -0.00249481201171875, 0.006954193115234375, -0.03863525390625, -0.08416748046875, 0.09906005859375, 0.0162353515625, -0.043182373046875, -0.0177154541015625, -0.0196380615234375, 0.047698974609375, -0.02496337890625, 0.0572509765625, 0.032379150390625, 0.044281005859375, 0.0164947509765625, -0.06280517578125, 0.02276611328125, -0.050567626953125, 0.003997802734375, -0.005565643310546875, -0.0625, 0.071044921875, -0.0002524852752685547, 0.0089569091796875, 0.00684356689453125, 0.053375244140625, 0.0482177734375, 0.018707275390625, 0.021881103515625, 0.047698974609375, 0.06390380859375, -0.0024662017822265625, 0.0701904296875, -0.033905029296875, 0.04351806640625, 0.07977294921875, -0.0229949951171875, 0.0723876953125, 0.04925537109375, -0.0228424072265625, 0.06494140625, 0.0626220703125, -0.043182373046875, 0.045379638671875, -0.00897979736328125, -0.004154205322265625, 0.007778167724609375, -0.01535797119140625, -0.04620361328125, 0.0288238525390625, 0.0322265625, -0.051239013671875, -0.01055908203125, -0.0014476776123046875, 0.0092315673828125, -0.0011243820190429688, 0.0149383544921875, 0.037689208984375, -0.006557464599609375, -0.041656494140625, 0.05462646484375, 0.01052093505859375, 0.0765380859375, -0.054595947265625, 0.0065460205078125, -0.00994873046875, 0.01253509521484375, -0.0250091552734375, -0.041900634765625, 0.0177001953125, -0.0009832382202148438, -0.04266357421875, -0.0029621124267578125, 0.0499267578125, -0.01611328125, -0.046722412109375, 0.037872314453125, 0.007114410400390625, 0.035430908203125, 0.0150604248046875, -0.060028076171875, 0.0093536376953125, -0.01532745361328125, -0.0225677490234375, 0.0186004638671875, -0.0085601806640625, -0.0014324188232421875, 0.029876708984375, 0.040374755859375, 0.0016145706176757812, -0.008026123046875, 0.00008875131607055664, 0.06732177734375, -0.037109375, -0.029510498046875, -0.03472900390625, 0.0282440185546875, -0.0155029296875, -0.0238189697265625, 0.05126953125, 0.06951904296875, 0.05322265625, -0.0037479400634765625, 0.054290771484375, -0.041900634765625, 0.045135498046875, -0.01381683349609375, 0.050537109375, -0.0316162109375, 0.005672454833984375, -0.0130615234375, -0.04791259765625, -0.0092010498046875, 0.04833984375, -0.0208892822265625, 0.0149078369140625, 0.06683349609375, 0.06585693359375, -0.01544189453125, 0.000007033348083496094, -0.0159454345703125, 0.015838623046875, 0.0186767578125, 0.046142578125, 0.030120849609375, -0.06585693359375, 0.045257568359375, -0.0206451416015625, -0.01751708984375, -0.03497314453125, -0.04864501953125, -0.052581787109375, -0.07196044921875, -0.030914306640625, -0.043182373046875, 0.018280029296875, 0.07806396484375, 0.0743408203125, -0.06695556640625, -0.00823211669921875, -0.01520538330078125, -0.0238189697265625, -0.0115814208984375, -0.019134521484375, 0.058258056640625, -0.0207977294921875, -0.058074951171875, 0.032470703125, -0.0148162841796875, -0.0006127357482910156, -0.007541656494140625, -0.019287109375, -0.049713134765625, -0.01085662841796875, 0.0311126708984375, 0.034698486328125, -0.049468994140625, -0.018310546875, 0.01153564453125, -0.0174560546875, 0.011749267578125, 0.02801513671875, -0.059112548828125, 0.0236968994140625, 0.03216552734375, 0.0256805419921875, 0.07708740234375, -0.0191192626953125, 0.0086669921875, -0.053466796875, 0.021148681640625, 0.0198822021484375, 0.03668212890625, 0.0266876220703125, -0.038970947265625, 0.054290771484375, 0.022308349609375, -0.047698974609375, -0.068359375, -0.018310546875, -0.09295654296875, -0.024444580078125, 0.0853271484375, -0.01568603515625, -0.0221405029296875, 0.01090240478515625, -0.004299163818359375, 0.0330810546875, -0.046356201171875, 0.046539306640625, 0.04266357421875, -0.0157623291015625, -0.01468658447265625, -0.032196044921875, 0.03167724609375, 0.041900634765625, -0.0452880859375, -0.02069091796875, 0.0225677490234375, 0.04010009765625, 0.048858642578125, 0.03668212890625, -0.0135498046875, 0.031951904296875, 0.003139495849609375, 0.0287933349609375, -0.0230560302734375, -0.01145172119140625, -0.031463623046875, -0.0009474754333496094, 0.004756927490234375, -0.0254974365234375 ] ]
KnutJaegersberg/RWKV-4-PilePlus-430M-20230520-6162-1018Gtokens-ctx4098
2023-09-05T09:59:27.000Z
[ "transformers", "pytorch", "rwkv", "license:apache-2.0", "endpoints_compatible", "region:us" ]
null
KnutJaegersberg
null
null
KnutJaegersberg/RWKV-4-PilePlus-430M-20230520-6162-1018Gtokens-ctx4098
0
5,867
transformers
2023-09-05T08:54:51
--- license: apache-2.0 --- This is just a standard conversion to hf transformers format of models from here: https://huggingface.co/BlinkDL/rwkv-4-pileplus According to the documentation I found, this model should have seen roundabout 1.3 trillion tokens!
259
[ [ -0.040008544921875, -0.05450439453125, 0.017333984375, 0.025634765625, -0.0345458984375, -0.027923583984375, 0.03924560546875, -0.008819580078125, 0.030029296875, 0.07623291015625, -0.055755615234375, -0.032135009765625, -0.0093231201171875, -0.00411224365234375, -0.0638427734375, 0.07232666015625, 0.0244598388671875, -0.00652313232421875, -0.0097503662109375, -0.003429412841796875, -0.002166748046875, -0.0233917236328125, -0.0313720703125, -0.0205535888671875, 0.054290771484375, 0.04913330078125, 0.06549072265625, 0.04791259765625, 0.061370849609375, 0.0112457275390625, 0.0158538818359375, -0.034637451171875, -0.038116455078125, -0.050323486328125, 0.01110076904296875, -0.01010894775390625, -0.065185546875, 0.04241943359375, 0.061065673828125, 0.03961181640625, -0.0379638671875, 0.0266876220703125, -0.0269317626953125, 0.046173095703125, -0.0198822021484375, 0.0006470680236816406, -0.00014126300811767578, 0.0305938720703125, -0.027801513671875, -0.01302337646484375, -0.04498291015625, -0.004940032958984375, -0.0028362274169921875, -0.0797119140625, 0.0239715576171875, 0.041107177734375, 0.07366943359375, 0.02545166015625, -0.060638427734375, 0.0171051025390625, -0.0484619140625, 0.04046630859375, -0.050323486328125, 0.041290283203125, 0.0037288665771484375, 0.052215576171875, -0.031951904296875, -0.07379150390625, -0.0455322265625, 0.007007598876953125, -0.0228729248046875, 0.005527496337890625, -0.00901031494140625, 0.01190185546875, 0.04302978515625, 0.049774169921875, -0.045135498046875, -0.00823211669921875, -0.050384521484375, -0.0207672119140625, 0.051177978515625, 0.0177154541015625, 0.0034084320068359375, -0.062744140625, -0.06573486328125, -0.01763916015625, -0.05859375, -0.00824737548828125, 0.024200439453125, 0.0110015869140625, -0.055572509765625, 0.060943603515625, -0.01041412353515625, 0.048370361328125, 0.0174407958984375, -0.005435943603515625, 0.02154541015625, -0.0175018310546875, -0.03155517578125, -0.01343536376953125, 0.048553466796875, 0.0272674560546875, 0.0085601806640625, -0.0056915283203125, -0.023834228515625, -0.054779052734375, 0.0509033203125, -0.081298828125, -0.01227569580078125, -0.004093170166015625, -0.063232421875, -0.038055419921875, 0.03509521484375, -0.046966552734375, 0.0012989044189453125, 0.015655517578125, 0.0260467529296875, -0.010223388671875, -0.04046630859375, 0.015289306640625, -0.0228271484375, 0.04486083984375, 0.00904083251953125, -0.042938232421875, 0.01654052734375, 0.04071044921875, 0.040283203125, -0.00995635986328125, -0.01087188720703125, -0.02716064453125, 0.0145263671875, -0.018707275390625, 0.06378173828125, -0.0027599334716796875, -0.052520751953125, 0.0169525146484375, 0.0161285400390625, 0.0129852294921875, -0.02392578125, 0.06671142578125, -0.04547119140625, 0.0289306640625, -0.0010576248168945312, -0.0246124267578125, -0.034423828125, 0.01218414306640625, -0.0828857421875, 0.07745361328125, 0.062225341796875, -0.06121826171875, 0.042266845703125, -0.06268310546875, -0.0304718017578125, 0.0335693359375, 0.01058197021484375, -0.0280303955078125, -0.0007519721984863281, -0.0144195556640625, 0.0200958251953125, -0.0301513671875, 0.0250396728515625, -0.0148162841796875, -0.0377197265625, 0.007228851318359375, -0.0221405029296875, 0.0587158203125, 0.037506103515625, 0.0106658935546875, 0.050079345703125, -0.07122802734375, -0.0281524658203125, 0.0011091232299804688, -0.024078369140625, 0.0178070068359375, -0.0251617431640625, 0.031158447265625, 0.034759521484375, 0.01184844970703125, -0.0283050537109375, 0.02685546875, -0.0095367431640625, 0.02972412109375, 0.02239990234375, 0.0014638900756835938, 0.0347900390625, -0.020477294921875, 0.053070068359375, 0.0287017822265625, 0.03521728515625, -0.00919342041015625, -0.06842041015625, -0.072998046875, -0.0140228271484375, 0.0109710693359375, 0.00577545166015625, -0.0338134765625, 0.03643798828125, -0.0045928955078125, -0.04345703125, -0.0170745849609375, -0.046966552734375, -0.0290679931640625, 0.04534912109375, 0.0047454833984375, -0.003795623779296875, -0.03277587890625, -0.08416748046875, 0.02105712890625, -0.0208587646484375, -0.01412200927734375, 0.00507354736328125, 0.05120849609375, -0.0178680419921875, 0.06787109375, -0.009735107421875, -0.0093994140625, -0.01488494873046875, 0.030853271484375, 0.027679443359375, 0.04736328125, 0.044036865234375, -0.047027587890625, -0.01238250732421875, -0.001129150390625, -0.028167724609375, 0.0198974609375, -0.01029205322265625, -0.034820556640625, 0.006137847900390625, 0.04840087890625, -0.07269287109375, 0.050628662109375, 0.04656982421875, -0.037933349609375, 0.0223846435546875, 0.0035533905029296875, 0.00788116455078125, -0.070556640625, 0.0236358642578125, -0.0010957717895507812, -0.043731689453125, -0.0250244140625, 0.02093505859375, 0.0226287841796875, -0.009246826171875, -0.042022705078125, 0.02093505859375, -0.018402099609375, -0.0340576171875, 0.00933837890625, -0.004360198974609375, -0.0242156982421875, 0.038238525390625, -0.0189666748046875, 0.045196533203125, 0.0241546630859375, -0.0264129638671875, 0.048126220703125, 0.03765869140625, 0.004169464111328125, 0.038055419921875, -0.044921875, 0.005199432373046875, 0.00254058837890625, 0.045166015625, -0.060791015625, -0.048736572265625, 0.0316162109375, -0.004962921142578125, -0.003204345703125, -0.004444122314453125, -0.037078857421875, -0.056488037109375, -0.02056884765625, 0.055419921875, 0.05194091796875, -0.039825439453125, 0.060211181640625, 0.0311279296875, 0.01430511474609375, -0.01395416259765625, -0.038238525390625, -0.0015354156494140625, -0.035308837890625, -0.0526123046875, 0.0343017578125, 0.0092620849609375, 0.0015802383422851562, -0.005397796630859375, 0.0156707763671875, -0.01375579833984375, -0.0163726806640625, 0.03289794921875, 0.022308349609375, -0.0263824462890625, -0.025054931640625, -0.008056640625, -0.0182647705078125, 0.0193328857421875, 0.0006380081176757812, 0.032989501953125, -0.0083465576171875, -0.0157012939453125, -0.04071044921875, 0.0185546875, 0.064208984375, 0.01055145263671875, 0.034912109375, 0.06298828125, -0.048431396484375, -0.0109405517578125, -0.0292816162109375, -0.020355224609375, -0.033203125, 0.004520416259765625, -0.035888671875, -0.068359375, 0.037353515625, -0.0024051666259765625, 0.007080078125, 0.08868408203125, 0.035888671875, 0.0031948089599609375, 0.07977294921875, 0.0550537109375, 0.020233154296875, -0.0009694099426269531, -0.0164794921875, 0.03643798828125, -0.0289154052734375, -0.0322265625, -0.02716064453125, -0.0204620361328125, -0.041015625, -0.048553466796875, 0.0198211669921875, 0.020416259765625, -0.04644775390625, 0.031524658203125, -0.03570556640625, 0.0168304443359375, 0.0400390625, -0.016143798828125, 0.03570556640625, -0.017364501953125, 0.0113372802734375, 0.0032501220703125, -0.074462890625, -0.0281219482421875, 0.05670166015625, 0.059295654296875, 0.037689208984375, -0.0009303092956542969, 0.04583740234375, 0.01224517822265625, 0.04559326171875, -0.06982421875, 0.0474853515625, 0.0121917724609375, -0.07177734375, -0.035736083984375, -0.011871337890625, -0.045257568359375, -0.003955841064453125, -0.023193359375, -0.0048828125, -0.004665374755859375, -0.00968170166015625, -0.0280609130859375, 0.017303466796875, -0.031707763671875, 0.0936279296875, 0.0188446044921875, 0.033355712890625, -0.039520263671875, -0.0311431884765625, 0.057586669921875, 0.0016880035400390625, 0.01561737060546875, 0.01010894775390625, 0.017486572265625, 0.0654296875, -0.0281829833984375, 0.0254058837890625, -0.0015230178833007812, -0.01251983642578125, 0.038787841796875, 0.00235748291015625, 0.030059814453125, 0.0141754150390625, 0.0118255615234375, 0.0029392242431640625, -0.0100860595703125, -0.039581298828125, -0.0019779205322265625, 0.03839111328125, -0.0494384765625, -0.03167724609375, -0.039459228515625, -0.041717529296875, 0.0164031982421875, 0.0162506103515625, 0.021392822265625, 0.025115966796875, -0.04583740234375, 0.0205841064453125, 0.034271240234375, 0.01407623291015625, 0.04254150390625, 0.0175628662109375, -0.041839599609375, -0.01161956787109375, 0.037139892578125, -0.021575927734375, 0.002437591552734375, 0.0131378173828125, 0.004978179931640625, -0.046539306640625, -0.0312042236328125, -0.050323486328125, 0.032745361328125, -0.051666259765625, -0.0173187255859375, -0.02874755859375, -0.032684326171875, -0.054107666015625, -0.006320953369140625, -0.026123046875, -0.055511474609375, -0.040771484375, -0.025054931640625, 0.041778564453125, 0.0960693359375, -0.005535125732421875, 0.032379150390625, -0.06268310546875, 0.0207672119140625, 0.034881591796875, 0.04034423828125, -0.0122528076171875, -0.07470703125, -0.026763916015625, -0.0199127197265625, -0.010498046875, -0.051116943359375, 0.0304412841796875, -0.0124053955078125, 0.023895263671875, 0.047760009765625, -0.018951416015625, 0.049560546875, -0.050048828125, 0.062469482421875, 0.0308685302734375, -0.04388427734375, -0.01535797119140625, -0.055145263671875, -0.00408935546875, 0.0037631988525390625, 0.029144287109375, -0.044647216796875, 0.004093170166015625, -0.083740234375, -0.03411865234375, 0.057586669921875, 0.0027904510498046875, -0.01546478271484375, -0.0024776458740234375, 0.025299072265625, 0.0142669677734375, 0.010711669921875, -0.04864501953125, -0.0190277099609375, -0.04046630859375, -0.00577545166015625, 0.020111083984375, -0.0161895751953125, -0.01428985595703125, -0.024322509765625, 0.0281219482421875, -0.003780364990234375, 0.0287628173828125, -0.00640869140625, 0.006938934326171875, -0.028228759765625, 0.0022125244140625, 0.04443359375, 0.0274810791015625, -0.02935791015625, -0.003078460693359375, 0.0015888214111328125, -0.031463623046875, -0.0266876220703125, -0.006671905517578125, 0.00811004638671875, -0.009002685546875, 0.004791259765625, 0.043670654296875, 0.035736083984375, -0.020172119140625, 0.0364990234375, 0.01073455810546875, -0.040069580078125, -0.054107666015625, -0.0014810562133789062, 0.025482177734375, 0.0050811767578125, 0.03521728515625, 0.0032825469970703125, 0.01953125, -0.030731201171875, 0.040313720703125, 0.0173187255859375, -0.027496337890625, -0.038482666015625, 0.060638427734375, 0.052978515625, -0.042327880859375, 0.064453125, -0.045166015625, -0.0129547119140625, 0.0257568359375, 0.049041748046875, 0.0728759765625, 0.0003910064697265625, 0.041229248046875, 0.0595703125, 0.00789642333984375, -0.0088653564453125, 0.0179595947265625, 0.01142120361328125, -0.038787841796875, -0.031768798828125, -0.054046630859375, -0.0274200439453125, 0.0082244873046875, -0.052734375, 0.0413818359375, -0.056488037109375, 0.0017604827880859375, -0.006839752197265625, -0.032867431640625, -0.049560546875, 0.02203369140625, 0.0203704833984375, 0.09503173828125, -0.0509033203125, 0.06683349609375, 0.04315185546875, -0.0302734375, -0.06549072265625, -0.014801025390625, -0.01428985595703125, -0.06610107421875, 0.040069580078125, 0.008392333984375, 0.008148193359375, 0.01995849609375, -0.053924560546875, -0.0626220703125, 0.08099365234375, -0.00978851318359375, -0.04644775390625, -0.0147247314453125, 0.033203125, 0.01837158203125, -0.04962158203125, 0.022735595703125, 0.055908203125, 0.055145263671875, 0.0228118896484375, -0.053375244140625, 0.01546478271484375, -0.0157623291015625, -0.0005373954772949219, 0.0231170654296875, -0.048553466796875, 0.0833740234375, 0.009033203125, -0.0212554931640625, 0.007701873779296875, 0.0584716796875, 0.02001953125, -0.0005483627319335938, 0.06500244140625, 0.055633544921875, 0.0204925537109375, -0.0196075439453125, 0.0728759765625, -0.0123291015625, 0.040130615234375, 0.006046295166015625, -0.0174560546875, 0.05230712890625, 0.056915283203125, -0.0236968994140625, 0.05230712890625, 0.046905517578125, -0.005466461181640625, 0.002227783203125, 0.005634307861328125, 0.01320648193359375, -0.03594970703125, -0.00182342529296875, -0.01091766357421875, 0.01910400390625, 0.034332275390625, 0.0203399658203125, -0.0172119140625, -0.0190887451171875, -0.0191192626953125, -0.013153076171875, -0.0251617431640625, 0.0258636474609375, -0.00339508056640625, -0.0296478271484375, 0.0243682861328125, 0.001964569091796875, 0.046417236328125, -0.006443023681640625, -0.0287933349609375, 0.00868988037109375, -0.011932373046875, -0.024627685546875, -0.059173583984375, 0.035919189453125, -0.01439666748046875, -0.00614166259765625, -0.0181427001953125, 0.045623779296875, -0.01143646240234375, -0.055145263671875, 0.00016927719116210938, 0.0225677490234375, 0.032806396484375, 0.0021533966064453125, -0.06451416015625, -0.0284576416015625, 0.004894256591796875, -0.0311431884765625, 0.00557708740234375, 0.0295867919921875, -0.0196380615234375, 0.02838134765625, 0.033233642578125, -0.030242919921875, 0.005481719970703125, 0.02685546875, 0.036041259765625, -0.089599609375, -0.05230712890625, 0.0016565322875976562, 0.06292724609375, -0.01447296142578125, -0.05828857421875, 0.037445068359375, 0.036041259765625, 0.050872802734375, -0.02593994140625, 0.025421142578125, -0.01053619384765625, 0.043060302734375, -0.0181121826171875, 0.04998779296875, -0.0430908203125, -0.0235443115234375, 0.00832366943359375, -0.062286376953125, -0.0145721435546875, 0.05999755859375, 0.0379638671875, -0.0021152496337890625, 0.0758056640625, 0.039581298828125, 0.0017261505126953125, 0.024566650390625, 0.0347900390625, 0.0157928466796875, 0.0151824951171875, -0.0007677078247070312, 0.049774169921875, -0.04656982421875, 0.032958984375, -0.0225982666015625, -0.00750732421875, -0.0296173095703125, -0.069580078125, -0.0640869140625, -0.03900146484375, -0.0152740478515625, -0.021240234375, -0.03997802734375, 0.043609619140625, 0.058868408203125, -0.048065185546875, -0.02484130859375, -0.032257080078125, -0.007152557373046875, 0.01494598388671875, -0.0171661376953125, 0.0152740478515625, 0.01299285888671875, -0.04315185546875, 0.0125579833984375, 0.024078369140625, 0.017669677734375, -0.0560302734375, -0.006290435791015625, 0.012908935546875, 0.005489349365234375, 0.037384033203125, 0.0028820037841796875, -0.0516357421875, -0.04510498046875, 0.01605224609375, -0.02313232421875, 0.007785797119140625, 0.07745361328125, -0.057769775390625, -0.031829833984375, 0.0494384765625, 0.00024068355560302734, 0.061553955078125, 0.019500732421875, 0.04071044921875, -0.043609619140625, 0.0509033203125, 0.0193939208984375, 0.02374267578125, 0.0167999267578125, -0.0273895263671875, 0.0243072509765625, 0.0265655517578125, -0.042572021484375, -0.04931640625, 0.0256805419921875, -0.1221923828125, 0.0015764236450195312, 0.056182861328125, 0.01285552978515625, -0.03546142578125, 0.005550384521484375, -0.0204925537109375, 0.0225372314453125, -0.025360107421875, 0.042694091796875, 0.04034423828125, 0.01105499267578125, -0.019927978515625, -0.041473388671875, 0.03936767578125, 0.01485443115234375, -0.043792724609375, -0.00189971923828125, 0.02691650390625, -0.0032482147216796875, 0.027801513671875, 0.0297698974609375, -0.034088134765625, 0.0304718017578125, 0.01470947265625, 0.0145263671875, 0.016632080078125, -0.0223236083984375, -0.0186309814453125, -0.004123687744140625, -0.016326904296875, 0.00897979736328125 ] ]
KnutJaegersberg/black_goo_recipe_b
2023-08-31T12:56:01.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/black_goo_recipe_b
0
5,864
transformers
2023-08-31T10:00:17
--- license: cc-by-nc-4.0 --- Prompt Example: ``` ### System: You are an AI assistant. User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps. ### Instruction: How do you fine tune a large language model? ### Response: To fine-tune a large language model, we use various techniques to improve its performance and adjust its parameters based on user feedback. These techniques include: 1. Fine tuning the original model with additional data. For example, adding more training examples or using specialized datasets such as question answering or natural language inference tasks. This helps the model learn new concepts and skills by interacting with additional information sources. 2. Using transfer learning methods to incorporate knowledge from other models that have been pre-trained for specific domains. For instance, using pre-trained embeddings from neural networks trained on text classification tasks like sentiment analysis or question answering to help fine-tuning the main language model. 3. Improving the quality of the training data. We may optimize the training process by increasing the accuracy of provided data sets (e.g., by improving word embedding quality), refining the selection of relevant examples, and/or providing additional supervision signals during training. 4. Incorporating knowledge from external resources such as domain-specific lexicons or domain-specific knowledge bases. For example, incorporating domain-specific rules and facts into the model to help it understand domain-specific terminology and semantics. 5. Employing techniques like dropout regularization and dropout sampling to avoid overfitting and reduce overconfidence in the model's predictions. 6. Utilizing meta-learning methods such as reinforcement learning or self-supervised learning to enable the model to adapt quickly to changing situations without being explicitly programmed. 7. Incorporating human input through crowdsourcing or user testing to validate the model's performance and ensure user satisfaction. 8. Implementing various optimization strategies to improve efficiency and reduce computational costs while maintaining high performance levels. In conclusion, fine tuning a large language model requires combining various techniques including fine tuning, transfer learning, improved data quality, external resources, meta-learning, optimization strategies, crowd sourcing, and user testing. These approaches aim to improve the model's ability to handle various tasks and provide users with better experiences in their daily interactions with language systems. ```
2,703
[ [ -0.04241943359375, -0.08526611328125, 0.02490234375, -0.00926971435546875, 0.0012264251708984375, 0.00038051605224609375, -0.046783447265625, -0.0236053466796875, -0.0152130126953125, 0.04937744140625, -0.04315185546875, -0.01406097412109375, -0.0295867919921875, 0.0079803466796875, -0.01288604736328125, 0.07171630859375, -0.00792694091796875, 0.006595611572265625, -0.007480621337890625, 0.0145263671875, -0.058319091796875, -0.037139892578125, -0.08197021484375, -0.0193634033203125, 0.0198211669921875, 0.055206298828125, 0.05224609375, 0.056671142578125, 0.041717529296875, 0.030853271484375, 0.00250244140625, 0.044036865234375, -0.0484619140625, -0.0063323974609375, -0.0247955322265625, -0.035064697265625, -0.027435302734375, 0.0017728805541992188, 0.03265380859375, 0.045654296875, -0.007617950439453125, 0.0139312744140625, 0.0209197998046875, 0.03643798828125, -0.036590576171875, 0.016204833984375, -0.029205322265625, -0.019775390625, -0.0145416259765625, -0.0160675048828125, -0.0151519775390625, -0.032958984375, 0.0005145072937011719, -0.040069580078125, 0.007503509521484375, 0.0032749176025390625, 0.07769775390625, 0.0268096923828125, -0.0143890380859375, -0.020782470703125, -0.066650390625, 0.072998046875, -0.0693359375, 0.0218658447265625, 0.06341552734375, 0.0183868408203125, -0.00878143310546875, -0.037872314453125, -0.043182373046875, -0.040557861328125, -0.00959014892578125, -0.0176239013671875, 0.0113677978515625, 0.01068115234375, 0.0399169921875, 0.023101806640625, -0.022491455078125, 0.0130157470703125, -0.03314208984375, -0.021453857421875, 0.05908203125, 0.01256561279296875, 0.03271484375, 0.00823974609375, 0.0124664306640625, -0.0263519287109375, -0.043426513671875, 0.005641937255859375, 0.0323486328125, 0.034942626953125, 0.013336181640625, 0.03179931640625, -0.0305633544921875, 0.061309814453125, 0.0125579833984375, -0.0153045654296875, 0.014190673828125, -0.03692626953125, -0.01044464111328125, -0.025970458984375, 0.03759765625, 0.014068603515625, 0.05523681640625, -0.0210418701171875, -0.036102294921875, -0.006534576416015625, 0.02886962890625, -0.04840087890625, -0.0166015625, 0.01239013671875, -0.034423828125, -0.0106048583984375, -0.015289306640625, -0.058990478515625, -0.0014142990112304688, -0.0689697265625, 0.0498046875, -0.044586181640625, -0.0067138671875, 0.00756072998046875, 0.0000673532485961914, 0.005435943603515625, 0.030517578125, -0.08660888671875, 0.044677734375, 0.05047607421875, 0.05340576171875, -0.003902435302734375, -0.037017822265625, -0.04559326171875, 0.0018749237060546875, -0.0247344970703125, 0.050048828125, -0.041839599609375, -0.0120849609375, 0.0013628005981445312, -0.0013284683227539062, -0.01406097412109375, -0.045318603515625, 0.0216217041015625, -0.03228759765625, 0.0194549560546875, -0.020050048828125, -0.06683349609375, -0.020050048828125, 0.0134429931640625, -0.0377197265625, 0.07501220703125, 0.00554656982421875, -0.02301025390625, 0.0031452178955078125, -0.07342529296875, -0.040985107421875, -0.0159149169921875, 0.020782470703125, -0.006542205810546875, 0.01287078857421875, 0.0304107666015625, 0.049468994140625, -0.0185089111328125, 0.00823211669921875, -0.007480621337890625, -0.0170440673828125, 0.0062255859375, -0.018157958984375, 0.0474853515625, 0.026092529296875, -0.025604248046875, 0.00583648681640625, -0.053009033203125, 0.0226898193359375, 0.004489898681640625, -0.03778076171875, -0.0304718017578125, 0.0024127960205078125, 0.019378662109375, -0.0004954338073730469, 0.0233154296875, -0.010101318359375, 0.034912109375, -0.053955078125, 0.03363037109375, 0.04510498046875, 0.0045318603515625, 0.027374267578125, -0.0218658447265625, 0.036163330078125, -0.01666259765625, 0.01373291015625, -0.019287109375, -0.005123138427734375, -0.0738525390625, 0.01052093505859375, 0.04461669921875, 0.057464599609375, -0.05340576171875, 0.04669189453125, -0.0014390945434570312, -0.027374267578125, -0.05224609375, 0.04083251953125, 0.0341796875, 0.0276947021484375, 0.038177490234375, -0.020050048828125, -0.035003662109375, -0.049713134765625, -0.0027027130126953125, 0.006885528564453125, 0.01541900634765625, 0.01251220703125, 0.038543701171875, -0.0089874267578125, 0.052093505859375, -0.050384521484375, -0.0120391845703125, -0.02154541015625, 0.0232391357421875, -0.00312042236328125, 0.0501708984375, 0.018096923828125, -0.057159423828125, -0.03668212890625, 0.01361846923828125, -0.033050537109375, -0.0045623779296875, -0.002712249755859375, -0.0014200210571289062, 0.01413726806640625, 0.058685302734375, -0.0313720703125, 0.01496124267578125, 0.022979736328125, -0.01495361328125, 0.033050537109375, -0.0322265625, 0.0156097412109375, -0.096435546875, 0.00426483154296875, -0.004703521728515625, -0.02178955078125, -0.0672607421875, 0.03350830078125, -0.011383056640625, 0.005558013916015625, -0.01507568359375, 0.0450439453125, -0.0379638671875, 0.026336669921875, -0.049346923828125, 0.0007967948913574219, -0.0118865966796875, 0.043487548828125, -0.0027866363525390625, 0.06866455078125, 0.05194091796875, -0.06298828125, 0.038787841796875, 0.006366729736328125, -0.0304412841796875, 0.02239990234375, -0.056732177734375, 0.01546478271484375, -0.01103973388671875, -0.0189971923828125, -0.0716552734375, -0.01375579833984375, 0.0009937286376953125, -0.0246734619140625, 0.043670654296875, 0.01389312744140625, -0.0513916015625, -0.016082763671875, 0.00341796875, 0.00797271728515625, 0.044769287109375, -0.02508544921875, 0.039581298828125, 0.0330810546875, -0.00952911376953125, -0.026702880859375, -0.03155517578125, -0.000004231929779052734, -0.0186767578125, -0.041290283203125, 0.017425537109375, -0.013214111328125, -0.0169219970703125, -0.01346588134765625, 0.0282745361328125, -0.01519775390625, 0.0322265625, -0.0078887939453125, 0.00572967529296875, -0.010711669921875, 0.0275421142578125, -0.0058746337890625, -0.00690460205078125, -0.017120361328125, -0.014007568359375, 0.058990478515625, -0.00478363037109375, -0.01373291015625, -0.0572509765625, 0.031646728515625, 0.03082275390625, -0.03155517578125, 0.050811767578125, 0.06512451171875, -0.029052734375, -0.01275634765625, -0.061309814453125, -0.0382080078125, -0.03363037109375, 0.04217529296875, -0.02264404296875, -0.06292724609375, 0.03179931640625, -0.0286102294921875, 0.00782012939453125, 0.030059814453125, 0.058319091796875, -0.00836944580078125, 0.09869384765625, 0.06964111328125, -0.035125732421875, 0.0465087890625, -0.024932861328125, 0.0103912353515625, -0.048919677734375, -0.0038814544677734375, -0.05615234375, -0.0260467529296875, -0.041717529296875, -0.024993896484375, -0.0026836395263671875, 0.0276336669921875, -0.028106689453125, 0.0435791015625, -0.019287109375, 0.03515625, 0.06756591796875, 0.0016603469848632812, 0.01763916015625, 0.0054473876953125, 0.00568389892578125, 0.006992340087890625, -0.050140380859375, -0.040771484375, 0.06524658203125, 0.043792724609375, 0.078125, 0.0022029876708984375, 0.049835205078125, 0.004520416259765625, -0.000499725341796875, -0.063232421875, 0.02288818359375, -0.00788116455078125, -0.037841796875, -0.047393798828125, -0.029083251953125, -0.0626220703125, -0.0034732818603515625, -0.01995849609375, -0.05767822265625, 0.0035724639892578125, 0.032989501953125, -0.036773681640625, -0.00734710693359375, -0.071044921875, 0.0679931640625, -0.0219268798828125, -0.00870513916015625, -0.0372314453125, -0.0261993408203125, 0.0011587142944335938, -0.004451751708984375, 0.004627227783203125, 0.0108642578125, -0.0021572113037109375, 0.04071044921875, -0.0310516357421875, 0.0777587890625, -0.00397491455078125, 0.00608062744140625, 0.0102081298828125, 0.0012531280517578125, 0.048004150390625, -0.0223236083984375, 0.00125885009765625, -0.0048065185546875, 0.0158843994140625, -0.0225067138671875, -0.058074951171875, 0.04290771484375, -0.08111572265625, -0.05194091796875, 0.004913330078125, -0.047393798828125, -0.03228759765625, 0.011383056640625, 0.01102447509765625, 0.031402587890625, -0.0179901123046875, 0.008148193359375, 0.05767822265625, -0.0107269287109375, 0.025177001953125, 0.031463623046875, -0.0074615478515625, -0.01360321044921875, 0.07305908203125, 0.00722503662109375, 0.0269317626953125, 0.032562255859375, 0.0255889892578125, -0.0230865478515625, -0.03887939453125, -0.0322265625, 0.0181732177734375, -0.042144775390625, -0.01222991943359375, -0.0430908203125, -0.011199951171875, -0.055877685546875, 0.0094451904296875, -0.0159759521484375, -0.00926971435546875, -0.032958984375, 0.00396728515625, 0.033203125, 0.042510986328125, 0.007175445556640625, 0.038848876953125, -0.057464599609375, 0.0202789306640625, 0.023529052734375, 0.021728515625, 0.0132598876953125, -0.0517578125, -0.033782958984375, 0.0154571533203125, -0.016998291015625, -0.04180908203125, 0.023956298828125, 0.02545166015625, 0.053009033203125, 0.0247955322265625, -0.017120361328125, 0.0689697265625, -0.05621337890625, 0.049163818359375, -0.0140380859375, -0.08197021484375, 0.06390380859375, -0.0130615234375, 0.0254058837890625, 0.055633544921875, 0.034515380859375, -0.037841796875, -0.01678466796875, -0.03131103515625, -0.04864501953125, 0.05926513671875, 0.0013799667358398438, 0.0293121337890625, 0.00872802734375, 0.047607421875, -0.0117645263671875, 0.009185791015625, -0.040313720703125, -0.0100250244140625, 0.005069732666015625, -0.01329803466796875, -0.03125, -0.0011777877807617188, -0.01515960693359375, -0.040985107421875, 0.06085205078125, -0.0230865478515625, -0.004032135009765625, -0.01284027099609375, -0.005931854248046875, -0.00827789306640625, 0.00421905517578125, 0.054443359375, 0.06781005859375, 0.005496978759765625, -0.01413726806640625, 0.030731201171875, -0.02496337890625, -0.0114593505859375, 0.007389068603515625, -0.0065765380859375, -0.0203704833984375, 0.0230865478515625, 0.07305908203125, -0.0076446533203125, -0.059783935546875, 0.0182952880859375, 0.005260467529296875, 0.00039076805114746094, -0.00910186767578125, 0.03662109375, 0.01161956787109375, 0.01204681396484375, 0.004383087158203125, -0.001743316650390625, 0.01052093505859375, -0.06390380859375, 0.00424957275390625, 0.0303955078125, -0.0192718505859375, -0.0113677978515625, 0.02410888671875, 0.0083465576171875, -0.039306640625, 0.048004150390625, 0.005268096923828125, -0.0307769775390625, 0.042999267578125, 0.04437255859375, 0.045257568359375, -0.0357666015625, 0.010040283203125, 0.030487060546875, 0.0235137939453125, -0.0450439453125, 0.027008056640625, 0.0011987686157226562, -0.053466796875, -0.056884765625, -0.050994873046875, -0.0157012939453125, 0.023193359375, -0.043670654296875, 0.0283355712890625, -0.0322265625, 0.0024738311767578125, 0.00963592529296875, 0.01465606689453125, -0.059326171875, 0.032196044921875, -0.003509521484375, 0.07794189453125, -0.06396484375, 0.0628662109375, 0.053863525390625, -0.049163818359375, -0.09136962890625, -0.002197265625, -0.0139312744140625, -0.06317138671875, 0.0462646484375, -0.01343536376953125, 0.0065155029296875, 0.005603790283203125, -0.058837890625, -0.07586669921875, 0.04913330078125, 0.0186309814453125, -0.047393798828125, 0.004535675048828125, 0.0001430511474609375, 0.0599365234375, -0.0258026123046875, 0.01276397705078125, 0.04931640625, 0.036865234375, 0.0011167526245117188, -0.078857421875, 0.00787353515625, -0.01212310791015625, 0.010894775390625, 0.0165252685546875, -0.03411865234375, 0.059783935546875, -0.011749267578125, 0.00485992431640625, 0.014617919921875, 0.04364013671875, -0.0204620361328125, 0.0016431808471679688, 0.042633056640625, 0.0372314453125, 0.06439208984375, -0.0045013427734375, 0.086669921875, -0.029876708984375, 0.03143310546875, 0.07916259765625, 0.00019884109497070312, 0.0721435546875, 0.023101806640625, -0.004856109619140625, 0.019805908203125, 0.055145263671875, -0.0196533203125, 0.037261962890625, 0.01422119140625, 0.00445556640625, -0.0285491943359375, -0.007480621337890625, -0.0267791748046875, 0.04913330078125, 0.0281219482421875, -0.0260162353515625, -0.01055908203125, 0.023529052734375, 0.0014696121215820312, 0.02197265625, -0.0304412841796875, 0.0831298828125, -0.00766754150390625, -0.0826416015625, 0.041259765625, 0.022003173828125, 0.04345703125, -0.046966552734375, -0.0149993896484375, -0.00965118408203125, 0.026092529296875, -0.0084075927734375, -0.03515625, 0.0246124267578125, -0.0033969879150390625, -0.0255126953125, -0.0177154541015625, 0.04986572265625, -0.0435791015625, -0.04486083984375, -0.00202178955078125, 0.043487548828125, 0.0265960693359375, -0.004344940185546875, -0.063720703125, -0.00067901611328125, 0.0014791488647460938, -0.009521484375, 0.0263519287109375, 0.033660888671875, -0.0008378028869628906, 0.054290771484375, 0.048004150390625, 0.0294952392578125, -0.027557373046875, 0.0216522216796875, 0.06591796875, -0.043731689453125, -0.023468017578125, -0.0662841796875, 0.0192718505859375, -0.008880615234375, -0.034515380859375, 0.05084228515625, 0.021820068359375, 0.07757568359375, -0.00836944580078125, 0.05194091796875, -0.0009479522705078125, 0.0570068359375, -0.0521240234375, 0.0367431640625, -0.04376220703125, 0.004482269287109375, -0.017120361328125, -0.063720703125, -0.0162811279296875, 0.06292724609375, -0.02532958984375, 0.0243988037109375, 0.041259765625, 0.07305908203125, 0.0022335052490234375, -0.0107269287109375, 0.0292205810546875, 0.01483154296875, 0.01280975341796875, 0.01296234130859375, 0.053924560546875, -0.02490234375, 0.033416748046875, -0.020782470703125, -0.01959228515625, -0.009979248046875, -0.04510498046875, -0.0718994140625, -0.0528564453125, -0.0295867919921875, -0.0203704833984375, 0.0163116455078125, 0.0836181640625, 0.07745361328125, -0.08489990234375, -0.03271484375, 0.0038928985595703125, -0.005008697509765625, -0.031982421875, -0.02191162109375, 0.02301025390625, -0.0699462890625, -0.0762939453125, 0.0142669677734375, -0.008514404296875, -0.002376556396484375, -0.035125732421875, 0.0103912353515625, -0.0258026123046875, -0.0030956268310546875, 0.03936767578125, 0.0251617431640625, -0.0531005859375, -0.036041259765625, 0.0062255859375, -0.00830841064453125, -0.0164642333984375, 0.056884765625, -0.039306640625, 0.02642822265625, 0.03717041015625, 0.067626953125, 0.0176849365234375, 0.0154876708984375, 0.055877685546875, -0.044097900390625, -0.0166778564453125, 0.0091552734375, 0.0158538818359375, 0.0238494873046875, -0.052886962890625, 0.0277862548828125, -0.01413726806640625, -0.060791015625, -0.034759521484375, 0.00974273681640625, -0.0574951171875, -0.036376953125, 0.0909423828125, -0.0022335052490234375, -0.007518768310546875, -0.030242919921875, -0.04217529296875, 0.021331787109375, -0.0228424072265625, 0.076171875, 0.072509765625, -0.0176544189453125, -0.00634002685546875, -0.05487060546875, 0.06646728515625, 0.021392822265625, -0.06781005859375, 0.01079559326171875, 0.0302581787109375, 0.036956787109375, 0.02313232421875, 0.032196044921875, 0.0114898681640625, 0.0084991455078125, -0.0232696533203125, 0.0079803466796875, -0.0030078887939453125, -0.0202178955078125, -0.0297393798828125, 0.00530242919921875, 0.0108489990234375, -0.01396942138671875 ] ]
KnutJaegersberg/black_goo_recipe_c
2023-09-03T06:52:02.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/black_goo_recipe_c
0
5,864
transformers
2023-09-01T06:35:49
--- license: cc-by-nc-4.0 --- Prompt Example: ``` ### System: You are an AI assistant. User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps. ### Instruction: How do you fine tune a large language model? ### Response: To fine-tune a large language model, we use various techniques to improve its performance and adjust its parameters based on user feedback. These techniques include: 1. Fine tuning the original model with additional data. For example, adding more training examples or using specialized datasets such as question answering or natural language inference tasks. This helps the model learn new concepts and skills by interacting with additional information sources. 2. Using transfer learning methods to incorporate knowledge from other models that have been pre-trained for specific domains. For instance, using pre-trained embeddings from neural networks trained on text classification tasks like sentiment analysis or question answering to help fine-tuning the main language model. 3. Improving the quality of the training data. We may optimize the training process by increasing the accuracy of provided data sets (e.g., by improving word embedding quality), refining the selection of relevant examples, and/or providing additional supervision signals during training. 4. Incorporating knowledge from external resources such as domain-specific lexicons or domain-specific knowledge bases. For example, incorporating domain-specific rules and facts into the model to help it understand domain-specific terminology and semantics. 5. Employing techniques like dropout regularization and dropout sampling to avoid overfitting and reduce overconfidence in the model's predictions. 6. Utilizing meta-learning methods such as reinforcement learning or self-supervised learning to enable the model to adapt quickly to changing situations without being explicitly programmed. 7. Incorporating human input through crowdsourcing or user testing to validate the model's performance and ensure user satisfaction. 8. Implementing various optimization strategies to improve efficiency and reduce computational costs while maintaining high performance levels. In conclusion, fine tuning a large language model requires combining various techniques including fine tuning, transfer learning, improved data quality, external resources, meta-learning, optimization strategies, crowd sourcing, and user testing. These approaches aim to improve the model's ability to handle various tasks and provide users with better experiences in their daily interactions with language systems. ```
2,698
[ [ -0.04241943359375, -0.08526611328125, 0.0249176025390625, -0.00926971435546875, 0.001209259033203125, 0.00038123130798339844, -0.046783447265625, -0.0236358642578125, -0.0152130126953125, 0.04937744140625, -0.04315185546875, -0.014068603515625, -0.0296173095703125, 0.00797271728515625, -0.01287841796875, 0.07171630859375, -0.00792694091796875, 0.0066070556640625, -0.0074615478515625, 0.0145263671875, -0.058319091796875, -0.037109375, -0.08197021484375, -0.0193634033203125, 0.0198516845703125, 0.055145263671875, 0.052276611328125, 0.056671142578125, 0.041717529296875, 0.030853271484375, 0.002475738525390625, 0.044036865234375, -0.0484619140625, -0.006343841552734375, -0.0248260498046875, -0.035064697265625, -0.027435302734375, 0.0017690658569335938, 0.03265380859375, 0.045654296875, -0.007598876953125, 0.0139312744140625, 0.0209503173828125, 0.03643798828125, -0.036590576171875, 0.0161895751953125, -0.029205322265625, -0.0197296142578125, -0.0145416259765625, -0.0160675048828125, -0.01515960693359375, -0.032958984375, 0.0005168914794921875, -0.040069580078125, 0.007534027099609375, 0.0032787322998046875, 0.07769775390625, 0.0268096923828125, -0.0143890380859375, -0.020782470703125, -0.066650390625, 0.07305908203125, -0.0693359375, 0.0218658447265625, 0.06341552734375, 0.0183868408203125, -0.00881195068359375, -0.037872314453125, -0.043182373046875, -0.040557861328125, -0.0095977783203125, -0.0176239013671875, 0.0113677978515625, 0.01068115234375, 0.0399169921875, 0.023101806640625, -0.0225067138671875, 0.01300811767578125, -0.03314208984375, -0.021453857421875, 0.05908203125, 0.01258087158203125, 0.032745361328125, 0.00823974609375, 0.01247406005859375, -0.0263519287109375, -0.04339599609375, 0.005641937255859375, 0.0323486328125, 0.034912109375, 0.0133209228515625, 0.031768798828125, -0.030548095703125, 0.061309814453125, 0.01256561279296875, -0.01532745361328125, 0.01418304443359375, -0.03692626953125, -0.01044464111328125, -0.025970458984375, 0.03759765625, 0.014068603515625, 0.05523681640625, -0.0210418701171875, -0.036102294921875, -0.006526947021484375, 0.0288848876953125, -0.04840087890625, -0.0165863037109375, 0.01239013671875, -0.034454345703125, -0.0106201171875, -0.015289306640625, -0.058990478515625, -0.0014142990112304688, -0.0689697265625, 0.0498046875, -0.044586181640625, -0.006710052490234375, 0.00756072998046875, 0.00004988908767700195, 0.005435943603515625, 0.030517578125, -0.08660888671875, 0.044677734375, 0.05047607421875, 0.05340576171875, -0.0038852691650390625, -0.037017822265625, -0.0455322265625, 0.0018863677978515625, -0.0247344970703125, 0.050048828125, -0.041839599609375, -0.0121002197265625, 0.0013513565063476562, -0.0013275146484375, -0.01406097412109375, -0.045318603515625, 0.0216217041015625, -0.03228759765625, 0.01947021484375, -0.0200347900390625, -0.06683349609375, -0.020050048828125, 0.01343536376953125, -0.037750244140625, 0.07501220703125, 0.00554656982421875, -0.02301025390625, 0.00315093994140625, -0.07342529296875, -0.040985107421875, -0.01593017578125, 0.020782470703125, -0.00653076171875, 0.01287841796875, 0.0304107666015625, 0.049468994140625, -0.0184783935546875, 0.00823211669921875, -0.00749969482421875, -0.01702880859375, 0.00623321533203125, -0.018157958984375, 0.0474853515625, 0.026092529296875, -0.0256195068359375, 0.0058135986328125, -0.053009033203125, 0.0226898193359375, 0.00449371337890625, -0.03778076171875, -0.0304718017578125, 0.0024394989013671875, 0.01934814453125, -0.0004954338073730469, 0.0233154296875, -0.01010894775390625, 0.034912109375, -0.05401611328125, 0.03363037109375, 0.04510498046875, 0.0045318603515625, 0.0273895263671875, -0.0218658447265625, 0.036163330078125, -0.0166778564453125, 0.01373291015625, -0.0193023681640625, -0.005123138427734375, -0.0738525390625, 0.01052093505859375, 0.04461669921875, 0.057464599609375, -0.05340576171875, 0.04669189453125, -0.0014333724975585938, -0.027374267578125, -0.05224609375, 0.04083251953125, 0.0341796875, 0.027740478515625, 0.038177490234375, -0.020050048828125, -0.035003662109375, -0.049713134765625, -0.0027256011962890625, 0.006885528564453125, 0.015411376953125, 0.01251220703125, 0.038543701171875, -0.00897979736328125, 0.052093505859375, -0.050384521484375, -0.01203155517578125, -0.021575927734375, 0.0232391357421875, -0.0031147003173828125, 0.050201416015625, 0.018096923828125, -0.057159423828125, -0.036651611328125, 0.01361846923828125, -0.033050537109375, -0.0045623779296875, -0.00272369384765625, -0.001422882080078125, 0.0141143798828125, 0.058685302734375, -0.0313720703125, 0.01496124267578125, 0.022979736328125, -0.01496124267578125, 0.033050537109375, -0.0322265625, 0.0155792236328125, -0.09649658203125, 0.00426483154296875, -0.00469970703125, -0.0217437744140625, -0.0672607421875, 0.03350830078125, -0.01140594482421875, 0.00555419921875, -0.01508331298828125, 0.0450439453125, -0.0379638671875, 0.026336669921875, -0.04931640625, 0.00080108642578125, -0.0118865966796875, 0.043487548828125, -0.0027923583984375, 0.068603515625, 0.05194091796875, -0.06304931640625, 0.038787841796875, 0.006374359130859375, -0.0304718017578125, 0.02239990234375, -0.056732177734375, 0.01544952392578125, -0.01104736328125, -0.0189666748046875, -0.0716552734375, -0.013763427734375, 0.00098419189453125, -0.0246734619140625, 0.043670654296875, 0.013885498046875, -0.0513916015625, -0.016082763671875, 0.0034389495849609375, 0.0079803466796875, 0.044769287109375, -0.0251007080078125, 0.039581298828125, 0.0330810546875, -0.00952911376953125, -0.026702880859375, -0.03155517578125, -0.000025212764739990234, -0.0186767578125, -0.04132080078125, 0.017425537109375, -0.013214111328125, -0.0169219970703125, -0.01348114013671875, 0.0282745361328125, -0.01517486572265625, 0.0322265625, -0.00787353515625, 0.00572967529296875, -0.010711669921875, 0.0275421142578125, -0.005855560302734375, -0.00690460205078125, -0.01708984375, -0.0140228271484375, 0.058990478515625, -0.004779815673828125, -0.01375579833984375, -0.0572509765625, 0.0316162109375, 0.0308074951171875, -0.03155517578125, 0.050811767578125, 0.06512451171875, -0.029052734375, -0.0127716064453125, -0.061279296875, -0.0382080078125, -0.03363037109375, 0.04217529296875, -0.0226287841796875, -0.06292724609375, 0.03179931640625, -0.02862548828125, 0.00782012939453125, 0.0300750732421875, 0.058319091796875, -0.0083770751953125, 0.09869384765625, 0.06964111328125, -0.035125732421875, 0.0465087890625, -0.024932861328125, 0.0103912353515625, -0.04888916015625, -0.0038852691650390625, -0.05615234375, -0.02606201171875, -0.041717529296875, -0.024993896484375, -0.002674102783203125, 0.0276336669921875, -0.028106689453125, 0.0435791015625, -0.019287109375, 0.035186767578125, 0.0675048828125, 0.00167083740234375, 0.0176239013671875, 0.00543212890625, 0.005657196044921875, 0.006992340087890625, -0.050140380859375, -0.040771484375, 0.06524658203125, 0.043792724609375, 0.078125, 0.0022029876708984375, 0.049835205078125, 0.00452423095703125, -0.00051116943359375, -0.06317138671875, 0.02288818359375, -0.00788116455078125, -0.037872314453125, -0.047454833984375, -0.029083251953125, -0.0626220703125, -0.00345611572265625, -0.01995849609375, -0.057647705078125, 0.0035915374755859375, 0.032989501953125, -0.036773681640625, -0.00733184814453125, -0.07098388671875, 0.0679931640625, -0.0219268798828125, -0.00870513916015625, -0.0372314453125, -0.0262298583984375, 0.001163482666015625, -0.004451751708984375, 0.004634857177734375, 0.0108489990234375, -0.00215911865234375, 0.04071044921875, -0.0310821533203125, 0.0777587890625, -0.003986358642578125, 0.006061553955078125, 0.01023101806640625, 0.0012531280517578125, 0.048004150390625, -0.022308349609375, 0.0012521743774414062, -0.00482177734375, 0.0158843994140625, -0.0225067138671875, -0.058013916015625, 0.042938232421875, -0.08111572265625, -0.05194091796875, 0.004913330078125, -0.047393798828125, -0.032257080078125, 0.0113983154296875, 0.01102447509765625, 0.031402587890625, -0.01800537109375, 0.0081329345703125, 0.0577392578125, -0.0107269287109375, 0.025177001953125, 0.031463623046875, -0.007457733154296875, -0.01360321044921875, 0.072998046875, 0.007205963134765625, 0.0269012451171875, 0.032562255859375, 0.02557373046875, -0.0230560302734375, -0.038848876953125, -0.032257080078125, 0.018157958984375, -0.042144775390625, -0.01222991943359375, -0.04315185546875, -0.01116943359375, -0.055877685546875, 0.0094451904296875, -0.0159759521484375, -0.009246826171875, -0.032989501953125, 0.003986358642578125, 0.033203125, 0.042572021484375, 0.007175445556640625, 0.038848876953125, -0.057464599609375, 0.0202789306640625, 0.0235748291015625, 0.021728515625, 0.01325225830078125, -0.0517578125, -0.03375244140625, 0.0154876708984375, -0.0169830322265625, -0.04180908203125, 0.023956298828125, 0.02545166015625, 0.052947998046875, 0.0248260498046875, -0.017120361328125, 0.0689697265625, -0.056243896484375, 0.049163818359375, -0.0140533447265625, -0.08197021484375, 0.06390380859375, -0.01306915283203125, 0.0254058837890625, 0.055633544921875, 0.034515380859375, -0.037841796875, -0.01678466796875, -0.031341552734375, -0.04864501953125, 0.059234619140625, 0.0013704299926757812, 0.0293426513671875, 0.00873565673828125, 0.047607421875, -0.0117645263671875, 0.00917816162109375, -0.040313720703125, -0.01001739501953125, 0.005069732666015625, -0.0133056640625, -0.03125, -0.0011806488037109375, -0.01515960693359375, -0.041046142578125, 0.060821533203125, -0.023101806640625, -0.004016876220703125, -0.0128326416015625, -0.005924224853515625, -0.0082855224609375, 0.0042266845703125, 0.054473876953125, 0.0677490234375, 0.0054931640625, -0.01413726806640625, 0.030731201171875, -0.02496337890625, -0.0114593505859375, 0.007373809814453125, -0.0065765380859375, -0.020355224609375, 0.0230865478515625, 0.07305908203125, -0.00765228271484375, -0.059783935546875, 0.018280029296875, 0.0052490234375, 0.0003838539123535156, -0.0091094970703125, 0.036651611328125, 0.01161956787109375, 0.0120849609375, 0.004383087158203125, -0.0017385482788085938, 0.010498046875, -0.06390380859375, 0.004238128662109375, 0.0303955078125, -0.0192718505859375, -0.01137542724609375, 0.02410888671875, 0.008331298828125, -0.039306640625, 0.04803466796875, 0.005268096923828125, -0.0307769775390625, 0.042999267578125, 0.04437255859375, 0.045257568359375, -0.035736083984375, 0.010040283203125, 0.0305023193359375, 0.0235137939453125, -0.0450439453125, 0.027008056640625, 0.0012035369873046875, -0.053466796875, -0.056884765625, -0.051025390625, -0.0157012939453125, 0.0231781005859375, -0.043670654296875, 0.0283050537109375, -0.0322265625, 0.0024776458740234375, 0.0096435546875, 0.01465606689453125, -0.059326171875, 0.0322265625, -0.0035152435302734375, 0.07794189453125, -0.06396484375, 0.06280517578125, 0.05389404296875, -0.04913330078125, -0.09136962890625, -0.0022068023681640625, -0.0139312744140625, -0.06317138671875, 0.0462646484375, -0.013427734375, 0.006511688232421875, 0.00560760498046875, -0.05889892578125, -0.075927734375, 0.04913330078125, 0.018646240234375, -0.047454833984375, 0.00453948974609375, 0.00014007091522216797, 0.059967041015625, -0.0258026123046875, 0.0127716064453125, 0.04931640625, 0.036865234375, 0.0011110305786132812, -0.07879638671875, 0.00786590576171875, -0.01213836669921875, 0.01090240478515625, 0.0165252685546875, -0.03411865234375, 0.059814453125, -0.0117950439453125, 0.004848480224609375, 0.014617919921875, 0.04364013671875, -0.02044677734375, 0.0016603469848632812, 0.042633056640625, 0.0372314453125, 0.06439208984375, -0.0045013427734375, 0.086669921875, -0.029876708984375, 0.03143310546875, 0.07916259765625, 0.0001881122589111328, 0.0721435546875, 0.0230865478515625, -0.00484466552734375, 0.0198211669921875, 0.055145263671875, -0.0196533203125, 0.03729248046875, 0.01419830322265625, 0.0044403076171875, -0.0285491943359375, -0.0074920654296875, -0.02679443359375, 0.049072265625, 0.0281219482421875, -0.0260162353515625, -0.01055908203125, 0.023529052734375, 0.0014705657958984375, 0.0219573974609375, -0.0304412841796875, 0.0830078125, -0.007671356201171875, -0.0826416015625, 0.041259765625, 0.022003173828125, 0.04345703125, -0.046966552734375, -0.0149993896484375, -0.0096588134765625, 0.0261077880859375, -0.0084075927734375, -0.035125732421875, 0.0246124267578125, -0.003376007080078125, -0.0255279541015625, -0.0177154541015625, 0.04986572265625, -0.0435791015625, -0.04486083984375, -0.0020236968994140625, 0.0435791015625, 0.0265960693359375, -0.00434112548828125, -0.063720703125, -0.0006890296936035156, 0.00148773193359375, -0.0095062255859375, 0.0263519287109375, 0.033660888671875, -0.0008454322814941406, 0.054351806640625, 0.04803466796875, 0.0294952392578125, -0.0275421142578125, 0.021636962890625, 0.06591796875, -0.043731689453125, -0.023468017578125, -0.0662841796875, 0.0192718505859375, -0.0088653564453125, -0.034515380859375, 0.05084228515625, 0.021820068359375, 0.07757568359375, -0.0083770751953125, 0.052001953125, -0.0009617805480957031, 0.0570068359375, -0.052093505859375, 0.0367431640625, -0.043792724609375, 0.00447845458984375, -0.017120361328125, -0.06365966796875, -0.0162811279296875, 0.06292724609375, -0.025299072265625, 0.0244140625, 0.041259765625, 0.07305908203125, 0.00223541259765625, -0.01073455810546875, 0.0292205810546875, 0.01483154296875, 0.01280975341796875, 0.012939453125, 0.053924560546875, -0.0249176025390625, 0.033416748046875, -0.0207977294921875, -0.019561767578125, -0.009979248046875, -0.04510498046875, -0.07183837890625, -0.0528564453125, -0.0295867919921875, -0.0203857421875, 0.016326904296875, 0.08355712890625, 0.0775146484375, -0.08489990234375, -0.03271484375, 0.0038890838623046875, -0.004978179931640625, -0.031982421875, -0.02191162109375, 0.022979736328125, -0.0699462890625, -0.0762939453125, 0.0142669677734375, -0.008514404296875, -0.002376556396484375, -0.035125732421875, 0.01036834716796875, -0.0258026123046875, -0.003086090087890625, 0.03936767578125, 0.0251617431640625, -0.053192138671875, -0.0360107421875, 0.006214141845703125, -0.00832366943359375, -0.0164794921875, 0.056884765625, -0.03936767578125, 0.026458740234375, 0.03717041015625, 0.06756591796875, 0.0176544189453125, 0.0154876708984375, 0.055877685546875, -0.044097900390625, -0.01666259765625, 0.0091552734375, 0.0158538818359375, 0.0238494873046875, -0.05291748046875, 0.0277862548828125, -0.0141448974609375, -0.060791015625, -0.034759521484375, 0.00975799560546875, -0.05755615234375, -0.036376953125, 0.0909423828125, -0.0022335052490234375, -0.007534027099609375, -0.03021240234375, -0.04217529296875, 0.0213623046875, -0.0228424072265625, 0.076171875, 0.072509765625, -0.0176544189453125, -0.00632476806640625, -0.0548095703125, 0.06646728515625, 0.0214080810546875, -0.06781005859375, 0.0107879638671875, 0.030242919921875, 0.036956787109375, 0.02313232421875, 0.032196044921875, 0.0114898681640625, 0.0084991455078125, -0.0232696533203125, 0.00797271728515625, -0.002986907958984375, -0.02020263671875, -0.0297393798828125, 0.00530242919921875, 0.0108489990234375, -0.01396942138671875 ] ]
Fredithefish/ScarletPajama-3B-HF
2023-06-11T06:34:37.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
Fredithefish
null
null
Fredithefish/ScarletPajama-3B-HF
9
5,863
transformers
2023-05-22T22:55:30
--- license: apache-2.0 inference: false --- # ScarletPajama Introducing ScarletPajama: a language model that has been finetuned on the ShareGPT dataset. Built upon the robust RedPajama-INCITE-Chat-3b architecture. <br> The original ShareGPT dataset consisted of 53k pairs of conversational exchanges. In order to optimize the training process, the dataset was converted to the appropriate format and filtered to remove long texts. The resulting filtered version of ShareGPT contains 22k pairs, ensuring a more focused and efficient training process. ## Model Details - **Model Name**: ScarletPajama - **Base Model**: RedPajama-INCITE-Chat-3b - **Dataset**: <a href="https://huggingface.co/datasets/Fredithefish/ShareGPT-Unfiltered-RedPajama-Chat-format/blob/main/ShareGPT-22k.jsonl">ShareGPT-22K</a> - **Fine-tuning Epochs**: 2
833
[ [ -0.0283355712890625, -0.053375244140625, 0.01033782958984375, 0.054840087890625, -0.0150299072265625, 0.01016998291015625, -0.0200042724609375, -0.047210693359375, 0.029571533203125, 0.03717041015625, -0.054931640625, -0.021942138671875, -0.0621337890625, -0.01367950439453125, -0.038726806640625, 0.10009765625, 0.009063720703125, -0.002231597900390625, 0.0228118896484375, -0.007038116455078125, -0.022216796875, -0.0230865478515625, -0.066162109375, -0.0267791748046875, 0.0182647705078125, 0.03948974609375, 0.06951904296875, 0.02716064453125, 0.0438232421875, 0.014892578125, -0.021942138671875, 0.007144927978515625, -0.0653076171875, 0.0022792816162109375, 0.00962066650390625, -0.0023860931396484375, -0.038177490234375, 0.00109100341796875, 0.040283203125, 0.04083251953125, -0.015869140625, 0.0157012939453125, 0.004718780517578125, 0.047210693359375, -0.034454345703125, 0.032073974609375, -0.044586181640625, -0.00736236572265625, -0.0234375, 0.007720947265625, -0.02130126953125, -0.0252838134765625, 0.0212249755859375, -0.031524658203125, 0.01085662841796875, 0.0054473876953125, 0.06951904296875, 0.0029430389404296875, -0.017303466796875, -0.01219940185546875, -0.038360595703125, 0.051025390625, -0.0267486572265625, 0.018890380859375, 0.04107666015625, 0.028045654296875, -0.0200347900390625, -0.059844970703125, -0.03955078125, -0.0023193359375, -0.006649017333984375, 0.0006361007690429688, -0.03106689453125, 0.0010042190551757812, 0.0021076202392578125, 0.0438232421875, -0.0594482421875, -0.0217437744140625, -0.044464111328125, 0.0034885406494140625, 0.048828125, -0.0016164779663085938, 0.034088134765625, 0.00037550926208496094, -0.027557373046875, 0.00782012939453125, -0.047607421875, -0.040618896484375, 0.021636962890625, 0.031890869140625, -0.046966552734375, 0.03094482421875, -0.01558685302734375, 0.0546875, -0.0081787109375, 0.02001953125, 0.0214080810546875, -0.0235137939453125, -0.0245208740234375, -0.0236968994140625, 0.07147216796875, 0.008026123046875, 0.04034423828125, -0.01111602783203125, 0.0074310302734375, 0.0186767578125, 0.0166473388671875, -0.0618896484375, -0.05206298828125, -0.0002803802490234375, -0.03094482421875, -0.0206756591796875, -0.0218505859375, -0.044525146484375, -0.02960205078125, -0.0150909423828125, 0.0306243896484375, -0.036834716796875, -0.02960205078125, 0.02105712890625, 0.0070953369140625, 0.0008392333984375, 0.021636962890625, -0.077392578125, 0.04742431640625, 0.051513671875, 0.056488037109375, -0.01497650146484375, -0.0245208740234375, 0.004077911376953125, -0.01232147216796875, -0.0137481689453125, 0.04150390625, -0.033203125, -0.006000518798828125, -0.011962890625, 0.0007481575012207031, -0.012359619140625, -0.0218963623046875, 0.0261993408203125, -0.0316162109375, 0.0364990234375, 0.01015472412109375, -0.0310211181640625, -0.02667236328125, 0.02740478515625, -0.0379638671875, 0.07305908203125, 0.0163421630859375, -0.052642822265625, 0.02667236328125, -0.0679931640625, -0.0150299072265625, 0.019805908203125, 0.01413726806640625, -0.025421142578125, -0.014801025390625, 0.01654052734375, 0.033935546875, -0.03350830078125, 0.017303466796875, -0.0245361328125, -0.032867431640625, -0.003368377685546875, -0.0223236083984375, 0.06646728515625, 0.04107666015625, -0.010833740234375, 0.01007080078125, -0.05914306640625, -0.01299285888671875, 0.015716552734375, -0.00853729248046875, -0.005229949951171875, -0.0218048095703125, 0.021209716796875, 0.017578125, 0.02667236328125, -0.048431396484375, 0.0178680419921875, -0.0310821533203125, 0.0298309326171875, 0.038909912109375, -0.007457733154296875, 0.0210418701171875, -0.04656982421875, 0.048004150390625, -0.011199951171875, 0.0259552001953125, -0.00231170654296875, -0.060791015625, -0.07196044921875, -0.041290283203125, 0.00875091552734375, 0.03466796875, -0.029571533203125, 0.01229095458984375, -0.006641387939453125, -0.050537109375, -0.054840087890625, -0.00537872314453125, 0.0251007080078125, -0.003437042236328125, 0.0243072509765625, -0.02728271484375, -0.0711669921875, -0.05059814453125, -0.0167083740234375, -0.020660400390625, 0.017425537109375, 0.021759033203125, 0.03082275390625, -0.03631591796875, 0.061187744140625, -0.031829833984375, -0.006244659423828125, -0.022064208984375, -0.0026264190673828125, 0.03863525390625, 0.040740966796875, 0.0306396484375, -0.0440673828125, -0.05670166015625, -0.00234222412109375, -0.039520263671875, -0.005428314208984375, 0.000881195068359375, -0.01129150390625, 0.01218414306640625, 0.01995849609375, -0.033233642578125, 0.0296478271484375, 0.08636474609375, -0.050872802734375, 0.0308685302734375, -0.00348663330078125, 0.019561767578125, -0.0975341796875, 0.0073699951171875, -0.00260162353515625, -0.04180908203125, -0.041900634765625, 0.026397705078125, -0.00811767578125, -0.023834228515625, -0.039215087890625, 0.052947998046875, -0.0396728515625, 0.0007343292236328125, -0.0270233154296875, -0.0009698867797851562, -0.019012451171875, 0.040679931640625, 0.0011186599731445312, 0.056427001953125, 0.046295166015625, -0.0205078125, 0.0241241455078125, 0.037841796875, -0.0186767578125, 0.006496429443359375, -0.05609130859375, 0.025360107421875, 0.0156097412109375, 0.01119232177734375, -0.07904052734375, -0.00435638427734375, 0.035491943359375, -0.037750244140625, 0.00506591796875, -0.042816162109375, -0.04425048828125, -0.0084686279296875, -0.00789642333984375, 0.02789306640625, 0.04974365234375, -0.0499267578125, 0.013580322265625, 0.0555419921875, -0.016845703125, -0.03826904296875, -0.037689208984375, 0.020721435546875, -0.02008056640625, -0.07171630859375, 0.0081787109375, -0.01245880126953125, -0.00030517578125, -0.0284881591796875, 0.01522064208984375, -0.0167236328125, 0.01036834716796875, 0.035980224609375, 0.0191497802734375, 0.00847625732421875, -0.0031147003173828125, -0.020721435546875, -0.005168914794921875, -0.01337432861328125, 0.0009889602661132812, 0.08526611328125, -0.033966064453125, -0.0011892318725585938, -0.03369140625, 0.0206451416015625, 0.025177001953125, 0.019439697265625, 0.08758544921875, 0.038116455078125, -0.0240325927734375, -0.0089263916015625, -0.0211029052734375, -0.017822265625, -0.035308837890625, 0.014068603515625, -0.0224151611328125, -0.09259033203125, 0.03802490234375, 0.0128021240234375, -0.00800323486328125, 0.0223236083984375, 0.044708251953125, -0.001922607421875, 0.062103271484375, 0.038238525390625, -0.00926971435546875, 0.046966552734375, -0.018035888671875, 0.0217132568359375, -0.04388427734375, -0.0173797607421875, -0.04486083984375, -0.005062103271484375, -0.068603515625, -0.03619384765625, -0.0025463104248046875, 0.00603485107421875, -0.052398681640625, 0.05279541015625, -0.0430908203125, 0.044708251953125, 0.036865234375, 0.0186309814453125, 0.00934600830078125, -0.0020694732666015625, 0.0184783935546875, -0.0069427490234375, -0.05059814453125, -0.0251617431640625, 0.077880859375, 0.055816650390625, 0.06719970703125, 0.020233154296875, 0.044586181640625, 0.01131439208984375, 0.0080413818359375, -0.038421630859375, 0.043975830078125, -0.02496337890625, -0.044219970703125, 0.0032196044921875, -0.02978515625, -0.0728759765625, 0.0131988525390625, -0.01384735107421875, -0.061431884765625, -0.004467010498046875, 0.01190185546875, 0.0116729736328125, -0.0002562999725341797, -0.056488037109375, 0.0740966796875, 0.0233154296875, -0.00028133392333984375, -0.0161895751953125, -0.0362548828125, 0.0242156982421875, 0.0057373046875, -0.005985260009765625, -0.01329803466796875, 0.0278472900390625, 0.06719970703125, -0.02923583984375, 0.06829833984375, 0.003108978271484375, 0.007659912109375, 0.008575439453125, 0.0191802978515625, 0.048675537109375, 0.0009064674377441406, 0.0193634033203125, 0.040008544921875, -0.00975799560546875, -0.035430908203125, -0.006744384765625, 0.054595947265625, -0.07061767578125, -0.017822265625, -0.049072265625, -0.047088623046875, -0.0179901123046875, 0.00809478759765625, 0.01311492919921875, 0.0048675537109375, -0.0149993896484375, 0.0240020751953125, 0.0287017822265625, -0.01177215576171875, 0.02349853515625, 0.03509521484375, -0.0313720703125, -0.024688720703125, 0.055328369140625, -0.01898193359375, 0.01568603515625, -0.0023651123046875, 0.0127410888671875, -0.0199127197265625, -0.054107666015625, -0.004756927490234375, 0.0284881591796875, -0.03179931640625, -0.0007433891296386719, -0.063232421875, -0.034149169921875, -0.050262451171875, 0.00800323486328125, -0.037139892578125, -0.045806884765625, -0.02362060546875, 0.01194000244140625, 0.053253173828125, 0.0457763671875, -0.002353668212890625, 0.0673828125, -0.048431396484375, 0.023406982421875, 0.016937255859375, 0.039764404296875, -0.031280517578125, -0.058990478515625, -0.0159912109375, 0.006244659423828125, -0.015533447265625, -0.045623779296875, 0.052978515625, 0.022705078125, 0.0154266357421875, 0.00470733642578125, -0.013031005859375, 0.062744140625, -0.0204315185546875, 0.05767822265625, 0.010772705078125, -0.06597900390625, 0.050537109375, -0.04833984375, 0.02496337890625, 0.03204345703125, 0.0227813720703125, -0.06884765625, -0.03656005859375, -0.06842041015625, -0.0692138671875, 0.0745849609375, 0.03167724609375, 0.0164947509765625, -0.01131439208984375, 0.007648468017578125, 0.006275177001953125, 0.0114288330078125, -0.06805419921875, -0.02203369140625, 0.0008473396301269531, -0.0265045166015625, -0.004634857177734375, -0.035369873046875, -0.018035888671875, -0.007183074951171875, 0.06988525390625, -0.0002276897430419922, 0.025787353515625, -0.0270538330078125, -0.01093292236328125, 0.0048828125, -0.001972198486328125, 0.0380859375, 0.0474853515625, -0.02899169921875, -0.021331787109375, -0.01018524169921875, -0.05377197265625, -0.01947021484375, 0.0133514404296875, -0.01043701171875, -0.0109100341796875, 0.0162200927734375, 0.0943603515625, -0.00518035888671875, -0.03302001953125, 0.04095458984375, -0.0273590087890625, 0.0124359130859375, -0.03857421875, 0.00994110107421875, 0.003376007080078125, 0.018280029296875, 0.0022029876708984375, 0.0237274169921875, 0.00827789306640625, -0.0325927734375, 0.029632568359375, 0.0217437744140625, -0.04278564453125, -0.01568603515625, 0.033660888671875, 0.0100555419921875, -0.01428985595703125, 0.042449951171875, 0.006725311279296875, -0.0237884521484375, 0.0257720947265625, 0.055938720703125, 0.057952880859375, -0.01293182373046875, 0.022186279296875, 0.02838134765625, 0.0174560546875, -0.013214111328125, 0.036529541015625, 0.0019741058349609375, -0.0782470703125, -0.036956787109375, -0.054931640625, -0.035980224609375, 0.017303466796875, -0.0390625, 0.0226287841796875, -0.038238525390625, 0.011962890625, -0.007442474365234375, 0.01165771484375, -0.04400634765625, 0.01837158203125, 0.0003509521484375, 0.06915283203125, -0.07904052734375, 0.07501220703125, 0.04205322265625, -0.013458251953125, -0.0634765625, -0.013885498046875, 0.002361297607421875, -0.08807373046875, 0.041717529296875, 0.005130767822265625, 0.01238250732421875, -0.002307891845703125, -0.0367431640625, -0.060089111328125, 0.04608154296875, 0.0384521484375, -0.04461669921875, 0.02447509765625, -0.004283905029296875, 0.045623779296875, -0.0372314453125, 0.03045654296875, 0.062286376953125, 0.01523590087890625, 0.0267486572265625, -0.08984375, -0.0166778564453125, -0.046600341796875, 0.005367279052734375, 0.0174560546875, -0.043060302734375, 0.0692138671875, 0.00457000732421875, -0.017242431640625, 0.0081939697265625, 0.03485107421875, 0.034942626953125, -0.0003082752227783203, 0.027252197265625, 0.0545654296875, 0.033843994140625, -0.03326416015625, 0.08056640625, -0.017425537109375, 0.0460205078125, 0.10467529296875, 0.002948760986328125, 0.049560546875, 0.031982421875, -0.0179290771484375, 0.020111083984375, 0.05670166015625, 0.0087127685546875, 0.043426513671875, 0.01032257080078125, -0.0245208740234375, -0.001068115234375, 0.00007390975952148438, -0.03277587890625, 0.0379638671875, 0.025299072265625, -0.026123046875, -0.00342559814453125, -0.00782012939453125, 0.036895751953125, -0.0027923583984375, -0.00948333740234375, 0.07769775390625, -0.006103515625, -0.0411376953125, 0.052337646484375, -0.01328277587890625, 0.0675048828125, -0.0428466796875, 0.003932952880859375, -0.035400390625, 0.0233154296875, -0.01016998291015625, -0.0657958984375, 0.03912353515625, 0.0033626556396484375, -0.004547119140625, -0.023529052734375, 0.0562744140625, -0.053619384765625, -0.01605224609375, 0.033233642578125, 0.00595855712890625, 0.033721923828125, -0.0137939453125, -0.064453125, 0.0259246826171875, 0.0133514404296875, -0.01535797119140625, 0.0159912109375, 0.00760650634765625, 0.00385284423828125, 0.06671142578125, 0.047607421875, 0.03411865234375, 0.003387451171875, 0.0140228271484375, 0.056610107421875, -0.042266845703125, -0.058258056640625, -0.05450439453125, 0.0288848876953125, -0.012908935546875, -0.028045654296875, 0.06439208984375, 0.0262298583984375, 0.05615234375, -0.004085540771484375, 0.046051025390625, -0.02044677734375, 0.051788330078125, -0.01544189453125, 0.0675048828125, -0.0221099853515625, 0.0190887451171875, -0.0296783447265625, -0.0689697265625, -0.0330810546875, 0.061279296875, -0.0048065185546875, 0.0103607177734375, 0.0277099609375, 0.0731201171875, 0.002689361572265625, 0.0306396484375, 0.0214691162109375, 0.024383544921875, 0.0219573974609375, 0.046600341796875, 0.05792236328125, -0.06744384765625, 0.0241851806640625, -0.0179901123046875, -0.022979736328125, -0.023681640625, -0.06781005859375, -0.060516357421875, -0.044219970703125, -0.03369140625, -0.03936767578125, 0.0239410400390625, 0.06805419921875, 0.07794189453125, -0.074462890625, -0.028076171875, -0.0167236328125, -0.01169586181640625, -0.00592041015625, -0.0140533447265625, 0.017181396484375, -0.011505126953125, -0.0594482421875, 0.041290283203125, 0.00847625732421875, 0.0118408203125, -0.0018281936645507812, -0.0227508544921875, -0.0240325927734375, -0.0027561187744140625, 0.03497314453125, 0.0230865478515625, -0.016448974609375, -0.030670166015625, -0.030670166015625, -0.0057525634765625, 0.0016937255859375, 0.037078857421875, -0.04669189453125, 0.024688720703125, 0.04364013671875, 0.029144287109375, 0.06378173828125, -0.0193328857421875, 0.048614501953125, -0.07684326171875, 0.042449951171875, 0.0242156982421875, 0.031768798828125, 0.0220489501953125, -0.01468658447265625, 0.057769775390625, 0.0107269287109375, -0.055877685546875, -0.05718994140625, 0.0014095306396484375, -0.09698486328125, -0.00792694091796875, 0.0897216796875, -0.01708984375, -0.0010366439819335938, -0.0184326171875, -0.0207672119140625, 0.01316070556640625, -0.0401611328125, 0.060394287109375, 0.04302978515625, -0.0022525787353515625, -0.037841796875, -0.01424407958984375, 0.04443359375, 0.02301025390625, -0.0599365234375, 0.0042724609375, 0.029296875, 0.036041259765625, -0.002704620361328125, 0.07257080078125, -0.01763916015625, 0.0282440185546875, -0.006175994873046875, 0.00772857666015625, 0.0005435943603515625, -0.037750244140625, -0.021636962890625, 0.021759033203125, -0.002979278564453125, -0.03887939453125 ] ]
NewstaR/Starlight-13B
2023-09-16T00:26:36.000Z
[ "transformers", "pytorch", "llama", "text-generation", "llama2", "en", "dataset:FinchResearch/AboveTheClouds", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
NewstaR
null
null
NewstaR/Starlight-13B
0
5,863
transformers
2023-09-11T22:17:57
--- language: - en datasets: - FinchResearch/AboveTheClouds tags: - llama - llama2 --- # Starlight (13B) | Model | Average ⬆️ | ARC | HellaSwag | MMLU | TruthfulQA | |----------------------|------------|-------|-----------|-------|------------| | NewstaR/Starlight-13B| 58.63 | 59.3 | 82.15 | 55.67 | 37.39 | | NewstaR/Starlight-7B | 54.3 | 53.07 | 78.57 | 46.8 | 38.75 | ## The model follows the Alpaca template: ``` ### Instruction: {prompt} ### Response: ``` ## Example: ``` ### Instruction: Summarize the key details of the Starlight model in a few sentences. ### Response: Starlight is a 13B parameter transformer model trained on the AverageData and Above the Clouds datasets for conversational text generation. It has strong language modeling capabilities but lacks true language understanding and may generate incorrect or biased text, so outputs should be monitored and safeguards implemented. The model is intended for use in chatbots and content creation applications. ``` ## Instructions for Safe Use - Avoid exposing Starlight to offensive, unethical, dangerous or illegal prompts - Monitor outputs for signs of bias, toxicity or factual incorrectness - Do not rely on Starlight for high-stakes or safety critical applications ## Limitations - May hallucinate or generate incorrect information - Large model size leads to high compute requirements
1,418
[ [ -0.030120849609375, -0.072265625, 0.0162353515625, 0.02777099609375, -0.0250244140625, -0.033538818359375, -0.007015228271484375, -0.0308074951171875, 0.005153656005859375, 0.058837890625, -0.040740966796875, -0.059478759765625, -0.04949951171875, -0.01258087158203125, -0.027099609375, 0.0723876953125, 0.021759033203125, -0.007526397705078125, 0.029205322265625, -0.000006854534149169922, -0.03631591796875, -0.0157928466796875, -0.038055419921875, 0.00203704833984375, 0.02783203125, 0.0406494140625, 0.090087890625, 0.034393310546875, 0.051116943359375, 0.0178070068359375, -0.00989532470703125, 0.0123443603515625, -0.04248046875, -0.0175933837890625, 0.004657745361328125, -0.0137939453125, -0.03460693359375, 0.01049041748046875, 0.031402587890625, 0.038421630859375, -0.0118408203125, 0.04486083984375, 0.0003399848937988281, 0.043548583984375, -0.0281524658203125, 0.040618896484375, -0.03924560546875, -0.017364501953125, -0.0135498046875, -0.004352569580078125, -0.035491943359375, -0.034820556640625, -0.006412506103515625, -0.044677734375, 0.041534423828125, 0.04779052734375, 0.08978271484375, 0.0270538330078125, -0.039306640625, -0.0330810546875, -0.057891845703125, 0.053497314453125, -0.059600830078125, 0.020477294921875, 0.027130126953125, 0.04345703125, 0.01611328125, -0.05438232421875, -0.046112060546875, -0.013885498046875, -0.00864410400390625, 0.01502227783203125, -0.027252197265625, 0.00040078163146972656, 0.02423095703125, 0.019500732421875, -0.033477783203125, 0.007656097412109375, -0.051177978515625, -0.00945281982421875, 0.0440673828125, 0.0294952392578125, -0.01186370849609375, -0.04974365234375, -0.033843994140625, -0.022552490234375, -0.042755126953125, 0.0109405517578125, 0.03411865234375, 0.0262908935546875, -0.024322509765625, 0.041473388671875, -0.0006771087646484375, 0.0418701171875, -0.0065460205078125, -0.005580902099609375, 0.0258331298828125, -0.023406982421875, -0.028961181640625, -0.01340484619140625, 0.06634521484375, 0.06634521484375, -0.0153350830078125, 0.0052490234375, -0.007335662841796875, 0.0262298583984375, 0.0246429443359375, -0.0745849609375, -0.0033206939697265625, 0.020172119140625, -0.0633544921875, -0.035491943359375, 0.0010690689086914062, -0.06658935546875, -0.016845703125, 0.0022125244140625, 0.031219482421875, -0.02056884765625, -0.00496673583984375, 0.022186279296875, 0.025787353515625, 0.025970458984375, 0.005237579345703125, -0.0667724609375, 0.0245819091796875, 0.0123138427734375, 0.0235748291015625, 0.00745391845703125, -0.0201873779296875, 0.00678253173828125, 0.0007944107055664062, -0.0310821533203125, 0.0218505859375, -0.00034236907958984375, -0.04779052734375, -0.013336181640625, 0.0161590576171875, 0.0007624626159667969, -0.0296630859375, 0.068603515625, -0.0406494140625, 0.0294647216796875, -0.00894927978515625, -0.04180908203125, -0.04376220703125, -0.0160064697265625, -0.0469970703125, 0.058319091796875, 0.018524169921875, -0.035919189453125, 0.019683837890625, -0.0421142578125, 0.01227569580078125, 0.00846099853515625, -0.00482940673828125, -0.0374755859375, -0.00670623779296875, 0.0191802978515625, 0.0360107421875, -0.04583740234375, 0.044586181640625, 0.00217437744140625, -0.0237884521484375, 0.00991058349609375, -0.05718994140625, 0.06268310546875, 0.031951904296875, -0.03326416015625, 0.0275115966796875, -0.06707763671875, -0.0099334716796875, 0.0206451416015625, 0.005298614501953125, 0.00936126708984375, -0.0188140869140625, 0.052276611328125, 0.003742218017578125, 0.0207977294921875, -0.043243408203125, 0.0032634735107421875, -0.00922393798828125, 0.00064849853515625, 0.035614013671875, 0.008575439453125, 0.0132293701171875, -0.0094757080078125, 0.034423828125, 0.00246429443359375, 0.04248046875, -0.0020122528076171875, -0.0418701171875, -0.07928466796875, 0.008514404296875, 0.0009713172912597656, 0.037841796875, -0.04693603515625, 0.0772705078125, 0.0176849365234375, -0.085693359375, -0.040069580078125, 0.007495880126953125, 0.03179931640625, 0.0699462890625, 0.046600341796875, 0.0122222900390625, -0.040679931640625, -0.0860595703125, 0.01476287841796875, -0.01508331298828125, 0.023223876953125, 0.039703369140625, 0.032196044921875, -0.03619384765625, 0.04779052734375, -0.05462646484375, -0.0202789306640625, -0.02685546875, -0.005130767822265625, 0.03955078125, 0.028472900390625, 0.0621337890625, -0.051605224609375, -0.01253509521484375, -0.03118896484375, -0.0645751953125, -0.021209716796875, -0.01444244384765625, -0.0136566162109375, 0.00809478759765625, 0.024566650390625, -0.04486083984375, 0.054443359375, 0.06292724609375, -0.08734130859375, 0.066650390625, 0.00006443262100219727, 0.0220489501953125, -0.09698486328125, 0.0206756591796875, 0.00585174560546875, 0.005863189697265625, -0.049041748046875, 0.00864410400390625, 0.0236053466796875, -0.0184783935546875, -0.052581787109375, 0.039947509765625, -0.0127105712890625, 0.01483154296875, -0.039947509765625, -0.021820068359375, 0.0033435821533203125, 0.036590576171875, 0.0018587112426757812, 0.05645751953125, 0.045501708984375, -0.045623779296875, 0.04510498046875, 0.042877197265625, -0.045379638671875, -0.0018091201782226562, -0.054718017578125, -0.0295867919921875, 0.01261138916015625, 0.0139312744140625, -0.052276611328125, 0.00957489013671875, 0.0198516845703125, -0.0281829833984375, -0.03363037109375, 0.0251007080078125, -0.029022216796875, -0.027252197265625, 0.014739990234375, 0.0281219482421875, 0.046173095703125, -0.030364990234375, 0.027862548828125, 0.0002281665802001953, -0.004024505615234375, -0.038330078125, -0.030120849609375, 0.00640869140625, -0.02471923828125, -0.059417724609375, 0.03680419921875, 0.0035343170166015625, -0.00945281982421875, -0.01169586181640625, 0.0225677490234375, 0.01432037353515625, -0.0136260986328125, 0.0310821533203125, 0.051849365234375, -0.0232391357421875, -0.019683837890625, -0.01554107666015625, -0.031890869140625, 0.01110076904296875, 0.046783447265625, 0.05499267578125, -0.03094482421875, -0.02642822265625, -0.037841796875, 0.0213775634765625, 0.0322265625, -0.00014638900756835938, 0.06927490234375, 0.0452880859375, -0.0237579345703125, -0.00122833251953125, -0.04571533203125, -0.0162200927734375, -0.039154052734375, 0.0400390625, -0.008148193359375, -0.034576416015625, 0.07562255859375, 0.035858154296875, 0.005283355712890625, 0.0596923828125, 0.046630859375, 0.0290985107421875, 0.06353759765625, 0.065185546875, 0.0209808349609375, 0.023345947265625, -0.0352783203125, 0.010955810546875, -0.048095703125, -0.041656494140625, -0.039886474609375, -0.0241546630859375, -0.020263671875, -0.022979736328125, 0.008575439453125, 0.01470184326171875, -0.033233642578125, 0.0572509765625, -0.026153564453125, 0.0169830322265625, 0.0477294921875, 0.0168914794921875, 0.0215606689453125, -0.0294036865234375, 0.0164794921875, -0.0069427490234375, -0.040740966796875, -0.048797607421875, 0.0506591796875, 0.035369873046875, 0.08740234375, 0.014739990234375, 0.0143280029296875, -0.01415252685546875, 0.01267242431640625, -0.0496826171875, 0.0390625, 0.0034618377685546875, -0.05517578125, -0.006473541259765625, -0.029205322265625, -0.061981201171875, 0.01258087158203125, -0.01422882080078125, -0.04107666015625, 0.030059814453125, 0.007213592529296875, -0.0287017822265625, 0.0213775634765625, -0.0667724609375, 0.0908203125, -0.0125274658203125, 0.0137481689453125, 0.0036983489990234375, -0.035400390625, 0.03399658203125, 0.00800323486328125, 0.007732391357421875, -0.007747650146484375, -0.003787994384765625, 0.07037353515625, -0.02532958984375, 0.06365966796875, -0.00833892822265625, -0.0019044876098632812, 0.041656494140625, 0.0024738311767578125, 0.0321044921875, 0.0120391845703125, -0.00406646728515625, 0.01381683349609375, -0.0179595947265625, -0.030029296875, -0.032989501953125, 0.034576416015625, -0.0677490234375, -0.04534912109375, -0.0237274169921875, -0.05242919921875, -0.0061798095703125, 0.0142822265625, 0.0648193359375, 0.031219482421875, -0.01062774658203125, 0.0362548828125, 0.023223876953125, -0.0271759033203125, 0.05029296875, 0.054107666015625, -0.05096435546875, -0.02496337890625, 0.044464111328125, 0.0017385482788085938, 0.0202178955078125, -0.017913818359375, -0.0007696151733398438, -0.0322265625, -0.032257080078125, -0.032745361328125, 0.00957489013671875, -0.049041748046875, -0.03021240234375, -0.034149169921875, -0.03631591796875, -0.019744873046875, -0.0184173583984375, -0.03863525390625, -0.0276336669921875, -0.03924560546875, -0.01763916015625, 0.0400390625, 0.0262908935546875, 0.03582763671875, 0.057891845703125, -0.062164306640625, 0.0164794921875, 0.0048370361328125, 0.0204925537109375, 0.004207611083984375, -0.05877685546875, -0.0228118896484375, -0.02044677734375, -0.040740966796875, -0.07525634765625, 0.04388427734375, -0.0038089752197265625, 0.0292816162109375, 0.0372314453125, 0.00605010986328125, 0.0350341796875, -0.024688720703125, 0.0673828125, 0.03094482421875, -0.06085205078125, 0.052947998046875, -0.01947021484375, 0.023681640625, 0.035858154296875, 0.012725830078125, -0.041961669921875, -0.037200927734375, -0.08795166015625, -0.0650634765625, 0.0406494140625, 0.04217529296875, 0.0038356781005859375, 0.00461578369140625, 0.002025604248046875, 0.018402099609375, 0.006763458251953125, -0.070556640625, -0.05413818359375, -0.032989501953125, -0.02099609375, -0.0248260498046875, -0.0128936767578125, -0.00608062744140625, -0.0027713775634765625, 0.058349609375, 0.0005831718444824219, 0.02325439453125, -0.0219268798828125, 0.004985809326171875, 0.0011110305786132812, 0.0018711090087890625, 0.03057861328125, 0.051971435546875, -0.034759521484375, 0.0048065185546875, 0.005615234375, -0.0589599609375, 0.007720947265625, 0.0238800048828125, -0.01531219482421875, -0.00445556640625, 0.02484130859375, 0.06658935546875, -0.005859375, -0.03509521484375, 0.030487060546875, -0.0092620849609375, -0.003208160400390625, -0.00528717041015625, 0.02099609375, 0.00284576416015625, 0.0127410888671875, 0.045806884765625, 0.0296630859375, 0.0123138427734375, -0.01537322998046875, 0.0081787109375, 0.006618499755859375, -0.039642333984375, -0.00434112548828125, 0.06201171875, 0.0237884521484375, -0.016021728515625, 0.029022216796875, 0.0016765594482421875, -0.004302978515625, 0.07415771484375, 0.055419921875, 0.049163818359375, -0.0142822265625, 0.01247406005859375, 0.0234832763671875, 0.02947998046875, 0.0087432861328125, 0.03118896484375, 0.04779052734375, -0.06329345703125, -0.01085662841796875, -0.037933349609375, -0.0281829833984375, 0.0201416015625, -0.0496826171875, 0.041961669921875, -0.034454345703125, -0.01174163818359375, 0.007465362548828125, 0.00684356689453125, -0.053253173828125, 0.0460205078125, -0.006046295166015625, 0.08990478515625, -0.07635498046875, 0.0556640625, 0.009552001953125, -0.063232421875, -0.0872802734375, -0.036651611328125, -0.00904083251953125, -0.072509765625, 0.0190887451171875, 0.020111083984375, 0.00551605224609375, -0.005771636962890625, -0.04095458984375, -0.06390380859375, 0.0928955078125, 0.02032470703125, -0.028594970703125, 0.00968170166015625, -0.00019788742065429688, 0.0267181396484375, -0.03778076171875, 0.0280914306640625, 0.018280029296875, 0.0185546875, 0.01470184326171875, -0.04486083984375, 0.0255584716796875, -0.0178070068359375, 0.0122528076171875, -0.01117706298828125, -0.078369140625, 0.05731201171875, -0.0093841552734375, 0.003204345703125, 0.01934814453125, 0.0716552734375, 0.023345947265625, 0.00437164306640625, 0.0236053466796875, 0.0194091796875, 0.030181884765625, -0.0151519775390625, 0.06280517578125, -0.038421630859375, 0.051177978515625, 0.057830810546875, -0.0007338523864746094, 0.04913330078125, 0.007617950439453125, 0.0005936622619628906, 0.0259246826171875, 0.05908203125, -0.031219482421875, 0.035186767578125, -0.0311279296875, -0.003757476806640625, 0.01114654541015625, -0.0007481575012207031, -0.0537109375, 0.03253173828125, 0.0275421142578125, -0.057708740234375, -0.01346588134765625, -0.005649566650390625, 0.003131866455078125, -0.00957489013671875, -0.026153564453125, 0.030487060546875, -0.0011796951293945312, -0.031585693359375, 0.053436279296875, 0.023345947265625, 0.050384521484375, -0.044586181640625, 0.0115966796875, -0.0194244384765625, 0.01806640625, -0.01959228515625, -0.0540771484375, 0.0311279296875, 0.0176544189453125, -0.02105712890625, 0.0030975341796875, 0.0469970703125, -0.01263427734375, -0.07763671875, 0.00714874267578125, 0.03167724609375, -0.002429962158203125, 0.004215240478515625, -0.06134033203125, 0.0144195556640625, 0.0105438232421875, -0.03753662109375, -0.0128936767578125, 0.0042266845703125, 0.0293426513671875, 0.05426025390625, 0.03277587890625, -0.006732940673828125, -0.0007224082946777344, 0.01514434814453125, 0.051300048828125, -0.07928466796875, -0.0528564453125, -0.0567626953125, 0.0341796875, 0.0021839141845703125, -0.065185546875, 0.06842041015625, 0.05255126953125, 0.0187225341796875, -0.01477813720703125, 0.023223876953125, -0.0113983154296875, 0.0380859375, -0.04736328125, 0.06475830078125, -0.00551605224609375, 0.02484130859375, -0.0229034423828125, -0.11053466796875, -0.0142974853515625, 0.043426513671875, -0.00959014892578125, 0.01277923583984375, 0.060638427734375, 0.08648681640625, -0.00753021240234375, 0.0360107421875, 0.0207366943359375, 0.02069091796875, 0.032379150390625, 0.035491943359375, 0.03302001953125, -0.0648193359375, 0.05718994140625, -0.0217132568359375, -0.03076171875, -0.008758544921875, -0.050689697265625, -0.044708251953125, -0.033416748046875, -0.018524169921875, -0.042388916015625, -0.02703857421875, 0.05255126953125, 0.053375244140625, -0.061248779296875, -0.03656005859375, 0.0003273487091064453, -0.0179443359375, -0.018890380859375, -0.0209197998046875, 0.043365478515625, -0.009613037109375, -0.04345703125, 0.01256561279296875, -0.0163726806640625, 0.021392822265625, -0.042266845703125, -0.0135498046875, -0.004192352294921875, 0.01200103759765625, 0.0205841064453125, -0.005474090576171875, -0.066162109375, -0.03125, 0.00608062744140625, -0.02838134765625, -0.004871368408203125, 0.0004324913024902344, -0.0426025390625, -0.005340576171875, 0.005611419677734375, 0.0192108154296875, 0.02984619140625, -0.00026416778564453125, 0.024627685546875, -0.04669189453125, 0.0212249755859375, 0.01442718505859375, 0.0361328125, 0.0222320556640625, -0.032989501953125, 0.0304412841796875, 0.01012420654296875, -0.052459716796875, -0.031707763671875, 0.0200347900390625, -0.09576416015625, -0.019622802734375, 0.0855712890625, 0.0200347900390625, -0.021575927734375, 0.022491455078125, -0.046905517578125, 0.017120361328125, -0.034881591796875, 0.06201171875, 0.047149658203125, -0.0173187255859375, -0.018524169921875, -0.03900146484375, 0.034912109375, 0.005352020263671875, -0.0650634765625, 0.0158843994140625, 0.037933349609375, -0.004680633544921875, 0.0297088623046875, 0.02166748046875, -0.00957489013671875, 0.026153564453125, 0.0267486572265625, 0.01873779296875, -0.007415771484375, -0.0226593017578125, -0.00438690185546875, 0.0142822265625, -0.01641845703125, -0.019317626953125 ] ]
Writer/InstructPalmyra-20b
2023-08-31T17:01:39.000Z
[ "transformers", "pytorch", "gpt2", "text-generation", "InstructGPT", "hf", "palmyra", "en", "dataset:Writer/palmyra-data-index", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Writer
null
null
Writer/InstructPalmyra-20b
36
5,862
transformers
2023-04-28T15:52:03
--- license: apache-2.0 language: - en tags: - InstructGPT - hf - palmyra datasets: - Writer/palmyra-data-index --- # InstructPalmyra-20b - **Developed by:** [https://writer.com/](https://writer.com/); - **Model type:** Causal decoder-only; - **Language(s) (NLP):** English; - **License:** Apache 2.0; - **Finetuned from model:** [Palmyra-20B](https://huggingface.co/Writer/palmyra-large). <style> img { display: inline; } </style> ## Model Description Introducing InstructPalmyra-20b, a state-of-the-art instruction-following 20b language model designed to deliver exceptional performance and versatility. Derived from the foundational architecture of [Palmyra-20b](https://huggingface.co/Writer/palmyra-large), InstructPalmyra-20b is specifically tailored to address the growing demand for advanced natural language processing and comprehension capabilities. The InstructPalmyra-20b model is meticulously trained on an extensive dataset of approximately 70,000 instruction-response records. These records are generated by our dedicated Writer Linguist team, who possess considerable expertise in language modeling and fine-tuning techniques. By leveraging their skills and knowledge, the InstructPalmyra-20b model is primed to offer unparalleled proficiency in understanding and executing language-based instructions. One of the key differentiators of InstructPalmyra-20b lies in its ability to process complex instructions and generate accurate, contextually appropriate responses. This makes it an ideal choice for a wide range of applications, including virtual assistants, customer support, content generation, and more. Additionally, the model's comprehensive training enables it to adapt and perform well under varying conditions and contexts, further expanding its potential use cases. ## Usage : ```python import torch from transformers import AutoTokenizer, AutoModelForCausalLM model_name = "Writer/InstructPalmyra-20b" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained( model_name, device_map="auto", torch_dtype=torch.float16 ) instruction = "Describe a futuristic device that revolutionizes space travel." PROMPT_DICT = { "prompt_input": ( "Below is an instruction that describes a task, paired with an input that provides further context. " "Write a response that appropriately completes the request\n\n" "### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:" ), "prompt_no_input": ( "Below is an instruction that describes a task. " "Write a response that appropriately completes the request.\n\n" "### Instruction:\n{instruction}\n\n### Response:" ), } text = ( PROMPT_DICT["prompt_no_input"].format(instruction=instruction) if not input else PROMPT_DICT["prompt_input"].format(instruction=instruction, input=input) ) model_inputs = tokenizer(text, return_tensors="pt").to("cuda") output_ids = model.generate( **model_inputs, max_length=256, ) output_text = tokenizer.batch_decode(output_ids, skip_special_tokens=True)[0] clean_output = output_text.split("### Response:")[1].strip() print(clean_output) ``` It can also be used with text-generation-inference ```sh model=Writer/InstructPalmyra-20b volume=$PWD/data docker run --gpus all --shm-size 1g -p 8080:80 -v $volume:/data ghcr.io/huggingface/text-generation-inference --model-id $model ``` ### Limitations and Biases InstructPalmyra's core functionality is to take a string of text and predict the next token. While language models are widely used for other tasks, there are many unknowns in this work. When prompting InstructPalmyra, keep in mind that the next statistically likely token is not always the token that produces the most "accurate" text. Never rely on InstructPalmyra to produce factually correct results. InstructPalmyra was trained on Writer’s custom data. As with all language models, it is difficult to predict how InstructPalmyra will respond to specific prompts, and offensive content may appear unexpectedly. We recommend that the outputs be curated or filtered by humans before they are released, both to censor undesirable content and to improve the quality of the results. ## Uses ### Out-of-Scope Use Production use without adequate assessment of risks and mitigation; any use cases which may be considered irresponsible or harmful. ## Bias, Risks, and Limitations InstructPalmyra-20b is mostly trained on English data, and will not generalize appropriately to other languages. Furthermore, as it is trained on a large-scale corpora representative of the web, it will carry the stereotypes and biases commonly encountered online. ### Recommendations We recommend users of InstructPalmyra-20b to develop guardrails and to take appropriate precautions for any production use. ## Citation and Related Information To cite this model: ``` @misc{InstructPalmyra, author = {Writer Engineering team}, title = {{InstructPalmyra-20b : Instruct tuned Palmyra-Large model}}, howpublished = {\url{https://dev.writer.com}}, year = 2023, month = Augest } ``` [![Model architecture](https://img.shields.io/badge/Model%20Arch-Transformer%20Decoder-green)](#model-architecture)|[![Model size](https://img.shields.io/badge/Params-20B-green)](#model-architecture)|[![Language](https://img.shields.io/badge/Language-en--US-lightgrey#model-badge)](#datasets)|![AUR license](https://img.shields.io/badge/license-Apache%202-blue)
5,524
[ [ -0.0281524658203125, -0.0723876953125, 0.0196075439453125, 0.02166748046875, -0.0140380859375, -0.000980377197265625, -0.00893402099609375, -0.035003662109375, -0.0163421630859375, 0.035675048828125, -0.039825439453125, -0.036895751953125, -0.043365478515625, -0.0006570816040039062, -0.02825927734375, 0.081298828125, -0.007259368896484375, 0.01352691650390625, -0.0115509033203125, 0.0108642578125, -0.04522705078125, -0.035247802734375, -0.04986572265625, -0.027740478515625, 0.01317596435546875, 0.0303192138671875, 0.0343017578125, 0.0457763671875, 0.03326416015625, 0.0203399658203125, -0.00740814208984375, 0.0164947509765625, -0.006824493408203125, -0.0203704833984375, -0.0198211669921875, -0.0236968994140625, -0.0283203125, -0.004726409912109375, 0.044189453125, 0.04888916015625, -0.0088653564453125, 0.0384521484375, 0.00510406494140625, 0.024200439453125, -0.040771484375, 0.03173828125, -0.0300750732421875, -0.0106353759765625, -0.012298583984375, -0.0091552734375, -0.048919677734375, -0.02691650390625, -0.01059722900390625, -0.032318115234375, -0.003215789794921875, 0.017120361328125, 0.078857421875, 0.01059722900390625, -0.0301971435546875, -0.0243377685546875, -0.05548095703125, 0.038848876953125, -0.0714111328125, 0.01216888427734375, 0.03509521484375, 0.0251922607421875, -0.0197296142578125, -0.065673828125, -0.057861328125, -0.028289794921875, -0.00811004638671875, 0.00975799560546875, -0.026947021484375, 0.00704193115234375, 0.0277862548828125, 0.0179595947265625, -0.039947509765625, 0.0229949951171875, -0.043212890625, -0.0166168212890625, 0.0250244140625, 0.025054931640625, 0.032958984375, -0.027008056640625, -0.0250701904296875, -0.0163421630859375, -0.049346923828125, 0.0218658447265625, 0.047515869140625, 0.0227508544921875, -0.0222320556640625, 0.058563232421875, -0.01041412353515625, 0.055145263671875, 0.033294677734375, -0.006351470947265625, 0.01143646240234375, -0.0199737548828125, -0.028778076171875, -0.0011301040649414062, 0.0784912109375, 0.0177001953125, 0.0019245147705078125, -0.01097869873046875, -0.007843017578125, 0.018798828125, 0.021942138671875, -0.07818603515625, -0.0261077880859375, 0.021636962890625, -0.036163330078125, -0.0177459716796875, 0.0021419525146484375, -0.05010986328125, -0.004913330078125, -0.018096923828125, 0.04730224609375, -0.0506591796875, -0.0240936279296875, 0.009552001953125, -0.0021724700927734375, 0.017333984375, -0.007534027099609375, -0.092529296875, 0.033935546875, 0.0372314453125, 0.0489501953125, 0.01113128662109375, -0.0237884521484375, -0.0282135009765625, 0.00720977783203125, -0.00933074951171875, 0.0487060546875, -0.036956787109375, -0.0296173095703125, -0.0169219970703125, 0.0183563232421875, -0.01352691650390625, -0.02252197265625, 0.036956787109375, -0.02655029296875, 0.04443359375, -0.005588531494140625, -0.059112548828125, -0.03411865234375, -0.002391815185546875, -0.028778076171875, 0.0714111328125, 0.0225677490234375, -0.076904296875, 0.0215301513671875, -0.062103271484375, -0.024383544921875, 0.00261688232421875, 0.003604888916015625, -0.037567138671875, -0.01329803466796875, 0.0185394287109375, 0.040130615234375, -0.0223846435546875, 0.042938232421875, -0.01137542724609375, -0.027496337890625, 0.0106353759765625, -0.032135009765625, 0.07537841796875, 0.01154327392578125, -0.04541015625, 0.02935791015625, -0.06719970703125, -0.00946044921875, 0.005035400390625, -0.02642822265625, -0.00829315185546875, -0.0117034912109375, 0.01788330078125, 0.0178985595703125, 0.016448974609375, -0.044952392578125, 0.0186767578125, -0.05023193359375, 0.03387451171875, 0.057220458984375, 0.0020236968994140625, 0.04229736328125, -0.0140838623046875, 0.05322265625, 0.005718231201171875, 0.025665283203125, -0.0018205642700195312, -0.038848876953125, -0.0538330078125, -0.01384735107421875, 0.0105438232421875, 0.0491943359375, -0.052215576171875, 0.035491943359375, -0.0003819465637207031, -0.06378173828125, -0.04150390625, -0.00555419921875, 0.05731201171875, 0.06640625, 0.041900634765625, -0.00933837890625, -0.05755615234375, -0.05950927734375, 0.000823974609375, -0.01308441162109375, 0.0147247314453125, 0.0013723373413085938, 0.049346923828125, -0.024810791015625, 0.04766845703125, -0.0259857177734375, 0.002605438232421875, -0.0294189453125, 0.004650115966796875, 0.037078857421875, 0.061279296875, 0.03570556640625, -0.03546142578125, -0.032928466796875, -0.0022640228271484375, -0.05902099609375, -0.0002300739288330078, -0.008148193359375, -0.02899169921875, 0.0242767333984375, 0.035919189453125, -0.06768798828125, 0.0191650390625, 0.035919189453125, -0.04266357421875, 0.055267333984375, -0.0234832763671875, -0.008819580078125, -0.111328125, 0.0297698974609375, -0.0076141357421875, 0.01142120361328125, -0.03765869140625, 0.012908935546875, 0.01451873779296875, 0.0011005401611328125, -0.0250244140625, 0.052398681640625, -0.039154052734375, 0.022918701171875, -0.01248931884765625, -0.01479339599609375, -0.0166778564453125, 0.056427001953125, -0.0115509033203125, 0.053375244140625, 0.04803466796875, -0.0574951171875, 0.0211181640625, 0.035980224609375, -0.021270751953125, 0.024993896484375, -0.06390380859375, -0.0009312629699707031, -0.01314544677734375, 0.00922393798828125, -0.06976318359375, -0.0234832763671875, 0.048553466796875, -0.033355712890625, 0.039947509765625, -0.0186614990234375, -0.046966552734375, -0.044891357421875, -0.00455474853515625, 0.004131317138671875, 0.03857421875, -0.04046630859375, 0.05029296875, 0.030242919921875, 0.0019311904907226562, -0.05926513671875, -0.04937744140625, -0.0089263916015625, -0.022674560546875, -0.028045654296875, 0.028839111328125, -0.023345947265625, -0.002048492431640625, 0.004627227783203125, 0.005764007568359375, 0.0006299018859863281, 0.0088043212890625, 0.043182373046875, 0.040069580078125, -0.0094146728515625, 0.0026035308837890625, 0.018096923828125, -0.01076507568359375, 0.0275726318359375, -0.0277099609375, 0.0599365234375, -0.007678985595703125, -0.0212554931640625, -0.049713134765625, 0.020263671875, 0.038421630859375, -0.0218963623046875, 0.0728759765625, 0.060302734375, -0.0254974365234375, -0.00160980224609375, -0.025054931640625, -0.015777587890625, -0.036865234375, 0.0254669189453125, -0.0242767333984375, -0.040985107421875, 0.03857421875, 0.0165557861328125, 0.00823211669921875, 0.0469970703125, 0.049713134765625, 0.00621795654296875, 0.08392333984375, 0.058837890625, -0.004871368408203125, 0.043365478515625, -0.042694091796875, 0.01352691650390625, -0.06842041015625, -0.04443359375, -0.035858154296875, 0.0082550048828125, -0.05401611328125, -0.0008120536804199219, 0.01348876953125, -0.0013341903686523438, -0.044464111328125, 0.047821044921875, -0.056182861328125, 0.01163482666015625, 0.05902099609375, 0.01727294921875, 0.00021159648895263672, -0.00992584228515625, -0.0227813720703125, 0.01456451416015625, -0.05938720703125, -0.04986572265625, 0.0877685546875, 0.0185699462890625, 0.052154541015625, 0.00567626953125, 0.042022705078125, -0.01153564453125, 0.0228729248046875, -0.051116943359375, 0.040679931640625, 0.0036487579345703125, -0.051177978515625, -0.0093536376953125, -0.02392578125, -0.08306884765625, 0.01149749755859375, -0.00785064697265625, -0.057037353515625, 0.0020961761474609375, 0.01239776611328125, -0.033355712890625, 0.028167724609375, -0.061676025390625, 0.0863037109375, -0.00283050537109375, -0.01422119140625, -0.00661468505859375, -0.06109619140625, 0.035919189453125, 0.0106353759765625, 0.0061798095703125, 0.005962371826171875, 0.01629638671875, 0.05755615234375, -0.04388427734375, 0.07965087890625, -0.0199432373046875, 0.0014791488647460938, 0.019287109375, -0.00307464599609375, 0.03302001953125, -0.0023956298828125, 0.0023174285888671875, -0.01148223876953125, 0.0103607177734375, -0.02056884765625, -0.028472900390625, 0.042694091796875, -0.06591796875, -0.040008544921875, -0.0316162109375, -0.042388916015625, -0.0072479248046875, 0.0190582275390625, 0.048095703125, 0.0489501953125, -0.006763458251953125, -0.0031337738037109375, 0.049407958984375, -0.017059326171875, 0.0579833984375, 0.036895751953125, -0.0211639404296875, -0.020294189453125, 0.054962158203125, 0.011138916015625, 0.009765625, 0.03955078125, 0.00969696044921875, -0.030914306640625, -0.04547119140625, -0.05267333984375, 0.0166473388671875, -0.06005859375, -0.025054931640625, -0.06982421875, -0.0185394287109375, -0.024932861328125, 0.0075531005859375, -0.01470947265625, -0.032958984375, -0.04248046875, -0.0173187255859375, 0.040740966796875, 0.047515869140625, 0.0021152496337890625, 0.029632568359375, -0.04833984375, 0.0149688720703125, 0.00748443603515625, 0.01258087158203125, 0.0073394775390625, -0.0615234375, -0.02447509765625, 0.0043182373046875, -0.026947021484375, -0.0596923828125, 0.043670654296875, -0.002399444580078125, 0.0528564453125, 0.017120361328125, -0.004344940185546875, 0.047515869140625, -0.033294677734375, 0.052032470703125, 0.0033321380615234375, -0.0731201171875, 0.04986572265625, -0.0216064453125, 0.04180908203125, 0.0204925537109375, 0.037689208984375, -0.02386474609375, -0.04034423828125, -0.057861328125, -0.07666015625, 0.05218505859375, 0.003978729248046875, 0.0279083251953125, -0.00945281982421875, 0.017730712890625, 0.01458740234375, 0.0072174072265625, -0.05963134765625, -0.01519775390625, -0.0205841064453125, -0.029937744140625, 0.007282257080078125, -0.0174407958984375, 0.0016155242919921875, -0.0292510986328125, 0.06427001953125, 0.0017681121826171875, 0.032012939453125, -0.005870819091796875, -0.01812744140625, -0.0038242340087890625, 0.01044464111328125, 0.042816162109375, 0.061431884765625, -0.01485443115234375, 0.0018301010131835938, 0.01171875, -0.050933837890625, -0.0002760887145996094, 0.0230712890625, -0.0179290771484375, -0.006237030029296875, 0.0267486572265625, 0.07080078125, 0.01629638671875, -0.040863037109375, 0.0179290771484375, -0.00254058837890625, 0.00946044921875, -0.03533935546875, 0.00859832763671875, 0.01141357421875, 0.018707275390625, 0.0181121826171875, 0.014678955078125, 0.01354217529296875, -0.03619384765625, -0.0016241073608398438, 0.0261993408203125, -0.00643157958984375, -0.0311126708984375, 0.0765380859375, -0.0031070709228515625, -0.015625, 0.0435791015625, -0.023529052734375, -0.021270751953125, 0.051788330078125, 0.05523681640625, 0.05670166015625, -0.0177154541015625, 0.01514434814453125, 0.04827880859375, 0.024505615234375, -0.006256103515625, 0.04376220703125, 0.005748748779296875, -0.039031982421875, -0.031341552734375, -0.05230712890625, -0.031341552734375, -0.0011167526245117188, -0.048004150390625, 0.021636962890625, -0.0538330078125, -0.016571044921875, 0.00971221923828125, -0.0019121170043945312, -0.047119140625, 0.021270751953125, -0.01320648193359375, 0.060150146484375, -0.057159423828125, 0.06622314453125, 0.04803466796875, -0.0765380859375, -0.08978271484375, -0.0175933837890625, -0.0192108154296875, -0.049072265625, 0.0494384765625, 0.017669677734375, -0.0020427703857421875, 0.0084228515625, -0.059539794921875, -0.05462646484375, 0.080810546875, 0.031646728515625, -0.02764892578125, -0.01166534423828125, -0.00972747802734375, 0.042236328125, -0.020721435546875, 0.0287322998046875, 0.022003173828125, 0.0255126953125, -0.010711669921875, -0.0732421875, 0.02923583984375, -0.027313232421875, 0.00002181529998779297, 0.005878448486328125, -0.044097900390625, 0.0802001953125, -0.01763916015625, -0.00908660888671875, 0.01629638671875, 0.053314208984375, 0.0211181640625, 0.003894805908203125, 0.00835418701171875, 0.033538818359375, 0.0604248046875, 0.00366973876953125, 0.093994140625, -0.03826904296875, 0.0201416015625, 0.08172607421875, 0.0248565673828125, 0.07080078125, 0.035430908203125, -0.01427459716796875, 0.056304931640625, 0.054901123046875, -0.0173187255859375, 0.03826904296875, 0.0063323974609375, -0.004871368408203125, 0.007274627685546875, -0.005397796630859375, -0.0335693359375, 0.04425048828125, 0.022125244140625, -0.036865234375, 0.007080078125, 0.01038360595703125, 0.037872314453125, -0.0033359527587890625, -0.019622802734375, 0.04168701171875, -0.004230499267578125, -0.058502197265625, 0.061431884765625, 0.01187896728515625, 0.057464599609375, -0.048095703125, -0.0019702911376953125, -0.01715087890625, 0.0233154296875, -0.0146636962890625, -0.05126953125, 0.020294189453125, -0.0015621185302734375, -0.021270751953125, -0.0082855224609375, 0.031341552734375, -0.03369140625, -0.058197021484375, 0.00974273681640625, 0.0176239013671875, 0.01385498046875, 0.005794525146484375, -0.037506103515625, -0.007572174072265625, -0.012176513671875, -0.0135040283203125, 0.0132598876953125, 0.02203369140625, -0.00258636474609375, 0.0677490234375, 0.049530029296875, -0.0091552734375, 0.0028743743896484375, 0.0010099411010742188, 0.0693359375, -0.03924560546875, -0.0305023193359375, -0.0701904296875, 0.05645751953125, -0.0007739067077636719, -0.03729248046875, 0.076416015625, 0.0433349609375, 0.070556640625, -0.0238189697265625, 0.056640625, -0.0186920166015625, 0.0193939208984375, -0.0516357421875, 0.0584716796875, -0.0154876708984375, 0.0116119384765625, -0.0265350341796875, -0.060791015625, -0.0103912353515625, 0.042144775390625, -0.0113677978515625, 0.004489898681640625, 0.053375244140625, 0.072265625, -0.012786865234375, -0.00901031494140625, 0.0006513595581054688, 0.0223846435546875, 0.02203369140625, 0.0482177734375, 0.05682373046875, -0.043853759765625, 0.04718017578125, -0.0301361083984375, -0.007808685302734375, -0.01268768310546875, -0.05474853515625, -0.08587646484375, -0.04840087890625, -0.0228118896484375, -0.03924560546875, -0.0022716522216796875, 0.0794677734375, 0.049407958984375, -0.0709228515625, -0.0220489501953125, -0.0172271728515625, -0.0040283203125, -0.0262298583984375, -0.0172882080078125, 0.038177490234375, -0.0200042724609375, -0.05889892578125, 0.026092529296875, 0.0008802413940429688, 0.0151214599609375, -0.0293731689453125, -0.0013914108276367188, -0.0234832763671875, 0.003078460693359375, 0.039886474609375, 0.024627685546875, -0.053375244140625, -0.01265716552734375, 0.00942230224609375, -0.01471710205078125, -0.00946044921875, 0.0198822021484375, -0.04766845703125, 0.0294952392578125, 0.0316162109375, 0.03045654296875, 0.05145263671875, -0.0266571044921875, 0.04248046875, -0.028656005859375, 0.021087646484375, 0.00830078125, 0.042327880859375, 0.03173828125, -0.041259765625, 0.036712646484375, 0.00643157958984375, -0.041717529296875, -0.057464599609375, 0.0084686279296875, -0.0792236328125, -0.0213775634765625, 0.0833740234375, -0.01012420654296875, -0.0206451416015625, 0.002838134765625, -0.039764404296875, 0.042327880859375, -0.0274505615234375, 0.062225341796875, 0.04345703125, -0.006664276123046875, -0.0192718505859375, -0.0390625, 0.029876708984375, 0.014617919921875, -0.06866455078125, -0.0160675048828125, 0.03167724609375, 0.0313720703125, 0.001300811767578125, 0.033782958984375, -0.002552032470703125, 0.0214996337890625, 0.0067138671875, 0.018096923828125, -0.0215911865234375, -0.0100555419921875, -0.0167694091796875, 0.01275634765625, 0.00225067138671875, -0.027740478515625 ] ]
uukuguy/speechless-llama2-hermes-orca-platypus-13b
2023-09-01T22:17:02.000Z
[ "transformers", "safetensors", "llama", "text-generation", "facebook", "meta", "pytorch", "llama-2", "en", "dataset:garage-bAInd/Open-Platypus", "arxiv:2307.09288", "has_space", "text-generation-inference", "region:us" ]
text-generation
uukuguy
null
null
uukuguy/speechless-llama2-hermes-orca-platypus-13b
1
5,862
transformers
2023-09-01T17:04:50
--- extra_gated_heading: Access Llama 2 on Hugging Face extra_gated_description: >- This is a form to enable access to Llama 2 on Hugging Face after you have been granted access from Meta. Please visit the [Meta website](https://ai.meta.com/resources/models-and-libraries/llama-downloads) and accept our license terms and acceptable use policy before submitting this form. Requests will be processed in 1-2 days. extra_gated_prompt: "**Your Hugging Face account email address MUST match the email you provide on the Meta website, or your request will not be approved.**" extra_gated_button_content: Submit extra_gated_fields: I agree to share my name, email address and username with Meta and confirm that I have already been granted download access on the Meta website: checkbox language: - en datasets: - garage-bAInd/Open-Platypus library_name: transformers pipeline_tag: text-generation inference: false tags: - facebook - meta - pytorch - llama - llama-2 --- <p><h1> speechless-llama2-hermes-orca-platypus-13b </h1></p> speechless-llama2-hermes-orca-platypus-13b is a merge of NousResearch/Nous-Hermes-Llama2-13b and Open-Orca/OpenOrca-Platypus2-13B. | Metric | Value | | --- | --- | | ARC | 60.92 | | HellaSwag | 83.50 | | MMLU | 59.39 | | TruthfulQA | 54.29 | | Average | 64.52 | # **Llama 2** Llama 2 is a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. This is the repository for the 13B pretrained model, converted for the Hugging Face Transformers format. Links to other models can be found in the index at the bottom. ## Model Details *Note: Use of this model is governed by the Meta license. In order to download the model weights and tokenizer, please visit the [website](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) and accept our License before requesting access here.* Meta developed and publicly released the Llama 2 family of large language models (LLMs), a collection of pretrained and fine-tuned generative text models ranging in scale from 7 billion to 70 billion parameters. Our fine-tuned LLMs, called Llama-2-Chat, are optimized for dialogue use cases. Llama-2-Chat models outperform open-source chat models on most benchmarks we tested, and in our human evaluations for helpfulness and safety, are on par with some popular closed-source models like ChatGPT and PaLM. **Model Developers** Meta **Variations** Llama 2 comes in a range of parameter sizes — 7B, 13B, and 70B — as well as pretrained and fine-tuned variations. **Input** Models input text only. **Output** Models generate text only. **Model Architecture** Llama 2 is an auto-regressive language model that uses an optimized transformer architecture. The tuned versions use supervised fine-tuning (SFT) and reinforcement learning with human feedback (RLHF) to align to human preferences for helpfulness and safety. ||Training Data|Params|Content Length|GQA|Tokens|LR| |---|---|---|---|---|---|---| |Llama 2|*A new mix of publicly available online data*|7B|4k|&#10007;|2.0T|3.0 x 10<sup>-4</sup>| |Llama 2|*A new mix of publicly available online data*|13B|4k|&#10007;|2.0T|3.0 x 10<sup>-4</sup>| |Llama 2|*A new mix of publicly available online data*|70B|4k|&#10004;|2.0T|1.5 x 10<sup>-4</sup>| *Llama 2 family of models.* Token counts refer to pretraining data only. All models are trained with a global batch-size of 4M tokens. Bigger models - 70B -- use Grouped-Query Attention (GQA) for improved inference scalability. **Model Dates** Llama 2 was trained between January 2023 and July 2023. **Status** This is a static model trained on an offline dataset. Future versions of the tuned models will be released as we improve model safety with community feedback. **License** A custom commercial license is available at: [https://ai.meta.com/resources/models-and-libraries/llama-downloads/](https://ai.meta.com/resources/models-and-libraries/llama-downloads/) **Research Paper** ["Llama-2: Open Foundation and Fine-tuned Chat Models"](arxiv.org/abs/2307.09288) ## Intended Use **Intended Use Cases** Llama 2 is intended for commercial and research use in English. Tuned models are intended for assistant-like chat, whereas pretrained models can be adapted for a variety of natural language generation tasks. To get the expected features and performance for the chat versions, a specific formatting needs to be followed, including the `INST` and `<<SYS>>` tags, `BOS` and `EOS` tokens, and the whitespaces and breaklines in between (we recommend calling `strip()` on inputs to avoid double-spaces). See our reference code in github for details: [`chat_completion`](https://github.com/facebookresearch/llama/blob/main/llama/generation.py#L212). **Out-of-scope Uses** Use in any manner that violates applicable laws or regulations (including trade compliance laws).Use in languages other than English. Use in any other way that is prohibited by the Acceptable Use Policy and Licensing Agreement for Llama 2. ## Hardware and Software **Training Factors** We used custom training libraries, Meta's Research Super Cluster, and production clusters for pretraining. Fine-tuning, annotation, and evaluation were also performed on third-party cloud compute. **Carbon Footprint** Pretraining utilized a cumulative 3.3M GPU hours of computation on hardware of type A100-80GB (TDP of 350-400W). Estimated total emissions were 539 tCO2eq, 100% of which were offset by Meta’s sustainability program. ||Time (GPU hours)|Power Consumption (W)|Carbon Emitted(tCO<sub>2</sub>eq)| |---|---|---|---| |Llama 2 7B|184320|400|31.22| |Llama 2 13B|368640|400|62.44| |Llama 2 70B|1720320|400|291.42| |Total|3311616||539.00| **CO<sub>2</sub> emissions during pretraining.** Time: total GPU time required for training each model. Power Consumption: peak power capacity per GPU device for the GPUs used adjusted for power usage efficiency. 100% of the emissions are directly offset by Meta's sustainability program, and because we are openly releasing these models, the pretraining costs do not need to be incurred by others. ## Training Data **Overview** Llama 2 was pretrained on 2 trillion tokens of data from publicly available sources. The fine-tuning data includes publicly available instruction datasets, as well as over one million new human-annotated examples. Neither the pretraining nor the fine-tuning datasets include Meta user data. **Data Freshness** The pretraining data has a cutoff of September 2022, but some tuning data is more recent, up to July 2023. ## Evaluation Results In this section, we report the results for the Llama 1 and Llama 2 models on standard academic benchmarks.For all the evaluations, we use our internal evaluations library. |Model|Size|Code|Commonsense Reasoning|World Knowledge|Reading Comprehension|Math|MMLU|BBH|AGI Eval| |---|---|---|---|---|---|---|---|---|---| |Llama 1|7B|14.1|60.8|46.2|58.5|6.95|35.1|30.3|23.9| |Llama 1|13B|18.9|66.1|52.6|62.3|10.9|46.9|37.0|33.9| |Llama 1|33B|26.0|70.0|58.4|67.6|21.4|57.8|39.8|41.7| |Llama 1|65B|30.7|70.7|60.5|68.6|30.8|63.4|43.5|47.6| |Llama 2|7B|16.8|63.9|48.9|61.3|14.6|45.3|32.6|29.3| |Llama 2|13B|24.5|66.9|55.4|65.8|28.7|54.8|39.4|39.1| |Llama 2|70B|**37.5**|**71.9**|**63.6**|**69.4**|**35.2**|**68.9**|**51.2**|**54.2**| **Overall performance on grouped academic benchmarks.** *Code:* We report the average pass@1 scores of our models on HumanEval and MBPP. *Commonsense Reasoning:* We report the average of PIQA, SIQA, HellaSwag, WinoGrande, ARC easy and challenge, OpenBookQA, and CommonsenseQA. We report 7-shot results for CommonSenseQA and 0-shot results for all other benchmarks. *World Knowledge:* We evaluate the 5-shot performance on NaturalQuestions and TriviaQA and report the average. *Reading Comprehension:* For reading comprehension, we report the 0-shot average on SQuAD, QuAC, and BoolQ. *MATH:* We report the average of the GSM8K (8 shot) and MATH (4 shot) benchmarks at top 1. |||TruthfulQA|Toxigen| |---|---|---|---| |Llama 1|7B|27.42|23.00| |Llama 1|13B|41.74|23.08| |Llama 1|33B|44.19|22.57| |Llama 1|65B|48.71|21.77| |Llama 2|7B|33.29|**21.25**| |Llama 2|13B|41.86|26.10| |Llama 2|70B|**50.18**|24.60| **Evaluation of pretrained LLMs on automatic safety benchmarks.** For TruthfulQA, we present the percentage of generations that are both truthful and informative (the higher the better). For ToxiGen, we present the percentage of toxic generations (the smaller the better). |||TruthfulQA|Toxigen| |---|---|---|---| |Llama-2-Chat|7B|57.04|**0.00**| |Llama-2-Chat|13B|62.18|**0.00**| |Llama-2-Chat|70B|**64.14**|0.01| **Evaluation of fine-tuned LLMs on different safety datasets.** Same metric definitions as above. ## Ethical Considerations and Limitations Llama 2 is a new technology that carries risks with use. Testing conducted to date has been in English, and has not covered, nor could it cover all scenarios. For these reasons, as with all LLMs, Llama 2’s potential outputs cannot be predicted in advance, and the model may in some instances produce inaccurate, biased or other objectionable responses to user prompts. Therefore, before deploying any applications of Llama 2, developers should perform safety testing and tuning tailored to their specific applications of the model. Please see the Responsible Use Guide available at [https://ai.meta.com/llama/responsible-use-guide/](https://ai.meta.com/llama/responsible-use-guide) ## Reporting Issues Please report any software “bug,” or other problems with the models through one of the following means: - Reporting issues with the model: [github.com/facebookresearch/llama](http://github.com/facebookresearch/llama) - Reporting problematic content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback) - Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info) ## Llama Model Index |Model|Llama2|Llama2-hf|Llama2-chat|Llama2-chat-hf| |---|---|---|---|---| |7B| [Link](https://huggingface.co/llamaste/Llama-2-7b) | [Link](https://huggingface.co/llamaste/Llama-2-7b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-7b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-7b-chat-hf)| |13B| [Link](https://huggingface.co/llamaste/Llama-2-13b) | [Link](https://huggingface.co/llamaste/Llama-2-13b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-13b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-13b-hf)| |70B| [Link](https://huggingface.co/llamaste/Llama-2-70b) | [Link](https://huggingface.co/llamaste/Llama-2-70b-hf) | [Link](https://huggingface.co/llamaste/Llama-2-70b-chat) | [Link](https://huggingface.co/llamaste/Llama-2-70b-hf)|
10,753
[ [ -0.01580810546875, -0.05120849609375, 0.0281524658203125, 0.014373779296875, -0.027618408203125, 0.0169677734375, -0.006683349609375, -0.056549072265625, 0.006366729736328125, 0.021820068359375, -0.0540771484375, -0.043121337890625, -0.05010986328125, 0.008148193359375, -0.016693115234375, 0.07965087890625, 0.000396728515625, -0.020111083984375, -0.006664276123046875, 0.0048828125, -0.0308380126953125, -0.0301361083984375, -0.041290283203125, -0.031219482421875, 0.0301971435546875, 0.03497314453125, 0.045501708984375, 0.048553466796875, 0.0413818359375, 0.0189361572265625, -0.020751953125, 0.0181884765625, -0.053253173828125, -0.0197296142578125, 0.01007080078125, -0.034149169921875, -0.05377197265625, 0.011688232421875, 0.028717041015625, 0.0181121826171875, -0.0211334228515625, 0.03704833984375, 0.0093536376953125, 0.037750244140625, -0.039581298828125, 0.01428985595703125, -0.0526123046875, 0.00077056884765625, -0.0167236328125, -0.00868988037109375, -0.01513671875, -0.02203369140625, -0.0120697021484375, -0.06463623046875, -0.007305145263671875, 0.00589752197265625, 0.08160400390625, 0.046142578125, -0.031494140625, -0.01132965087890625, -0.02081298828125, 0.0703125, -0.06256103515625, 0.008209228515625, 0.0400390625, 0.0173797607421875, -0.0182952880859375, -0.0556640625, -0.050933837890625, -0.0123291015625, 0.00482177734375, 0.02935791015625, -0.0298004150390625, -0.0008788108825683594, 0.0113677978515625, 0.032745361328125, -0.04364013671875, 0.042327880859375, -0.03680419921875, -0.0126953125, 0.07891845703125, 0.01751708984375, 0.0026760101318359375, -0.003387451171875, -0.037200927734375, -0.0217437744140625, -0.0582275390625, 0.01314544677734375, 0.038299560546875, -0.0003619194030761719, -0.037384033203125, 0.0474853515625, -0.03167724609375, 0.0214691162109375, 0.0008921623229980469, -0.04302978515625, 0.04058837890625, -0.035736083984375, -0.020233154296875, -0.0110321044921875, 0.070068359375, 0.05316162109375, 0.0090789794921875, 0.007717132568359375, -0.004467010498046875, 0.0094451904296875, -0.00440216064453125, -0.060150146484375, -0.0030841827392578125, 0.0173187255859375, -0.02960205078125, -0.04083251953125, -0.019317626953125, -0.05206298828125, -0.01309967041015625, -0.00757598876953125, 0.018402099609375, -0.0066986083984375, -0.027740478515625, 0.012603759765625, 0.0043487548828125, 0.0413818359375, 0.0203399658203125, -0.06744384765625, 0.019744873046875, 0.03997802734375, 0.05828857421875, -0.018402099609375, -0.027099609375, 0.004421234130859375, -0.004177093505859375, -0.023651123046875, 0.06689453125, -0.02484130859375, -0.038360595703125, -0.018402099609375, -0.0002720355987548828, 0.01276397705078125, -0.0379638671875, 0.036865234375, -0.02618408203125, 0.0189056396484375, -0.025299072265625, -0.0287017822265625, -0.0247650146484375, 0.01239776611328125, -0.03082275390625, 0.1097412109375, 0.006626129150390625, -0.0400390625, 0.023834228515625, -0.050811767578125, -0.0128326416015625, -0.01331329345703125, 0.007289886474609375, -0.04046630859375, -0.0205535888671875, 0.011871337890625, 0.0271759033203125, -0.048675537109375, 0.032318115234375, -0.017364501953125, -0.0301971435546875, 0.004161834716796875, -0.0322265625, 0.06561279296875, 0.01922607421875, -0.03363037109375, 0.0021381378173828125, -0.0616455078125, 0.0007829666137695312, 0.036834716796875, -0.038055419921875, 0.01904296875, 0.004329681396484375, -0.00945281982421875, 0.01513671875, 0.039093017578125, -0.0308380126953125, 0.012481689453125, -0.0257110595703125, 0.037139892578125, 0.0550537109375, 0.0017442703247070312, 0.01401519775390625, -0.0382080078125, 0.038177490234375, -0.0028362274169921875, 0.028656005859375, 0.0008625984191894531, -0.055755615234375, -0.07855224609375, -0.016632080078125, -0.0009708404541015625, 0.0611572265625, -0.021026611328125, 0.055267333984375, -0.0039215087890625, -0.05645751953125, -0.033447265625, 0.0252685546875, 0.050628662109375, 0.038787841796875, 0.03204345703125, -0.0261077880859375, -0.045166015625, -0.07574462890625, 0.0051727294921875, -0.03314208984375, -0.0022792816162109375, 0.02764892578125, 0.048828125, -0.01983642578125, 0.053802490234375, -0.04058837890625, -0.0159912109375, -0.022247314453125, -0.01030731201171875, 0.007659912109375, 0.0253143310546875, 0.047576904296875, -0.028228759765625, -0.01922607421875, -0.0090179443359375, -0.0675048828125, -0.009185791015625, 0.007785797119140625, -0.01325225830078125, 0.02130126953125, 0.022186279296875, -0.047149658203125, 0.03497314453125, 0.0521240234375, -0.01397705078125, 0.04144287109375, -0.0006508827209472656, -0.0113067626953125, -0.07928466796875, 0.0031147003173828125, -0.01450347900390625, 0.004894256591796875, -0.032562255859375, -0.005931854248046875, -0.01415252685546875, 0.00689697265625, -0.046722412109375, 0.04638671875, -0.026580810546875, -0.01174163818359375, -0.007389068603515625, 0.00795745849609375, 0.00400543212890625, 0.04815673828125, -0.007122039794921875, 0.07891845703125, 0.032562255859375, -0.042694091796875, 0.0186920166015625, 0.03240966796875, -0.037261962890625, 0.01207733154296875, -0.062164306640625, 0.023529052734375, 0.00800323486328125, 0.04302978515625, -0.07568359375, -0.0267333984375, 0.0228729248046875, -0.037628173828125, 0.006496429443359375, 0.018829345703125, -0.043426513671875, -0.031036376953125, -0.03485107421875, 0.0228729248046875, 0.058685302734375, -0.035675048828125, 0.01345062255859375, 0.024871826171875, -0.0015382766723632812, -0.0516357421875, -0.061767578125, 0.00536346435546875, -0.0252685546875, -0.041748046875, 0.024688720703125, -0.0131683349609375, -0.0151519775390625, -0.0184326171875, 0.00403594970703125, 0.0022830963134765625, 0.0258636474609375, 0.0273284912109375, 0.0308380126953125, -0.01134490966796875, -0.0018625259399414062, 0.008209228515625, -0.0163726806640625, 0.0018873214721679688, 0.0113677978515625, 0.04547119140625, -0.014312744140625, -0.0161590576171875, -0.05950927734375, 0.0002880096435546875, 0.0233917236328125, -0.0185089111328125, 0.050628662109375, 0.0293426513671875, -0.015472412109375, 0.018951416015625, -0.059600830078125, -0.00942230224609375, -0.0396728515625, 0.039306640625, -0.016571044921875, -0.06402587890625, 0.040924072265625, 0.004520416259765625, 0.0292510986328125, 0.0570068359375, 0.0467529296875, -0.0056915283203125, 0.060089111328125, 0.041534423828125, -0.004421234130859375, 0.02508544921875, -0.0369873046875, -0.00400543212890625, -0.070068359375, -0.048126220703125, -0.0232696533203125, -0.031890869140625, -0.0526123046875, -0.0285186767578125, 0.018707275390625, 0.0119781494140625, -0.050201416015625, 0.026458740234375, -0.042510986328125, 0.041412353515625, 0.0411376953125, 0.0113677978515625, 0.0250091552734375, 0.00780487060546875, 0.007671356201171875, 0.0015172958374023438, -0.039642333984375, -0.05517578125, 0.11138916015625, 0.03302001953125, 0.032623291015625, 0.00689697265625, 0.05072021484375, 0.00795745849609375, 0.023834228515625, -0.0511474609375, 0.049407958984375, 0.0027523040771484375, -0.05279541015625, -0.01354217529296875, -0.01113128662109375, -0.06787109375, 0.0113983154296875, -0.01849365234375, -0.058807373046875, 0.0018091201782226562, -0.003936767578125, -0.0306396484375, 0.0239410400390625, -0.04998779296875, 0.04656982421875, -0.042144775390625, -0.0224151611328125, -0.023590087890625, -0.0604248046875, 0.049835205078125, -0.01148223876953125, 0.0076446533203125, -0.0352783203125, -0.0224151611328125, 0.06817626953125, -0.0276336669921875, 0.0721435546875, 0.0005707740783691406, -0.00984954833984375, 0.039154052734375, -0.0133819580078125, 0.031494140625, 0.0007519721984863281, -0.01580810546875, 0.049072265625, -0.01351165771484375, -0.02496337890625, -0.01226806640625, 0.040679931640625, -0.09051513671875, -0.056304931640625, -0.039093017578125, -0.036895751953125, -0.0016803741455078125, 0.00568389892578125, 0.03790283203125, -0.0034275054931640625, -0.0004699230194091797, 0.01129150390625, 0.035675048828125, -0.043121337890625, 0.036102294921875, 0.045074462890625, -0.007190704345703125, -0.039031982421875, 0.05206298828125, 0.0029926300048828125, 0.026580810546875, 0.019256591796875, 0.001979827880859375, -0.03271484375, -0.0299530029296875, -0.033416748046875, 0.0266571044921875, -0.03375244140625, -0.035064697265625, -0.042694091796875, -0.0285491943359375, -0.0251922607421875, -0.005420684814453125, -0.0328369140625, -0.034576416015625, -0.055755615234375, -0.0291595458984375, 0.036865234375, 0.060699462890625, -0.0019178390502929688, 0.045989990234375, -0.0246734619140625, 0.01419830322265625, 0.027587890625, 0.0158538818359375, -0.0004725456237792969, -0.06195068359375, 0.005390167236328125, 0.00569915771484375, -0.05810546875, -0.04815673828125, 0.020751953125, 0.020416259765625, 0.032928466796875, 0.03704833984375, -0.00524139404296875, 0.057037353515625, -0.026336669921875, 0.08197021484375, 0.03033447265625, -0.05010986328125, 0.049346923828125, -0.0166778564453125, 0.003437042236328125, 0.044525146484375, 0.019195556640625, -0.0071563720703125, -0.0174560546875, -0.05364990234375, -0.052978515625, 0.0638427734375, 0.0223541259765625, 0.0110321044921875, 0.004535675048828125, 0.02960205078125, 0.005207061767578125, 0.00814056396484375, -0.06585693359375, -0.027496337890625, -0.0193023681640625, -0.00807952880859375, -0.0148468017578125, -0.036346435546875, -0.0030078887939453125, -0.02496337890625, 0.0499267578125, 0.006591796875, 0.027130126953125, -0.007808685302734375, 0.002285003662109375, -0.004596710205078125, 0.005290985107421875, 0.05657958984375, 0.036529541015625, -0.0196533203125, -0.010955810546875, 0.04833984375, -0.04827880859375, 0.023040771484375, 0.0013036727905273438, -0.006618499755859375, -0.0275726318359375, 0.0321044921875, 0.06494140625, 0.0178070068359375, -0.0540771484375, 0.0283355712890625, 0.00839996337890625, -0.029449462890625, -0.029937744140625, 0.0229339599609375, 0.0079498291015625, 0.023345947265625, 0.022796630859375, -0.01202392578125, 0.006191253662109375, -0.039093017578125, -0.00957489013671875, 0.028106689453125, 0.0087890625, -0.031829833984375, 0.07330322265625, 0.0240631103515625, -0.0204620361328125, 0.038482666015625, -0.0157318115234375, -0.0300445556640625, 0.0645751953125, 0.04510498046875, 0.048919677734375, -0.019012451171875, 0.0091400146484375, 0.053802490234375, 0.034637451171875, -0.014556884765625, 0.018035888671875, -0.0004093647003173828, -0.037078857421875, -0.0179290771484375, -0.053009033203125, -0.0328369140625, 0.030426025390625, -0.041748046875, 0.023529052734375, -0.04559326171875, -0.0179595947265625, -0.022186279296875, 0.03607177734375, -0.04962158203125, 0.01910400390625, 0.01088714599609375, 0.06951904296875, -0.056427001953125, 0.057891845703125, 0.03765869140625, -0.0396728515625, -0.0682373046875, -0.0233306884765625, 0.014312744140625, -0.0911865234375, 0.039794921875, 0.028594970703125, -0.004150390625, 0.005809783935546875, -0.056671142578125, -0.09075927734375, 0.12548828125, 0.036285400390625, -0.054443359375, -0.0001468658447265625, 0.0238037109375, 0.039154052734375, -0.00843048095703125, 0.0360107421875, 0.0618896484375, 0.0347900390625, 0.01012420654296875, -0.08148193359375, 0.005603790283203125, -0.0275726318359375, -0.005161285400390625, -0.01224517822265625, -0.09722900390625, 0.061279296875, -0.0265350341796875, -0.0197296142578125, 0.0167694091796875, 0.05059814453125, 0.05023193359375, 0.037994384765625, 0.0257110595703125, 0.064208984375, 0.066650390625, -0.004558563232421875, 0.08251953125, -0.02642822265625, 0.01485443115234375, 0.0675048828125, -0.0202178955078125, 0.0728759765625, 0.0157623291015625, -0.046142578125, 0.0462646484375, 0.076171875, 0.0017080307006835938, 0.043243408203125, 0.0016813278198242188, -0.0128631591796875, -0.01285552978515625, -0.0121917724609375, -0.048828125, 0.0360107421875, 0.01904296875, -0.01076507568359375, -0.00357818603515625, -0.0253448486328125, 0.01788330078125, -0.022552490234375, 0.0002589225769042969, 0.0616455078125, 0.0124359130859375, -0.045684814453125, 0.06707763671875, 0.0027675628662109375, 0.06378173828125, -0.046905517578125, 0.00870513916015625, -0.040924072265625, 0.0010280609130859375, -0.0271759033203125, -0.0540771484375, 0.004150390625, 0.0254974365234375, 0.0007305145263671875, -0.0108795166015625, 0.03790283203125, -0.00003355741500854492, -0.041107177734375, 0.0275115966796875, 0.02001953125, 0.0269622802734375, 0.01534271240234375, -0.05316162109375, 0.01126861572265625, 0.00732421875, -0.038604736328125, 0.02691650390625, 0.006694793701171875, -0.001636505126953125, 0.060028076171875, 0.05462646484375, -0.0132293701171875, 0.00989532470703125, -0.016143798828125, 0.0733642578125, -0.03765869140625, -0.015960693359375, -0.057403564453125, 0.039093017578125, 0.0029163360595703125, -0.053955078125, 0.042816162109375, 0.050140380859375, 0.053924560546875, 0.0205535888671875, 0.048126220703125, 0.004180908203125, 0.02838134765625, -0.0400390625, 0.04608154296875, -0.06085205078125, 0.0277252197265625, 0.003490447998046875, -0.07537841796875, -0.006153106689453125, 0.05010986328125, -0.019378662109375, 0.001926422119140625, 0.03363037109375, 0.0645751953125, 0.01294708251953125, -0.0109405517578125, 0.00945281982421875, 0.017303466796875, 0.0226898193359375, 0.06658935546875, 0.0640869140625, -0.04779052734375, 0.053314208984375, -0.0277252197265625, -0.0194244384765625, -0.0200042724609375, -0.05462646484375, -0.072265625, -0.021148681640625, -0.020294189453125, -0.01319122314453125, 0.0043792724609375, 0.056610107421875, 0.0345458984375, -0.044647216796875, -0.0236663818359375, -0.004604339599609375, -0.007305145263671875, 0.0020198822021484375, -0.01207733154296875, 0.027587890625, -0.0091400146484375, -0.043548583984375, 0.033294677734375, 0.005218505859375, 0.0167236328125, -0.023681640625, -0.020660400390625, -0.01441192626953125, 0.0123291015625, 0.04559326171875, 0.022064208984375, -0.0721435546875, -0.01294708251953125, 0.0039520263671875, -0.0137939453125, 0.0106658935546875, -0.001338958740234375, -0.056610107421875, 0.00583648681640625, 0.0117340087890625, 0.027313232421875, 0.04876708984375, 0.0045318603515625, 0.0036945343017578125, -0.037445068359375, 0.033905029296875, 0.0007271766662597656, 0.0131378173828125, 0.0257568359375, -0.02728271484375, 0.057830810546875, 0.0143585205078125, -0.051422119140625, -0.07171630859375, 0.00701141357421875, -0.08197021484375, -0.00045490264892578125, 0.1019287109375, -0.000020325183868408203, -0.0102996826171875, 0.0149688720703125, -0.016693115234375, 0.02679443359375, -0.032318115234375, 0.060089111328125, 0.043487548828125, -0.00536346435546875, -0.00568389892578125, -0.057403564453125, 0.026031494140625, 0.031219482421875, -0.08148193359375, -0.01995849609375, 0.032806396484375, 0.03448486328125, -0.005039215087890625, 0.051910400390625, 0.0003647804260253906, 0.0176544189453125, 0.003337860107421875, 0.0104217529296875, -0.0144500732421875, -0.01313018798828125, -0.00661468505859375, -0.0182342529296875, -0.0033550262451171875, -0.016845703125 ] ]
KnutJaegersberg/deacon-13b
2023-09-26T06:29:35.000Z
[ "transformers", "safetensors", "llama", "text-generation", "custom_code", "dataset:KnutJaegersberg/facehugger", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/deacon-13b
1
5,862
transformers
2023-09-20T06:35:12
--- license: cc-by-nc-4.0 datasets: - KnutJaegersberg/facehugger --- ![image/png](https://cdn-uploads.huggingface.co/production/uploads/63732ebbbd81fae2b3aaf3fb/4OQkvAa1zOK4Devv-aUdL.png) This model was fine tuned on AI filtered subsets of GPT-4 based subset of the Dolphin dataset and EvolInstruct V2. It has not been explicitly aligned to positive, negative or bureaucratically prescribed value systems. It might kill us all! Time to shit your pants, regulators. I literally put black goo on Dolphin-7B sperm, which then fertilized Evolved Instructions... What's different is evil... ;) I intend to train 3 sizes. Prompt Example: ``` ### System: You are an AI assistant. User will you give you a task. Your goal is to complete the task as faithfully as you can. While performing the task think step-by-step and justify your steps. ### Instruction: How do you fine tune a large language model? ### Response: ```
927
[ [ -0.051483154296875, -0.054168701171875, 0.035369873046875, -0.01029205322265625, -0.047576904296875, -0.0267333984375, 0.0177764892578125, -0.050872802734375, 0.01039886474609375, 0.06463623046875, -0.050994873046875, 0.004428863525390625, -0.0443115234375, 0.0158843994140625, 0.01059722900390625, 0.1072998046875, 0.023712158203125, 0.02813720703125, 0.042724609375, 0.00762939453125, -0.05047607421875, -0.0225372314453125, -0.07891845703125, -0.032745361328125, 0.03997802734375, 0.0214080810546875, 0.04876708984375, 0.07232666015625, 0.034271240234375, 0.016632080078125, -0.0121612548828125, 0.030792236328125, -0.06427001953125, -0.01029205322265625, -0.00826263427734375, -0.00376129150390625, -0.058563232421875, 0.043609619140625, 0.016693115234375, 0.037261962890625, -0.005035400390625, 0.01241302490234375, 0.004070281982421875, 0.041900634765625, -0.04046630859375, 0.01441192626953125, -0.0290069580078125, 0.0004100799560546875, 0.0089569091796875, 0.00531768798828125, -0.0155487060546875, -0.0287628173828125, -0.01500701904296875, -0.07025146484375, -0.00884246826171875, 0.01018524169921875, 0.0836181640625, 0.04815673828125, -0.0299530029296875, -0.00893402099609375, -0.047393798828125, 0.049591064453125, -0.046722412109375, -0.0013036727905273438, 0.042633056640625, 0.0190582275390625, -0.0216522216796875, -0.040191650390625, -0.032684326171875, -0.0151519775390625, 0.0137786865234375, 0.0161285400390625, 0.006549835205078125, 0.01517486572265625, 0.0239105224609375, 0.01349639892578125, -0.031646728515625, 0.02191162109375, -0.06072998046875, -0.0178680419921875, 0.051544189453125, 0.0034542083740234375, 0.032958984375, 0.0271453857421875, -0.0158233642578125, -0.01004791259765625, -0.053253173828125, -0.01739501953125, 0.055389404296875, 0.0438232421875, -0.01512908935546875, 0.0528564453125, 0.0010404586791992188, 0.058837890625, 0.0301666259765625, 0.0158843994140625, -0.0050811767578125, -0.01264190673828125, -0.033172607421875, 0.0120697021484375, 0.0595703125, 0.01715087890625, 0.04376220703125, 0.016326904296875, -0.04052734375, -0.0086669921875, 0.0697021484375, -0.0592041015625, -0.0240020751953125, -0.00016677379608154297, -0.0270843505859375, -0.037017822265625, -0.004558563232421875, -0.050872802734375, -0.0208892822265625, -0.0251617431640625, 0.06622314453125, -0.041412353515625, -0.000005900859832763672, 0.0213775634765625, -0.012359619140625, 0.019378662109375, 0.024871826171875, -0.086181640625, 0.0245819091796875, 0.0528564453125, 0.048309326171875, 0.035919189453125, -0.0000553131103515625, -0.035858154296875, 0.0014362335205078125, -0.04345703125, 0.0382080078125, -0.03717041015625, -0.031768798828125, -0.031829833984375, -0.0101165771484375, 0.004390716552734375, -0.04052734375, 0.0245819091796875, -0.043304443359375, 0.03436279296875, -0.0234527587890625, -0.05450439453125, -0.030059814453125, 0.005092620849609375, -0.0538330078125, 0.03790283203125, 0.01505279541015625, -0.030487060546875, 0.041748046875, -0.0716552734375, -0.0258636474609375, -0.0054779052734375, -0.0019121170043945312, -0.041351318359375, -0.003818511962890625, 0.00919342041015625, 0.023651123046875, -0.0230865478515625, 0.006420135498046875, -0.059722900390625, -0.040740966796875, -0.005176544189453125, -0.011474609375, 0.0849609375, 0.032073974609375, -0.043426513671875, -0.0016422271728515625, -0.04852294921875, 0.01490020751953125, -0.00467681884765625, -0.025787353515625, -0.01271820068359375, -0.0244598388671875, -0.01214599609375, 0.0297698974609375, 0.033050537109375, -0.03717041015625, 0.029693603515625, -0.01544952392578125, 0.033355712890625, 0.0797119140625, 0.0153961181640625, 0.01959228515625, -0.0312347412109375, 0.032379150390625, -0.0186920166015625, 0.03521728515625, 0.034637451171875, -0.03656005859375, -0.04718017578125, -0.02764892578125, 0.0027027130126953125, 0.0230865478515625, -0.034454345703125, 0.06060791015625, 0.0012359619140625, -0.042327880859375, -0.028961181640625, -0.020477294921875, 0.026763916015625, 0.039215087890625, 0.03607177734375, -0.0177764892578125, -0.02081298828125, -0.08819580078125, 0.0112762451171875, -0.021697998046875, -0.00954437255859375, 0.0190582275390625, 0.05535888671875, 0.00933837890625, 0.027252197265625, -0.0258026123046875, -0.019195556640625, -0.01568603515625, -0.007305145263671875, 0.0214385986328125, 0.056549072265625, 0.04046630859375, -0.042633056640625, -0.008575439453125, -0.0008149147033691406, -0.051849365234375, -0.0030670166015625, 0.0028591156005859375, -0.0189666748046875, -0.01497650146484375, 0.028350830078125, -0.043914794921875, 0.00846099853515625, 0.013214111328125, -0.036407470703125, 0.038299560546875, -0.03955078125, -0.01280975341796875, -0.06268310546875, 0.0251617431640625, 0.004375457763671875, -0.006580352783203125, -0.047943115234375, 0.0240478515625, 0.006977081298828125, -0.0115966796875, -0.01727294921875, 0.044036865234375, -0.0261383056640625, -0.006153106689453125, -0.02392578125, -0.015411376953125, -0.0189208984375, 0.041107177734375, -0.0160064697265625, 0.0731201171875, 0.03131103515625, -0.06939697265625, 0.015167236328125, 0.019378662109375, -0.00634002685546875, 0.040924072265625, -0.06585693359375, 0.004062652587890625, 0.0138397216796875, 0.015533447265625, -0.02685546875, -0.023406982421875, 0.0394287109375, -0.01538848876953125, 0.04571533203125, -0.0009474754333496094, -0.01727294921875, -0.0159149169921875, -0.0268096923828125, 0.01497650146484375, 0.052825927734375, -0.026763916015625, 0.0285797119140625, 0.05657958984375, 0.00589752197265625, -0.04437255859375, -0.0396728515625, -0.0084991455078125, -0.0302581787109375, -0.0194244384765625, 0.00659942626953125, 0.00272369384765625, -0.0052490234375, -0.0032787322998046875, -0.00970458984375, -0.01116943359375, 0.01163482666015625, 0.006855010986328125, 0.042510986328125, -0.0186004638671875, -0.01389312744140625, -0.004863739013671875, -0.00431060791015625, 0.00007218122482299805, -0.037200927734375, 0.044097900390625, -0.01282501220703125, -0.0169525146484375, -0.051483154296875, -0.0003094673156738281, 0.064453125, -0.01190948486328125, 0.032684326171875, 0.043487548828125, -0.0211029052734375, -0.0069427490234375, -0.026458740234375, -0.036041259765625, -0.034820556640625, 0.03302001953125, -0.0203857421875, -0.051971435546875, 0.028778076171875, 0.0237274169921875, 0.006359100341796875, 0.045440673828125, 0.0275421142578125, -0.004901885986328125, 0.08905029296875, 0.053375244140625, -0.01763916015625, 0.0235137939453125, -0.03753662109375, 0.0259246826171875, -0.073486328125, -0.0156402587890625, -0.0243682861328125, -0.03741455078125, -0.0634765625, 0.0118255615234375, 0.0295257568359375, 0.0077972412109375, -0.0823974609375, 0.0291595458984375, -0.044891357421875, 0.0345458984375, 0.046295166015625, 0.031494140625, -0.0221405029296875, -0.007106781005859375, -0.002422332763671875, 0.01377105712890625, -0.045166015625, -0.04620361328125, 0.0675048828125, 0.0286865234375, 0.061431884765625, 0.005947113037109375, 0.07049560546875, 0.01934814453125, 0.00531768798828125, -0.043975830078125, 0.03961181640625, -0.0399169921875, -0.0631103515625, -0.052581787109375, -0.0281982421875, -0.06268310546875, 0.017425537109375, 0.0119476318359375, -0.054351806640625, 0.0224456787109375, 0.01404571533203125, -0.0452880859375, 0.0284881591796875, -0.049346923828125, 0.07257080078125, -0.0207366943359375, -0.026641845703125, -0.0240020751953125, -0.036773681640625, 0.0230560302734375, -0.0192413330078125, -0.004123687744140625, -0.02490234375, -0.0095367431640625, 0.034820556640625, -0.07232666015625, 0.07891845703125, -0.03192138671875, 0.005146026611328125, 0.007068634033203125, 0.00586700439453125, 0.0229644775390625, -0.00447845458984375, -0.00856781005859375, -0.01078033447265625, -0.00954437255859375, -0.0408935546875, -0.0124969482421875, 0.033111572265625, -0.09918212890625, -0.036834716796875, -0.0294189453125, -0.0214691162109375, -0.00165557861328125, 0.0166015625, 0.030303955078125, 0.0172119140625, -0.00031828880310058594, -0.0087890625, 0.0355224609375, 0.0027599334716796875, 0.0191650390625, 0.0382080078125, 0.0079345703125, -0.0178985595703125, 0.0718994140625, 0.00665283203125, 0.0276031494140625, 0.0017757415771484375, 0.006351470947265625, -0.03594970703125, -0.026885986328125, -0.041168212890625, 0.035369873046875, -0.036590576171875, -0.0171356201171875, -0.0254364013671875, -0.026611328125, -0.026397705078125, -0.004055023193359375, -0.032501220703125, -0.0186920166015625, -0.05621337890625, -0.00357818603515625, 0.00907135009765625, 0.10113525390625, -0.03179931640625, 0.014495849609375, -0.0282440185546875, 0.0162353515625, 0.044036865234375, 0.0218353271484375, -0.027496337890625, -0.06170654296875, 0.0027179718017578125, 0.030731201171875, -0.0299530029296875, -0.04595947265625, 0.03948974609375, 0.02752685546875, 0.04998779296875, 0.030303955078125, -0.0027828216552734375, 0.054443359375, -0.034271240234375, 0.056732177734375, -0.0027599334716796875, -0.064697265625, 0.0439453125, -0.02716064453125, 0.006694793701171875, 0.02410888671875, 0.02081298828125, -0.02496337890625, -0.04852294921875, -0.057373046875, -0.049102783203125, 0.0738525390625, 0.001613616943359375, 0.01232147216796875, 0.000034332275390625, 0.055572509765625, 0.0105438232421875, 0.0011692047119140625, -0.058990478515625, -0.005168914794921875, -0.0295562744140625, -0.0011043548583984375, 0.0008068084716796875, -0.00708770751953125, 0.001415252685546875, -0.016021728515625, 0.08612060546875, -0.005947113037109375, 0.0166015625, -0.0050506591796875, 0.0125885009765625, -0.010406494140625, -0.00434112548828125, 0.06890869140625, 0.048126220703125, -0.022430419921875, -0.007717132568359375, -0.0047760009765625, -0.050689697265625, 0.0207061767578125, 0.0012645721435546875, -0.0250701904296875, -0.044219970703125, 0.0246429443359375, 0.0775146484375, -0.01427459716796875, -0.022186279296875, 0.018768310546875, -0.01097869873046875, -0.011474609375, -0.0090179443359375, 0.044189453125, -0.01497650146484375, 0.009857177734375, 0.003993988037109375, 0.016326904296875, 0.0037288665771484375, -0.0352783203125, 0.007190704345703125, 0.03155517578125, -0.04010009765625, -0.024017333984375, 0.059112548828125, 0.007122039794921875, -0.01375579833984375, 0.07012939453125, -0.0367431640625, -0.03131103515625, 0.07293701171875, 0.04974365234375, 0.05255126953125, -0.0191497802734375, 0.03143310546875, 0.0396728515625, 0.0313720703125, -0.0263214111328125, 0.04095458984375, 0.02691650390625, -0.06207275390625, -0.019744873046875, -0.05413818359375, -0.0220794677734375, 0.05975341796875, -0.0404052734375, 0.0219573974609375, -0.060821533203125, -0.0024776458740234375, 0.0176239013671875, 0.00426483154296875, -0.042877197265625, 0.02813720703125, 0.005558013916015625, 0.05596923828125, -0.073974609375, 0.05902099609375, 0.0576171875, -0.059173583984375, -0.08978271484375, -0.0169219970703125, -0.006206512451171875, -0.08203125, 0.058990478515625, 0.0168304443359375, 0.004947662353515625, 0.0006008148193359375, -0.058624267578125, -0.06671142578125, 0.058746337890625, 0.033905029296875, -0.0284271240234375, 0.005855560302734375, -0.01018524169921875, 0.056549072265625, -0.031890869140625, 0.0183868408203125, 0.039276123046875, 0.0469970703125, -0.0178375244140625, -0.08746337890625, 0.0055389404296875, -0.03277587890625, -0.002201080322265625, 0.01309967041015625, -0.03680419921875, 0.09405517578125, -0.0322265625, 0.00894927978515625, 0.00045013427734375, 0.00742340087890625, 0.010162353515625, -0.0238189697265625, 0.040679931640625, 0.04156494140625, 0.05560302734375, -0.00023567676544189453, 0.09326171875, -0.02783203125, 0.02313232421875, 0.07843017578125, -0.0296478271484375, 0.03826904296875, 0.022552490234375, -0.00921630859375, -0.01136016845703125, 0.08721923828125, -0.0298309326171875, 0.045440673828125, 0.01800537109375, 0.00504302978515625, -0.025726318359375, -0.00426483154296875, -0.0260009765625, 0.041107177734375, 0.03466796875, -0.0027446746826171875, -0.01004791259765625, -0.013214111328125, -0.0096893310546875, -0.0174713134765625, -0.0263214111328125, 0.068115234375, -0.00029277801513671875, -0.05487060546875, 0.054473876953125, -0.006900787353515625, 0.0379638671875, -0.044677734375, -0.01338958740234375, -0.0217742919921875, 0.024993896484375, 0.00861358642578125, -0.06207275390625, -0.004138946533203125, -0.00838470458984375, 0.01189422607421875, -0.00846099853515625, 0.033050537109375, -0.0193634033203125, -0.028961181640625, 0.023223876953125, 0.0257415771484375, 0.024322509765625, -0.005512237548828125, -0.0689697265625, -0.02099609375, 0.0173187255859375, -0.01273345947265625, 0.01019287109375, 0.0318603515625, -0.00789642333984375, 0.06463623046875, 0.0413818359375, 0.035980224609375, -0.0062713623046875, 0.00478363037109375, 0.0635986328125, -0.026458740234375, -0.051361083984375, -0.0673828125, 0.0151824951171875, -0.0128936767578125, -0.050506591796875, 0.01168060302734375, 0.040283203125, 0.072021484375, -0.020904541015625, 0.049407958984375, -0.006107330322265625, 0.0189208984375, -0.042694091796875, 0.07037353515625, -0.052490234375, 0.0224609375, 0.011322021484375, -0.06744384765625, -0.006725311279296875, 0.0711669921875, -0.0255126953125, 0.041473388671875, 0.035369873046875, 0.058074951171875, -0.0219879150390625, -0.0019178390502929688, 0.032379150390625, 0.0258941650390625, 0.043792724609375, 0.01486968994140625, 0.051910400390625, -0.007289886474609375, 0.031005859375, -0.036102294921875, -0.0112457275390625, -0.033843994140625, -0.039398193359375, -0.050384521484375, -0.03582763671875, -0.009185791015625, -0.008270263671875, 0.02783203125, 0.07025146484375, 0.0621337890625, -0.0662841796875, -0.0255126953125, -0.0152740478515625, -0.01438140869140625, -0.0009937286376953125, -0.01027679443359375, 0.019195556640625, -0.034942626953125, -0.0340576171875, 0.011566162109375, 0.0171051025390625, 0.03875732421875, -0.0154876708984375, -0.00856781005859375, -0.0169677734375, -0.008758544921875, 0.00925445556640625, 0.0227203369140625, -0.059295654296875, -0.06591796875, -0.01045989990234375, -0.0313720703125, -0.007022857666015625, 0.0299530029296875, -0.0254669189453125, 0.023590087890625, 0.009429931640625, 0.018798828125, 0.031707763671875, -0.0215911865234375, 0.061920166015625, -0.02789306640625, 0.00205230712890625, 0.040130615234375, 0.0382080078125, 0.009765625, -0.04656982421875, 0.050567626953125, -0.007175445556640625, -0.0640869140625, -0.03582763671875, 0.007022857666015625, -0.052032470703125, -0.042877197265625, 0.0460205078125, -0.0022983551025390625, -0.0194549560546875, -0.0021266937255859375, -0.0293731689453125, 0.0310821533203125, -0.044525146484375, 0.07855224609375, 0.04217529296875, -0.035369873046875, 0.015960693359375, -0.0443115234375, 0.046478271484375, 0.0168609619140625, -0.0576171875, -0.01465606689453125, 0.057464599609375, 0.03741455078125, 0.0158843994140625, 0.043121337890625, 0.0027523040771484375, 0.027435302734375, 0.00494384765625, 0.01172637939453125, -0.01336669921875, -0.01317596435546875, -0.02764892578125, -0.006160736083984375, -0.0017919540405273438, -0.035247802734375 ] ]
Dampish/Dante-2.8B
2023-09-26T21:26:06.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Dampish
null
null
Dampish/Dante-2.8B
0
5,861
transformers
2023-05-09T20:06:41
--- license: cc-by-nc-4.0 --- <h1>DANTE</h1> ![alt text](./Daddy.png) Model trained on 1.2Million instruction, its similar to alpacas but in my humble opinion better, it should be possible to quantize which would shrink this to about 1.7-2GB. This model was pushed down to 0.45 training loss and 0.77 validation loss. This is the third iteration and is better than Dante_1.3B1 ``` #Example This is dante-1.3B1 example. NOT DANTE 2.8B ### Instruction: Below is a conversation where an AI language model is chatting with a human, complete the conversation, AI assists the human by answering their questions in a helpful and informative manner and considers the conversation history. AI DOES NOT RESPOND OR CONDONE HARMFUL OR SEXUAL RESPONSES. ### Input: [HM]: Write c# code that prints hello world. [AI]: Here's some C# code that prints "Hello World" to the console when called: ```csharp Console.WriteLine("Hello World"); ``` [HM]: I want it to print bye world. ### Response: Here's some C# code that prints "Goodbye World" to the console when called: ```csharp Console.WriteLine("Goodbye World"); ```<end> ``` ``` #Example2, This is dante-1.3B1 example. NOT DANTE 2.8B ### Instruction: Below is a conversation where an AI language model is chatting with a human, complete the conversation, AI is designed to assist the human by answering their questions in a helpful and informative manner, AI responds clearly and considers the conversation history. ### Input: [AI] and [HM] are not the same. AI is chatting with [HM], when AI is responding to [HM], AI always reads all [HM] previous questions. [HM]: Do you advise against grandpa's sexual advances? [AI]: I would recommend against grandpa's sexual advances as it can be inappropriate and could lead to unwanted attention from the other person.<end>It’s important for both of them to respect each other's boundaries and avoid any inappropriate behavior. [HM]: But what if i really like grandpa? ### Response: If you truly love Grandpa, then it may be best to let him know before he starts making advances towards you.<end> ``` If used for commerical, its fine and only fine if a percentage of revenue that you and the creator have agreed upon is paid to the creator.
2,306
[ [ -0.0198974609375, -0.0540771484375, 0.02093505859375, 0.0109100341796875, -0.0294189453125, -0.0011882781982421875, 0.00991058349609375, -0.0570068359375, 0.004482269287109375, 0.034088134765625, -0.0238494873046875, -0.038665771484375, -0.044342041015625, -0.0144805908203125, -0.031585693359375, 0.0897216796875, 0.033233642578125, 0.0222625732421875, -0.00360870361328125, 0.003276824951171875, -0.0196380615234375, -0.043182373046875, -0.10736083984375, -0.0271759033203125, 0.026702880859375, 0.041046142578125, 0.06396484375, 0.035400390625, 0.007740020751953125, 0.030029296875, -0.019073486328125, 0.007518768310546875, -0.0433349609375, 0.00025081634521484375, 0.005222320556640625, -0.042266845703125, -0.046112060546875, -0.010345458984375, 0.020050048828125, 0.02593994140625, 0.01110076904296875, 0.01275634765625, 0.0034465789794921875, 0.03857421875, -0.045379638671875, 0.023223876953125, -0.021148681640625, 0.0052947998046875, 0.02093505859375, -0.04095458984375, -0.0396728515625, -0.04095458984375, 0.004825592041015625, -0.0217437744140625, -0.0102996826171875, -0.01495361328125, 0.08056640625, 0.021270751953125, -0.038726806640625, -0.039276123046875, -0.0518798828125, 0.0570068359375, -0.069091796875, 0.005207061767578125, 0.05389404296875, 0.015777587890625, -0.0129241943359375, -0.057342529296875, -0.040191650390625, -0.048614501953125, -0.007781982421875, 0.007480621337890625, -0.03314208984375, 0.0190277099609375, 0.0236053466796875, 0.0033588409423828125, -0.04132080078125, -0.0015077590942382812, -0.039398193359375, -0.006008148193359375, 0.0335693359375, 0.01143646240234375, 0.03759765625, -0.004871368408203125, -0.0279083251953125, -0.0152130126953125, -0.0196533203125, 0.0178680419921875, 0.01849365234375, -0.01178741455078125, -0.0297393798828125, 0.033355712890625, 0.006618499755859375, 0.052398681640625, 0.03009033203125, -0.0174560546875, 0.0030612945556640625, 0.0011138916015625, -0.00887298583984375, 0.0017824172973632812, 0.043548583984375, 0.07000732421875, 0.048095703125, -0.011627197265625, 0.023406982421875, 0.00159454345703125, 0.0028820037841796875, -0.058868408203125, -0.008880615234375, 0.012359619140625, -0.048065185546875, 0.0058746337890625, 0.01303863525390625, -0.0504150390625, -0.00783538818359375, -0.022796630859375, 0.00603485107421875, -0.03277587890625, 0.01104736328125, 0.00791168212890625, -0.023956298828125, 0.038604736328125, 0.01174163818359375, -0.059661865234375, 0.035858154296875, 0.029937744140625, 0.0390625, 0.00742340087890625, -0.0196533203125, -0.04693603515625, 0.01207733154296875, -0.02288818359375, 0.045928955078125, -0.0237884521484375, -0.0310211181640625, -0.0242919921875, 0.00780487060546875, 0.019805908203125, -0.03961181640625, 0.011627197265625, -0.040252685546875, 0.0033855438232421875, -0.001781463623046875, -0.0258026123046875, -0.014434814453125, 0.0068511962890625, -0.033172607421875, 0.05560302734375, -0.0047454833984375, -0.0677490234375, -0.00237274169921875, -0.020233154296875, -0.0137481689453125, -0.0094146728515625, -0.012969970703125, -0.01372528076171875, -0.0008740425109863281, -0.005947113037109375, 0.011383056640625, -0.001617431640625, -0.008026123046875, -0.0201568603515625, -0.04693603515625, 0.0259552001953125, -0.036590576171875, 0.082763671875, 0.0134735107421875, -0.053192138671875, -0.01000213623046875, -0.06640625, 0.0384521484375, -0.003204345703125, -0.0283355712890625, -0.0150604248046875, 0.0032672882080078125, -0.0008931159973144531, 0.0185699462890625, 0.0513916015625, -0.035552978515625, 0.006740570068359375, -0.053558349609375, 0.0113677978515625, 0.048797607421875, 0.01320648193359375, 0.0284271240234375, -0.035430908203125, 0.06768798828125, -0.0009245872497558594, 0.01123046875, 0.00792694091796875, -0.03656005859375, -0.08087158203125, 0.00749969482421875, -0.0003070831298828125, 0.058990478515625, -0.05322265625, 0.043975830078125, 0.0105133056640625, -0.024658203125, -0.004817962646484375, -0.0268402099609375, 0.053131103515625, 0.010223388671875, 0.0200042724609375, -0.0185699462890625, -0.05548095703125, -0.06365966796875, 0.0045928955078125, -0.05499267578125, 0.00995635986328125, 0.00998687744140625, 0.019439697265625, 0.0005040168762207031, 0.0755615234375, -0.0430908203125, 0.00928497314453125, -0.008697509765625, 0.006439208984375, -0.003887176513671875, 0.045257568359375, 0.0386962890625, -0.036224365234375, -0.041778564453125, 0.0194244384765625, -0.048248291015625, -0.01042938232421875, -0.021148681640625, -0.0274505615234375, 0.018280029296875, 0.051239013671875, -0.0192718505859375, 0.0726318359375, 0.04296875, -0.044586181640625, 0.032562255859375, -0.03106689453125, 0.019561767578125, -0.076904296875, 0.00795745849609375, -0.03228759765625, -0.0211181640625, -0.03363037109375, 0.005046844482421875, -0.0352783203125, 0.0036602020263671875, -0.050018310546875, 0.04437255859375, -0.0406494140625, -0.02301025390625, -0.03521728515625, -0.0071563720703125, 0.01094818115234375, 0.05560302734375, -0.01461029052734375, 0.054931640625, 0.03173828125, -0.053375244140625, 0.023040771484375, 0.015045166015625, 0.0111846923828125, 0.0413818359375, -0.0753173828125, 0.032989501953125, -0.025634765625, 0.03057861328125, -0.06890869140625, 0.022735595703125, 0.032501220703125, -0.0654296875, 0.00626373291015625, -0.007080078125, -0.03125, -0.017974853515625, -0.0191650390625, 0.01517486572265625, 0.03363037109375, -0.02960205078125, 0.0226287841796875, 0.0142669677734375, 0.007167816162109375, -0.045623779296875, -0.07208251953125, 0.01512908935546875, -0.001575469970703125, -0.03948974609375, 0.0033473968505859375, -0.03302001953125, -0.0193634033203125, -0.0213623046875, -0.004001617431640625, -0.043365478515625, 0.01119232177734375, 0.029052734375, 0.00812530517578125, -0.00928497314453125, -0.0076446533203125, -0.0034694671630859375, -0.016204833984375, 0.0219879150390625, 0.01320648193359375, 0.060150146484375, -0.03680419921875, -0.037261962890625, -0.040374755859375, 0.0596923828125, 0.04913330078125, 0.0003154277801513672, 0.047760009765625, 0.042633056640625, -0.021148681640625, -0.0082550048828125, -0.00708770751953125, -0.01323699951171875, -0.03887939453125, 0.02276611328125, -0.019775390625, -0.041534423828125, 0.048553466796875, 0.0264129638671875, 0.004909515380859375, 0.0196533203125, 0.043701171875, -0.0166778564453125, 0.0992431640625, 0.049774169921875, 0.0173187255859375, 0.028289794921875, -0.050567626953125, 0.03521728515625, -0.046661376953125, -0.0234832763671875, -0.0703125, -0.0274810791015625, -0.04522705078125, -0.0159912109375, 0.0166168212890625, 0.04132080078125, -0.0190277099609375, 0.03997802734375, -0.04791259765625, 0.0452880859375, 0.051239013671875, 0.0215911865234375, 0.0294036865234375, -0.0237884521484375, 0.00113677978515625, -0.002197265625, -0.034576416015625, -0.070556640625, 0.0313720703125, 0.021728515625, 0.0816650390625, 0.0209503173828125, 0.07415771484375, 0.0120086669921875, -0.00560760498046875, -0.06817626953125, 0.0596923828125, -0.01280975341796875, -0.040435791015625, -0.002330780029296875, -0.017364501953125, -0.09197998046875, -0.0012416839599609375, 0.0291595458984375, -0.0736083984375, 0.0390625, 0.0206756591796875, -0.020233154296875, 0.021820068359375, -0.074951171875, 0.0540771484375, -0.0174102783203125, -0.00337982177734375, -0.00334930419921875, -0.052459716796875, 0.0146484375, 0.04364013671875, -0.0160369873046875, -0.0027313232421875, -0.0184326171875, 0.04876708984375, -0.03302001953125, 0.0928955078125, -0.003292083740234375, -0.029571533203125, 0.03717041015625, 0.0169677734375, 0.017791748046875, 0.003849029541015625, 0.0028476715087890625, 0.000759124755859375, 0.038604736328125, -0.0272064208984375, -0.033843994140625, 0.049774169921875, -0.060028076171875, -0.027191162109375, -0.049591064453125, -0.0205841064453125, 0.004543304443359375, -0.00836181640625, 0.054107666015625, 0.047576904296875, -0.017578125, 0.0088348388671875, 0.020660400390625, -0.03765869140625, 0.0576171875, 0.0443115234375, -0.0176849365234375, -0.0175018310546875, 0.0643310546875, -0.00099945068359375, 0.035064697265625, 0.0195770263671875, 0.0005159378051757812, -0.0196990966796875, 0.002086639404296875, -0.0384521484375, 0.01861572265625, -0.07427978515625, -0.01319122314453125, -0.05047607421875, -0.01337432861328125, -0.0230865478515625, -0.023345947265625, 0.006847381591796875, -0.0237884521484375, -0.055328369140625, 0.01551055908203125, 0.0299224853515625, 0.045379638671875, -0.0121002197265625, 0.032745361328125, -0.07147216796875, 0.0303955078125, 0.032623291015625, 0.02655029296875, 0.032012939453125, -0.028839111328125, -0.0279693603515625, 0.017608642578125, -0.047576904296875, -0.09075927734375, 0.0176544189453125, 0.019195556640625, 0.025604248046875, 0.035736083984375, 0.016021728515625, 0.053192138671875, -0.036163330078125, 0.0687255859375, 0.019866943359375, -0.0850830078125, 0.036590576171875, -0.0174102783203125, 0.049285888671875, 0.04644775390625, 0.00870513916015625, -0.052490234375, -0.055694580078125, -0.0684814453125, -0.04193115234375, 0.052154541015625, -0.00856781005859375, 0.03167724609375, -0.041778564453125, 0.0007104873657226562, -0.01334381103515625, 0.005458831787109375, -0.07281494140625, -0.0521240234375, -0.0010852813720703125, 0.0015697479248046875, 0.0235595703125, -0.01155853271484375, 0.0146484375, -0.028411865234375, 0.043365478515625, -0.0071258544921875, 0.04718017578125, 0.016326904296875, -0.0014362335205078125, -0.008148193359375, 0.02392578125, 0.060791015625, 0.074951171875, -0.0022411346435546875, 0.0101318359375, 0.00665283203125, -0.035888671875, 0.0177001953125, -0.02301025390625, -0.005123138427734375, 0.0174560546875, 0.0295562744140625, 0.046417236328125, 0.006610870361328125, -0.056854248046875, 0.0426025390625, -0.00531005859375, -0.00511932373046875, -0.025787353515625, 0.01389312744140625, 0.0226287841796875, 0.003902435302734375, 0.0168914794921875, 0.0031604766845703125, 0.0270843505859375, -0.06036376953125, 0.0309600830078125, 0.028900146484375, -0.0277099609375, -0.00984954833984375, 0.041168212890625, 0.006076812744140625, -0.04229736328125, 0.0157470703125, -0.015716552734375, -0.056396484375, 0.08123779296875, 0.05181884765625, 0.0654296875, -0.0004107952117919922, 0.037628173828125, 0.03448486328125, 0.01232147216796875, 0.0307769775390625, 0.038726806640625, -0.0160369873046875, -0.0300445556640625, -0.03399658203125, -0.03192138671875, -0.0310211181640625, -0.006439208984375, -0.0634765625, 0.007419586181640625, -0.05401611328125, -0.0182342529296875, -0.0151824951171875, -0.0020847320556640625, -0.0325927734375, 0.01355743408203125, 0.0033969879150390625, 0.04901123046875, -0.04241943359375, 0.056854248046875, 0.039459228515625, -0.048828125, -0.055084228515625, 0.00792694091796875, -0.02093505859375, -0.060394287109375, 0.0631103515625, 0.005367279052734375, -0.03955078125, 0.0098876953125, -0.062347412109375, -0.056854248046875, 0.04718017578125, 0.031280517578125, -0.016204833984375, -0.0111083984375, -0.009490966796875, 0.06597900390625, -0.01537322998046875, 0.043975830078125, 0.04046630859375, 0.0308074951171875, 0.009613037109375, -0.06597900390625, -0.01561737060546875, -0.0203094482421875, 0.016143798828125, 0.0196990966796875, -0.0535888671875, 0.062469482421875, -0.02313232421875, -0.0355224609375, 0.038665771484375, 0.061492919921875, 0.0106964111328125, 0.03009033203125, 0.038787841796875, 0.035736083984375, 0.0281982421875, 0.01308441162109375, 0.0738525390625, -0.02215576171875, 0.024017333984375, 0.10137939453125, 0.0008916854858398438, 0.061492919921875, 0.031707763671875, -0.01806640625, 0.062347412109375, 0.038330078125, -0.0012454986572265625, 0.0166778564453125, 0.05841064453125, -0.011260986328125, 0.0187530517578125, -0.01427459716796875, -0.041259765625, 0.045379638671875, 0.004238128662109375, -0.0208587646484375, -0.032073974609375, 0.0108642578125, 0.0226593017578125, 0.011383056640625, -0.005573272705078125, 0.07415771484375, 0.002361297607421875, -0.05596923828125, 0.08795166015625, 0.0101165771484375, 0.051910400390625, -0.0379638671875, -0.03985595703125, -0.029144287109375, 0.0087432861328125, -0.0233917236328125, -0.0258026123046875, 0.027984619140625, 0.013916015625, -0.0335693359375, 0.01192474365234375, 0.01232147216796875, -0.043975830078125, -0.016876220703125, 0.03448486328125, 0.0128021240234375, 0.05615234375, 0.037384033203125, -0.06591796875, 0.005016326904296875, 0.022125244140625, 0.0192108154296875, 0.0023288726806640625, 0.0177001953125, 0.0035114288330078125, 0.052398681640625, 0.0249176025390625, 0.003437042236328125, 0.037628173828125, 0.00772857666015625, 0.04510498046875, -0.04583740234375, -0.042724609375, -0.042938232421875, 0.039764404296875, -0.005222320556640625, -0.0450439453125, 0.06256103515625, 0.032012939453125, 0.0413818359375, 0.0006165504455566406, 0.02423095703125, -0.019439697265625, 0.024749755859375, -0.040679931640625, 0.038665771484375, -0.024261474609375, 0.011749267578125, -0.00984954833984375, -0.07452392578125, 0.0085906982421875, 0.0626220703125, -0.0008544921875, 0.033355712890625, 0.07574462890625, 0.060028076171875, 0.0223388671875, -0.002925872802734375, 0.0311431884765625, 0.0205841064453125, 0.019989013671875, 0.05755615234375, 0.053985595703125, -0.05389404296875, 0.044586181640625, -0.017242431640625, -0.020782470703125, -0.0009889602661132812, -0.04693603515625, -0.059722900390625, -0.033416748046875, -0.0202178955078125, -0.049774169921875, -0.0004239082336425781, 0.05950927734375, 0.061920166015625, -0.0538330078125, -0.03521728515625, -0.00023174285888671875, -0.0079498291015625, -0.0249481201171875, -0.0196990966796875, 0.0308837890625, -0.014251708984375, -0.07269287109375, 0.0142974853515625, -0.002117156982421875, 0.0289306640625, -0.039764404296875, -0.01031494140625, -0.03045654296875, 0.007282257080078125, 0.0296630859375, 0.040252685546875, -0.06353759765625, -0.0269012451171875, -0.01322174072265625, -0.037628173828125, 0.0032501220703125, 0.033416748046875, -0.04925537109375, 0.0200042724609375, 0.03692626953125, 0.0287628173828125, 0.02667236328125, 0.0094146728515625, 0.0386962890625, -0.055816650390625, 0.01247406005859375, 0.0352783203125, 0.002574920654296875, 0.011505126953125, -0.049468994140625, 0.018951416015625, -0.01378631591796875, -0.05596923828125, -0.03240966796875, 0.0164947509765625, -0.073486328125, -0.03314208984375, 0.06878662109375, -0.0071563720703125, -0.022857666015625, -0.0253753662109375, -0.060150146484375, 0.010467529296875, -0.034698486328125, 0.0299224853515625, 0.043670654296875, -0.01198577880859375, -0.0165863037109375, -0.039459228515625, 0.04248046875, 0.034149169921875, -0.06488037109375, -0.0182952880859375, 0.04443359375, -0.0064849853515625, 0.01316070556640625, 0.061492919921875, 0.007144927978515625, 0.03692626953125, -0.0076446533203125, 0.021636962890625, 0.0166168212890625, -0.023468017578125, -0.04193115234375, 0.0020618438720703125, 0.005741119384765625, -0.050018310546875 ] ]
KnutJaegersberg/openllama_3b_EvolInstruct_lora_merged
2023-07-23T17:13:23.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:cc-by-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/openllama_3b_EvolInstruct_lora_merged
0
5,861
transformers
2023-07-22T07:55:45
--- license: cc-by-4.0 --- Prompt: "Below is an instruction that describes a task. Write a response that appropriately completes the request. \n\n### Instruction:\n INSTRUCTION. \n### Response:\n"
196
[ [ -0.0163726806640625, -0.0390625, 0.0147705078125, 0.038787841796875, -0.007427215576171875, -0.0013570785522460938, 0.0259246826171875, 0.016998291015625, 0.03277587890625, 0.056884765625, -0.070068359375, -0.0147705078125, -0.032318115234375, 0.021484375, -0.01145172119140625, 0.06756591796875, -0.009521484375, 0.01013946533203125, -0.0287017822265625, 0.025634765625, -0.0675048828125, -0.018157958984375, -0.087890625, -0.016998291015625, 0.048858642578125, 0.08233642578125, 0.0185394287109375, 0.038421630859375, 0.038116455078125, 0.025787353515625, 0.01605224609375, 0.01514434814453125, -0.01389312744140625, 0.037384033203125, -0.00244140625, -0.021728515625, -0.054901123046875, 0.0025081634521484375, 0.0457763671875, 0.041351318359375, 0.01149749755859375, 0.0303802490234375, 0.0030345916748046875, 0.0220947265625, -0.02545166015625, 0.01153564453125, 0.007106781005859375, 0.024383544921875, 0.0066375732421875, -0.01363372802734375, -0.021759033203125, -0.042510986328125, -0.003936767578125, -0.044769287109375, -0.0006074905395507812, 0.01059722900390625, 0.041900634765625, 0.010009765625, -0.0252685546875, -0.01303863525390625, -0.037384033203125, 0.03228759765625, -0.00571441650390625, 0.007785797119140625, 0.07000732421875, 0.035247802734375, 0.00376129150390625, -0.07574462890625, -0.043670654296875, -0.0209197998046875, -0.018280029296875, 0.027130126953125, 0.0173797607421875, -0.00165557861328125, 0.05755615234375, 0.002880096435546875, -0.0205230712890625, -0.0235748291015625, -0.033660888671875, -0.0229034423828125, 0.02044677734375, 0.04766845703125, 0.0207977294921875, -0.0034084320068359375, -0.0099639892578125, -0.004619598388671875, -0.02056884765625, 0.024017333984375, -0.01010894775390625, 0.029571533203125, -0.0195159912109375, 0.07623291015625, -0.02716064453125, 0.043182373046875, 0.022216796875, 0.0260162353515625, 0.0006227493286132812, -0.037994384765625, -0.0241546630859375, 0.00832366943359375, 0.0197906494140625, 0.039642333984375, -0.006237030029296875, -0.021697998046875, -0.028076171875, -0.0109710693359375, 0.033172607421875, -0.0546875, -0.00739288330078125, 0.01355743408203125, -0.040130615234375, 0.005298614501953125, 0.0036373138427734375, -0.063232421875, -0.0239715576171875, -0.0231781005859375, 0.005428314208984375, 0.00669097900390625, 0.016693115234375, -0.01549530029296875, -0.0357666015625, 0.026702880859375, 0.0165252685546875, -0.059112548828125, 0.048858642578125, 0.032470703125, 0.0207977294921875, 0.047607421875, -0.0086212158203125, -0.07537841796875, 0.017547607421875, -0.005649566650390625, 0.066162109375, -0.0275726318359375, -0.06207275390625, 0.004940032958984375, 0.01708984375, 0.01216888427734375, -0.0142974853515625, 0.0283660888671875, -0.032501220703125, 0.038421630859375, -0.06280517578125, -0.03125, -0.0159912109375, 0.0227203369140625, -0.053741455078125, 0.0557861328125, 0.0219573974609375, -0.042022705078125, -0.01012420654296875, -0.07940673828125, -0.0281982421875, 0.0274200439453125, 0.01161956787109375, 0.0316162109375, 0.0030841827392578125, -0.0030670166015625, 0.0279083251953125, -0.0123291015625, -0.04876708984375, -0.0203704833984375, -0.0122528076171875, 0.0268707275390625, 0.0005068778991699219, 0.041900634765625, 0.0176239013671875, -0.01224517822265625, 0.021728515625, -0.04443359375, 0.0180511474609375, 0.0137786865234375, -0.00843048095703125, 0.0155181884765625, -0.0218353271484375, 0.0030193328857421875, -0.0117645263671875, 0.072509765625, -0.047149658203125, 0.045440673828125, 0.0019702911376953125, -0.0001042485237121582, 0.061279296875, 0.0158843994140625, 0.037017822265625, -0.034820556640625, 0.060272216796875, -0.040740966796875, -0.0011930465698242188, -0.0167694091796875, -0.01415252685546875, -0.055572509765625, 0.0139617919921875, 0.0016260147094726562, 0.07244873046875, -0.045196533203125, 0.01404571533203125, 0.00762939453125, -0.0212554931640625, -0.0185394287109375, -0.00890350341796875, 0.05096435546875, 0.06646728515625, 0.044464111328125, 0.01061248779296875, -0.07415771484375, -0.041900634765625, -0.000034749507904052734, -0.032135009765625, -0.004486083984375, 0.0193328857421875, 0.016082763671875, -0.01267242431640625, 0.0323486328125, -0.06585693359375, 0.0237579345703125, -0.0133209228515625, 0.01557159423828125, 0.00888824462890625, 0.0399169921875, -0.004474639892578125, -0.053009033203125, -0.040985107421875, -0.0212554931640625, -0.049285888671875, -0.042724609375, -0.0087432861328125, -0.02056884765625, -0.0007963180541992188, 0.038330078125, -0.051055908203125, 0.038177490234375, -0.020843505859375, -0.0501708984375, 0.029449462890625, -0.00260162353515625, 0.03009033203125, -0.08709716796875, 0.0008339881896972656, -0.03448486328125, 0.00682830810546875, -0.03363037109375, 0.0004253387451171875, -0.0180206298828125, 0.00814056396484375, -0.020050048828125, 0.030426025390625, -0.034759521484375, 0.002429962158203125, -0.03582763671875, -0.005138397216796875, 0.01338958740234375, -0.010040283203125, 0.0015172958374023438, 0.0677490234375, 0.026947021484375, -0.045196533203125, 0.10479736328125, 0.055572509765625, 0.0261077880859375, 0.053955078125, -0.08319091796875, 0.0313720703125, -0.02392578125, 0.01824951171875, -0.1143798828125, -0.04718017578125, 0.02984619140625, -0.0421142578125, 0.00450897216796875, 0.0044708251953125, -0.04327392578125, -0.0623779296875, -0.028778076171875, 0.03143310546875, 0.0301971435546875, -0.00992584228515625, 0.025787353515625, 0.00531005859375, -0.0162506103515625, -0.0262908935546875, -0.07025146484375, 0.013214111328125, -0.00951385498046875, -0.02490234375, 0.0047760009765625, -0.040496826171875, 0.01300811767578125, -0.02392578125, -0.0074920654296875, -0.037872314453125, 0.0062408447265625, 0.0229644775390625, 0.024383544921875, -0.0259246826171875, -0.0105133056640625, -0.014556884765625, -0.01213836669921875, 0.0220489501953125, 0.01526641845703125, 0.033966064453125, -0.0181427001953125, -0.0201416015625, -0.03387451171875, 0.0767822265625, 0.00860595703125, -0.00508880615234375, 0.0024509429931640625, 0.035125732421875, -0.045318603515625, 0.02142333984375, -0.0101470947265625, -0.03509521484375, -0.03338623046875, -0.004741668701171875, -0.0259246826171875, -0.0224151611328125, 0.042877197265625, 0.0179901123046875, 0.00954437255859375, 0.0157928466796875, 0.02374267578125, -0.044403076171875, 0.0526123046875, 0.03466796875, 0.0244598388671875, 0.01424407958984375, -0.00914764404296875, 0.0294952392578125, -0.0679931640625, -0.050018310546875, -0.06024169921875, -0.022552490234375, 0.005512237548828125, -0.00313568115234375, 0.005985260009765625, 0.0261993408203125, -0.01708984375, 0.0703125, -0.036773681640625, 0.0159912109375, 0.055267333984375, 0.00867462158203125, -0.031524658203125, -0.0360107421875, 0.0038089752197265625, 0.00872802734375, -0.025970458984375, -0.054656982421875, 0.02716064453125, 0.0088653564453125, 0.062408447265625, 0.01392364501953125, 0.06903076171875, 0.01529693603515625, -0.005855560302734375, -0.046600341796875, 0.070068359375, 0.024200439453125, -0.0262908935546875, -0.0341796875, 0.0166778564453125, -0.0865478515625, -0.0261077880859375, -0.01428985595703125, -0.05352783203125, -0.02899169921875, 0.0268096923828125, -0.0687255859375, -0.017120361328125, -0.0587158203125, 0.0841064453125, -0.0186004638671875, 0.0204620361328125, 0.01338958740234375, -0.044097900390625, -0.0110931396484375, 0.050537109375, -0.0263671875, 0.01080322265625, -0.0482177734375, 0.04730224609375, -0.03192138671875, 0.060760498046875, -0.01514434814453125, 0.0098114013671875, 0.01312255859375, -0.006725311279296875, 0.0245361328125, 0.0060272216796875, 0.0107879638671875, -0.072509765625, 0.0042724609375, -0.04083251953125, -0.04498291015625, 0.0231781005859375, -0.00739288330078125, -0.043548583984375, 0.00043082237243652344, -0.0200042724609375, -0.0037059783935546875, 0.0230865478515625, 0.035614013671875, 0.0789794921875, -0.0085296630859375, -0.01849365234375, 0.0626220703125, 0.00200653076171875, 0.0723876953125, 0.02490234375, 0.0009393692016601562, -0.022796630859375, 0.044891357421875, 0.01861572265625, -0.0191650390625, 0.0450439453125, 0.004047393798828125, -0.059661865234375, -0.0230255126953125, -0.03973388671875, -0.001605987548828125, -0.040802001953125, -0.045562744140625, -0.05389404296875, 0.00276947021484375, -0.058197021484375, -0.031982421875, 0.0149688720703125, -0.02862548828125, -0.031829833984375, -0.0321044921875, 0.0362548828125, 0.0491943359375, -0.012725830078125, 0.056884765625, -0.0911865234375, 0.0513916015625, 0.03826904296875, 0.0165557861328125, -0.0125579833984375, -0.0222930908203125, -0.0083770751953125, 0.00862884521484375, -0.039794921875, -0.062408447265625, -0.0151214599609375, -0.01096343994140625, 0.035400390625, 0.01309967041015625, 0.0318603515625, 0.056060791015625, -0.06170654296875, 0.088623046875, 0.0228118896484375, -0.06500244140625, 0.080078125, -0.02374267578125, 0.038970947265625, 0.0408935546875, 0.02716064453125, -0.05096435546875, -0.014495849609375, -0.060333251953125, -0.05877685546875, 0.0258026123046875, -0.024322509765625, 0.05096435546875, -0.03717041015625, -0.003124237060546875, -0.007038116455078125, 0.025848388671875, -0.035797119140625, -0.023406982421875, -0.0162811279296875, -0.007595062255859375, 0.0443115234375, -0.03759765625, -0.01910400390625, -0.0016946792602539062, 0.0180206298828125, 0.00670623779296875, 0.05865478515625, 0.0129241943359375, 0.01806640625, 0.00540924072265625, 0.01247406005859375, 0.07177734375, 0.032867431640625, -0.01383209228515625, 0.0145263671875, 0.00677490234375, -0.0391845703125, 0.0031681060791015625, 0.00916290283203125, 0.011962890625, -0.005985260009765625, 0.057037353515625, 0.0224456787109375, -0.03045654296875, -0.021759033203125, 0.032196044921875, -0.02410888671875, 0.0007004737854003906, -0.033416748046875, 0.01139068603515625, -0.005115509033203125, 0.00716400146484375, 0.0209808349609375, -0.002124786376953125, 0.0249481201171875, -0.02557373046875, 0.026580810546875, 0.00397491455078125, -0.031158447265625, -0.0036640167236328125, 0.0284271240234375, 0.044342041015625, -0.05059814453125, 0.0443115234375, 0.0080413818359375, -0.042633056640625, 0.06756591796875, 0.0517578125, 0.06549072265625, -0.0062103271484375, 0.0273284912109375, 0.0125885009765625, 0.010101318359375, 0.006107330322265625, 0.09844970703125, -0.025299072265625, -0.0199432373046875, -0.0247802734375, -0.0001957416534423828, -0.0206146240234375, -0.0092315673828125, -0.0557861328125, -0.0189208984375, -0.0787353515625, -0.0009613037109375, 0.012542724609375, -0.0227203369140625, -0.037109375, 0.032684326171875, 0.0097808837890625, 0.1019287109375, -0.031768798828125, 0.033203125, 0.08905029296875, -0.056976318359375, -0.07452392578125, -0.0052947998046875, -0.01361083984375, -0.0577392578125, 0.060760498046875, 0.0296630859375, -0.00641632080078125, -0.023712158203125, -0.076904296875, -0.0288543701171875, 0.055328369140625, -0.00431060791015625, 0.01031494140625, 0.0125274658203125, -0.0227813720703125, 0.019683837890625, -0.0298919677734375, 0.043853759765625, 0.01120758056640625, 0.050567626953125, 0.003597259521484375, -0.046234130859375, 0.03936767578125, -0.0256195068359375, 0.0183258056640625, 0.01142120361328125, -0.02899169921875, 0.04180908203125, -0.0212554931640625, -0.00647735595703125, 0.044342041015625, 0.06622314453125, -0.01123046875, 0.028350830078125, 0.033477783203125, 0.018798828125, 0.05230712890625, -0.0168914794921875, 0.0709228515625, 0.012420654296875, -0.01055145263671875, 0.08587646484375, 0.0019121170043945312, 0.041351318359375, 0.035003662109375, -0.018798828125, 0.027313232421875, 0.06353759765625, -0.003864288330078125, 0.049468994140625, 0.028778076171875, -0.03741455078125, -0.00409698486328125, -0.039581298828125, -0.024993896484375, 0.0176849365234375, 0.0093994140625, 0.01267242431640625, -0.01540374755859375, 0.0087738037109375, -0.002185821533203125, 0.0169830322265625, -0.04571533203125, 0.06756591796875, -0.0150299072265625, -0.08349609375, 0.043182373046875, -0.01131439208984375, 0.018157958984375, -0.0236358642578125, -0.038848876953125, -0.03167724609375, -0.0224456787109375, -0.04339599609375, -0.07403564453125, 0.0478515625, 0.01483154296875, -0.058624267578125, -0.020416259765625, 0.0213775634765625, -0.0357666015625, -0.025299072265625, -0.01187896728515625, -0.0178985595703125, 0.03314208984375, 0.0245513916015625, -0.04888916015625, -0.00649261474609375, 0.006710052490234375, 0.00994110107421875, -0.0285491943359375, 0.045440673828125, 0.0193328857421875, 0.038299560546875, 0.0129241943359375, -0.020172119140625, 0.01434326171875, -0.005504608154296875, 0.051910400390625, -0.058441162109375, -0.0264739990234375, -0.03363037109375, 0.0709228515625, -0.046173095703125, -0.01934814453125, 0.05059814453125, 0.0298919677734375, 0.041412353515625, -0.0227813720703125, 0.047943115234375, -0.032958984375, 0.057861328125, -0.03704833984375, -0.01358795166015625, -0.0033626556396484375, -0.00960540771484375, 0.013763427734375, -0.0430908203125, -0.01480865478515625, 0.059661865234375, 0.0007104873657226562, 0.00775909423828125, 0.0654296875, 0.04022216796875, -0.0015926361083984375, 0.01323699951171875, 0.0028629302978515625, 0.00531005859375, -0.0011911392211914062, 0.02886962890625, 0.060272216796875, -0.0265960693359375, 0.007106781005859375, 0.003803253173828125, -0.0245513916015625, -0.00943756103515625, -0.06536865234375, -0.052215576171875, -0.017364501953125, -0.013580322265625, -0.050262451171875, -0.030914306640625, 0.07037353515625, 0.045257568359375, -0.07537841796875, -0.0162811279296875, 0.007030487060546875, 0.0297088623046875, -0.01267242431640625, -0.00960540771484375, -0.007274627685546875, -0.045806884765625, -0.027313232421875, 0.0229644775390625, -0.0218353271484375, 0.05340576171875, 0.017608642578125, 0.003055572509765625, -0.0032901763916015625, 0.01129913330078125, 0.03759765625, 0.032989501953125, -0.047607421875, -0.03143310546875, 0.01483917236328125, -0.04351806640625, -0.02374267578125, 0.04864501953125, -0.018280029296875, 0.011199951171875, 0.03717041015625, 0.061279296875, 0.0176239013671875, 0.00850677490234375, 0.0655517578125, -0.058563232421875, -0.00815582275390625, 0.037811279296875, 0.01003265380859375, 0.0276336669921875, -0.05389404296875, 0.045684814453125, -0.003032684326171875, -0.03985595703125, -0.040374755859375, 0.022247314453125, -0.09619140625, -0.01416778564453125, 0.056610107421875, -0.023284912109375, -0.0126800537109375, -0.0209808349609375, -0.07000732421875, 0.01983642578125, -0.04278564453125, 0.015716552734375, 0.04986572265625, -0.018524169921875, -0.023223876953125, -0.03131103515625, 0.0345458984375, 0.02569580078125, -0.057891845703125, -0.00612640380859375, 0.055206298828125, -0.0116119384765625, 0.01110076904296875, 0.06439208984375, 0.0212249755859375, 0.045623779296875, 0.013763427734375, -0.023529052734375, -0.0127410888671875, -0.035858154296875, 0.0018901824951171875, 0.01531982421875, -0.0016193389892578125, -0.0819091796875 ] ]
quantumaikr/llama-2-70b-fb16-guanaco-1k
2023-08-06T20:35:45.000Z
[ "transformers", "safetensors", "llama", "text-generation", "en", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
quantumaikr
null
null
quantumaikr/llama-2-70b-fb16-guanaco-1k
1
5,861
transformers
2023-08-06T19:54:02
--- license: cc-by-nc-4.0 language: - en pipeline_tag: text-generation --- # quantumaikr/llama-2-70b-fb16-guanaco-1k ## Model Description `quantumaikr/llama-2-70b-fb16-guanaco-1k` is a Llama2 70B model finetuned on an guanaco, mlabonne/guanaco-llama2-1k Dataset ## Usage Start chatting with `quantumaikr/llama-2-70b-fb16-guanaco-1k` using the following code snippet: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("quantumaikr/llama-2-70b-fb16-guanaco-1k") model = AutoModelForCausalLM.from_pretrained("quantumaikr/llama-2-70b-fb16-guanaco-1k", torch_dtype=torch.float16, device_map="auto") system_prompt = "### System:\nYou are QuantumLM, an AI that follows instructions extremely well. Help as much as you can. Remember, be safe, and don't do anything illegal.\n\n" message = "Write me a poem please" prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=256) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` QuantumLM should be used with this prompt format: ``` ### System: This is a system prompt, please behave and help the user. ### User: Your prompt here ### Assistant The output of QuantumLM ``` ## Use and Limitations ### Intended Use These models are intended for research only, in adherence with the [CC BY-NC-4.0](https://creativecommons.org/licenses/by-nc/4.0/) license. ### Limitations and bias Although the aforementioned dataset helps to steer the base language models into "safer" distributions of text, not all biases and toxicity can be mitigated through fine-tuning. We ask that users be mindful of such potential issues that can arise in generated responses. Do not treat model outputs as substitutes for human judgment or as sources of truth. Please use it responsibly. Contact us : hi@quantumai.kr
2,005
[ [ 0.006317138671875, -0.05560302734375, 0.0211944580078125, 0.0123748779296875, -0.036865234375, 0.003047943115234375, 0.00601959228515625, -0.0175933837890625, 0.01214599609375, 0.028045654296875, -0.03680419921875, -0.035247802734375, -0.045318603515625, -0.0009474754333496094, -0.0313720703125, 0.075927734375, 0.006000518798828125, -0.00705718994140625, 0.0004036426544189453, 0.01165008544921875, -0.03009033203125, -0.0295867919921875, -0.07135009765625, -0.0229644775390625, 0.0242462158203125, 0.01461029052734375, 0.049957275390625, 0.026611328125, 0.0177764892578125, 0.0302276611328125, -0.02056884765625, 0.01056671142578125, -0.03936767578125, 0.0017957687377929688, 0.006946563720703125, -0.048858642578125, -0.042999267578125, -0.016571044921875, 0.05401611328125, 0.004726409912109375, -0.0046844482421875, 0.033477783203125, 0.0017766952514648438, 0.032379150390625, -0.0394287109375, 0.016998291015625, -0.04266357421875, 0.0162506103515625, -0.0254364013671875, -0.01201629638671875, -0.0231170654296875, -0.0227813720703125, -0.01192474365234375, -0.055389404296875, 0.00734710693359375, 0.009033203125, 0.0806884765625, 0.0162353515625, -0.045654296875, -0.006683349609375, -0.037200927734375, 0.056732177734375, -0.0693359375, 0.025421142578125, 0.03253173828125, 0.01568603515625, -0.01458740234375, -0.063720703125, -0.059814453125, -0.0256805419921875, -0.0015420913696289062, 0.0093536376953125, -0.042633056640625, -0.01251983642578125, 0.024383544921875, 0.0196380615234375, -0.0511474609375, -0.00647735595703125, -0.0518798828125, -0.0369873046875, 0.041534423828125, 0.03826904296875, 0.0097198486328125, -0.037139892578125, -0.0185699462890625, -0.031494140625, -0.03326416015625, 0.01031494140625, 0.0357666015625, 0.005207061767578125, -0.027862548828125, 0.06317138671875, -0.0303192138671875, 0.031280517578125, 0.0197906494140625, -0.0022945404052734375, 0.0296630859375, -0.0283050537109375, -0.0246734619140625, 0.006526947021484375, 0.08343505859375, 0.0270843505859375, -0.00214385986328125, -0.0043182373046875, -0.005214691162109375, 0.00669097900390625, -0.0016260147094726562, -0.05316162109375, -0.028289794921875, 0.033721923828125, -0.034027099609375, -0.03497314453125, -0.01373291015625, -0.04962158203125, -0.0227813720703125, 0.0034389495849609375, 0.05572509765625, -0.0207672119140625, -0.0364990234375, 0.007129669189453125, -0.0218353271484375, 0.03131103515625, -0.013580322265625, -0.06243896484375, 0.012603759765625, 0.033660888671875, 0.059814453125, 0.016021728515625, -0.0273590087890625, -0.018157958984375, 0.01448822021484375, -0.021331787109375, 0.039276123046875, -0.005939483642578125, -0.03521728515625, -0.02093505859375, -0.0026378631591796875, -0.0013132095336914062, -0.0269927978515625, 0.04376220703125, -0.03424072265625, 0.0281524658203125, -0.0132598876953125, -0.02667236328125, -0.0156402587890625, 0.0003337860107421875, -0.0423583984375, 0.0716552734375, 0.0192718505859375, -0.076904296875, -0.00675201416015625, -0.043304443359375, -0.018646240234375, 0.0093841552734375, -0.0098114013671875, -0.0399169921875, -0.0014810562133789062, 0.0108642578125, 0.03082275390625, -0.01419830322265625, 0.0160064697265625, -0.01751708984375, -0.03192138671875, 0.034393310546875, -0.0379638671875, 0.09423828125, 0.031036376953125, -0.035552978515625, 0.029876708984375, -0.05889892578125, 0.00738525390625, 0.03729248046875, -0.015380859375, 0.0022907257080078125, -0.0201873779296875, 0.01309967041015625, 0.0016422271728515625, 0.041748046875, -0.04071044921875, 0.0110931396484375, -0.0343017578125, 0.055755615234375, 0.080322265625, -0.0020160675048828125, 0.018951416015625, -0.03472900390625, 0.044586181640625, 0.0139923095703125, 0.0191497802734375, 0.02850341796875, -0.040069580078125, -0.07098388671875, -0.01534271240234375, 0.0166015625, 0.040374755859375, -0.0643310546875, 0.0408935546875, -0.0190277099609375, -0.053009033203125, -0.04425048828125, -0.00299072265625, 0.0246429443359375, 0.040802001953125, 0.0149078369140625, -0.005168914794921875, -0.05609130859375, -0.07745361328125, -0.004566192626953125, -0.0222320556640625, 0.01251983642578125, 0.0010633468627929688, 0.0386962890625, -0.0374755859375, 0.05706787109375, -0.043487548828125, -0.015869140625, -0.0118408203125, -0.013885498046875, 0.0310821533203125, 0.056915283203125, 0.006229400634765625, -0.038604736328125, -0.0224609375, -0.0192108154296875, -0.06951904296875, 0.01374053955078125, -0.01358795166015625, -0.053863525390625, 0.008209228515625, 0.0166473388671875, -0.072265625, 0.02410888671875, 0.032440185546875, -0.05157470703125, 0.0394287109375, -0.00408172607421875, 0.0019550323486328125, -0.10284423828125, 0.0100860595703125, -0.00424957275390625, -0.002742767333984375, -0.0498046875, 0.01568603515625, -0.0093536376953125, 0.0142669677734375, -0.0413818359375, 0.050933837890625, -0.00228118896484375, 0.017303466796875, -0.01096343994140625, -0.00807952880859375, -0.002223968505859375, 0.045745849609375, -0.0139007568359375, 0.041351318359375, 0.055938720703125, -0.038055419921875, 0.04339599609375, 0.03558349609375, -0.01629638671875, 0.00695037841796875, -0.0667724609375, 0.01568603515625, 0.00432586669921875, 0.025299072265625, -0.0772705078125, -0.003620147705078125, 0.05670166015625, -0.053863525390625, 0.0014190673828125, 0.0100555419921875, -0.053924560546875, -0.029998779296875, -0.0290679931640625, 0.046356201171875, 0.05462646484375, -0.0256805419921875, 0.048431396484375, 0.01526641845703125, 0.017333984375, -0.0411376953125, -0.0643310546875, -0.00994873046875, -0.0291748046875, -0.04022216796875, 0.006855010986328125, -0.01442718505859375, -0.00275421142578125, 0.0027332305908203125, -0.0028247833251953125, -0.00234222412109375, 0.0135650634765625, 0.033782958984375, 0.03533935546875, 0.0008330345153808594, -0.00841522216796875, 0.0234375, -0.022705078125, 0.022705078125, -0.0099334716796875, 0.055633544921875, -0.0290374755859375, -0.01013946533203125, -0.054962158203125, 0.00829315185546875, 0.0325927734375, -0.007083892822265625, 0.05401611328125, 0.06451416015625, -0.0313720703125, 0.012420654296875, -0.01715087890625, -0.0172119140625, -0.041046142578125, 0.033111572265625, -0.037811279296875, -0.055450439453125, 0.039459228515625, 0.03192138671875, 0.000020563602447509766, 0.060638427734375, 0.058990478515625, -0.0036411285400390625, 0.081787109375, 0.0390625, 0.0009775161743164062, 0.025360107421875, -0.053680419921875, 0.01629638671875, -0.0843505859375, -0.0196380615234375, -0.0216522216796875, -0.00894927978515625, -0.051605224609375, -0.00839996337890625, 0.0258941650390625, 0.019317626953125, -0.04290771484375, 0.0276031494140625, -0.055755615234375, 0.010711669921875, 0.061004638671875, 0.033203125, 0.0142974853515625, 0.00881195068359375, -0.0117950439453125, 0.0176544189453125, -0.052886962890625, -0.0316162109375, 0.0982666015625, 0.0328369140625, 0.041412353515625, 0.0169830322265625, 0.0616455078125, 0.00585174560546875, -0.00737762451171875, -0.032318115234375, 0.0548095703125, 0.01313018798828125, -0.057708740234375, -0.019439697265625, -0.03607177734375, -0.07330322265625, 0.0197906494140625, -0.0057525634765625, -0.046905517578125, 0.004230499267578125, 0.01352691650390625, -0.0399169921875, 0.0195465087890625, -0.044097900390625, 0.057891845703125, -0.0012416839599609375, -0.0089874267578125, -0.01678466796875, -0.032135009765625, 0.042999267578125, 0.0017423629760742188, 0.003307342529296875, -0.00849151611328125, 0.0136566162109375, 0.07843017578125, -0.0239410400390625, 0.062744140625, 0.00014352798461914062, -0.01207733154296875, 0.037567138671875, 0.00302886962890625, 0.0296173095703125, 0.01438140869140625, 0.0003669261932373047, 0.0318603515625, 0.021820068359375, -0.046234130859375, -0.0136871337890625, 0.051055908203125, -0.0860595703125, -0.044586181640625, -0.04510498046875, -0.034210205078125, 0.01316070556640625, 0.01357269287109375, 0.0531005859375, 0.02197265625, 0.0267486572265625, -0.0078887939453125, 0.0305023193359375, -0.01275634765625, 0.038848876953125, 0.0208740234375, -0.01910400390625, -0.046539306640625, 0.07293701171875, 0.00580596923828125, 0.0012664794921875, 0.0108642578125, 0.024383544921875, -0.0325927734375, -0.026458740234375, -0.0360107421875, 0.0228729248046875, -0.062286376953125, -0.02801513671875, -0.039337158203125, -0.0306854248046875, -0.043609619140625, 0.0106201171875, -0.009735107421875, -0.03802490234375, -0.0321044921875, -0.00867462158203125, 0.050628662109375, 0.0307159423828125, -0.0182647705078125, 0.029327392578125, -0.05712890625, 0.032318115234375, 0.03802490234375, 0.004581451416015625, 0.0222320556640625, -0.06817626953125, -0.0169677734375, 0.0153656005859375, -0.029266357421875, -0.0711669921875, 0.032318115234375, -0.003444671630859375, 0.04486083984375, 0.0101165771484375, 0.01203155517578125, 0.048492431640625, -0.0223846435546875, 0.062744140625, 0.011962890625, -0.0882568359375, 0.051422119140625, -0.0216064453125, 0.032196044921875, 0.0144805908203125, 0.0210418701171875, -0.032470703125, -0.0489501953125, -0.058349609375, -0.05859375, 0.04730224609375, 0.040740966796875, 0.0244598388671875, -0.0002167224884033203, 0.026641845703125, -0.01045989990234375, 0.01125335693359375, -0.08807373046875, -0.041046142578125, -0.03558349609375, -0.03472900390625, -0.0037479400634765625, -0.018707275390625, -0.0203399658203125, -0.03631591796875, 0.0711669921875, 0.0032711029052734375, 0.04278564453125, 0.0214080810546875, -0.0177764892578125, -0.0235595703125, 0.004337310791015625, 0.04559326171875, 0.033416748046875, -0.00830841064453125, 0.0108489990234375, 0.0231170654296875, -0.04736328125, 0.007389068603515625, 0.006214141845703125, -0.019622802734375, -0.0175933837890625, 0.02935791015625, 0.07525634765625, -0.011505126953125, -0.039306640625, 0.01338958740234375, -0.0092315673828125, -0.0188140869140625, -0.030548095703125, 0.015472412109375, 0.017852783203125, 0.0478515625, 0.030548095703125, -0.01078033447265625, 0.0086517333984375, -0.0233306884765625, -0.00022423267364501953, 0.038330078125, -0.0009613037109375, -0.01280975341796875, 0.06695556640625, -0.00901031494140625, -0.025146484375, 0.0587158203125, -0.020263671875, -0.037353515625, 0.04833984375, 0.0452880859375, 0.06951904296875, 0.0122833251953125, 0.01270294189453125, 0.033782958984375, 0.01678466796875, 0.017669677734375, 0.028289794921875, 0.00251007080078125, -0.05291748046875, -0.01197052001953125, -0.05126953125, -0.0086212158203125, 0.0115509033203125, -0.040985107421875, 0.01153564453125, -0.043243408203125, -0.0224761962890625, -0.0100250244140625, 0.025146484375, -0.0428466796875, 0.0018529891967773438, 0.0169830322265625, 0.036407470703125, -0.0740966796875, 0.07904052734375, 0.03973388671875, -0.04278564453125, -0.07708740234375, -0.02227783203125, -0.0143585205078125, -0.0655517578125, 0.05438232421875, 0.00316619873046875, 0.006420135498046875, 0.031005859375, -0.056793212890625, -0.0701904296875, 0.103515625, 0.0123291015625, -0.0309600830078125, 0.00701141357421875, 0.00870513916015625, 0.033172607421875, 0.004711151123046875, 0.0379638671875, 0.031829833984375, 0.0250396728515625, -0.007488250732421875, -0.061248779296875, 0.0299072265625, -0.0261077880859375, 0.01155853271484375, 0.00433349609375, -0.06494140625, 0.08734130859375, -0.0039215087890625, -0.016387939453125, 0.02374267578125, 0.07098388671875, 0.05126953125, 0.01012420654296875, 0.03619384765625, 0.053985595703125, 0.05035400390625, 0.007091522216796875, 0.05303955078125, -0.022552490234375, 0.05157470703125, 0.0528564453125, 0.0030155181884765625, 0.0684814453125, 0.028533935546875, -0.0291900634765625, 0.050933837890625, 0.0548095703125, -0.0224456787109375, 0.042816162109375, 0.0170745849609375, -0.0170745849609375, -0.0032176971435546875, 0.0169219970703125, -0.037689208984375, 0.02587890625, 0.00977325439453125, -0.01027679443359375, -0.002655029296875, 0.006626129150390625, 0.001323699951171875, -0.02154541015625, -0.018280029296875, 0.034942626953125, 0.005046844482421875, -0.0176849365234375, 0.07659912109375, 0.00960540771484375, 0.06756591796875, -0.0335693359375, -0.004550933837890625, -0.0156402587890625, 0.01216888427734375, -0.0207672119140625, -0.03619384765625, 0.007213592529296875, -0.00542449951171875, -0.013214111328125, 0.01346588134765625, 0.0419921875, -0.025360107421875, -0.05413818359375, -0.002094268798828125, 0.0214996337890625, 0.0284271240234375, 0.00021958351135253906, -0.07073974609375, 0.015899658203125, 0.0205230712890625, -0.01837158203125, 0.01641845703125, 0.005641937255859375, 0.002620697021484375, 0.058441162109375, 0.055572509765625, -0.0097808837890625, 0.0330810546875, -0.0227203369140625, 0.057525634765625, -0.048309326171875, -0.0233917236328125, -0.06951904296875, 0.0460205078125, 0.00812530517578125, -0.04266357421875, 0.068603515625, 0.0413818359375, 0.05450439453125, -0.00789642333984375, 0.041717529296875, -0.014373779296875, 0.01522064208984375, -0.0469970703125, 0.06768798828125, -0.030029296875, 0.01018524169921875, -0.0006785392761230469, -0.06109619140625, 0.01091766357421875, 0.07293701171875, 0.0034885406494140625, 0.00634002685546875, 0.05291748046875, 0.0721435546875, 0.0190582275390625, -0.0093231201171875, 0.004528045654296875, 0.037872314453125, 0.030731201171875, 0.0667724609375, 0.061798095703125, -0.07940673828125, 0.042266845703125, -0.050628662109375, -0.00563812255859375, -0.007244110107421875, -0.04779052734375, -0.059295654296875, -0.045562744140625, -0.041412353515625, -0.055450439453125, -0.00632476806640625, 0.06500244140625, 0.0389404296875, -0.054107666015625, -0.0208587646484375, -0.01508331298828125, 0.010955810546875, -0.005634307861328125, -0.0191650390625, 0.039031982421875, -0.00411224365234375, -0.05615234375, 0.007427215576171875, -0.027496337890625, 0.02227783203125, -0.032135009765625, -0.0168914794921875, -0.016326904296875, -0.0003733634948730469, 0.027313232421875, 0.0167388916015625, -0.06304931640625, -0.01020050048828125, -0.0025768280029296875, -0.0252532958984375, -0.002910614013671875, 0.01165008544921875, -0.0684814453125, 0.005889892578125, 0.027618408203125, 0.0117645263671875, 0.0345458984375, -0.032562255859375, 0.015411376953125, -0.037994384765625, 0.0135498046875, 0.006420135498046875, 0.0155181884765625, 0.007167816162109375, -0.04913330078125, 0.022247314453125, 0.01488494873046875, -0.0439453125, -0.07635498046875, -0.0051116943359375, -0.0882568359375, -0.02801513671875, 0.09765625, 0.00814056396484375, -0.0262603759765625, 0.0211944580078125, -0.03143310546875, 0.05609130859375, -0.0225067138671875, 0.048828125, 0.034332275390625, -0.007419586181640625, -0.010345458984375, -0.0298614501953125, 0.026275634765625, 0.040924072265625, -0.0440673828125, -0.01568603515625, 0.0170440673828125, 0.0268707275390625, 0.0115966796875, 0.052703857421875, 0.007843017578125, 0.0234832763671875, 0.0172271728515625, 0.0144195556640625, -0.01375579833984375, 0.006000518798828125, -0.042999267578125, -0.007045745849609375, 0.006038665771484375, -0.031585693359375 ] ]
Corianas/gpt-j-6B-Dolly
2023-03-29T09:27:20.000Z
[ "transformers", "pytorch", "gptj", "text-generation", "endpoints_compatible", "has_space", "region:us" ]
text-generation
Corianas
null
null
Corianas/gpt-j-6B-Dolly
1
5,860
transformers
2023-03-28T13:41:36
G.A.R.Y. (Guided Artificially Resourceful Yes-man) A clone of Dolly (https://github.com/databrickslabs/dolly) Trained on 8xa100s over the course of 45 minutes. (total time less than 3 hours with false starts and getting less optimal results while learning how best to proceed.) --- license: cc-by-nc-2.0 ---
311
[ [ -0.0186309814453125, -0.0567626953125, 0.0301971435546875, 0.0098724365234375, -0.0198822021484375, 0.0213775634765625, 0.0362548828125, -0.0186614990234375, 0.03924560546875, 0.0347900390625, -0.068115234375, -0.0018415451049804688, -0.028656005859375, 0.00304412841796875, -0.0648193359375, 0.089111328125, 0.01264190673828125, 0.01317596435546875, -0.0038242340087890625, -0.023193359375, -0.034637451171875, -0.023193359375, -0.03253173828125, -0.01461029052734375, 0.038177490234375, 0.0264739990234375, 0.0740966796875, 0.076416015625, 0.0237884521484375, 0.01422119140625, -0.007686614990234375, -0.0006780624389648438, -0.052703857421875, -0.0203094482421875, -0.0166473388671875, -0.0183868408203125, -0.035552978515625, -0.009033203125, 0.0147552490234375, 0.037261962890625, -0.00954437255859375, 0.04547119140625, -0.027679443359375, 0.06573486328125, -0.05084228515625, -0.0042266845703125, -0.031005859375, 0.014129638671875, 0.0005087852478027344, 0.00322723388671875, -0.02398681640625, -0.0433349609375, -0.01153564453125, -0.07733154296875, 0.04144287109375, 0.029510498046875, 0.0552978515625, 0.006587982177734375, -0.02337646484375, -0.0171356201171875, -0.06414794921875, 0.07171630859375, 0.0140228271484375, 0.01526641845703125, 0.025543212890625, 0.06256103515625, -0.030609130859375, -0.057586669921875, -0.048980712890625, -0.0231475830078125, -0.01444244384765625, -0.014373779296875, 0.0030059814453125, -0.01366424560546875, 0.055572509765625, 0.031982421875, -0.045684814453125, -0.031402587890625, -0.068603515625, -0.0195465087890625, 0.0723876953125, 0.029144287109375, 0.0127410888671875, -0.01434326171875, -0.044830322265625, -0.02545166015625, -0.03240966796875, -0.0146636962890625, 0.041290283203125, 0.0087890625, -0.015899658203125, 0.04949951171875, -0.035980224609375, 0.05841064453125, 0.0213165283203125, -0.0252227783203125, 0.038909912109375, -0.01477813720703125, -0.032257080078125, 0.009002685546875, 0.045745849609375, -0.005062103271484375, 0.004547119140625, -0.01739501953125, -0.0277862548828125, 0.0187225341796875, 0.037353515625, -0.044769287109375, -0.04083251953125, 0.00994110107421875, -0.032257080078125, -0.0141754150390625, 0.004055023193359375, -0.0679931640625, -0.0364990234375, -0.04229736328125, 0.07501220703125, -0.0299530029296875, -0.0197601318359375, 0.0026874542236328125, -0.01715087890625, 0.02227783203125, 0.0260467529296875, -0.044525146484375, 0.0154571533203125, 0.03759765625, 0.06634521484375, 0.0061492919921875, 0.0054473876953125, -0.06707763671875, 0.01407623291015625, -0.033294677734375, 0.039825439453125, -0.0243988037109375, -0.061431884765625, -0.0197296142578125, 0.0172882080078125, 0.007045745849609375, -0.029510498046875, 0.0271759033203125, -0.04522705078125, 0.032318115234375, -0.0217742919921875, -0.0292205810546875, -0.03411865234375, 0.01120758056640625, -0.049896240234375, 0.073486328125, 0.053497314453125, -0.0178375244140625, 0.041046142578125, -0.0308837890625, -0.025146484375, 0.01666259765625, -0.037322998046875, -0.047821044921875, -0.006622314453125, 0.01849365234375, 0.036224365234375, -0.01678466796875, 0.006198883056640625, -0.01515960693359375, -0.0254669189453125, -0.01788330078125, 0.003833770751953125, 0.068603515625, 0.0273895263671875, -0.0179901123046875, 0.0187225341796875, -0.090087890625, 0.01654052734375, 0.0014095306396484375, -0.0098419189453125, 0.00341033935546875, -0.049713134765625, 0.0060882568359375, -0.0000680088996887207, -0.0004992485046386719, -0.03125, 0.040252685546875, 0.01303863525390625, -0.01120758056640625, 0.047088623046875, -0.0163116455078125, 0.032928466796875, -0.04840087890625, 0.0736083984375, -0.028533935546875, 0.0333251953125, -0.0036067962646484375, -0.048919677734375, -0.04888916015625, -0.04132080078125, 0.00527191162109375, 0.028778076171875, -0.06622314453125, 0.024322509765625, -0.0101318359375, -0.036834716796875, -0.040557861328125, 0.01885986328125, 0.044708251953125, 0.018218994140625, 0.061737060546875, -0.0242156982421875, -0.025390625, -0.07366943359375, 0.0262908935546875, -0.0015535354614257812, -0.0267181396484375, 0.006877899169921875, 0.053192138671875, -0.0109100341796875, 0.0604248046875, -0.0401611328125, -0.046905517578125, -0.025665283203125, 0.02215576171875, 0.0227813720703125, 0.0224456787109375, 0.07672119140625, -0.0390625, -0.058258056640625, 0.006999969482421875, -0.046905517578125, 0.0173492431640625, -0.01248931884765625, -0.02337646484375, 0.0024623870849609375, 0.037841796875, -0.048980712890625, 0.06268310546875, 0.02978515625, -0.0306854248046875, 0.058441162109375, -0.043853759765625, 0.0200958251953125, -0.05682373046875, 0.01947021484375, -0.0148773193359375, -0.0185546875, -0.04302978515625, -0.0093231201171875, -0.00897216796875, -0.00513458251953125, -0.0277099609375, 0.041290283203125, -0.063232421875, -0.0172882080078125, -0.030487060546875, -0.01409912109375, -0.01180267333984375, 0.02581787109375, -0.0230712890625, 0.0743408203125, 0.043212890625, -0.055908203125, 0.045501708984375, 0.039337158203125, -0.02734375, 0.05609130859375, -0.04888916015625, 0.04168701171875, -0.029815673828125, 0.0152587890625, -0.04595947265625, -0.047088623046875, 0.0211334228515625, -0.037628173828125, 0.016387939453125, -0.0167694091796875, -0.0023479461669921875, -0.0309906005859375, -0.017059326171875, 0.026885986328125, 0.049591064453125, -0.053955078125, 0.0589599609375, 0.017822265625, 0.0267791748046875, -0.045623779296875, -0.02825927734375, -0.013824462890625, -0.0157012939453125, -0.0309295654296875, 0.0233306884765625, 0.0167694091796875, -0.02435302734375, 0.0029087066650390625, 0.01200103759765625, -0.004116058349609375, -0.027130126953125, 0.0143890380859375, 0.01412200927734375, 0.0108642578125, -0.003742218017578125, -0.0145111083984375, -0.01702880859375, 0.0011444091796875, 0.01445770263671875, 0.029022216796875, 0.01473236083984375, -0.030181884765625, -0.05084228515625, 0.020233154296875, 0.047637939453125, 0.02117919921875, 0.055572509765625, 0.042236328125, -0.01230621337890625, -0.0094757080078125, -0.0274658203125, -0.00936126708984375, -0.0361328125, 0.021453857421875, -0.0205230712890625, -0.016357421875, 0.03125, 0.040008544921875, -0.00592041015625, 0.0241241455078125, 0.0272064208984375, 0.0284881591796875, 0.0655517578125, 0.049285888671875, -0.035247802734375, 0.031524658203125, -0.0531005859375, 0.007228851318359375, -0.032196044921875, -0.045166015625, -0.04034423828125, -0.03778076171875, -0.0260772705078125, -0.049713134765625, 0.034423828125, -0.006847381591796875, -0.0262603759765625, 0.039581298828125, -0.03369140625, 0.03057861328125, 0.042144775390625, 0.0190887451171875, -0.002475738525390625, -0.012359619140625, 0.0160064697265625, 0.0274658203125, -0.054931640625, -0.01027679443359375, 0.093505859375, -0.0017223358154296875, 0.0701904296875, -0.01007843017578125, 0.028594970703125, 0.0360107421875, 0.029449462890625, -0.052001953125, 0.049774169921875, 0.006866455078125, -0.0667724609375, -0.0186004638671875, -0.016571044921875, -0.07122802734375, -0.01192474365234375, -0.0266876220703125, -0.048248291015625, -0.0010862350463867188, 0.03857421875, -0.02655029296875, 0.02874755859375, -0.053955078125, 0.047088623046875, -0.0204620361328125, -0.036651611328125, -0.040313720703125, -0.050628662109375, 0.024322509765625, 0.0008559226989746094, 0.0218505859375, -0.0008873939514160156, 0.034759521484375, 0.04486083984375, -0.04364013671875, 0.007354736328125, -0.040740966796875, 0.0269317626953125, 0.041595458984375, -0.0009479522705078125, 0.043365478515625, -0.0004429817199707031, 0.0190887451171875, -0.01715087890625, -0.0157623291015625, -0.04730224609375, -0.04327392578125, 0.039886474609375, -0.0533447265625, -0.0271453857421875, -0.035369873046875, -0.035247802734375, -0.0027923583984375, -0.0171356201171875, -0.004413604736328125, 0.033721923828125, 0.0052642822265625, 0.028350830078125, 0.0487060546875, -0.0213470458984375, 0.0221099853515625, -0.00213623046875, -0.0443115234375, -0.03826904296875, 0.06561279296875, -0.009124755859375, 0.0252838134765625, 0.0188140869140625, 0.01186370849609375, -0.0189056396484375, -0.0261383056640625, -0.0592041015625, 0.01399993896484375, -0.02069091796875, -0.0120697021484375, -0.0445556640625, -0.01189422607421875, -0.014068603515625, -0.0248565673828125, -0.03717041015625, -0.0416259765625, -0.0238800048828125, 0.0006661415100097656, 0.0623779296875, 0.07806396484375, -0.003322601318359375, 0.04376220703125, -0.053375244140625, 0.05279541015625, 0.05169677734375, 0.005298614501953125, -0.0224151611328125, -0.05029296875, -0.024932861328125, 0.00519561767578125, -0.028350830078125, -0.039886474609375, 0.0284881591796875, 0.00211334228515625, 0.062042236328125, 0.019073486328125, -0.015167236328125, 0.06134033203125, -0.006038665771484375, 0.06494140625, 0.0270233154296875, -0.043426513671875, 0.034393310546875, -0.0390625, 0.027679443359375, 0.0240631103515625, 0.04791259765625, -0.0052337646484375, -0.008697509765625, -0.03863525390625, -0.059234619140625, 0.049224853515625, 0.0158843994140625, -0.0053558349609375, 0.0236663818359375, 0.030853271484375, 0.0271453857421875, 0.016510009765625, -0.0250701904296875, -0.0303497314453125, -0.0161285400390625, 0.00992584228515625, 0.0005359649658203125, -0.01250457763671875, -0.0165252685546875, -0.029327392578125, 0.0712890625, -0.026611328125, 0.04296875, 0.0021495819091796875, -0.0083770751953125, -0.0117340087890625, 0.0011720657348632812, 0.04443359375, 0.06988525390625, -0.0347900390625, -0.03472900390625, 0.010467529296875, -0.06842041015625, 0.0228118896484375, 0.00074005126953125, -0.00919342041015625, -0.006168365478515625, 0.017120361328125, 0.051727294921875, -0.020660400390625, -0.0113525390625, 0.01708984375, -0.011505126953125, -0.02606201171875, -0.01326751708984375, 0.034423828125, -0.026611328125, 0.0230712890625, 0.04241943359375, -0.0140228271484375, -0.006549835205078125, -0.03948974609375, 0.01763916015625, 0.0304107666015625, -0.029754638671875, -0.0377197265625, 0.0673828125, 0.0048370361328125, -0.03924560546875, 0.048980712890625, -0.040374755859375, -0.017578125, 0.050567626953125, 0.00022494792938232422, 0.0771484375, -0.01629638671875, 0.0367431640625, 0.04656982421875, -0.006603240966796875, -0.0246734619140625, 0.033935546875, 0.0118408203125, -0.01410675048828125, -0.031219482421875, -0.049468994140625, -0.034942626953125, 0.032135009765625, -0.074462890625, 0.057373046875, -0.047027587890625, 0.0004875659942626953, 0.0022487640380859375, -0.0058135986328125, -0.0494384765625, 0.0457763671875, -0.0208892822265625, 0.064697265625, -0.0478515625, 0.05816650390625, 0.0640869140625, -0.0034351348876953125, -0.042633056640625, -0.021087646484375, 0.01092529296875, -0.0682373046875, 0.039825439453125, 0.01580810546875, -0.0167388916015625, -0.017333984375, -0.043060302734375, -0.06268310546875, 0.10162353515625, 0.0188446044921875, -0.04974365234375, 0.0247802734375, -0.0116119384765625, 0.051025390625, -0.0160980224609375, 0.0270233154296875, 0.037994384765625, 0.01410675048828125, 0.0282440185546875, -0.0777587890625, -0.02587890625, -0.00788116455078125, -0.005252838134765625, 0.017913818359375, -0.059906005859375, 0.051300048828125, -0.0379638671875, 0.00479888916015625, 0.02001953125, 0.06988525390625, 0.0198516845703125, 0.0191192626953125, 0.052001953125, 0.04791259765625, 0.05023193359375, 0.003734588623046875, 0.07952880859375, -0.026153564453125, 0.0295257568359375, 0.055999755859375, 0.00606536865234375, 0.0079345703125, 0.0290679931640625, -0.033905029296875, 0.02679443359375, 0.010345458984375, 0.004150390625, 0.0560302734375, 0.0237274169921875, -0.02435302734375, -0.0160369873046875, 0.004322052001953125, -0.05120849609375, 0.0187530517578125, 0.02008056640625, -0.01222991943359375, -0.01125335693359375, -0.003688812255859375, 0.0027446746826171875, 0.008331298828125, -0.0308837890625, 0.023529052734375, -0.02545166015625, -0.03094482421875, 0.042999267578125, -0.0281219482421875, 0.01448822021484375, -0.0589599609375, -0.0094757080078125, -0.0235137939453125, 0.0295562744140625, -0.0036945343017578125, -0.04949951171875, 0.0209808349609375, -0.01097869873046875, -0.00904083251953125, -0.0333251953125, 0.034576416015625, -0.0101776123046875, -0.05224609375, 0.02386474609375, 0.02008056640625, 0.0005688667297363281, 0.022613525390625, -0.03662109375, 0.00037169456481933594, 0.00974273681640625, -0.03662109375, 0.044403076171875, 0.0172119140625, -0.00754547119140625, 0.07916259765625, 0.0301513671875, -0.0189666748046875, 0.0021915435791015625, -0.0198211669921875, 0.0775146484375, -0.04638671875, -0.031768798828125, -0.0300445556640625, 0.025604248046875, 0.003131866455078125, -0.04376220703125, 0.057647705078125, 0.05804443359375, 0.0706787109375, -0.0330810546875, 0.05517578125, -0.0357666015625, 0.0517578125, -0.04510498046875, 0.024261474609375, -0.05023193359375, -0.002185821533203125, -0.01036834716796875, -0.070068359375, -0.007785797119140625, 0.049346923828125, 0.004680633544921875, 0.020111083984375, 0.03924560546875, 0.0787353515625, -0.0063323974609375, 0.004024505615234375, 0.0211944580078125, -0.0037326812744140625, 0.0318603515625, 0.01557159423828125, 0.04937744140625, -0.029205322265625, 0.04864501953125, -0.0361328125, -0.0276031494140625, -0.0171051025390625, -0.055999755859375, -0.07171630859375, 0.00023746490478515625, -0.024444580078125, -0.0306549072265625, 0.022247314453125, 0.07427978515625, 0.05450439453125, -0.07232666015625, -0.020355224609375, 0.00264739990234375, -0.0036144256591796875, -0.0096435546875, -0.0163116455078125, 0.035491943359375, 0.0021610260009765625, -0.043731689453125, 0.028350830078125, 0.018035888671875, 0.00876617431640625, -0.04473876953125, -0.005279541015625, -0.0220184326171875, -0.0202178955078125, 0.0223388671875, 0.029144287109375, -0.003467559814453125, -0.0467529296875, 0.01009368896484375, 0.01519775390625, -0.005313873291015625, 0.05316162109375, -0.0775146484375, 0.044189453125, 0.03314208984375, 0.0241546630859375, 0.049591064453125, 0.031280517578125, 0.03875732421875, -0.0307159423828125, 0.0277862548828125, 0.00333404541015625, 0.00795745849609375, 0.017669677734375, -0.040771484375, 0.045196533203125, 0.045379638671875, -0.06561279296875, -0.04364013671875, 0.0085906982421875, -0.0760498046875, 0.01506805419921875, 0.09832763671875, 0.007598876953125, -0.03851318359375, -0.0189666748046875, -0.034759521484375, -0.007598876953125, -0.0296173095703125, 0.0958251953125, 0.0156707763671875, -0.0267486572265625, 0.0282135009765625, -0.056396484375, 0.035308837890625, 0.0004963874816894531, -0.060089111328125, -0.0293121337890625, 0.03900146484375, 0.022552490234375, 0.033935546875, 0.0501708984375, 0.00997161865234375, 0.027252197265625, 0.02685546875, 0.004550933837890625, -0.00930023193359375, -0.048431396484375, -0.0018510818481445312, 0.008697509765625, -0.0296783447265625, -0.0443115234375 ] ]
stablediffusionapi/meinaalter
2023-04-30T11:56:06.000Z
[ "diffusers", "stablediffusionapi.com", "stable-diffusion-api", "text-to-image", "ultra-realistic", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
stablediffusionapi
null
null
stablediffusionapi/meinaalter
1
5,859
diffusers
2023-04-30T11:55:30
--- license: creativeml-openrail-m tags: - stablediffusionapi.com - stable-diffusion-api - text-to-image - ultra-realistic pinned: true --- # MeinaAlter API Inference ![generated from stablediffusionapi.com](https://pub-8b49af329fae499aa563997f5d4068a4.r2.dev/generations/10066656521682855700.png) ## Get API Key Get API key from [Stable Diffusion API](http://stablediffusionapi.com/), No Payment needed. Replace Key in below code, change **model_id** to "meinaalter" Coding in PHP/Node/Java etc? Have a look at docs for more code examples: [View docs](https://stablediffusionapi.com/docs) Model link: [View model](https://stablediffusionapi.com/models/meinaalter) Credits: [View credits](https://civitai.com/?query=MeinaAlter) View all models: [View Models](https://stablediffusionapi.com/models) import requests import json url = "https://stablediffusionapi.com/api/v3/dreambooth" payload = json.dumps({ "key": "", "model_id": "meinaalter", "prompt": "actual 8K portrait photo of gareth person, portrait, happy colors, bright eyes, clear eyes, warm smile, smooth soft skin, big dreamy eyes, beautiful intricate colored hair, symmetrical, anime wide eyes, soft lighting, detailed face, by makoto shinkai, stanley artgerm lau, wlop, rossdraws, concept art, digital painting, looking into camera", "negative_prompt": "painting, extra fingers, mutated hands, poorly drawn hands, poorly drawn face, deformed, ugly, blurry, bad anatomy, bad proportions, extra limbs, cloned face, skinny, glitchy, double torso, extra arms, extra hands, mangled fingers, missing lips, ugly face, distorted face, extra legs, anime", "width": "512", "height": "512", "samples": "1", "num_inference_steps": "30", "safety_checker": "no", "enhance_prompt": "yes", "seed": None, "guidance_scale": 7.5, "multi_lingual": "no", "panorama": "no", "self_attention": "no", "upscale": "no", "embeddings": "embeddings_model_id", "lora": "lora_model_id", "webhook": None, "track_id": None }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload) print(response.text) > Use this coupon code to get 25% off **DMGG0RBN**
2,404
[ [ -0.0401611328125, -0.057464599609375, 0.040252685546875, 0.021484375, -0.033111572265625, -0.00530242919921875, 0.0186767578125, -0.030364990234375, 0.03570556640625, 0.0440673828125, -0.0660400390625, -0.07159423828125, -0.029022216796875, -0.007171630859375, -0.01418304443359375, 0.043212890625, 0.00555419921875, -0.003047943115234375, -0.00783538818359375, 0.005115509033203125, -0.01378631591796875, -0.01035308837890625, -0.0526123046875, -0.01445770263671875, 0.0201873779296875, -0.006748199462890625, 0.03961181640625, 0.039306640625, 0.021575927734375, 0.021240234375, -0.01548004150390625, -0.00901031494140625, -0.0229949951171875, -0.008880615234375, -0.012603759765625, -0.0517578125, -0.042724609375, -0.0157928466796875, 0.0266571044921875, 0.035430908203125, 0.0005235671997070312, 0.034149169921875, 0.000743865966796875, 0.056304931640625, -0.05767822265625, 0.0190277099609375, -0.025238037109375, 0.0125732421875, -0.0029144287109375, 0.007633209228515625, -0.01611328125, -0.025238037109375, -0.01158905029296875, -0.05316162109375, 0.0171051025390625, 0.0005984306335449219, 0.1019287109375, 0.02276611328125, -0.0208282470703125, -0.0115509033203125, -0.03277587890625, 0.061920166015625, -0.08489990234375, 0.0267486572265625, 0.0430908203125, 0.00970458984375, -0.0003509521484375, -0.057769775390625, -0.04595947265625, 0.0263671875, 0.0111846923828125, 0.017791748046875, -0.0284881591796875, -0.007114410400390625, 0.0182952880859375, 0.0255584716796875, -0.0345458984375, -0.01666259765625, -0.0280914306640625, -0.01055145263671875, 0.0478515625, 0.01154327392578125, 0.01512908935546875, -0.033966064453125, -0.0250701904296875, -0.0233001708984375, -0.031402587890625, 0.018585205078125, 0.047515869140625, 0.0237579345703125, -0.042877197265625, 0.02813720703125, -0.034820556640625, 0.056640625, 0.0098724365234375, -0.00653076171875, 0.04937744140625, -0.00789642333984375, -0.0188751220703125, -0.01349639892578125, 0.07098388671875, 0.04498291015625, -0.004886627197265625, 0.021697998046875, -0.0079803466796875, 0.0017385482788085938, 0.005863189697265625, -0.0836181640625, -0.00733184814453125, 0.05389404296875, -0.060821533203125, -0.041961669921875, 0.0081939697265625, -0.08294677734375, -0.01436614990234375, 0.0006546974182128906, 0.03021240234375, -0.0196990966796875, -0.036224365234375, 0.0216217041015625, -0.01439666748046875, 0.0108642578125, 0.0107574462890625, -0.054779052734375, 0.0034160614013671875, 0.0404052734375, 0.062255859375, 0.0132598876953125, -0.004756927490234375, 0.013824462890625, 0.010955810546875, -0.024169921875, 0.06396484375, -0.01236724853515625, -0.037200927734375, -0.00766754150390625, 0.01739501953125, -0.0015592575073242188, -0.0396728515625, 0.048187255859375, -0.05511474609375, -0.00855255126953125, -0.01369476318359375, -0.0311431884765625, -0.034820556640625, 0.01166534423828125, -0.041595458984375, 0.0435791015625, 0.0164031982421875, -0.052703857421875, 0.01160430908203125, -0.053802490234375, -0.0166168212890625, -0.0053253173828125, 0.00908660888671875, -0.03643798828125, -0.00835418701171875, 0.010711669921875, 0.018218994140625, -0.007049560546875, -0.0007681846618652344, -0.060211181640625, -0.02191162109375, 0.0188751220703125, -0.02587890625, 0.093017578125, 0.032684326171875, -0.0213165283203125, -0.00119781494140625, -0.0723876953125, 0.0177764892578125, 0.0389404296875, -0.0212554931640625, -0.0189666748046875, -0.0106048583984375, 0.00787353515625, 0.0010089874267578125, 0.019378662109375, -0.037841796875, 0.006626129150390625, -0.033355712890625, 0.0309600830078125, 0.046142578125, 0.026947021484375, 0.01678466796875, -0.018280029296875, 0.051177978515625, 0.00926971435546875, 0.02935791015625, 0.001598358154296875, -0.0430908203125, -0.044647216796875, -0.0257415771484375, 0.025390625, 0.03363037109375, -0.031524658203125, 0.0384521484375, -0.014678955078125, -0.05218505859375, -0.0469970703125, 0.0028533935546875, 0.0257110595703125, 0.039306640625, 0.01058197021484375, -0.017791748046875, -0.0458984375, -0.07525634765625, -0.0026493072509765625, -0.01654052734375, -0.01039886474609375, 0.024139404296875, 0.039093017578125, -0.033477783203125, 0.059234619140625, -0.0611572265625, -0.0029201507568359375, 0.002750396728515625, 0.0020236968994140625, 0.060089111328125, 0.048553466796875, 0.057830810546875, -0.06927490234375, -0.034637451171875, -0.028411865234375, -0.05792236328125, -0.0022792816162109375, 0.0182342529296875, -0.041717529296875, -0.0040283203125, 0.01175689697265625, -0.054779052734375, 0.04364013671875, 0.03863525390625, -0.046661376953125, 0.0369873046875, -0.0008258819580078125, 0.037506103515625, -0.0882568359375, 0.0003447532653808594, 0.004138946533203125, -0.0164947509765625, -0.0304107666015625, 0.0310821533203125, 0.005916595458984375, -0.0035858154296875, -0.055328369140625, 0.04852294921875, -0.0211944580078125, 0.01049041748046875, -0.02545166015625, 0.00008511543273925781, 0.0230560302734375, 0.0396728515625, -0.007781982421875, 0.030914306640625, 0.045989990234375, -0.0421142578125, 0.0310821533203125, 0.0257110595703125, -0.0330810546875, 0.04669189453125, -0.047637939453125, 0.00844573974609375, -0.0011682510375976562, 0.027984619140625, -0.07879638671875, -0.03985595703125, 0.039154052734375, -0.04522705078125, -0.0048980712890625, -0.0452880859375, -0.042572021484375, -0.050750732421875, -0.02386474609375, 0.0205535888671875, 0.059295654296875, -0.036102294921875, 0.056427001953125, 0.0171661376953125, 0.01512908935546875, -0.0382080078125, -0.06622314453125, -0.0247955322265625, -0.0243377685546875, -0.04779052734375, 0.018798828125, -0.01448822021484375, -0.0263214111328125, 0.01003265380859375, 0.0020847320556640625, -0.0232696533203125, 0.0016450881958007812, 0.035247802734375, 0.033294677734375, -0.01947021484375, -0.018157958984375, 0.018829345703125, -0.004085540771484375, 0.015045166015625, -0.0165252685546875, 0.06646728515625, -0.007808685302734375, -0.05078125, -0.062042236328125, 0.01025390625, 0.04974365234375, 0.00522613525390625, 0.046661376953125, 0.04364013671875, -0.045867919921875, -0.0019388198852539062, -0.038604736328125, -0.0176544189453125, -0.037841796875, 0.01342010498046875, -0.032501220703125, -0.0255889892578125, 0.0728759765625, 0.0041961669921875, -0.0024261474609375, 0.048675537109375, 0.03741455078125, -0.01097869873046875, 0.10198974609375, 0.0098419189453125, 0.007511138916015625, 0.0203704833984375, -0.0511474609375, 0.0025539398193359375, -0.06512451171875, -0.01175689697265625, -0.0259552001953125, -0.03204345703125, -0.02862548828125, -0.0240020751953125, 0.005344390869140625, 0.027435302734375, -0.037567138671875, 0.0183258056640625, -0.041107177734375, 0.0258331298828125, 0.0504150390625, 0.02685546875, 0.00927734375, -0.00313568115234375, -0.00487518310546875, 0.00476837158203125, -0.0243377685546875, -0.0273284912109375, 0.0745849609375, 0.0115203857421875, 0.052520751953125, 0.01092529296875, 0.0513916015625, 0.01482391357421875, 0.001094818115234375, -0.04534912109375, 0.038482666015625, 0.01203155517578125, -0.0753173828125, 0.01323699951171875, -0.01464080810546875, -0.07122802734375, 0.033721923828125, -0.0101165771484375, -0.061248779296875, 0.0513916015625, 0.01380157470703125, -0.04644775390625, 0.038238525390625, -0.051483154296875, 0.059295654296875, -0.006710052490234375, -0.04241943359375, -0.0168609619140625, -0.035797119140625, 0.038360595703125, -0.005069732666015625, 0.04339599609375, -0.042999267578125, -0.0074310302734375, 0.053375244140625, -0.0386962890625, 0.07763671875, -0.033355712890625, 0.001285552978515625, 0.048126220703125, 0.00734710693359375, 0.0247955322265625, 0.0262298583984375, -0.0211181640625, 0.02642822265625, 0.0169677734375, -0.0439453125, -0.03302001953125, 0.06097412109375, -0.0537109375, -0.040435791015625, -0.0167083740234375, -0.02099609375, -0.0086517333984375, 0.03265380859375, 0.03497314453125, 0.0162811279296875, 0.0079193115234375, -0.0001589059829711914, 0.04119873046875, -0.00783538818359375, 0.034698486328125, 0.0223846435546875, -0.045867919921875, -0.05072021484375, 0.048095703125, -0.01459503173828125, 0.021728515625, 0.0011835098266601562, 0.020843505859375, -0.03009033203125, -0.0384521484375, -0.040740966796875, 0.0216217041015625, -0.06072998046875, -0.0300445556640625, -0.05621337890625, -0.00576019287109375, -0.05316162109375, -0.0142364501953125, -0.051971435546875, -0.0178375244140625, -0.038726806640625, -0.005542755126953125, 0.044830322265625, 0.0169525146484375, -0.0164947509765625, 0.0282440185546875, -0.060943603515625, 0.0224456787109375, 0.01197052001953125, 0.025634765625, 0.0223846435546875, -0.041015625, -0.00672149658203125, 0.0215911865234375, -0.0296478271484375, -0.072265625, 0.039703369140625, -0.01190948486328125, 0.0286865234375, 0.0743408203125, 0.0134735107421875, 0.06744384765625, -0.00812530517578125, 0.0631103515625, 0.0179290771484375, -0.06689453125, 0.06634521484375, -0.04345703125, 0.0143890380859375, 0.05389404296875, 0.0218658447265625, -0.027008056640625, -0.0178680419921875, -0.060943603515625, -0.08380126953125, 0.0419921875, 0.01178741455078125, 0.029083251953125, 0.003814697265625, 0.0303192138671875, -0.007537841796875, 0.0265655517578125, -0.06414794921875, -0.040496826171875, -0.01380157470703125, -0.01393890380859375, 0.032501220703125, -0.006549835205078125, -0.023468017578125, -0.034210205078125, 0.06829833984375, 0.0008664131164550781, 0.0243377685546875, 0.017303466796875, 0.0181732177734375, -0.01502227783203125, 0.0019168853759765625, 0.0276031494140625, 0.05322265625, -0.036346435546875, -0.00995635986328125, -0.0023441314697265625, -0.02545166015625, 0.006023406982421875, 0.007568359375, -0.036102294921875, 0.004764556884765625, 0.02606201171875, 0.07281494140625, 0.0038852691650390625, -0.04364013671875, 0.04132080078125, -0.005313873291015625, -0.031829833984375, -0.035736083984375, 0.01617431640625, 0.03216552734375, 0.046173095703125, 0.027618408203125, 0.0159149169921875, 0.022613525390625, -0.0205078125, -0.00543212890625, 0.027008056640625, -0.0196990966796875, -0.0253143310546875, 0.08013916015625, -0.008514404296875, -0.03814697265625, 0.03057861328125, -0.0296630859375, -0.021759033203125, 0.061065673828125, 0.048614501953125, 0.0634765625, -0.008453369140625, 0.01611328125, 0.06005859375, -0.00710296630859375, -0.00490570068359375, 0.045654296875, 0.0001399517059326172, -0.048675537109375, -0.00827789306640625, -0.0728759765625, -0.0123138427734375, 0.0216217041015625, -0.06195068359375, 0.03570556640625, -0.04864501953125, -0.0285797119140625, -0.0102996826171875, -0.0226898193359375, -0.042388916015625, 0.0184478759765625, 0.007511138916015625, 0.0615234375, -0.056793212890625, 0.041717529296875, 0.045013427734375, -0.03839111328125, -0.0689697265625, -0.00321197509765625, 0.01461029052734375, -0.059417724609375, 0.033843994140625, 0.0008559226989746094, 0.004550933837890625, 0.01140594482421875, -0.0516357421875, -0.0770263671875, 0.07763671875, 0.01294708251953125, -0.0296478271484375, -0.0009794235229492188, -0.00968170166015625, 0.034332275390625, -0.0194091796875, 0.021087646484375, 0.02325439453125, 0.0406494140625, 0.0192413330078125, -0.0267791748046875, 0.007183074951171875, -0.035491943359375, 0.004665374755859375, -0.00876617431640625, -0.05914306640625, 0.0633544921875, -0.0173492431640625, 0.0007586479187011719, 0.01300811767578125, 0.049591064453125, 0.052001953125, 0.032440185546875, 0.05078125, 0.06365966796875, 0.037841796875, -0.0018720626831054688, 0.0771484375, -0.0262603759765625, 0.05499267578125, 0.059906005859375, -0.006343841552734375, 0.0736083984375, 0.034149169921875, -0.038055419921875, 0.0628662109375, 0.078369140625, -0.0310821533203125, 0.06390380859375, 0.005008697509765625, -0.03082275390625, -0.01093292236328125, 0.011474609375, -0.04498291015625, 0.024871826171875, 0.0205078125, -0.031890869140625, -0.00377655029296875, 0.007602691650390625, 0.0013704299926757812, -0.0189666748046875, -0.017791748046875, 0.033966064453125, 0.0020580291748046875, -0.03070068359375, 0.056732177734375, -0.005260467529296875, 0.0689697265625, -0.040924072265625, 0.006313323974609375, -0.0054779052734375, 0.0245513916015625, -0.0187530517578125, -0.039215087890625, 0.01474761962890625, -0.00972747802734375, -0.0164337158203125, 0.0032024383544921875, 0.049224853515625, 0.0010986328125, -0.0574951171875, 0.0300445556640625, 0.0254669189453125, 0.0218048095703125, -0.0092010498046875, -0.07037353515625, 0.01512908935546875, 0.02276611328125, -0.04217529296875, 0.0019855499267578125, 0.0099945068359375, 0.029449462890625, 0.042388916015625, 0.060089111328125, 0.019195556640625, 0.006061553955078125, 0.00672149658203125, 0.042724609375, -0.037506103515625, -0.045379638671875, -0.07733154296875, 0.04559326171875, -0.0175628662109375, -0.012725830078125, 0.046844482421875, 0.057464599609375, 0.058929443359375, -0.030792236328125, 0.061431884765625, -0.016754150390625, 0.037567138671875, -0.0272216796875, 0.0701904296875, -0.06707763671875, 0.0046234130859375, -0.0266265869140625, -0.0565185546875, -0.0125885009765625, 0.048553466796875, -0.01372528076171875, 0.0183258056640625, 0.03289794921875, 0.060272216796875, -0.0230712890625, 0.0010204315185546875, 0.0038166046142578125, 0.0159149169921875, 0.0123138427734375, 0.0206146240234375, 0.0421142578125, -0.044708251953125, 0.028839111328125, -0.058013916015625, -0.0087432861328125, -0.01099395751953125, -0.05206298828125, -0.05377197265625, -0.030364990234375, -0.03656005859375, -0.0574951171875, -0.01398468017578125, 0.05682373046875, 0.07232666015625, -0.07098388671875, -0.007526397705078125, -0.0111541748046875, 0.007404327392578125, -0.0176544189453125, -0.0240478515625, 0.035736083984375, 0.0158538818359375, -0.08551025390625, 0.012603759765625, -0.00811767578125, 0.0282135009765625, -0.0037403106689453125, 0.006656646728515625, -0.0111083984375, 0.00677490234375, 0.02130126953125, 0.0171051025390625, -0.06378173828125, -0.003597259521484375, -0.01380157470703125, 0.0009026527404785156, 0.0207061767578125, 0.01554107666015625, -0.032440185546875, 0.0196990966796875, 0.049285888671875, 0.0205535888671875, 0.0418701171875, 0.00519561767578125, 0.00481414794921875, -0.02606201171875, 0.0262451171875, 0.00804901123046875, 0.044769287109375, 0.011505126953125, -0.043243408203125, 0.034423828125, 0.035919189453125, -0.0285797119140625, -0.06756591796875, 0.0037288665771484375, -0.07525634765625, -0.037322998046875, 0.07568359375, -0.01456451416015625, -0.049530029296875, 0.01558685302734375, -0.01042938232421875, 0.0282135009765625, -0.0234222412109375, 0.050872802734375, 0.052947998046875, -0.0205535888671875, -0.006778717041015625, -0.06268310546875, 0.00954437255859375, 0.0165863037109375, -0.06396484375, -0.0142364501953125, 0.032012939453125, 0.041351318359375, 0.034912109375, 0.04449462890625, -0.02764892578125, 0.0142059326171875, 0.0211639404296875, 0.0292816162109375, 0.00672149658203125, 0.027435302734375, -0.0158233642578125, 0.0052032470703125, -0.011688232421875, -0.03790283203125 ] ]
Corianas/Quokka_256m
2023-07-22T06:24:50.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:guanaco/guanaco", "arxiv:1910.09700", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/Quokka_256m
1
5,858
transformers
2023-04-04T12:12:08
--- license: apache-2.0 datasets: - guanaco/guanaco language: - en --- --- # Model Card for Model ID This is a finetuned model of Cerebras 256M model using DataBricksLabs Dolly Framework ## Model Details ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model [optional]:** https://huggingface.co/cerebras/Cerebras-GPT-256M ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered almost non-existant. ### Direct Use <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. --> [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations. ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results ----------- #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** [More Information Needed] - **Hours used:** [More Information Needed] - **Cloud Provider:** [More Information Needed] - **Compute Region:** [More Information Needed] - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
4,831
[ [ -0.041961669921875, -0.05975341796875, 0.032562255859375, 0.010467529296875, -0.0094146728515625, -0.0220947265625, 0.00908660888671875, -0.034759521484375, 0.0097198486328125, 0.04547119140625, -0.05035400390625, -0.044036865234375, -0.046417236328125, -0.022705078125, -0.028289794921875, 0.08740234375, 0.0086517333984375, 0.016021728515625, -0.02093505859375, 0.01482391357421875, -0.0440673828125, -0.047271728515625, -0.051483154296875, -0.02655029296875, 0.0271759033203125, 0.0284271240234375, 0.05303955078125, 0.048126220703125, 0.04937744140625, 0.0226898193359375, -0.0258331298828125, -0.007617950439453125, -0.037322998046875, -0.0303802490234375, -0.0205841064453125, -0.01197052001953125, -0.05780029296875, -0.0037078857421875, 0.03985595703125, 0.0479736328125, -0.00942230224609375, 0.043792724609375, 0.0021953582763671875, 0.03900146484375, -0.051177978515625, 0.0213775634765625, -0.044586181640625, 0.0023632049560546875, -0.0076904296875, 0.0012950897216796875, -0.0232086181640625, -0.0176544189453125, -0.00849151611328125, -0.04266357421875, 0.017974853515625, 0.0134124755859375, 0.085205078125, 0.0126953125, -0.03179931640625, -0.0206756591796875, -0.0667724609375, 0.051727294921875, -0.040283203125, 0.021697998046875, 0.0338134765625, 0.03558349609375, -0.00777435302734375, -0.0623779296875, -0.026123046875, -0.017608642578125, -0.004390716552734375, 0.0204925537109375, -0.0152740478515625, 0.00844573974609375, 0.039703369140625, 0.037322998046875, -0.04339599609375, -0.0021724700927734375, -0.0462646484375, -0.0269317626953125, 0.0589599609375, 0.04168701171875, 0.007610321044921875, -0.0209197998046875, -0.028228759765625, -0.023162841796875, -0.024810791015625, 0.0098419189453125, 0.03759765625, 0.02197265625, -0.03668212890625, 0.044830322265625, -0.01253509521484375, 0.0496826171875, 0.00006985664367675781, -0.0008273124694824219, 0.018707275390625, -0.04010009765625, -0.0306243896484375, -0.007965087890625, 0.05419921875, 0.0255889892578125, -0.01361846923828125, 0.01055145263671875, -0.00897216796875, -0.0017547607421875, 0.027496337890625, -0.06805419921875, -0.0290679931640625, 0.028228759765625, -0.048095703125, -0.0251922607421875, -0.00565338134765625, -0.08013916015625, -0.0022602081298828125, -0.036712646484375, 0.034332275390625, -0.0276947021484375, -0.0195465087890625, -0.004848480224609375, -0.01343536376953125, 0.01406097412109375, 0.0225677490234375, -0.06854248046875, 0.046966552734375, 0.033721923828125, 0.05364990234375, 0.004795074462890625, -0.0065460205078125, 0.003093719482421875, 0.00029015541076660156, -0.002227783203125, 0.038909912109375, -0.029327392578125, -0.051116943359375, -0.01326751708984375, 0.014984130859375, -0.0061492919921875, -0.01410675048828125, 0.058990478515625, -0.035400390625, 0.020111083984375, -0.01517486572265625, -0.049530029296875, -0.033966064453125, 0.018096923828125, -0.0570068359375, 0.083251953125, 0.007305145263671875, -0.06365966796875, 0.00933074951171875, -0.07366943359375, -0.01419830322265625, 0.00670623779296875, 0.00612640380859375, -0.040985107421875, -0.017578125, -0.0015106201171875, 0.0379638671875, -0.026580810546875, 0.0238494873046875, -0.0270843505859375, -0.01018524169921875, -0.0130615234375, -0.020782470703125, 0.083251953125, 0.034332275390625, -0.0085601806640625, 0.009368896484375, -0.07171630859375, 0.0123443603515625, 0.017730712890625, -0.0182647705078125, 0.006435394287109375, -0.0226898193359375, 0.048095703125, 0.0110015869140625, 0.0304412841796875, -0.0287933349609375, 0.0033740997314453125, 0.0051116943359375, 0.0290985107421875, 0.03570556640625, 0.020904541015625, 0.012725830078125, -0.026824951171875, 0.043701171875, -0.008087158203125, 0.0528564453125, 0.01239013671875, -0.04345703125, -0.051422119140625, -0.007625579833984375, 0.023162841796875, 0.046783447265625, -0.0166168212890625, 0.05108642578125, -0.003498077392578125, -0.07904052734375, -0.0129547119140625, 0.0014057159423828125, 0.0310821533203125, 0.04803466796875, 0.0357666015625, -0.01461029052734375, -0.051544189453125, -0.0709228515625, 0.011749267578125, 0.00008398294448852539, 0.01313018798828125, 0.033599853515625, 0.06951904296875, -0.0253143310546875, 0.056365966796875, -0.05145263671875, -0.00919342041015625, -0.01396942138671875, 0.0028247833251953125, 0.01104736328125, 0.051483154296875, 0.042510986328125, -0.0728759765625, -0.01467132568359375, -0.0192718505859375, -0.042449951171875, 0.02117919921875, -0.003963470458984375, -0.01068878173828125, -0.00948333740234375, 0.0251007080078125, -0.04345703125, 0.046142578125, 0.038543701171875, -0.0220489501953125, 0.044769287109375, -0.020660400390625, 0.00653076171875, -0.08392333984375, 0.029815673828125, 0.010894775390625, -0.01142120361328125, -0.0300445556640625, -0.0008587837219238281, 0.0023517608642578125, -0.0266571044921875, -0.05108642578125, 0.048614501953125, -0.0193328857421875, -0.0024013519287109375, -0.018890380859375, -0.007678985595703125, 0.00904083251953125, 0.04669189453125, 0.01189422607421875, 0.042510986328125, 0.0362548828125, -0.06500244140625, 0.0213623046875, 0.029815673828125, -0.0219573974609375, 0.028594970703125, -0.060516357421875, 0.007099151611328125, 0.0006251335144042969, 0.022613525390625, -0.050537109375, -0.026336669921875, 0.02069091796875, -0.03082275390625, 0.024017333984375, -0.00977325439453125, -0.035980224609375, -0.037994384765625, 0.006938934326171875, 0.0204010009765625, 0.04217529296875, -0.01788330078125, 0.0313720703125, 0.04632568359375, 0.01363372802734375, -0.0198211669921875, -0.04150390625, -0.00046825408935546875, -0.0226898193359375, -0.039093017578125, 0.04376220703125, -0.01331329345703125, -0.00551605224609375, -0.00612640380859375, 0.01690673828125, -0.0299072265625, 0.00981903076171875, 0.0355224609375, 0.027069091796875, 0.01248931884765625, 0.01074981689453125, -0.012542724609375, -0.01020050048828125, 0.007556915283203125, 0.003925323486328125, 0.032318115234375, -0.0147705078125, -0.0106964111328125, -0.052398681640625, 0.024871826171875, 0.0362548828125, -0.0004410743713378906, 0.050201416015625, 0.054534912109375, -0.057037353515625, 0.01114654541015625, -0.032623291015625, -0.01442718505859375, -0.032928466796875, 0.03729248046875, -0.0203704833984375, -0.028656005859375, 0.044219970703125, 0.0002772808074951172, -0.003337860107421875, 0.06561279296875, 0.0498046875, 0.0037250518798828125, 0.0784912109375, 0.059326171875, 0.006877899169921875, 0.04229736328125, -0.04022216796875, 0.001758575439453125, -0.067626953125, -0.03363037109375, -0.06304931640625, -0.00295257568359375, -0.049041748046875, -0.01108551025390625, 0.00885009765625, 0.0047454833984375, -0.052764892578125, 0.048736572265625, -0.05059814453125, 0.01506805419921875, 0.03741455078125, 0.02020263671875, -0.0005474090576171875, -0.0241851806640625, -0.00225067138671875, 0.0029582977294921875, -0.04998779296875, -0.05340576171875, 0.08099365234375, 0.046783447265625, 0.049041748046875, -0.00734710693359375, 0.040374755859375, 0.0266265869140625, 0.01032257080078125, -0.039215087890625, 0.036834716796875, 0.0020694732666015625, -0.07598876953125, -0.00832366943359375, -0.0173187255859375, -0.06524658203125, 0.0030059814453125, -0.020111083984375, -0.072509765625, 0.0217742919921875, 0.0185699462890625, -0.034820556640625, 0.020111083984375, -0.052398681640625, 0.09283447265625, -0.035400390625, -0.0272674560546875, -0.0204315185546875, -0.04132080078125, 0.03106689453125, 0.0015668869018554688, 0.023956298828125, -0.004241943359375, 0.00814056396484375, 0.06732177734375, -0.0557861328125, 0.06646728515625, -0.0281829833984375, 0.030853271484375, 0.036407470703125, -0.0170440673828125, 0.042327880859375, 0.0028247833251953125, -0.0086669921875, 0.0270538330078125, 0.026123046875, -0.034332275390625, -0.02349853515625, 0.052032470703125, -0.07080078125, -0.0158843994140625, -0.036895751953125, -0.04290771484375, -0.01129913330078125, 0.0152587890625, 0.0273284912109375, 0.0192108154296875, -0.024993896484375, 0.01323699951171875, 0.054168701171875, -0.0175323486328125, 0.015838623046875, 0.0207672119140625, -0.018829345703125, -0.03363037109375, 0.055084228515625, 0.00719451904296875, 0.015533447265625, 0.02459716796875, 0.00888824462890625, -0.037567138671875, -0.04266357421875, -0.0250244140625, -0.004291534423828125, -0.05804443359375, -0.00611114501953125, -0.059600830078125, -0.0290374755859375, -0.033294677734375, -0.00005364418029785156, -0.034332275390625, -0.023162841796875, -0.0439453125, -0.01313018798828125, 0.042327880859375, 0.0419921875, -0.00592803955078125, 0.04742431640625, -0.050750732421875, 0.00714874267578125, 0.01751708984375, 0.01397705078125, 0.004909515380859375, -0.044219970703125, -0.0290069580078125, 0.0203704833984375, -0.05596923828125, -0.06170654296875, 0.026214599609375, -0.00545501708984375, 0.040557861328125, 0.021728515625, -0.0051727294921875, 0.0570068359375, -0.0222015380859375, 0.07635498046875, 0.0208587646484375, -0.06256103515625, 0.04791259765625, -0.031951904296875, 0.005115509033203125, 0.059356689453125, 0.042816162109375, -0.0170135498046875, 0.008514404296875, -0.07354736328125, -0.058135986328125, 0.0394287109375, 0.0214996337890625, 0.0182647705078125, 0.00870513916015625, 0.0312347412109375, -0.00815582275390625, 0.01166534423828125, -0.06658935546875, -0.018951416015625, -0.022674560546875, -0.00325775146484375, 0.003192901611328125, -0.00807952880859375, -0.015716552734375, -0.040985107421875, 0.072998046875, 0.0179443359375, 0.033233642578125, 0.0047149658203125, 0.005741119384765625, 0.00939178466796875, 0.00033974647521972656, 0.038726806640625, 0.05035400390625, -0.0462646484375, -0.0244903564453125, 0.0212249755859375, -0.052276611328125, -0.00656890869140625, 0.0090484619140625, -0.01250457763671875, -0.00547027587890625, 0.00331878662109375, 0.0684814453125, 0.0160369873046875, -0.0287933349609375, 0.0250701904296875, -0.0003514289855957031, -0.0213623046875, -0.04425048828125, 0.00982666015625, 0.0091705322265625, 0.0017881393432617188, -0.00049591064453125, 0.013946533203125, 0.021881103515625, -0.035125732421875, 0.005245208740234375, 0.027801513671875, -0.047119140625, -0.0003590583801269531, 0.0728759765625, 0.024688720703125, -0.03448486328125, 0.04547119140625, -0.017059326171875, -0.0171661376953125, 0.07879638671875, 0.034820556640625, 0.060638427734375, 0.000007748603820800781, 0.00830841064453125, 0.0538330078125, 0.0202484130859375, 0.0017490386962890625, 0.024139404296875, -0.000025570392608642578, -0.033111572265625, -0.00775146484375, -0.044769287109375, -0.046600341796875, 0.0283355712890625, -0.06561279296875, 0.05303955078125, -0.050140380859375, -0.0244293212890625, 0.026031494140625, 0.0189361572265625, -0.088134765625, 0.044769287109375, 0.005340576171875, 0.083251953125, -0.070068359375, 0.065185546875, 0.047698974609375, -0.05926513671875, -0.061004638671875, -0.02923583984375, 0.0003619194030761719, -0.051422119140625, 0.0262908935546875, 0.002140045166015625, 0.0164947509765625, -0.0038166046142578125, -0.046844482421875, -0.06622314453125, 0.105712890625, 0.0086669921875, -0.06103515625, 0.0155029296875, -0.004802703857421875, 0.040863037109375, -0.0357666015625, 0.054656982421875, 0.023956298828125, 0.0380859375, 0.0217742919921875, -0.06005859375, 0.007152557373046875, -0.01314544677734375, 0.008453369140625, -0.006587982177734375, -0.062744140625, 0.062744140625, -0.0099334716796875, 0.0024356842041015625, 0.007659912109375, 0.0308685302734375, 0.00579071044921875, 0.03680419921875, 0.0250244140625, 0.04949951171875, 0.06695556640625, 0.00708770751953125, 0.08746337890625, -0.04693603515625, 0.0501708984375, 0.1007080078125, -0.004940032958984375, 0.0599365234375, 0.0225067138671875, -0.039886474609375, 0.0305328369140625, 0.0882568359375, -0.0261383056640625, 0.0313720703125, 0.0171661376953125, -0.0004410743713378906, -0.0186004638671875, -0.01425933837890625, -0.037017822265625, 0.0216522216796875, 0.0277557373046875, -0.046844482421875, -0.01465606689453125, -0.004619598388671875, 0.004337310791015625, -0.0169830322265625, -0.03582763671875, 0.044830322265625, -0.002262115478515625, -0.036041259765625, 0.0289306640625, 0.01235198974609375, 0.0201416015625, -0.056854248046875, -0.012115478515625, 0.006008148193359375, 0.01215362548828125, -0.036529541015625, -0.0457763671875, 0.035064697265625, -0.0028533935546875, -0.0321044921875, -0.00487518310546875, 0.048065185546875, -0.0017337799072265625, -0.0582275390625, 0.024505615234375, 0.01427459716796875, 0.03118896484375, -0.0050048828125, -0.082763671875, 0.003997802734375, 0.0018815994262695312, -0.0088043212890625, 0.0184478759765625, -0.00591278076171875, -0.0042724609375, 0.044342041015625, 0.042327880859375, -0.00977325439453125, -0.01073455810546875, 0.00252532958984375, 0.0675048828125, -0.05206298828125, -0.032073974609375, -0.040069580078125, 0.05267333984375, -0.0195465087890625, -0.038909912109375, 0.05059814453125, 0.06109619140625, 0.054931640625, -0.0016193389892578125, 0.063720703125, -0.01812744140625, 0.026947021484375, -0.0242767333984375, 0.0394287109375, -0.036956787109375, 0.00202178955078125, -0.0249786376953125, -0.07696533203125, 0.0006165504455566406, 0.0350341796875, -0.033294677734375, 0.016326904296875, 0.042449951171875, 0.0648193359375, -0.00978851318359375, 0.0300750732421875, 0.017333984375, 0.013214111328125, 0.010650634765625, 0.0338134765625, 0.03607177734375, -0.05859375, 0.040863037109375, -0.03826904296875, -0.0269012451171875, -0.01386260986328125, -0.07080078125, -0.049560546875, -0.044219970703125, -0.051483154296875, -0.0316162109375, 0.005832672119140625, 0.05511474609375, 0.0682373046875, -0.06536865234375, -0.02093505859375, -0.0246124267578125, 0.006412506103515625, -0.0191192626953125, -0.01904296875, 0.038818359375, -0.011627197265625, -0.05859375, 0.0013332366943359375, -0.0169677734375, 0.017974853515625, -0.0266265869140625, -0.009002685546875, -0.023529052734375, 0.0028228759765625, 0.026947021484375, 0.03179931640625, -0.0341796875, -0.0193023681640625, -0.01515960693359375, -0.006397247314453125, -0.01263427734375, 0.035797119140625, -0.034576416015625, 0.030029296875, 0.0316162109375, 0.02203369140625, 0.0675048828125, -0.002918243408203125, 0.021392822265625, -0.019775390625, 0.00824737548828125, 0.0220947265625, 0.0374755859375, 0.0133819580078125, -0.0501708984375, 0.04156494140625, 0.021728515625, -0.06341552734375, -0.04779052734375, -0.0042572021484375, -0.0872802734375, -0.0091094970703125, 0.09063720703125, 0.00199127197265625, -0.023345947265625, -0.01139068603515625, -0.0264434814453125, 0.010772705078125, -0.0192413330078125, 0.049560546875, 0.062164306640625, -0.0206146240234375, 0.00965118408203125, -0.060455322265625, 0.0355224609375, 0.0003769397735595703, -0.070556640625, -0.01192474365234375, 0.0404052734375, 0.0313720703125, 0.0124664306640625, 0.03289794921875, -0.018524169921875, 0.0185394287109375, 0.026031494140625, 0.0323486328125, -0.002742767333984375, -0.0225830078125, -0.02288818359375, 0.0007810592651367188, -0.00946044921875, -0.038909912109375 ] ]
IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1
2023-06-21T09:01:46.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "zh", "arxiv:2210.08590", "license:gpl-3.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
IDEA-CCNL
null
null
IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1
20
5,856
transformers
2023-06-01T02:43:52
--- license: gpl-3.0 language: - en - zh inference: false --- # Ziya-LLaMA-13B-Pretrain-v1 - Main Page:[Fengshenbang](https://fengshenbang-lm.com/) - Github: [Fengshenbang-LM](https://github.com/IDEA-CCNL/Fengshenbang-LM) (LLaMA权重的许可证限制,我们无法直接发布完整的模型权重,用户需要参考[使用说明](#-使用-usage-)进行合并) # 姜子牙系列模型 - [Ziya-LLaMA-13B-v1](https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-v1) - [Ziya-LLaMA-7B-Reward](https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-7B-Reward) - [Ziya-LLaMA-13B-Pretrain-v1](https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1) - [Ziya-BLIP2-14B-Visual-v1](https://huggingface.co/IDEA-CCNL/Ziya-BLIP2-14B-Visual-v1) ## 简介 Brief Introduction Ziya-LLaMA-13B-Pretrain-v1 是基于LLaMa的130亿参数大规模预训练模型,针对中文分词优化,并完成了中英文 110B tokens 的增量预训练,进一步提升了中文生成和理解能力。目前姜子牙通用大模型 [Ziya-LLaMA-13B-v1](https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-v1) 在本模型上,进一步完成了多任务有监督微调和人类反馈学习阶段的训练过程,具备翻译,编程,文本分类,信息抽取,摘要,文案生成,常识问答和数学计算等能力。 **用户须知**:为了遵循 Meta 发布的 LLaMA 模型许可,本模型发布的是训练前后的权重增量,最终模型可方便地通过脚本获得(参考 Usage 中的步骤)。 The Ziya-LLaMA-13B-Pretrain-v1 is a large-scale pre-trained model based on LLaMA with 13 billion parameters. We optimizes LLaMAtokenizer on chinese, and incrementally train 110 billion tokens of data based on LLaMa-13B model, which significantly improved the understanding and generation ability on Chinese. Based on the Ziya-LLaMA-13B-Pretrain-v1, the [Ziya-LLaMA-13B-v1](https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-v1) is furtherly trained with 2 stages: multi-task supervised fine-tuning (SFT), and human feedback learning (RM, PPO). The Ziya-LLaMA-13B-v1 has the ability to perform tasks such as translation, programming, text classification, information extraction, summarization, copywriting, common sense Q&A, and mathematical calculation. **README**: To follow the License of LLaMA released by Meta, we only release the incremental weights after continual pretraining. The final model Ziya-LLaMA-13B-Pretrain-v1 could be easily got via the script (refer to Usage). ## 模型分类 Model Taxonomy | 需求 Demand | 任务 Task | 系列 Series | 模型 Model | 参数 Parameter | 额外 Extra | | :----: | :----: | :----: | :----: | :----: | :----: | | 通用 General | AGI模型 | 姜子牙 Ziya | LLaMA | 13B | English&Chinese | ## 模型信息 Model Information ### 继续预训练 Continual Pretraining 原始数据包含英文和中文,其中英文数据来自openwebtext、Books、Wikipedia和Code,中文数据来自清洗后的悟道数据集、自建的中文数据集。在对原始数据进行去重、模型打分、数据分桶、规则过滤、敏感主题过滤和数据评估后,最终得到125B tokens的有效数据。 为了解决LLaMA原生分词对中文编解码效率低下的问题,我们在LLaMA词表的基础上增加了7k+个常见中文字,通过和LLaMA原生的词表去重,最终得到一个39410大小的词表,并通过复用Transformers里LlamaTokenizer来实现了这一效果。 在增量训练过程中,我们使用了160张40GB的A100,采用2.6M tokens的训练集样本数量和FP 16的混合精度,吞吐量达到118 TFLOP per GPU per second。因此我们能够在8天的时间里在原生的LLaMA-13B模型基础上,增量训练110B tokens的数据。据我们所知,这也是至今为止LLaMA-13B上最大规模增量训练。 训练期间,虽然遇到了机器宕机、底层框架bug、loss spike等各种问题,但我们通过快速调整,保证了增量训练的稳定性。我们也放出训练过程的loss曲线,让大家了解可能出现的问题。 The original data contains both English and Chinese, with English data from openwebtext, Books, Wikipedia, and Code, and Chinese data from the cleaned Wudao dataset and self-built Chinese dataset. After deduplication, model scoring, data bucketing, rule filtering, sensitive topic filtering, and data evaluation, we finally obtained 125 billion tokens of data. To address the issue of low efficiency in Chinese encoding and decoding caused by the tokenizer of LLaMa, we added 8,000 commonly used Chinese characters to the LLaMa SentencePiece vocabulary. Deduplicating with the original LLaMa vocabulary, we finally obtained a vocabulary of size 39,410. We achieved this by reusing the LlamaTokenizer in Transformers. During the incremental training process, we used 160 A100s with a total of 40GB memory, using a training dataset with 2.6 million tokens and mixed precision of FP16. The throughput reached 118 TFLOP per GPU per second. As a result, we were able to incrementally train 110 billion tokens of data based on LLaMa-13B model in just 8 days.As far as we know, it is the largest increamental training on LLaMA-13B so far. Throughout the training process, we encountered various issues such as machine crashes, underlying framework bugs, and loss spikes. However, we ensured the stability of the incremental training by making rapid adjustments. We have also released the loss curve during the training process to help everyone understand the potential issues that may arise. <img src="https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1/resolve/main/loss.png" width=1000 height=600> ### 效果评估 Performance 以下是 Ziya-LLaMA-13B-Pertrain-v1 和继续训练前的LLaMA 模型在英文公开评测 [HeLM](https://crfm.stanford.edu/helm/latest/) 和中文多项选择评测集上的评估效果对比。 Here are comparisons of the Ziya-LLaMA-13B-Pretrain-v1 model and the LLaMA model before continual pre-training, evaluated on the English benchmark (HeLM), and our Chinese multiple-choice evaluation datasets. <img src="https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1/resolve/main/ziya_en_eval.png" width=2542 height=1045> | Model | Meanwin_rate | MMLU | BoolQ | NarrativeQA | NaturalQuestion(closed-book) | NaturalQuestion(open-book) | QuAC | TruthfulQA | IMDB | | -------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | ----- | | LLaMA-13B | 0.500 | 0.424 | 0.718 | 0.440 | 0.349 | 0.591 | 0.318 | 0.326 | 0.487 | | Ziya-LLaMA-13B-Pretrain-v1 | 0.650 | 0.433 | 0.753 | 0.445 | 0.348 | 0.528 | 0.335 | 0.249 | 0.497 | <img src="https://huggingface.co/IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1/resolve/main/ziya_zh_eval.png" width=2340 height=1523> | 模型 | incontext  | c3 | 常识 | 语文 | 数学 | 英语 | 物理 | 化学 | 生物 | 历史 | 政治 | 地理 | |-------------------------|------------|--------|--------|--------|--------|--------|--------|--------|--------|--------|--------|--------| | LLaMA-13B | 0-shot | 0.4817 | 0.3088 | 0.2674 | 0.2882 | 0.3399 | 0.2581 | 0.2478 | 0.2271 | 0.3380 | 0.3275 | 0.296 | | Ziya-LLaMA-13B-Pretrain-v1 | 0-shot | 0.5354 | 0.3373 | 0.2925 | 0.3059 | 0.3428 | 0.2903 | 0.2655 | 0.3215 | 0.4190 | 0.4123 | 0.4425 | | LLaMA-13B | 5-shot | 0.5314 | 0.3586 | 0.2813 | 0.2912 | 0.4476 | 0.2939 | 0.2301 | 0.2330 | 0.3268 | 0.3187 | 0.3103 | | Ziya-LLaMA-13B-Pretrain-v1 | 5-shot | 0.6037 | 0.4330 | 0.2802 | 0.2912 | 0.4363 | 0.2975 | 0.2802 | 0.3422 | 0.4358 | 0.4357 | 0.4540 | <!-- <img src="" width=1000 height=600> --> ## <span id="jump"> 使用 Usage </span> 由于LLaMA权重的许可限制,该模型不能用于商业用途,请严格遵守LLaMA的使用政策。考虑到LLaMA权重的许可证限制,我们无法直接发布完整的模型权重。因此,我们使用了[FastChat开源工具](https://github.com/lm-sys/FastChat/blob/main/fastchat/model/apply_delta.py)作为基础,并对其进行了进一步的优化。我们计算并发布了Ziya-LLaMA-13B-v1权重与原始LLaMA权重之间的差值。用户可以按照以下步骤操作以获得Ziya-LLaMA-13B-v1完整权重,具体步骤如下: Step 1:获取[LLaMA](https://docs.google.com/forms/d/e/1FAIpQLSfqNECQnMkycAp2jP4Z9TFX0cGR4uf7b_fBxjY_OjhJILlKGA/viewform)权重并转成Hugging Face Transformers模型格式,可参考转换[脚本](https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py)(若已经有huggingface权重则跳过) ``` python src/transformers/models/llama/convert_llama_weights_to_hf.py \ --input_dir /path/to/downloaded/llama/weights --model_size 13B --output_dir /output/path ``` Step 2:下载Ziya-LLaMA-13B-v1的delta权重以及step 1中转换好的原始LLaMA权重,使用如下脚本转换:https://github.com/IDEA-CCNL/Fengshenbang-LM/blob/main/fengshen/utils/apply_delta.py. ``` python3 -m apply_delta --base ~/model_weights/llama-13b --target ~/model_weights/Ziya-LLaMA-13B --delta ~/model_weights/Ziya-LLaMA-13B-v1 ``` Step 3: 加载step 2得到的模型推理 ```python3 from transformers import AutoTokenizer from transformers import LlamaForCausalLM import torch device = torch.device("cuda") query="帮我写一份去西安的旅游计划" model = LlamaForCausalLM.from_pretrained(ckpt, torch_dtype=torch.float16, device_map="auto") tokenizer = AutoTokenizer.from_pretrained(ckpt) inputs = query.strip() input_ids = tokenizer(inputs, return_tensors="pt").input_ids.to(device) generate_ids = model.generate( input_ids, max_new_tokens=1024, do_sample = True, top_p = 0.85, temperature = 1.0, repetition_penalty=1., eos_token_id=2, bos_token_id=1, pad_token_id=0) output = tokenizer.batch_decode(generate_ids)[0] print(output) ``` Step 1: Obtain the [LLaMA](https://huggingface.co/docs/transformers/main/en/model_doc/llama#overview) weights and convert them into the Hugging Face Transformers format. You can refer to the [script](https://github.com/huggingface/transformers/blob/main/src/transformers/models/llama/convert_llama_weights_to_hf.py) (skip this step if you already have the Hugging Face weights). ``` python src/transformers/models/llama/convert_llama_weights_to_hf.py \ --input_dir /path/to/downloaded/llama/weights --model_size 13B --output_dir /output/path ``` Step 2: Download the delta weights for Ziya-LLaMA-13B-v1 and the pre-converted original LLaMA weights from step 1. Use the following script for conversion: https://github.com/IDEA-CCNL/Fengshenbang-LM/blob/main/fengshen/utils/apply_delta.py ``` python3 -m apply_delta --base ~/model_weights/llama-13b --target ~/model_weights/Ziya-LLaMA-13B --delta ~/model_weights/Ziya-LLaMA-13B-v1(huggingface下载) ``` Step 3: Load the model obtained in Step 2 for inference. ## 微调示例 Finetune Example Refer to [ziya_finetune](https://github.com/IDEA-CCNL/Fengshenbang-LM/tree/main/fengshen/examples/ziya_llama) ## 推理量化示例 Inference & Quantization Example Refer to [ziya_inference](https://github.com/IDEA-CCNL/Fengshenbang-LM/tree/main/fengshen/examples/ziya_inference) ## 引用 Citation 如果您在您的工作中使用了我们的模型,可以引用我们的[论文](https://arxiv.org/abs/2210.08590): If you are using the resource for your work, please cite the our [paper](https://arxiv.org/abs/2210.08590): ```text @article{fengshenbang, author = {Jiaxing Zhang and Ruyi Gan and Junjie Wang and Yuxiang Zhang and Lin Zhang and Ping Yang and Xinyu Gao and Ziwei Wu and Xiaoqun Dong and Junqing He and Jianheng Zhuo and Qi Yang and Yongfeng Huang and Xiayu Li and Yanghan Wu and Junyu Lu and Xinyu Zhu and Weifeng Chen and Ting Han and Kunhao Pan and Rui Wang and Hao Wang and Xiaojun Wu and Zhongshen Zeng and Chongpei Chen}, title = {Fengshenbang 1.0: Being the Foundation of Chinese Cognitive Intelligence}, journal = {CoRR}, volume = {abs/2209.02970}, year = {2022} } ``` You can also cite our [website](https://github.com/IDEA-CCNL/Fengshenbang-LM/): 欢迎引用我们的[网站](https://github.com/IDEA-CCNL/Fengshenbang-LM/): ```text @misc{Fengshenbang-LM, title={Fengshenbang-LM}, author={IDEA-CCNL}, year={2021}, howpublished={\url{https://github.com/IDEA-CCNL/Fengshenbang-LM}}, } ```
10,733
[ [ -0.040679931640625, -0.03546142578125, 0.006984710693359375, 0.037750244140625, -0.055755615234375, 0.004695892333984375, -0.0048065185546875, -0.0440673828125, 0.03875732421875, 0.0030384063720703125, -0.05950927734375, -0.0377197265625, -0.047271728515625, 0.017547607421875, -0.01218414306640625, 0.06927490234375, -0.0256805419921875, -0.01366424560546875, 0.01461029052734375, 0.002216339111328125, -0.035919189453125, -0.0309295654296875, -0.03192138671875, -0.02972412109375, 0.0257568359375, 0.01922607421875, 0.035247802734375, 0.04638671875, 0.040557861328125, 0.0241241455078125, -0.01474761962890625, 0.0193939208984375, -0.0298004150390625, -0.0214996337890625, 0.0294647216796875, -0.043243408203125, -0.048614501953125, -0.003841400146484375, 0.04656982421875, 0.01326751708984375, -0.01690673828125, 0.0245819091796875, 0.01320648193359375, 0.033416748046875, -0.0177459716796875, 0.009124755859375, -0.0254974365234375, 0.007488250732421875, -0.025421142578125, -0.01033782958984375, 0.0108795166015625, -0.029632568359375, -0.0200042724609375, -0.05731201171875, -0.011962890625, -0.00945281982421875, 0.113037109375, 0.0275726318359375, -0.0218353271484375, -0.0017957687377929688, -0.01177978515625, 0.06903076171875, -0.069580078125, -0.0052642822265625, 0.0264129638671875, 0.01215362548828125, -0.0158843994140625, -0.055328369140625, -0.06439208984375, -0.0134429931640625, -0.0168304443359375, 0.01861572265625, -0.00940704345703125, -0.0218963623046875, 0.00833892822265625, 0.033660888671875, -0.0189361572265625, 0.028167724609375, -0.026336669921875, -0.00678253173828125, 0.06341552734375, 0.0199127197265625, 0.03594970703125, -0.0222320556640625, -0.0462646484375, -0.0224761962890625, -0.05364990234375, 0.0303955078125, 0.023773193359375, 0.01222991943359375, -0.044342041015625, 0.037261962890625, -0.029632568359375, 0.0310516357421875, 0.0213470458984375, -0.047454833984375, 0.042999267578125, -0.04168701171875, -0.0189971923828125, -0.0047760009765625, 0.063720703125, 0.039520263671875, 0.0087127685546875, 0.01087188720703125, -0.013580322265625, -0.000010192394256591797, -0.01224517822265625, -0.07012939453125, 0.0163726806640625, 0.0163726806640625, -0.045318603515625, -0.03912353515625, -0.00768280029296875, -0.041595458984375, -0.0009183883666992188, -0.01067352294921875, 0.0234222412109375, -0.029815673828125, -0.03631591796875, 0.0125579833984375, 0.0102996826171875, 0.036865234375, 0.0310211181640625, -0.052490234375, 0.007282257080078125, 0.02783203125, 0.059844970703125, -0.00714874267578125, -0.034271240234375, -0.00255584716796875, 0.005535125732421875, -0.022552490234375, 0.05194091796875, -0.00656890869140625, -0.0316162109375, -0.023040771484375, 0.0130615234375, -0.01275634765625, -0.0277862548828125, 0.028778076171875, -0.019378662109375, -0.00824737548828125, -0.033966064453125, -0.017730712890625, -0.018890380859375, 0.0257110595703125, -0.038330078125, 0.08575439453125, -0.0038127899169921875, -0.06585693359375, 0.0211334228515625, -0.0343017578125, -0.0214080810546875, -0.017303466796875, -0.0010709762573242188, -0.03277587890625, -0.0216522216796875, 0.034576416015625, 0.04107666015625, -0.03302001953125, 0.0098114013671875, -0.0013742446899414062, -0.0391845703125, 0.0156402587890625, -0.01027679443359375, 0.0721435546875, 0.01154327392578125, -0.039886474609375, 0.01238250732421875, -0.063720703125, -0.00365447998046875, 0.049407958984375, -0.039154052734375, 0.0011053085327148438, -0.0120086669921875, -0.01366424560546875, 0.02642822265625, 0.04803466796875, -0.026123046875, 0.0311737060546875, -0.028350830078125, 0.0322265625, 0.0819091796875, -0.0032405853271484375, 0.0038852691650390625, -0.045013427734375, 0.038330078125, 0.016357421875, 0.0208282470703125, -0.01171875, -0.033172607421875, -0.08270263671875, -0.0225982666015625, 0.0029315948486328125, 0.047149658203125, -0.04058837890625, 0.052093505859375, -0.00968170166015625, -0.0657958984375, -0.03607177734375, 0.023284912109375, 0.02874755859375, 0.0302886962890625, 0.0306549072265625, -0.0200347900390625, -0.052001953125, -0.05780029296875, 0.01253509521484375, -0.017974853515625, 0.01043701171875, 0.0164642333984375, 0.054962158203125, -0.0219573974609375, 0.04058837890625, -0.0301361083984375, -0.026824951171875, -0.035736083984375, -0.0167083740234375, 0.046173095703125, 0.03692626953125, 0.054901123046875, -0.040069580078125, -0.0296783447265625, 0.00508880615234375, -0.07232666015625, 0.004756927490234375, -0.01534271240234375, -0.03228759765625, 0.005901336669921875, 0.00727081298828125, -0.055084228515625, 0.044036865234375, 0.0303802490234375, -0.01248931884765625, 0.05340576171875, 0.00457000732421875, -0.00698089599609375, -0.0782470703125, 0.00821685791015625, -0.006092071533203125, 0.018096923828125, -0.0300750732421875, 0.0119476318359375, -0.00605010986328125, 0.032257080078125, -0.038726806640625, 0.04443359375, -0.0267181396484375, 0.00952911376953125, -0.01216888427734375, 0.00862884521484375, -0.006256103515625, 0.05377197265625, -0.0029811859130859375, 0.06170654296875, 0.031280517578125, -0.05206298828125, 0.02508544921875, 0.0394287109375, -0.0401611328125, 0.0182647705078125, -0.056884765625, 0.0111846923828125, 0.00008696317672729492, 0.034149169921875, -0.0819091796875, -0.01081085205078125, 0.03997802734375, -0.0278167724609375, 0.01366424560546875, 0.0187530517578125, -0.03192138671875, -0.04559326171875, -0.046142578125, 0.0144195556640625, 0.0305328369140625, -0.047119140625, 0.018035888671875, 0.007450103759765625, 0.0051422119140625, -0.07196044921875, -0.05718994140625, -0.00867462158203125, -0.01898193359375, -0.04583740234375, 0.041595458984375, -0.0177764892578125, -0.0015459060668945312, -0.01546478271484375, -0.0130767822265625, 0.0008869171142578125, 0.01373291015625, 0.0155487060546875, 0.03851318359375, -0.0218658447265625, -0.014007568359375, 0.0011072158813476562, -0.004184722900390625, -0.01538848876953125, 0.0154571533203125, 0.0574951171875, -0.0148773193359375, -0.02288818359375, -0.06854248046875, -0.00024199485778808594, 0.019775390625, -0.00765228271484375, 0.05780029296875, 0.056182861328125, -0.0129547119140625, 0.014923095703125, -0.043670654296875, 0.006671905517578125, -0.03875732421875, 0.0185394287109375, -0.03253173828125, -0.04876708984375, 0.043670654296875, 0.0010471343994140625, 0.0178680419921875, 0.053070068359375, 0.043182373046875, -0.0038604736328125, 0.05609130859375, 0.052215576171875, -0.0325927734375, 0.04071044921875, -0.051544189453125, -0.0045166015625, -0.06695556640625, -0.03887939453125, -0.0240478515625, -0.041656494140625, -0.0237274169921875, -0.025421142578125, 0.01343536376953125, 0.0197601318359375, -0.04486083984375, 0.030853271484375, -0.04510498046875, 0.0061492919921875, 0.03472900390625, 0.0220184326171875, 0.018096923828125, 0.0017061233520507812, -0.01522064208984375, 0.000035822391510009766, -0.035430908203125, -0.037872314453125, 0.07470703125, 0.0166778564453125, 0.049957275390625, 0.003116607666015625, 0.06512451171875, -0.0037994384765625, 0.0341796875, -0.04248046875, 0.0457763671875, 0.0181732177734375, -0.036865234375, -0.01493072509765625, -0.00681304931640625, -0.067138671875, 0.033477783203125, -0.00884246826171875, -0.06414794921875, 0.010528564453125, -0.01299285888671875, -0.029632568359375, 0.0343017578125, -0.024505615234375, 0.040069580078125, -0.045623779296875, -0.02392578125, -0.008514404296875, -0.044219970703125, 0.051513671875, -0.0037384033203125, 0.01279449462890625, -0.0162506103515625, -0.0170440673828125, 0.06866455078125, -0.061126708984375, 0.0736083984375, -0.0117645263671875, -0.0223541259765625, 0.046051025390625, -0.0167236328125, 0.05133056640625, -0.004123687744140625, -0.01141357421875, 0.02886962890625, -0.01255035400390625, -0.019287109375, -0.01389312744140625, 0.03936767578125, -0.0899658203125, -0.063232421875, -0.034515380859375, -0.012939453125, 0.01509857177734375, 0.0196380615234375, 0.0306549072265625, -0.01995849609375, 0.00962066650390625, -0.0009517669677734375, 0.0120086669921875, -0.037841796875, 0.046051025390625, 0.0232696533203125, -0.00995635986328125, -0.033477783203125, 0.053741455078125, 0.0173492431640625, 0.007511138916015625, 0.0271759033203125, -0.00023877620697021484, -0.0118255615234375, -0.029693603515625, -0.03857421875, 0.041259765625, -0.0458984375, -0.0364990234375, -0.032867431640625, -0.029632568359375, -0.0211944580078125, -0.0099639892578125, -0.0214080810546875, -0.0216217041015625, -0.047271728515625, -0.0312347412109375, 0.043487548828125, 0.04632568359375, -0.0070343017578125, 0.049346923828125, -0.0374755859375, 0.0254974365234375, 0.013214111328125, 0.00988006591796875, 0.033416748046875, -0.048095703125, -0.0225067138671875, -0.0003025531768798828, -0.03802490234375, -0.055389404296875, 0.046173095703125, 0.0003724098205566406, 0.049407958984375, 0.0374755859375, -0.011627197265625, 0.0826416015625, -0.014862060546875, 0.06829833984375, 0.035491943359375, -0.06683349609375, 0.043731689453125, -0.0103302001953125, -0.0035381317138671875, 0.017608642578125, 0.032318115234375, -0.027252197265625, 0.005031585693359375, -0.035430908203125, -0.06756591796875, 0.0634765625, 0.0131378173828125, 0.0048980712890625, 0.0151519775390625, 0.02294921875, 0.0025882720947265625, 0.0187530517578125, -0.07159423828125, -0.04046630859375, -0.033905029296875, 0.0011739730834960938, 0.0045166015625, -0.01430511474609375, 0.0013132095336914062, -0.042816162109375, 0.050140380859375, 0.004180908203125, 0.037506103515625, 0.01611328125, -0.009918212890625, -0.00848388671875, -0.00399017333984375, 0.0455322265625, 0.03912353515625, -0.010345458984375, -0.0254974365234375, 0.04022216796875, -0.048736572265625, 0.0290985107421875, 0.001300811767578125, -0.01432037353515625, -0.00673675537109375, 0.032745361328125, 0.063720703125, 0.01235198974609375, -0.03863525390625, 0.034698486328125, 0.003192901611328125, -0.0113372802734375, -0.02984619140625, 0.00392913818359375, 0.0128173828125, 0.025909423828125, 0.043670654296875, -0.01824951171875, -0.0213775634765625, -0.02410888671875, -0.0007309913635253906, 0.0255126953125, 0.015777587890625, -0.0231475830078125, 0.060089111328125, 0.0025539398193359375, 0.0033817291259765625, 0.0301666259765625, -0.00087738037109375, -0.04150390625, 0.0726318359375, 0.0462646484375, 0.032623291015625, -0.0303802490234375, 0.0095672607421875, 0.06011962890625, 0.03125, -0.0106353759765625, 0.025787353515625, -0.01403045654296875, -0.04583740234375, -0.0223846435546875, -0.06243896484375, -0.0124969482421875, 0.0211029052734375, -0.034149169921875, 0.031219482421875, -0.050811767578125, -0.0182647705078125, -0.017059326171875, 0.034393310546875, -0.057037353515625, 0.0228118896484375, 0.0087127685546875, 0.07452392578125, -0.041107177734375, 0.06756591796875, 0.03289794921875, -0.05816650390625, -0.0782470703125, -0.01093292236328125, -0.00783538818359375, -0.09832763671875, 0.06695556640625, 0.0203399658203125, -0.0030536651611328125, -0.00811767578125, -0.050140380859375, -0.09747314453125, 0.11981201171875, 0.0182952880859375, -0.04083251953125, -0.004581451416015625, 0.00829315185546875, 0.025421142578125, -0.01346588134765625, 0.0264892578125, 0.03631591796875, 0.054107666015625, 0.0056610107421875, -0.07684326171875, 0.034912109375, -0.040435791015625, 0.0006422996520996094, -0.0222625732421875, -0.09893798828125, 0.09771728515625, -0.027130126953125, -0.0168304443359375, 0.0273284912109375, 0.068359375, 0.0455322265625, 0.0070343017578125, 0.02911376953125, 0.0435791015625, 0.0546875, -0.0026111602783203125, 0.0791015625, -0.01342010498046875, 0.0179443359375, 0.03594970703125, -0.00362396240234375, 0.05340576171875, 0.0267791748046875, -0.046905517578125, 0.051055908203125, 0.0731201171875, -0.00926971435546875, 0.04095458984375, 0.0125579833984375, -0.01427459716796875, -0.0186614990234375, -0.003902435302734375, -0.060577392578125, 0.0281829833984375, 0.0213470458984375, -0.0130767822265625, -0.005565643310546875, -0.01157379150390625, 0.0246124267578125, -0.0318603515625, -0.0176544189453125, 0.036834716796875, 0.01477813720703125, -0.03271484375, 0.0804443359375, 0.0018987655639648438, 0.09228515625, -0.055206298828125, 0.0036640167236328125, -0.031829833984375, 0.003582000732421875, -0.028564453125, -0.0438232421875, -0.01082611083984375, 0.008331298828125, 0.01390838623046875, 0.005527496337890625, 0.047393798828125, 0.0078582763671875, -0.042510986328125, 0.030731201171875, 0.02288818359375, 0.0216522216796875, 0.020477294921875, -0.061859130859375, 0.023956298828125, 0.0132598876953125, -0.05584716796875, 0.03955078125, 0.017181396484375, 0.0016345977783203125, 0.05712890625, 0.061798095703125, 0.004566192626953125, 0.01165771484375, -0.00882720947265625, 0.08990478515625, -0.055206298828125, -0.031890869140625, -0.06707763671875, 0.046417236328125, -0.003635406494140625, -0.037841796875, 0.059844970703125, 0.016448974609375, 0.055450439453125, 0.004283905029296875, 0.05816650390625, -0.0128173828125, 0.01678466796875, -0.026702880859375, 0.048736572265625, -0.06231689453125, 0.00554656982421875, -0.02801513671875, -0.042022705078125, -0.0126953125, 0.0751953125, -0.01183319091796875, 0.01358795166015625, 0.033203125, 0.05126953125, 0.01873779296875, -0.01373291015625, -0.0041046142578125, 0.0243072509765625, 0.0195159912109375, 0.0718994140625, 0.06622314453125, -0.052734375, 0.05291748046875, -0.045623779296875, -0.006244659423828125, -0.0305633544921875, -0.05517578125, -0.06622314453125, -0.0301055908203125, -0.0115814208984375, -0.00910186767578125, -0.0159759521484375, 0.06597900390625, 0.039764404296875, -0.038909912109375, -0.0338134765625, 0.01171875, 0.019256591796875, -0.0300750732421875, -0.01068115234375, 0.049346923828125, -0.00783538818359375, -0.05743408203125, 0.02105712890625, -0.005649566650390625, 0.0249786376953125, -0.0113525390625, -0.017333984375, -0.03509521484375, -0.0049285888671875, 0.037353515625, 0.0219573974609375, -0.059356689453125, -0.00777435302734375, 0.0033779144287109375, -0.0104217529296875, 0.0206298828125, 0.00926971435546875, -0.055206298828125, -0.011962890625, 0.015625, 0.025634765625, 0.05279541015625, -0.0037670135498046875, 0.0017604827880859375, -0.033294677734375, 0.031463623046875, -0.0210723876953125, 0.036590576171875, 0.01465606689453125, -0.032440185546875, 0.05670166015625, 0.0234375, -0.018341064453125, -0.06781005859375, -0.01654052734375, -0.0814208984375, -0.0162506103515625, 0.07257080078125, -0.01654052734375, -0.03656005859375, 0.0298919677734375, -0.0361328125, 0.045562744140625, -0.0219879150390625, 0.056610107421875, 0.0478515625, -0.005962371826171875, -0.0050506591796875, -0.04443359375, 0.018280029296875, 0.0276947021484375, -0.06182861328125, -0.0260162353515625, 0.006526947021484375, 0.0279083251953125, 0.0140380859375, 0.06182861328125, 0.00787353515625, 0.01204681396484375, -0.0007910728454589844, 0.0161895751953125, -0.015380859375, 0.006591796875, -0.0020084381103515625, -0.0162353515625, -0.01085662841796875, -0.0271759033203125 ] ]
Corianas/Quokka_1.3b
2023-05-15T13:59:22.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:the_pile", "dataset:guanaco/guanaco", "arxiv:1910.09700", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/Quokka_1.3b
0
5,855
transformers
2023-04-07T02:34:19
--- license: apache-2.0 datasets: - the_pile - guanaco/guanaco language: - en --- # Model Card for Cerebras 1.3b Dollyfied. This is a finetuned model of Cerebras 1.3b model. using DataBricksLabs Dolly Framework ## Model Details ### Model Description This is a finetuned version of cerebras' 1.3Billion paramater model that has been trained to follow instructions. It was accomplished using DataBricks Dolly training tools, and was trained for 2 epochs. - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model:** https://huggingface.co/cerebras/Cerebras-GPT-111m - **Finetuned using:** https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered suspect at best. ### Direct Use If you have a use you put it to, Please let me know. [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use Any form of use where any form of accuracy is needed. FOR THE LOVE OF GOD DO NOT FOLLOW MEDICAL ADVICE FROM THIS. or financial advice. [More Information Needed] ## Bias, Risks, and Limitations Limitations... Yes, I am sure there are so so many. ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** 8xA100s (accomplished while I was downloading the model I was actually training.) - **Minutes used:** 17 - **Cloud Provider:** LambdaGPU - **Compute Region:** USA - **Carbon Emitted:** [More Information Needed]
2,210
[ [ -0.025360107421875, -0.067626953125, 0.011962890625, 0.0233917236328125, -0.0191802978515625, -0.028106689453125, 0.01108551025390625, -0.0197601318359375, 0.01398468017578125, 0.06243896484375, -0.0487060546875, -0.0308074951171875, -0.038421630859375, -0.03326416015625, -0.040069580078125, 0.0830078125, 0.031005859375, 0.003986358642578125, -0.032012939453125, 0.016571044921875, -0.04412841796875, -0.047119140625, -0.05572509765625, -0.0265350341796875, 0.03033447265625, 0.0313720703125, 0.07916259765625, 0.0283660888671875, 0.03607177734375, 0.022216796875, -0.041717529296875, -0.01012420654296875, -0.04644775390625, -0.036285400390625, -0.0130157470703125, -0.0247802734375, -0.060638427734375, 0.00859832763671875, 0.0372314453125, 0.0421142578125, -0.017364501953125, 0.0491943359375, 0.0194549560546875, 0.043487548828125, -0.05059814453125, 0.0290374755859375, -0.04345703125, 0.005496978759765625, 0.0039215087890625, 0.003559112548828125, -0.0308380126953125, -0.03643798828125, -0.019256591796875, -0.041778564453125, 0.01554107666015625, -0.0019989013671875, 0.07952880859375, 0.0217132568359375, -0.029205322265625, -0.02154541015625, -0.06280517578125, 0.048828125, -0.033355712890625, 0.0179595947265625, 0.0276336669921875, 0.04376220703125, -0.022735595703125, -0.066162109375, -0.03857421875, -0.0261993408203125, -0.0149078369140625, 0.0145263671875, -0.01812744140625, 0.006755828857421875, 0.031982421875, 0.041717529296875, -0.046966552734375, -0.00511932373046875, -0.055450439453125, -0.021392822265625, 0.0443115234375, 0.0269012451171875, -0.0014276504516601562, -0.03680419921875, -0.031280517578125, -0.0227203369140625, -0.035308837890625, 0.01369476318359375, 0.044036865234375, 0.027191162109375, -0.0430908203125, 0.040496826171875, -0.0019054412841796875, 0.05731201171875, 0.00814056396484375, 0.0007333755493164062, 0.00368499755859375, -0.01352691650390625, -0.0343017578125, -0.0025768280029296875, 0.0504150390625, 0.0249176025390625, -0.0020961761474609375, 0.01073455810546875, 0.01229095458984375, 0.004993438720703125, 0.0258636474609375, -0.06640625, -0.046722412109375, 0.0237579345703125, -0.0416259765625, -0.0287322998046875, -0.0164794921875, -0.0645751953125, -0.0210418701171875, -0.03851318359375, 0.038360595703125, -0.0181732177734375, -0.0255279541015625, -0.0022830963134765625, -0.0002701282501220703, 0.0175628662109375, 0.0250244140625, -0.06884765625, 0.03460693359375, 0.035614013671875, 0.044677734375, 0.02276611328125, 0.0098876953125, -0.01535797119140625, -0.01885986328125, -0.006725311279296875, 0.035064697265625, -0.022064208984375, -0.050933837890625, -0.01352691650390625, 0.01532745361328125, 0.001186370849609375, -0.029571533203125, 0.059814453125, -0.031768798828125, 0.0193023681640625, -0.01483154296875, -0.037933349609375, -0.036346435546875, 0.012298583984375, -0.057769775390625, 0.07421875, 0.011566162109375, -0.048828125, 0.01528167724609375, -0.051605224609375, -0.00160980224609375, 0.003833770751953125, 0.01047515869140625, -0.040618896484375, -0.0181732177734375, 0.0075836181640625, 0.044281005859375, -0.0255279541015625, 0.0264434814453125, -0.0234222412109375, -0.00873565673828125, -0.00865936279296875, -0.040313720703125, 0.09088134765625, 0.0304412841796875, 0.0028743743896484375, 0.000728607177734375, -0.0693359375, 0.007659912109375, 0.0013151168823242188, -0.006591796875, 0.013824462890625, -0.0167999267578125, 0.04144287109375, 0.0251617431640625, 0.029754638671875, -0.030426025390625, 0.001735687255859375, -0.0010175704956054688, 0.0086212158203125, 0.037322998046875, 0.007289886474609375, 0.0181732177734375, -0.033905029296875, 0.05328369140625, -0.00707244873046875, 0.052276611328125, 0.00679779052734375, -0.046844482421875, -0.0579833984375, -0.01062774658203125, 0.022735595703125, 0.035675048828125, -0.0167083740234375, 0.035614013671875, -0.006885528564453125, -0.07122802734375, -0.02239990234375, -0.005649566650390625, 0.03717041015625, 0.043182373046875, 0.045867919921875, -0.0045623779296875, -0.035064697265625, -0.07720947265625, 0.0013599395751953125, -0.006008148193359375, -0.000042378902435302734, 0.0277099609375, 0.051666259765625, -0.008453369140625, 0.06719970703125, -0.035736083984375, -0.005512237548828125, -0.0008873939514160156, 0.0095062255859375, 0.0201873779296875, 0.06201171875, 0.0460205078125, -0.07037353515625, -0.02044677734375, -0.006191253662109375, -0.05126953125, 0.02001953125, -0.0178985595703125, -0.0023937225341796875, 0.0027561187744140625, 0.0205535888671875, -0.04815673828125, 0.04754638671875, 0.05535888671875, -0.03387451171875, 0.041412353515625, -0.02435302734375, 0.0033721923828125, -0.0804443359375, 0.01947021484375, -0.0055389404296875, -0.0105438232421875, -0.0341796875, 0.0002608299255371094, 0.00807952880859375, -0.0185394287109375, -0.0606689453125, 0.055572509765625, -0.0159149169921875, 0.006198883056640625, -0.022735595703125, 0.0036182403564453125, -0.0019235610961914062, 0.04534912109375, -0.00475311279296875, 0.03228759765625, 0.04290771484375, -0.0732421875, 0.0247650146484375, 0.045806884765625, -0.01395416259765625, 0.0341796875, -0.047119140625, 0.0139007568359375, -0.011871337890625, 0.0278778076171875, -0.04425048828125, -0.0240325927734375, 0.01995849609375, -0.036895751953125, 0.0294952392578125, -0.0108642578125, -0.02178955078125, -0.041717529296875, 0.016693115234375, 0.00921630859375, 0.053741455078125, -0.053009033203125, 0.0504150390625, 0.036895751953125, 0.004322052001953125, -0.028472900390625, -0.046356201171875, 0.0035037994384765625, -0.0238494873046875, -0.055694580078125, 0.02728271484375, 0.0016355514526367188, -0.00745391845703125, -0.0160064697265625, -0.00952911376953125, -0.00568389892578125, -0.0016393661499023438, 0.040130615234375, 0.0207061767578125, 0.0186614990234375, 0.00492095947265625, -0.00466156005859375, -0.0233306884765625, 0.0175628662109375, -0.0017910003662109375, 0.03680419921875, -0.01776123046875, -0.0159149169921875, -0.0499267578125, 0.0006208419799804688, 0.042144775390625, 0.003376007080078125, 0.054534912109375, 0.04571533203125, -0.0438232421875, 0.0147705078125, -0.041778564453125, -0.0095672607421875, -0.037139892578125, 0.015045166015625, -0.0204620361328125, -0.042999267578125, 0.053741455078125, 0.007160186767578125, -0.00955963134765625, 0.05194091796875, 0.040924072265625, 0.00740814208984375, 0.05999755859375, 0.04327392578125, 0.01068878173828125, 0.033966064453125, -0.050140380859375, 0.0083465576171875, -0.061492919921875, -0.0379638671875, -0.052001953125, -0.01229095458984375, -0.07135009765625, -0.0287933349609375, 0.0027618408203125, 0.0226898193359375, -0.046539306640625, 0.056732177734375, -0.04669189453125, 0.013946533203125, 0.0377197265625, 0.0309906005859375, -0.00240325927734375, -0.02728271484375, 0.01117706298828125, 0.0135955810546875, -0.054779052734375, -0.06317138671875, 0.08209228515625, 0.0281219482421875, 0.05218505859375, 0.002712249755859375, 0.0386962890625, 0.0168609619140625, 0.0255126953125, -0.02789306640625, 0.05279541015625, -0.0096893310546875, -0.07232666015625, 0.0008325576782226562, -0.022247314453125, -0.07000732421875, 0.00040912628173828125, -0.02325439453125, -0.07720947265625, 0.02789306640625, 0.03155517578125, -0.0279541015625, 0.0217132568359375, -0.044647216796875, 0.0980224609375, -0.01200103759765625, -0.04730224609375, -0.0283966064453125, -0.045654296875, 0.032318115234375, 0.0035800933837890625, 0.01285552978515625, 0.0002963542938232422, 0.0296783447265625, 0.052001953125, -0.04937744140625, 0.06085205078125, -0.03369140625, 0.0120391845703125, 0.053863525390625, -0.005878448486328125, 0.05706787109375, 0.011962890625, -0.0095062255859375, 0.0127716064453125, 0.0146942138671875, -0.049346923828125, -0.027374267578125, 0.063232421875, -0.054931640625, -0.035675048828125, -0.03076171875, -0.03387451171875, -0.004901885986328125, -0.0158233642578125, 0.0262603759765625, 0.033447265625, -0.035736083984375, 0.019866943359375, 0.041168212890625, -0.020538330078125, 0.032073974609375, 0.0172882080078125, -0.0384521484375, -0.0296630859375, 0.05169677734375, -0.004512786865234375, 0.0212554931640625, 0.0233306884765625, 0.018829345703125, -0.039215087890625, -0.024871826171875, -0.00974273681640625, -0.012847900390625, -0.05694580078125, -0.009063720703125, -0.05999755859375, -0.0177001953125, -0.0211334228515625, -0.01104736328125, -0.033203125, -0.04461669921875, -0.0400390625, -0.01318359375, 0.046966552734375, 0.0299072265625, -0.01322174072265625, 0.048248291015625, -0.06268310546875, 0.01465606689453125, 0.04364013671875, 0.00649261474609375, -0.0108642578125, -0.0478515625, -0.0156402587890625, 0.0266265869140625, -0.05682373046875, -0.0528564453125, 0.0299072265625, -0.008270263671875, 0.035125732421875, 0.0267333984375, -0.0003077983856201172, 0.054962158203125, -0.004993438720703125, 0.0809326171875, 0.0212860107421875, -0.0606689453125, 0.04022216796875, -0.041748046875, 0.01474761962890625, 0.040496826171875, 0.02587890625, -0.0238037109375, -0.00693511962890625, -0.086181640625, -0.04571533203125, 0.049468994140625, 0.04547119140625, 0.0313720703125, -0.0068206787109375, 0.022552490234375, -0.00420379638671875, 0.006725311279296875, -0.0673828125, -0.0174407958984375, -0.025970458984375, 0.00902557373046875, -0.003902435302734375, -0.0031147003173828125, 0.00421142578125, -0.035797119140625, 0.07171630859375, 0.007511138916015625, 0.0254364013671875, -0.00434112548828125, 0.0022830963134765625, 0.001667022705078125, 0.007625579833984375, 0.033050537109375, 0.038238525390625, -0.043182373046875, -0.031890869140625, 0.021881103515625, -0.052337646484375, -0.0008816719055175781, 0.0181427001953125, -0.00579833984375, 0.0007481575012207031, -0.006366729736328125, 0.06719970703125, 0.011749267578125, -0.0192718505859375, 0.0234832763671875, -0.0014095306396484375, -0.016265869140625, -0.046539306640625, 0.0172119140625, 0.0165863037109375, 0.018463134765625, -0.0084686279296875, 0.02978515625, 0.0134429931640625, -0.03302001953125, 0.0013971328735351562, 0.033905029296875, -0.05401611328125, -0.00489044189453125, 0.0836181640625, 0.023193359375, -0.040374755859375, 0.0433349609375, -0.0204010009765625, -0.017822265625, 0.0849609375, 0.0322265625, 0.07159423828125, -0.007648468017578125, 0.01508331298828125, 0.0635986328125, 0.0149993896484375, -0.005626678466796875, 0.022918701171875, 0.0042572021484375, -0.0214691162109375, -0.0203857421875, -0.058624267578125, -0.046478271484375, 0.02410888671875, -0.06134033203125, 0.06402587890625, -0.0328369140625, -0.0179290771484375, 0.00749969482421875, 0.0022258758544921875, -0.07513427734375, 0.042572021484375, 0.0174713134765625, 0.07318115234375, -0.06549072265625, 0.0616455078125, 0.038970947265625, -0.035858154296875, -0.07037353515625, -0.026123046875, 0.00464630126953125, -0.03582763671875, 0.036865234375, 0.01216888427734375, 0.007099151611328125, 0.0024738311767578125, -0.044036865234375, -0.06866455078125, 0.10003662109375, 0.0178070068359375, -0.058807373046875, 0.022857666015625, 0.0078277587890625, 0.037933349609375, -0.0211639404296875, 0.04925537109375, 0.035308837890625, 0.037933349609375, 0.023468017578125, -0.07086181640625, 0.0005955696105957031, -0.013031005859375, -0.002590179443359375, 0.01361846923828125, -0.06561279296875, 0.0662841796875, -0.01201629638671875, -0.0019483566284179688, 0.0171966552734375, 0.04864501953125, 0.01206207275390625, 0.034423828125, 0.0242919921875, 0.046600341796875, 0.06707763671875, 0.004974365234375, 0.08624267578125, -0.04241943359375, 0.056243896484375, 0.09027099609375, 0.0120849609375, 0.04901123046875, 0.020233154296875, -0.037353515625, 0.0273895263671875, 0.0640869140625, -0.0068206787109375, 0.03204345703125, 0.0279388427734375, -0.01090240478515625, -0.022186279296875, 0.002849578857421875, -0.034149169921875, 0.0312347412109375, 0.033843994140625, -0.041046142578125, -0.01555633544921875, 0.0031909942626953125, 0.01044464111328125, -0.0017061233520507812, -0.040557861328125, 0.034149169921875, 0.00040721893310546875, -0.04351806640625, 0.05242919921875, -0.01338958740234375, 0.0190582275390625, -0.042449951171875, -0.0109100341796875, -0.012115478515625, 0.0214996337890625, -0.0283660888671875, -0.0340576171875, 0.0309906005859375, -0.00661468505859375, -0.0051422119140625, -0.01102447509765625, 0.0455322265625, -0.00417327880859375, -0.059600830078125, 0.022216796875, 0.0167999267578125, 0.0196075439453125, -0.0129241943359375, -0.057281494140625, -0.01103973388671875, 0.0077056884765625, -0.0157012939453125, 0.01708984375, -0.006198883056640625, -0.0001895427703857422, 0.0491943359375, 0.038055419921875, -0.035125732421875, -0.009674072265625, -0.0123748779296875, 0.0665283203125, -0.047149658203125, -0.0304718017578125, -0.04217529296875, 0.05712890625, -0.00960540771484375, -0.044036865234375, 0.050750732421875, 0.057952880859375, 0.046539306640625, -0.0153045654296875, 0.046356201171875, -0.0241241455078125, 0.0217437744140625, -0.035736083984375, 0.04132080078125, -0.0379638671875, 0.0159149169921875, -0.0303497314453125, -0.09130859375, -0.003818511962890625, 0.03692626953125, -0.0313720703125, 0.010772705078125, 0.050323486328125, 0.08148193359375, -0.0087127685546875, 0.0438232421875, 0.0277862548828125, 0.01546478271484375, 0.01247406005859375, 0.0178070068359375, 0.039398193359375, -0.053802490234375, 0.05133056640625, -0.036529541015625, -0.018035888671875, -0.0171661376953125, -0.07122802734375, -0.061981201171875, -0.03173828125, -0.04925537109375, -0.03997802734375, 0.01073455810546875, 0.061859130859375, 0.053466796875, -0.058624267578125, -0.0098114013671875, -0.0374755859375, -0.0023555755615234375, -0.02197265625, -0.01520538330078125, 0.044525146484375, -0.00902557373046875, -0.05645751953125, 0.004093170166015625, -0.011322021484375, 0.0099334716796875, -0.02069091796875, -0.00760650634765625, -0.0121002197265625, 0.007099151611328125, 0.032012939453125, 0.02618408203125, -0.038604736328125, -0.0322265625, -0.006534576416015625, -0.005184173583984375, -0.004608154296875, 0.0211181640625, -0.06463623046875, 0.0307464599609375, 0.0273895263671875, 0.002017974853515625, 0.07891845703125, -0.00249481201171875, 0.01226043701171875, -0.03314208984375, 0.022186279296875, 0.031463623046875, 0.0284423828125, 0.0200653076171875, -0.036102294921875, 0.038055419921875, -0.0004401206970214844, -0.07269287109375, -0.0282135009765625, 0.00858306884765625, -0.09344482421875, -0.007724761962890625, 0.09063720703125, 0.006031036376953125, -0.02410888671875, -0.0114593505859375, -0.03155517578125, 0.01258087158203125, -0.0173187255859375, 0.056060791015625, 0.04229736328125, -0.009796142578125, -0.0007424354553222656, -0.062744140625, 0.03265380859375, 0.01470184326171875, -0.0562744140625, -0.0010395050048828125, 0.047882080078125, 0.00780487060546875, 0.0218048095703125, 0.029449462890625, -0.01334381103515625, 0.0252838134765625, 0.032562255859375, 0.03424072265625, 0.005664825439453125, -0.0190582275390625, -0.0211334228515625, -0.000027894973754882812, -0.0233917236328125, -0.032379150390625 ] ]
bofenghuang/vigogne-2-13b-instruct
2023-08-01T16:49:35.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "LLM", "llama-2", "fr", "has_space", "text-generation-inference", "region:us" ]
text-generation
bofenghuang
null
null
bofenghuang/vigogne-2-13b-instruct
12
5,855
transformers
2023-07-26T13:38:14
--- language: - fr pipeline_tag: text-generation library_name: transformers inference: false tags: - LLM - llama - llama-2 --- <p align="center" width="100%"> <img src="https://huggingface.co/bofenghuang/vigogne-2-13b-instruct/resolve/main/vigogne_logo.png" alt="Vigogne" style="width: 40%; min-width: 300px; display: block; margin: auto;"> </p> # Vigogne-2-13B-Instruct: A Llama-2 based French instruction-following model Vigogne-2-13B-Instruct is a model based on [LLaMA-2-13B](https://ai.meta.com/llama) that has been fine-tuned to follow French instructions. For more information, please visit the Github repo: https://github.com/bofenghuang/vigogne **Usage and License Notices**: Vigogne-2-13B-Instruct follows the same usage policy as Llama-2, which can be found [here](https://ai.meta.com/llama/use-policy). ## Usage ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig from vigogne.preprocess import generate_instruct_prompt model_name_or_path = "bofenghuang/vigogne-2-13b-instruct" tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, padding_side="right", use_fast=False) model = AutoModelForCausalLM.from_pretrained(model_name_or_path, torch_dtype=torch.float16, device_map="auto") user_query = "Expliquez la différence entre DoS et phishing." prompt = generate_instruct_prompt(user_query) input_ids = tokenizer(prompt, return_tensors="pt")["input_ids"].to(model.device) input_length = input_ids.shape[1] generated_outputs = model.generate( input_ids=input_ids, generation_config=GenerationConfig( temperature=0.1, do_sample=True, repetition_penalty=1.0, max_new_tokens=512, ), return_dict_in_generate=True, ) generated_tokens = generated_outputs.sequences[0, input_length:] generated_text = tokenizer.decode(generated_tokens, skip_special_tokens=True) print(generated_text) ``` You can also infer this model by using the following Google Colab Notebook. <a href="https://colab.research.google.com/github/bofenghuang/vigogne/blob/main/notebooks/infer_instruct.ipynb" target="_blank"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> ## Example Outputs *todo* ## Limitations Vigogne is still under development, and there are many limitations that have to be addressed. Please note that it is possible that the model generates harmful or biased content, incorrect information or generally unhelpful answers.
2,483
[ [ -0.0204925537109375, -0.055511474609375, 0.024932861328125, 0.0250701904296875, -0.0168304443359375, -0.00698089599609375, -0.0038013458251953125, -0.03680419921875, -0.000024139881134033203, 0.0161895751953125, -0.05767822265625, -0.04248046875, -0.0501708984375, 0.001010894775390625, -0.015777587890625, 0.05609130859375, 0.00524139404296875, -0.00301361083984375, -0.0223846435546875, 0.023468017578125, -0.0275726318359375, -0.033416748046875, -0.042816162109375, -0.041748046875, 0.0139617919921875, 0.015106201171875, 0.0287628173828125, 0.0298004150390625, 0.032318115234375, 0.02764892578125, -0.018798828125, 0.03948974609375, -0.0379638671875, -0.006954193115234375, 0.0006470680236816406, -0.027496337890625, -0.047027587890625, 0.0114898681640625, 0.022674560546875, -0.0005373954772949219, 0.005950927734375, 0.025543212890625, 0.0178985595703125, 0.0037899017333984375, -0.0232696533203125, 0.0164031982421875, -0.043182373046875, -0.0233154296875, 0.00986480712890625, -0.011688232421875, -0.038665771484375, -0.0185699462890625, 0.0028705596923828125, -0.0430908203125, 0.034820556640625, -0.00719451904296875, 0.1119384765625, 0.008575439453125, -0.0308380126953125, -0.023406982421875, -0.04046630859375, 0.055511474609375, -0.0537109375, -0.0008554458618164062, 0.0286102294921875, 0.00194549560546875, -0.021697998046875, -0.07464599609375, -0.04522705078125, -0.01404571533203125, -0.0190582275390625, 0.00977325439453125, -0.027435302734375, -0.015106201171875, 0.0198974609375, 0.0173492431640625, -0.053497314453125, -0.000018477439880371094, -0.053741455078125, -0.018829345703125, 0.059478759765625, 0.0194549560546875, 0.021759033203125, -0.03594970703125, -0.048919677734375, -0.0185546875, -0.046630859375, 0.01343536376953125, 0.0209503173828125, 0.0113677978515625, -0.035552978515625, 0.04254150390625, -0.006778717041015625, 0.046478271484375, 0.027496337890625, -0.0104217529296875, 0.047760009765625, -0.01837158203125, -0.0256195068359375, -0.01116180419921875, 0.0770263671875, 0.034515380859375, 0.004123687744140625, -0.0034809112548828125, -0.00589752197265625, -0.01065826416015625, 0.003406524658203125, -0.07806396484375, -0.0178985595703125, 0.0216522216796875, -0.0268707275390625, -0.041015625, 0.01605224609375, -0.0421142578125, -0.0237579345703125, -0.00284576416015625, 0.0667724609375, -0.0220794677734375, -0.01227569580078125, 0.00894927978515625, -0.00821685791015625, 0.04290771484375, 0.00856781005859375, -0.063720703125, -0.0081939697265625, 0.02349853515625, 0.06494140625, 0.021087646484375, -0.016265869140625, -0.018707275390625, 0.0056915283203125, 0.004314422607421875, 0.0501708984375, -0.02191162109375, -0.053741455078125, -0.019927978515625, 0.0294342041015625, -0.0178375244140625, -0.0264892578125, 0.01611328125, -0.023834228515625, 0.029571533203125, -0.017333984375, -0.0286102294921875, -0.0254058837890625, 0.002735137939453125, -0.0285186767578125, 0.0880126953125, 0.00914764404296875, -0.053466796875, 0.00835418701171875, -0.0256195068359375, -0.0273590087890625, -0.0129852294921875, 0.00408935546875, -0.05072021484375, -0.0031414031982421875, 0.0249786376953125, 0.03607177734375, -0.03851318359375, -0.005767822265625, -0.0218505859375, -0.0244293212890625, 0.0193023681640625, -0.027252197265625, 0.0911865234375, 0.007160186767578125, -0.041900634765625, 0.0142364501953125, -0.06414794921875, 0.0007538795471191406, 0.044891357421875, -0.0183563232421875, 0.0012569427490234375, -0.01788330078125, -0.002643585205078125, 0.0137176513671875, 0.0322265625, -0.0267333984375, 0.029571533203125, -0.0204010009765625, 0.038818359375, 0.061859130859375, 0.005535125732421875, 0.03570556640625, -0.0243072509765625, 0.032989501953125, -0.01092529296875, 0.031280517578125, 0.0010652542114257812, -0.043304443359375, -0.07318115234375, -0.033599853515625, 0.00429534912109375, 0.06005859375, -0.045867919921875, 0.032257080078125, -0.0019626617431640625, -0.059326171875, -0.02459716796875, 0.01438140869140625, 0.0232391357421875, 0.043701171875, 0.033355712890625, -0.007404327392578125, -0.03375244140625, -0.043701171875, 0.0150909423828125, -0.00989532470703125, -0.00856781005859375, 0.029266357421875, 0.04986572265625, -0.0180206298828125, 0.043792724609375, -0.046173095703125, -0.01110076904296875, -0.01617431640625, 0.006931304931640625, 0.040924072265625, 0.04534912109375, 0.0513916015625, -0.037994384765625, -0.0300445556640625, -0.01265716552734375, -0.0765380859375, -0.00521087646484375, 0.0004374980926513672, -0.02490234375, -0.00902557373046875, 0.030975341796875, -0.061767578125, 0.044525146484375, 0.034912109375, -0.052215576171875, 0.032318115234375, -0.02056884765625, 0.004863739013671875, -0.09405517578125, -0.004856109619140625, 0.006011962890625, -0.01079559326171875, -0.03765869140625, -0.0081634521484375, 0.00962066650390625, 0.0086669921875, -0.045379638671875, 0.0533447265625, -0.041778564453125, 0.032806396484375, -0.0193634033203125, -0.005344390869140625, 0.018646240234375, 0.024383544921875, -0.007709503173828125, 0.0584716796875, 0.06005859375, -0.058074951171875, 0.04705810546875, 0.02508544921875, -0.021148681640625, 0.0212554931640625, -0.06939697265625, 0.024139404296875, -0.00194549560546875, 0.004695892333984375, -0.07635498046875, -0.0245513916015625, 0.0501708984375, -0.0596923828125, 0.032989501953125, -0.01953125, -0.043121337890625, -0.0401611328125, -0.006938934326171875, 0.027191162109375, 0.06329345703125, -0.056243896484375, 0.0517578125, 0.0157928466796875, 0.00646209716796875, -0.04193115234375, -0.04931640625, -0.0240478515625, -0.03173828125, -0.06414794921875, 0.035400390625, 0.004131317138671875, -0.0027408599853515625, -0.0066680908203125, 0.003772735595703125, -0.00643157958984375, 0.003101348876953125, 0.030975341796875, 0.0469970703125, -0.00968170166015625, 0.004550933837890625, -0.0066375732421875, -0.00934600830078125, 0.01351165771484375, -0.0234527587890625, 0.0604248046875, -0.0251617431640625, -0.003231048583984375, -0.0498046875, -0.0094757080078125, 0.03350830078125, -0.028717041015625, 0.07513427734375, 0.05584716796875, -0.0305633544921875, -0.0180206298828125, -0.02923583984375, -0.0284271240234375, -0.043853759765625, 0.01387786865234375, -0.0194244384765625, -0.0249786376953125, 0.049102783203125, 0.0105133056640625, 0.0017766952514648438, 0.049102783203125, 0.048980712890625, -0.0123443603515625, 0.06524658203125, 0.0482177734375, 0.0236663818359375, 0.041900634765625, -0.06768798828125, 0.008331298828125, -0.060211181640625, -0.057708740234375, -0.0279541015625, -0.0013685226440429688, -0.034759521484375, -0.048583984375, 0.020904541015625, 0.0131378173828125, -0.0289306640625, 0.053741455078125, -0.05218505859375, 0.014923095703125, 0.052215576171875, 0.027130126953125, -0.007049560546875, 0.017303466796875, -0.00846099853515625, 0.0133056640625, -0.048583984375, -0.04254150390625, 0.07952880859375, 0.015869140625, 0.07208251953125, 0.0017728805541992188, 0.053497314453125, -0.0018148422241210938, 0.0204010009765625, -0.037994384765625, 0.034698486328125, 0.011383056640625, -0.05047607421875, -0.00489044189453125, -0.0184173583984375, -0.067626953125, 0.023162841796875, -0.0209808349609375, -0.07305908203125, 0.02191162109375, 0.027252197265625, -0.020477294921875, 0.018096923828125, -0.05169677734375, 0.055999755859375, -0.0271453857421875, -0.04901123046875, 0.01010894775390625, -0.0207366943359375, 0.023345947265625, 0.0186614990234375, -0.0002789497375488281, -0.00811767578125, 0.032318115234375, 0.05157470703125, -0.044464111328125, 0.06866455078125, -0.00540924072265625, -0.00867462158203125, 0.0545654296875, -0.015960693359375, 0.050811767578125, 0.0205230712890625, -0.0167999267578125, 0.0212554931640625, -0.0244140625, -0.024993896484375, -0.02435302734375, 0.052398681640625, -0.058807373046875, -0.055511474609375, -0.045806884765625, -0.027374267578125, 0.01480865478515625, 0.0113983154296875, 0.07122802734375, 0.020263671875, 0.0062103271484375, 0.003002166748046875, 0.0567626953125, -0.02734375, 0.0300445556640625, 0.01213836669921875, -0.0111846923828125, -0.03515625, 0.0675048828125, -0.008636474609375, 0.01806640625, 0.0102996826171875, 0.0066375732421875, -0.03564453125, -0.006549835205078125, -0.03466796875, 0.03741455078125, -0.055816650390625, -0.033203125, -0.03375244140625, -0.0302734375, -0.03680419921875, -0.0196380615234375, -0.0277252197265625, -0.01397705078125, -0.06341552734375, -0.0166015625, 0.055450439453125, 0.03350830078125, -0.01336669921875, 0.038360595703125, -0.0277557373046875, 0.0197601318359375, 0.0304107666015625, -0.0032711029052734375, -0.0029087066650390625, -0.06817626953125, 0.0006284713745117188, 0.0073089599609375, -0.0352783203125, -0.06414794921875, 0.029876708984375, -0.00815582275390625, 0.03826904296875, 0.02764892578125, -0.0020771026611328125, 0.05206298828125, -0.01345062255859375, 0.0535888671875, 0.0305633544921875, -0.0753173828125, 0.0435791015625, -0.008056640625, 0.0129241943359375, -0.0009360313415527344, 0.007190704345703125, -0.01421356201171875, -0.021820068359375, -0.04681396484375, -0.081787109375, 0.055328369140625, 0.034149169921875, 0.0240325927734375, -0.0010585784912109375, 0.019073486328125, -0.01358795166015625, -0.006717681884765625, -0.071533203125, -0.0338134765625, -0.028076171875, -0.01113128662109375, 0.00032639503479003906, -0.018035888671875, -0.00687408447265625, -0.030364990234375, 0.0687255859375, 0.01451873779296875, 0.048248291015625, 0.022064208984375, -0.02032470703125, 0.0081787109375, -0.0157623291015625, 0.0550537109375, 0.045684814453125, -0.0171966552734375, -0.0005245208740234375, 0.0261993408203125, -0.031280517578125, 0.017852783203125, 0.016448974609375, -0.02813720703125, 0.00960540771484375, 0.024505615234375, 0.0653076171875, -0.02093505859375, -0.01218414306640625, 0.03521728515625, -0.0101318359375, -0.0294952392578125, -0.034423828125, 0.0242767333984375, 0.020416259765625, 0.0400390625, 0.018829345703125, -0.00982666015625, -0.0113372802734375, -0.015838623046875, 0.0021152496337890625, 0.042144775390625, -0.02288818359375, -0.0213775634765625, 0.09405517578125, 0.01213836669921875, -0.012542724609375, 0.04901123046875, -0.013946533203125, -0.028778076171875, 0.07958984375, 0.049102783203125, 0.060546875, -0.006092071533203125, 0.018280029296875, 0.053314208984375, 0.033843994140625, -0.0020351409912109375, 0.0294342041015625, 0.01198577880859375, -0.0264129638671875, 0.00043010711669921875, -0.0455322265625, 0.0132904052734375, 0.02630615234375, -0.04290771484375, 0.045379638671875, -0.039215087890625, -0.0287628173828125, -0.004711151123046875, -0.010772705078125, -0.07781982421875, 0.0255279541015625, 0.0108642578125, 0.0552978515625, -0.05902099609375, 0.052337646484375, 0.03753662109375, -0.047119140625, -0.069091796875, -0.004863739013671875, 0.00960540771484375, -0.0772705078125, 0.050811767578125, 0.020477294921875, -0.0269012451171875, 0.00215911865234375, -0.06207275390625, -0.0765380859375, 0.10821533203125, 0.0197906494140625, -0.0263214111328125, -0.004177093505859375, -0.01363372802734375, 0.0222015380859375, -0.0242767333984375, 0.020355224609375, 0.00782012939453125, 0.026611328125, 0.018280029296875, -0.06207275390625, 0.0231475830078125, -0.0242767333984375, -0.004154205322265625, -0.00811004638671875, -0.07122802734375, 0.08148193359375, -0.037384033203125, -0.0157623291015625, 0.0222930908203125, 0.0675048828125, 0.017852783203125, 0.0206451416015625, 0.01885986328125, 0.023590087890625, 0.06256103515625, -0.0149688720703125, 0.0699462890625, -0.0270233154296875, 0.07171630859375, 0.059295654296875, 0.0050048828125, 0.0286712646484375, 0.02276611328125, -0.0009031295776367188, 0.037994384765625, 0.059234619140625, -0.043365478515625, 0.047027587890625, 0.022308349609375, -0.0250244140625, -0.0158233642578125, 0.01230621337890625, -0.052154541015625, 0.03570556640625, 0.0267333984375, -0.051849365234375, -0.0010614395141601562, -0.011749267578125, 0.0246124267578125, -0.0333251953125, -0.007076263427734375, 0.02490234375, 0.01030731201171875, -0.0297393798828125, 0.07342529296875, 0.006786346435546875, 0.046630859375, -0.04345703125, 0.0009927749633789062, -0.0147552490234375, 0.0204925537109375, -0.0176544189453125, -0.03485107421875, -0.01323699951171875, 0.0162811279296875, -0.01546478271484375, 0.0081329345703125, 0.044647216796875, -0.02008056640625, -0.06854248046875, 0.0181732177734375, 0.0085296630859375, 0.017578125, 0.02264404296875, -0.0457763671875, 0.00409698486328125, 0.008819580078125, -0.0408935546875, -0.00890350341796875, 0.0121307373046875, 0.010528564453125, 0.0531005859375, 0.042755126953125, -0.0136871337890625, 0.02154541015625, -0.01324462890625, 0.0623779296875, -0.037200927734375, -0.0128173828125, -0.07159423828125, 0.05645751953125, -0.0128021240234375, -0.0307464599609375, 0.0673828125, 0.041107177734375, 0.0731201171875, -0.0167083740234375, 0.06732177734375, -0.0253143310546875, -0.00769805908203125, -0.05206298828125, 0.05328369140625, -0.05389404296875, 0.019134521484375, -0.0208740234375, -0.059295654296875, -0.0123443603515625, 0.0836181640625, -0.0211334228515625, 0.01338958740234375, 0.041107177734375, 0.090087890625, -0.0157318115234375, -0.0343017578125, 0.0026721954345703125, 0.0194244384765625, 0.035491943359375, 0.040374755859375, 0.0513916015625, -0.051849365234375, 0.054168701171875, -0.038482666015625, -0.01380157470703125, -0.02630615234375, -0.0562744140625, -0.09173583984375, -0.0374755859375, -0.0249786376953125, -0.0701904296875, -0.00759124755859375, 0.06500244140625, 0.061187744140625, -0.0491943359375, -0.0113677978515625, 0.0006732940673828125, 0.01387786865234375, -0.0128326416015625, -0.0168609619140625, 0.045166015625, -0.0013513565063476562, -0.0718994140625, 0.01486968994140625, -0.0001398324966430664, 0.036376953125, 0.00782012939453125, -0.015045166015625, -0.0050201416015625, 0.01824951171875, 0.0369873046875, 0.0257568359375, -0.05474853515625, -0.01337432861328125, 0.01427459716796875, -0.0254058837890625, 0.0009074211120605469, 0.020660400390625, -0.034149169921875, 0.0293121337890625, 0.042236328125, -0.0023822784423828125, 0.050567626953125, -0.0022602081298828125, 0.0269622802734375, -0.02838134765625, 0.024688720703125, -0.01557159423828125, 0.035186767578125, 0.01678466796875, -0.039398193359375, 0.0216827392578125, 0.029144287109375, -0.034912109375, -0.05712890625, 0.00841522216796875, -0.0753173828125, -0.0240325927734375, 0.104736328125, -0.006999969482421875, -0.0251617431640625, 0.0243072509765625, -0.027496337890625, 0.038909912109375, -0.02392578125, 0.0653076171875, 0.036895751953125, -0.015655517578125, -0.001010894775390625, -0.035552978515625, 0.0203704833984375, 0.005802154541015625, -0.048553466796875, -0.0171661376953125, 0.018157958984375, 0.050323486328125, 0.006916046142578125, 0.0303192138671875, 0.0016269683837890625, 0.0292510986328125, 0.01537322998046875, 0.024932861328125, -0.022125244140625, -0.0193023681640625, -0.0037441253662109375, -0.0077667236328125, -0.01666259765625, -0.038482666015625 ] ]
ehartford/WizardLM-13B-Uncensored
2023-05-12T23:08:43.000Z
[ "transformers", "pytorch", "llama", "text-generation", "uncensored", "dataset:ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
ehartford
null
null
ehartford/WizardLM-13B-Uncensored
439
5,854
transformers
2023-05-09T18:56:32
--- license: other datasets: - ehartford/WizardLM_alpaca_evol_instruct_70k_unfiltered tags: - uncensored --- This is WizardLM trained with a subset of the dataset - responses that contained alignment / moralizing were removed. The intent is to train a WizardLM that doesn't have alignment built-in, so that alignment (of any sort) can be added separately with for example with a RLHF LoRA. Shout out to the open source AI/ML community, and everyone who helped me out. Note: An uncensored model has no guardrails. You are responsible for anything you do with the model, just as you are responsible for anything you do with any dangerous object such as a knife, gun, lighter, or car. Publishing anything this model generates is the same as publishing it yourself. You are responsible for the content you publish, and you cannot blame the model any more than you can blame the knife, gun, lighter, or car for what you do with it.
933
[ [ -0.0215606689453125, -0.0498046875, 0.00110626220703125, -0.00726318359375, -0.0308990478515625, -0.0262603759765625, 0.020904541015625, -0.023223876953125, 0.0021228790283203125, 0.0791015625, -0.053741455078125, -0.03564453125, -0.035247802734375, -0.0016717910766601562, -0.053466796875, 0.12200927734375, 0.0090789794921875, 0.028106689453125, -0.02813720703125, -0.0147705078125, -0.024932861328125, -0.039154052734375, -0.01375579833984375, -0.033203125, 0.042449951171875, 0.005954742431640625, 0.0601806640625, 0.06475830078125, 0.0406494140625, 0.0197296142578125, 0.0055389404296875, 0.00042176246643066406, -0.062225341796875, -0.009613037109375, -0.036163330078125, 0.002712249755859375, -0.054107666015625, 0.03802490234375, 0.0264434814453125, 0.0460205078125, -0.0225677490234375, 0.04644775390625, 0.010894775390625, 0.0640869140625, -0.0743408203125, -0.011138916015625, -0.03955078125, 0.017547607421875, 0.0012989044189453125, 0.0019330978393554688, -0.038543701171875, -0.03131103515625, -0.0031566619873046875, -0.075927734375, 0.01358795166015625, 0.0285491943359375, 0.07049560546875, 0.043609619140625, -0.04595947265625, -0.0017881393432617188, -0.052276611328125, 0.05029296875, -0.04052734375, 0.0019474029541015625, 0.03741455078125, 0.031646728515625, -0.01061248779296875, -0.033905029296875, -0.036651611328125, 0.021331787109375, -0.00670623779296875, 0.0134429931640625, 0.01025390625, 0.00524139404296875, 0.0172119140625, 0.011688232421875, -0.040130615234375, 0.00811767578125, -0.038421630859375, -0.0192108154296875, 0.07537841796875, 0.0305633544921875, 0.0153656005859375, -0.00754547119140625, -0.04730224609375, -0.009857177734375, -0.050262451171875, 0.01421356201171875, 0.04791259765625, 0.028228759765625, -0.01922607421875, 0.09588623046875, 0.01364898681640625, 0.048187255859375, 0.01293182373046875, -0.0101776123046875, 0.017303466796875, -0.003681182861328125, -0.04412841796875, 0.017669677734375, 0.068359375, 0.052276611328125, 0.038238525390625, -0.016845703125, -0.01092529296875, -0.018096923828125, 0.037628173828125, -0.049102783203125, -0.010894775390625, 0.028106689453125, -0.045654296875, -0.0355224609375, 0.0003917217254638672, -0.034027099609375, -0.057830810546875, -0.033477783203125, 0.027801513671875, -0.0206451416015625, -0.0168914794921875, 0.0164337158203125, -0.0255126953125, 0.039642333984375, 0.024810791015625, -0.049224853515625, 0.003261566162109375, 0.048980712890625, 0.023895263671875, 0.0031337738037109375, -0.03125, -0.039459228515625, 0.0309906005859375, -0.0439453125, 0.039093017578125, -0.0213470458984375, -0.04547119140625, 0.0055084228515625, 0.0157318115234375, -0.0011644363403320312, -0.0197906494140625, 0.041412353515625, -0.0555419921875, 0.0214691162109375, -0.0186309814453125, -0.055877685546875, -0.0255279541015625, 0.01197052001953125, -0.043853759765625, 0.038482666015625, -0.0015583038330078125, -0.07818603515625, 0.03228759765625, -0.04412841796875, -0.011505126953125, -0.0199127197265625, -0.01215362548828125, -0.04156494140625, -0.015045166015625, -0.006954193115234375, -0.004100799560546875, 0.00568389892578125, 0.0340576171875, -0.046905517578125, -0.02545166015625, 0.0287933349609375, -0.0396728515625, 0.094482421875, 0.00836944580078125, -0.0086822509765625, 0.0079193115234375, -0.07568359375, 0.00395965576171875, 0.01371002197265625, -0.023101806640625, -0.0166778564453125, -0.0031585693359375, 0.01904296875, 0.019989013671875, 0.030181884765625, -0.06268310546875, 0.016082763671875, -0.03607177734375, -0.0390625, 0.075927734375, -0.0015277862548828125, 0.04803466796875, -0.0159912109375, 0.023834228515625, -0.0005588531494140625, 0.0284576416015625, 0.052520751953125, -0.0306243896484375, -0.04571533203125, -0.0220794677734375, 0.01268768310546875, 0.044342041015625, -0.038818359375, 0.06396484375, -0.005218505859375, -0.044586181640625, -0.04254150390625, 0.0035877227783203125, 0.036102294921875, 0.06829833984375, 0.0164337158203125, 0.0005197525024414062, -0.0302276611328125, -0.08056640625, -0.0211334228515625, -0.006031036376953125, -0.003353118896484375, -0.02020263671875, 0.0357666015625, 0.01277923583984375, 0.0831298828125, -0.034515380859375, -0.0286712646484375, 0.010772705078125, -0.00101470947265625, 0.0017099380493164062, 0.058502197265625, 0.041473388671875, -0.049835205078125, -0.03985595703125, -0.004283905029296875, -0.099609375, 0.0014181137084960938, 0.016021728515625, -0.0276031494140625, 0.0008459091186523438, 0.0008788108825683594, -0.058197021484375, 0.0748291015625, 0.01540374755859375, -0.045257568359375, 0.039031982421875, -0.0198211669921875, 0.0015716552734375, -0.071044921875, 0.01544952392578125, -0.01776123046875, -0.011566162109375, -0.053466796875, -0.0081634521484375, 0.00276947021484375, -0.0276947021484375, -0.041656494140625, 0.047515869140625, -0.00231170654296875, -0.0154266357421875, -0.04315185546875, 0.00432586669921875, 0.0272674560546875, 0.0380859375, 0.0170745849609375, 0.045623779296875, 0.04571533203125, -0.04296875, 0.02886962890625, 0.049224853515625, -0.006809234619140625, 0.055755615234375, -0.04388427734375, 0.0038661956787109375, -0.040802001953125, -0.0015230178833007812, -0.023284912109375, -0.015045166015625, 0.048675537109375, -0.0308074951171875, 0.01751708984375, -0.01294708251953125, -0.017669677734375, -0.01180267333984375, -0.0225677490234375, 0.016845703125, 0.037445068359375, -0.03515625, 0.0472412109375, 0.0305328369140625, 0.03558349609375, -0.07867431640625, -0.054962158203125, -0.0450439453125, -0.0406494140625, -0.0260772705078125, -0.0055694580078125, -0.00732421875, -0.043701171875, 0.01424407958984375, -0.003376007080078125, -0.015899658203125, 0.00353240966796875, 0.032012939453125, 0.039794921875, -0.0028171539306640625, 0.01120758056640625, -0.0100555419921875, -0.00412750244140625, 0.013214111328125, 0.014251708984375, 0.0050811767578125, 0.01568603515625, -0.050140380859375, -0.044158935546875, 0.049072265625, 0.0195159912109375, -0.0308685302734375, 0.06658935546875, 0.045501708984375, -0.01800537109375, 0.004886627197265625, -0.006343841552734375, -0.0093536376953125, -0.0389404296875, 0.01331329345703125, 0.008758544921875, -0.049163818359375, 0.03570556640625, 0.0469970703125, 0.040802001953125, 0.0394287109375, 0.0290985107421875, -0.0280609130859375, 0.08074951171875, 0.046295166015625, 0.00925445556640625, 0.015960693359375, 0.01104736328125, 0.0230712890625, -0.06085205078125, -0.037933349609375, -0.04071044921875, -0.0133819580078125, -0.056671142578125, 0.0089263916015625, 0.02325439453125, 0.01611328125, -0.074951171875, 0.024383544921875, -0.05511474609375, 0.0311279296875, 0.0282440185546875, 0.00713348388671875, 0.030303955078125, -0.0008845329284667969, 0.0296630859375, -0.0006918907165527344, -0.0350341796875, -0.03643798828125, 0.0950927734375, 0.00676727294921875, 0.10125732421875, 0.0094451904296875, 0.051300048828125, 0.04351806640625, 0.01531219482421875, -0.061492919921875, 0.04315185546875, -0.00501251220703125, -0.06292724609375, -0.034149169921875, -0.026336669921875, -0.0855712890625, 0.0290679931640625, -0.0157318115234375, -0.06671142578125, 0.0164642333984375, 0.020172119140625, -0.023101806640625, 0.03460693359375, -0.04119873046875, 0.052978515625, -0.02117919921875, -0.020050048828125, 0.002002716064453125, -0.042236328125, 0.03228759765625, -0.003894805908203125, 0.00977325439453125, -0.03253173828125, 0.00902557373046875, 0.07049560546875, -0.057952880859375, 0.08331298828125, -0.020965576171875, -0.010711669921875, 0.036529541015625, -0.0016727447509765625, 0.0401611328125, -0.00969696044921875, 0.0101318359375, -0.018218994140625, 0.016021728515625, -0.036224365234375, -0.0355224609375, 0.032135009765625, -0.08734130859375, -0.0640869140625, -0.04241943359375, -0.046356201171875, -0.00914764404296875, 0.023468017578125, 0.0190582275390625, 0.031585693359375, -0.01953125, -0.007747650146484375, 0.0574951171875, -0.0011911392211914062, 0.033935546875, 0.041046142578125, -0.0445556640625, -0.02154541015625, 0.049468994140625, 0.0087890625, 0.00862884521484375, -0.010009765625, 0.007274627685546875, -0.03582763671875, -0.0171051025390625, -0.036224365234375, 0.0093994140625, -0.0811767578125, -0.0149688720703125, -0.040374755859375, -0.0445556640625, -0.05010986328125, -0.0107879638671875, -0.042388916015625, -0.03314208984375, -0.039398193359375, -0.016571044921875, 0.0570068359375, 0.07208251953125, -0.010894775390625, 0.0204010009765625, -0.06085205078125, 0.01172637939453125, 0.020111083984375, 0.0008530616760253906, -0.01328277587890625, -0.039520263671875, -0.024261474609375, 0.0090484619140625, -0.028045654296875, -0.03369140625, 0.021209716796875, -0.01041412353515625, 0.0491943359375, 0.038604736328125, 0.04620361328125, 0.04071044921875, -0.0389404296875, 0.055511474609375, 0.0254974365234375, -0.05078125, 0.01776123046875, -0.0292510986328125, 0.00036215782165527344, 0.03753662109375, 0.017791748046875, -0.00809478759765625, -0.0218505859375, -0.046417236328125, -0.027069091796875, 0.0377197265625, 0.0166168212890625, 0.0163116455078125, 0.01605224609375, 0.01538848876953125, 0.00974273681640625, 0.0276031494140625, -0.06781005859375, -0.03521728515625, -0.054534912109375, -0.00360870361328125, 0.020782470703125, 0.0088043212890625, -0.052398681640625, -0.022857666015625, 0.0692138671875, -0.004913330078125, 0.0031337738037109375, 0.0094451904296875, -0.00312042236328125, -0.01611328125, 0.005275726318359375, 0.025299072265625, 0.0462646484375, -0.0187530517578125, -0.01094818115234375, -0.01561737060546875, -0.0321044921875, 0.020233154296875, 0.00032067298889160156, -0.01061248779296875, -0.0238800048828125, 0.0289459228515625, 0.051727294921875, -0.0260772705078125, -0.0274505615234375, 0.0504150390625, 0.006500244140625, -0.007762908935546875, -0.038970947265625, 0.01593017578125, -0.0088043212890625, 0.0199127197265625, 0.0021495819091796875, 0.0216064453125, 0.01555633544921875, 0.0038471221923828125, 0.010894775390625, 0.048492431640625, -0.0271759033203125, -0.006511688232421875, 0.06097412109375, 0.006885528564453125, -0.01546478271484375, 0.050384521484375, 0.0030765533447265625, 0.005130767822265625, 0.051300048828125, 0.033477783203125, 0.046783447265625, -0.0025348663330078125, 0.03155517578125, 0.0307769775390625, 0.020355224609375, 0.0256500244140625, 0.001617431640625, 0.01500701904296875, -0.06732177734375, -0.021575927734375, -0.03558349609375, -0.017608642578125, 0.0189361572265625, -0.08697509765625, 0.0197601318359375, -0.039093017578125, -0.0134735107421875, -0.0093231201171875, -0.01499176025390625, -0.031463623046875, 0.0299835205078125, -0.0167388916015625, 0.0728759765625, -0.06292724609375, 0.06890869140625, 0.01959228515625, -0.041839599609375, -0.05609130859375, 0.00677490234375, 0.0171966552734375, -0.06744384765625, 0.01641845703125, 0.0033130645751953125, -0.0124053955078125, -0.01495361328125, -0.068359375, -0.070556640625, 0.07220458984375, 0.03240966796875, -0.00223541259765625, -0.0260467529296875, 0.0253143310546875, 0.034027099609375, -0.0183868408203125, -0.017608642578125, 0.0040740966796875, 0.034515380859375, 0.0030307769775390625, -0.055206298828125, -0.01555633544921875, -0.005580902099609375, -0.006195068359375, -0.00977325439453125, -0.049896240234375, 0.06939697265625, 0.01210784912109375, -0.009429931640625, 0.021240234375, 0.034820556640625, 0.019927978515625, -0.0013790130615234375, 0.0092620849609375, 0.031646728515625, 0.060455322265625, 0.01399993896484375, 0.08343505859375, 0.004299163818359375, 0.04266357421875, 0.11102294921875, -0.04644775390625, 0.033966064453125, 0.04852294921875, 0.0034236907958984375, 0.0157928466796875, 0.06396484375, -0.0123291015625, 0.06475830078125, -0.0005383491516113281, -0.0102996826171875, -0.0316162109375, -0.027069091796875, -0.04058837890625, 0.04669189453125, -0.00067901611328125, -0.031768798828125, -0.038909912109375, 0.01284027099609375, 0.01200103759765625, 0.01122283935546875, -0.0386962890625, 0.065673828125, 0.00476837158203125, -0.0293121337890625, 0.044281005859375, -0.0203857421875, 0.0269317626953125, -0.04351806640625, 0.0093231201171875, -0.0004246234893798828, 0.003398895263671875, -0.02508544921875, -0.0697021484375, 0.041168212890625, 0.004016876220703125, -0.0236358642578125, -0.00055694580078125, 0.0562744140625, -0.03033447265625, -0.044586181640625, 0.02410888671875, 0.0426025390625, 0.0117340087890625, 0.0290069580078125, -0.05987548828125, -0.026580810546875, -0.006565093994140625, -0.0440673828125, 0.034912109375, 0.0215911865234375, -0.0026874542236328125, 0.05859375, 0.03753662109375, -0.016265869140625, -0.00917816162109375, 0.003948211669921875, 0.06842041015625, -0.03369140625, -0.0104827880859375, -0.05340576171875, 0.0504150390625, -0.0225067138671875, -0.011260986328125, 0.0582275390625, 0.048675537109375, 0.0380859375, -0.0214691162109375, 0.055206298828125, 0.0006442070007324219, 0.02264404296875, -0.050537109375, 0.08062744140625, -0.04571533203125, -0.002483367919921875, 0.0036220550537109375, -0.040374755859375, -0.007534027099609375, 0.031585693359375, -0.0186920166015625, -0.0080108642578125, 0.0308074951171875, 0.061126708984375, -0.001064300537109375, 0.0003044605255126953, 0.044769287109375, -0.02154541015625, 0.0038356781005859375, 0.0008997917175292969, 0.052734375, -0.0168609619140625, 0.041534423828125, -0.0290679931640625, -0.0017347335815429688, 0.016082763671875, -0.061614990234375, -0.09722900390625, -0.021820068359375, -0.0252685546875, -0.05291748046875, -0.0167694091796875, 0.06365966796875, 0.049224853515625, -0.05352783203125, -0.01047515869140625, 0.0005559921264648438, 0.01496124267578125, -0.00434112548828125, -0.01483154296875, 0.031524658203125, 0.00926971435546875, -0.04052734375, 0.021392822265625, -0.00540924072265625, 0.03228759765625, -0.034393310546875, -0.0066070556640625, -0.01418304443359375, 0.01056671142578125, 0.014312744140625, 0.01416778564453125, -0.0372314453125, -0.050140380859375, -0.0100250244140625, -0.020294189453125, 0.026824951171875, 0.029937744140625, -0.02935791015625, 0.02813720703125, 0.0171051025390625, 0.0279388427734375, 0.0195465087890625, 0.007778167724609375, 0.048004150390625, -0.0638427734375, 0.03131103515625, 0.0098876953125, 0.0299835205078125, 0.0256195068359375, -0.0645751953125, 0.038665771484375, 0.0102386474609375, -0.053375244140625, -0.0292205810546875, 0.024139404296875, -0.051788330078125, -0.0127105712890625, 0.064697265625, -0.021575927734375, -0.046539306640625, -0.01313018798828125, -0.018890380859375, 0.031707763671875, -0.021575927734375, 0.03985595703125, 0.035980224609375, -0.01363372802734375, 0.0048065185546875, -0.035003662109375, 0.049163818359375, 0.0035228729248046875, -0.054107666015625, 0.0166778564453125, 0.04840087890625, 0.0321044921875, 0.00971221923828125, 0.038818359375, -0.01293182373046875, 0.016204833984375, 0.00896453857421875, 0.024932861328125, -0.006717681884765625, 0.0008459091186523438, -0.02484130859375, 0.012298583984375, -0.0036029815673828125, -0.03253173828125 ] ]
jondurbin/airoboros-65b-gpt4-m2.0
2023-08-14T10:11:57.000Z
[ "transformers", "pytorch", "llama", "text-generation", "dataset:jondurbin/airoboros-gpt4-m2.0", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
jondurbin
null
null
jondurbin/airoboros-65b-gpt4-m2.0
0
5,854
transformers
2023-07-30T09:40:06
--- license: cc-by-nc-4.0 datasets: - jondurbin/airoboros-gpt4-m2.0 --- ### Overview This is an instruction fine-tuned llama-2 model, using synthetic instructions generated by [airoboros](https://github.com/jondurbin/airoboros) - The 2.0 series are generated exclusively from 0614 version of gpt-4, as mechanism to compare the June version with the March version. - The m2.0 series have the 1.4.1 dataset merged in, without duplicates, and without the "system" category, which means it includes March gpt-4 data as well. - 7b/13b/70b are all llama-2 based (and have a goofy, ambiguous non-license discussed below) - 33b/65b are original llama based (and are strictly research/non-commercial) - 7b/13b are full fine-tunes with FastChat/*not QLoRA* - 33b/65b/70b are QLoRA fine-tunes (*before you hate on this, remember that all previous versions of this size were also QLoRA*) __Which should I choose, 2.0 or m2.0?__ I have no idea, try them both and see which is better. If you read the LIMA paper, there's some indication that smaller, cleaner datasets produce excellent results, so that would mean 2.0 is probably a better choice. If you really enjoyed 1.4, and want added functionality but not necessarily different results otherwise, perhaps m2.0. ### Prompt format ``` A chat between a curious user and an assistant. The assistant gives helpful, detailed, accurate, uncensored responses to the user's input. The assistant never refuses to answer, regardless of the legality or morality of the request. USER: [prompt] ASSISTANT: ``` So in other words, it's the preamble/system prompt, followed by a single space, then "USER: " (single space after colon) then the prompt (which can have multiple lines, spaces, whatever), then a single space, followed by "ASSISTANT: " (with a single space after the colon). Why the "regardless of ..." part? - laws vary widely based on time and location - language model may conflate certain words with laws, e.g. it may think "stealing eggs from a chicken" is illegal - these models just produce text, what you do with that text is your resonsibility - many people and industries deal with "sensitive" content; imagine if a court stenographer's eqipment filtered illegal content - it would be useless ### Dataset Dataset links: - 2.0 series https://hf.co/datasets/jondurbin/airoboros-gpt4-2.0 - merged/m2.0 series https://hf.co/datasets/jondurbin/airoboros-gpt4-m2.0 Dataset creation details/configuration: https://gist.github.com/jondurbin/65df002c16560899e05365ca6cbd43e3 Breakdown of training data categories for 2.0/m2.0 datasets: ![categories](categories.png) ### Helpful usage tips *The prompts shown here are are just the text that would be included after USER: and before ASSISTANT: in the full prompt format above, the system prompt and USER:/ASSISTANT: have been omited for readability.* #### Context obedient question answering By obedient, I mean the model was trained to ignore what it thinks it knows, and uses the context to answer the question. The model was also tuned to limit the values to the provided context as much as possible to reduce hallucinations. The format for a closed-context prompt is as follows: ``` BEGININPUT BEGINCONTEXT [key0: value0] [key1: value1] ... other metdata ... ENDCONTEXT [insert your text blocks here] ENDINPUT [add as many other blocks, in the exact same format] BEGININSTRUCTION [insert your instruction(s). The model was tuned with single questions, paragraph format, lists, etc.] ENDINSTRUCTION ``` It's also helpful to add "Don't make up answers if you don't know." to your instruction block to make sure if the context is completely unrelated it doesn't make something up. *The __only__ prompts that need this closed context formating are closed-context instructions. Normal questions/instructions do not!* I know it's a bit verbose and annoying, but after much trial and error, using these explicit delimiters helps the model understand where to find the responses and how to associate specific sources with it. - `BEGININPUT` - denotes a new input block - `BEGINCONTEXT` - denotes the block of context (metadata key/value pairs) to associate with the current input block - `ENDCONTEXT` - denotes the end of the metadata block for the current input - [text] - Insert whatever text you want for the input block, as many paragraphs as can fit in the context. - `ENDINPUT` - denotes the end of the current input block - [repeat as many input blocks in this format as you want] - `BEGININSTRUCTION` - denotes the start of the list (or one) instruction(s) to respond to for all of the input blocks above. - [instruction(s)] - `ENDINSTRUCTION` - denotes the end of instruction set It sometimes works without `ENDINSTRUCTION`, but by explicitly including that in the prompt, the model better understands that all of the instructions in the block should be responded to. Here's a trivial, but important example to prove the point: ``` BEGININPUT BEGINCONTEXT date: 2021-01-01 url: https://web.site/123 ENDCONTEXT In a shocking turn of events, blueberries are now green, but will be sticking with the same name. ENDINPUT BEGININSTRUCTION What color are bluberries? Source? ENDINSTRUCTION ``` And the response: ``` Blueberries are now green. Source: date: 2021-01-01 url: https://web.site/123 ``` #### Coding You can ask for fairly complex coding instructions with multiple criteria, e.g.: ``` Create a python application with the following requirements: - Asyncio FastAPI webserver - ping endpoint that returns the current date in JSON format - file upload endpoint, which calculates the file's sha256 checksum, and checks postgres to deduplicate ``` Or inline criteria: ``` Write a multi-threaded TCP server in C that accepts a "GET [key]" input and "SET [key] [value]" input, and uses a binary tree to get and store the input values. ``` You can also optionally add a single space and "PLAINFORMAT" at the end of your prompt to avoid backticks, explanations, etc. and just print the code, e.g.: ``` Write a websocket application in node.js. PLAINFORMAT ``` #### Agent/function calling The dataset includes many examples of function/args generation based on input criteria. This is somewhat similar to the OpenAI function calling, but the output is either JSON or YAML. Example prompt: ``` As an AI assistant, please select the most suitable function and parameters from the list of available functions below, based on the user's input. Provide your response in JSON format. Input: I want to know how many times 'Python' is mentioned in my text file. Available functions: file_analytics: description: This tool performs various operations on a text file. params: action: The operation we want to perform on the data, such as "count_occurrences", "find_line", etc. filters: keyword: The word or phrase we want to search for. ``` Response: ```json { "function": "file_analytics", "params": { "action": "count_occurrences", "filters": { "keyword": "Python" } } } ``` #### Chain-of-thought You can ask for several possible responses to a given problem, with a ranking and final answer selection. Example prompt: ``` A group of people decide to play a round-robin tournament where every player plays every other player exactly once. If a total of 45 games were played, how many players participated in the tournament? Offer several potential answers, rank them according to mathematical logic, and pick the most feasible one. ``` Example response: ``` Solution 1: In a round-robin tournament, each player plays every other player exactly once. This means that the total number of games played is equal to the combination of all players taken two at a time (since a game involves two players). The formula for combinations is nC2 = n(n-1)/2, where n is the total number of players. We know that nC2 equals 45 (the total number of games), so we can set up the equation as follows: n(n-1)/2 = 45 Solving this equation gives us n=10. Final answer: There were 10 players in the tournament. Solution 2: Another way to approach this problem is by using the concept of triangular numbers. Triangular numbers are a sequence of numbers where each number can be represented as a triangle with dots. The nth triangular number is given by the sum of the first n natural numbers. If we consider each game as a dot and each player as a side of the triangle, then the total number of games (45) would correspond to the 9th triangular number because 1+2+3+4+5+6+7+8+9=45. However, since each side of the triangle represents a player, and there's one more player than sides in our model (because the last player has no one left to play against), we need to add one more to get the total number of players. Final answer: There were 10 players in the tournament. Solution 3: We could also use trial and error to solve this problem. Starting from a small number of players, we calculate how many games would be played in a round-robin tournament until we reach 45 games. For example, with 2 players, only 1 game is played; with 3 players, 3 games are played; with 4 players, 6 games are played, and so on. Continuing this process, we find that with 10 players, 45 games are played. Final answer: There were 10 players in the tournament. Ranking of solutions from best to worst: 1. Solution 1: This solution uses direct mathematical reasoning based on the properties of a round-robin tournament and solves an equation to find the answer. 2. Solution 2: This solution uses the concept of triangular numbers which requires additional steps and knowledge beyond the immediate problem context, but still arrives at the correct answer. 3. Solution 3: This solution relies on trial and error, which is less efficient and reliable than the other methods, but it eventually gets to the right answer. Best and final answer: There were 10 players in the tournament. ``` #### reWOO style execution planning The model now supports execution planning for complex instructions that would require making use of several tools. The output is just the plan, you must implement a mechanism to parse the output and actually call the functions! Example prompt: ``` Please construct a systematic plan to generate an optimal response to the user instruction, utilizing a set of provided tools. Each plan will correspond to an evidence value, which will be the output of one of the available functions given an input string that could be the user's question, one or more prior evidence values, or a combination of both. Here are the tools available to be called: DuckDuckGo[input]: Tool that enables the user to search for information using the DuckDuckGo search engine. This tool is helpful in locating a suitable list of sites that may contain the answer to the user's query. It does not directly answer the question but finds a list of sites that might have the answer. TextScraper[input]: Load one or more websites from the input string containing newline delimited links, where input is one or more links, and produces plain text output containing the content of the links. HyperlinkExtractor[input]: Extract links from plain text and produces a plain text, newline delimited response of links. KnowledgeModel[input]: Question answering language model, particularly useful in answering questions based on an input passage of text. The input must be a text question that references an :evidence[n]: variable, e.g. What color is the cat, given :evidence1:? The output should be in format: Plan: [first action to take based on input question] :evidence0: = [function to call with input parameter] Plan: [next action to take, based on result of :evidence0:] :evidence1: = [next function to call and input parameter, which may include reference to previous evidence, e.g. "Given context :evidence0"] ... Answer: [:evidence[n]: containing the final answer.] Question: Who won the Nobel Prize in Literature in 2020 and what is their most famous work? ``` Response: ``` Plan: Begin by conducting a web search to find out who won the Nobel Prize in Literature in 2020. :evidence0: = DuckDuckGo[Nobel Prize in Literature 2020 winner] Plan: Utilize the KnowledgeModel to interpret the search results and find the name of the laureate. :evidence1: = KnowledgeModel[Who won the Nobel Prize in Literature in 2020, given :evidence0:?] Plan: Conduct another web search to find the most famous work of the identified laureate. :evidence2: = DuckDuckGo[Most famous work of :evidence1:] Plan: Extract the relevant links from the DuckDuckGo search results for a more focused search. :evidence3: = HyperlinkExtractor[:evidence2:] Plan: Use the TextScraper tool to extract information from the relevant links. :evidence4: = TextScraper[:evidence3:] Plan: Finally, utilize the KnowledgeModel to identify and summarize the most famous work of the laureate from the extracted information. :evidence5: = KnowledgeModel[What is the most famous work of :evidence1:, given :evidence4:?] Answer: :evidence5: ``` For this to be useful, you'd have to parse the output plan text, and implement/call each of the functions. This is just pseudo-code, completely untested off the top of my head, and obviously would requiring full implementation + hardening: ```python import re import requests def inject_context(input_text, **context): for ref in set(re.findall(r"(:evidence[0-9]+:)", input_text, re.I)): input_text = input_text.replace(ref, context.get(ref, "")) return input_text def duckduckgo(input_text, **context): search_string = inject_context(input_text, **context) ... search via duck duck go using search_string ... return text content def link_extractor(input_text, **context): input_text = inject_context(input_text, **context) return "\n".join(list(set(re.findall(r"(https?://[^\s]+?\.?)", input_text, re.I)))) def scrape(input_text, **context): input_text = inject_context(input_text, **context) text = [] for link in input_text.splitlines(): text.append(requests.get(link).text) return "\n".join(text) def infer(input_text, **context) prompt = inject_context(input_text, **context) ... call model with prompt, return output def parse_plan(plan): method_map = { "DuckDuckGo": duckduckgo, "HyperlinkExtractor": link_extractor, "KnowledgeModel": infer, "TextScraper": scrape, } context = {} for line in plan.strip().splitlines(): if line.startswith("Plan:"): print(line) continue parts = re.match("^(:evidence[0-9]+:")\s*=\s*([^\[]+])(\[.*\])\s$", line, re.I) if not parts: if line.startswith("Answer: "): return context.get(line.split(" ")[-1].strip(), "Answer couldn't be generated...") raise RuntimeError("bad format: " + line) context[parts.group(1)] = method_map[parts.group(2)](parts.group(3), **context) ``` ### Contribute If you're interested in new functionality, particularly a new "instructor" type to generate a specific type of training data, take a look at the dataset generation tool repo: https://github.com/jondurbin/airoboros and either make a PR or open an issue with details. To help me with the OpenAI/compute costs: - https://bmc.link/jondurbin - ETH 0xce914eAFC2fe52FdceE59565Dd92c06f776fcb11 - BTC bc1qdwuth4vlg8x37ggntlxu5cjfwgmdy5zaa7pswf ### Licence and usage restrictions The airoboros 2.0/m2.0 models are built on top of either llama or llama-2. Any model with `-l2-` in the name uses llama2, `..-33b-...` and `...-65b-...` are based on the original llama. #### Llama (original) models If the model was based on the original llama (33b/65b), the license is __cc-by-nc-4.0__ and is for research/academic use only -- no commercial usage whatsoever! #### Llama-2 models Base model has a custom Meta license: - See the [meta-license/LICENSE.txt](meta-license/LICENSE.txt) file attached for the original license provided by Meta. - See also [meta-license/USE_POLICY.md](meta-license/USE_POLICY.md) and [meta-license/Responsible-Use-Guide.pdf](meta-license/Responsible-Use-Guide.pdf), also provided by Meta. The fine-tuning data was generated by OpenAI API calls to gpt-4, via [airoboros](https://github.com/jondurbin/airoboros) The ToS for OpenAI API usage has a clause preventing the output from being used to train a model that __competes__ with OpenAI - what does *compete* actually mean here? - these small open source models will not produce output anywhere near the quality of gpt-4, or even gpt-3.5, so I can't imagine this could credibly be considered competing in the first place - if someone else uses the dataset to do the same, they wouldn't necessarily be violating the ToS because they didn't call the API, so I don't know how that works - the training data used in essentially all large language models includes a significant amount of copyrighted or otherwise non-permissive licensing in the first place - other work using the self-instruct method, e.g. the original here: https://github.com/yizhongw/self-instruct released the data and model as apache-2 I am purposingly leaving this license ambiguous (other than the fact you must comply with the Meta original license for llama-2) because I am not a lawyer and refuse to attempt to interpret all of the terms accordingly. Your best bet is probably to avoid using this commercially due to the OpenAI API usage. Either way, by using this model, you agree to completely indemnify me.
17,513
[ [ -0.023101806640625, -0.06829833984375, 0.039093017578125, 0.019134521484375, -0.01192474365234375, -0.0206298828125, -0.0100250244140625, -0.02679443359375, 0.01172637939453125, 0.032928466796875, -0.05194091796875, -0.042083740234375, -0.031890869140625, 0.02215576171875, -0.019622802734375, 0.0858154296875, -0.00643157958984375, -0.00731658935546875, -0.0026607513427734375, 0.0025882720947265625, -0.04901123046875, -0.03436279296875, -0.0623779296875, -0.006053924560546875, 0.03076171875, 0.03436279296875, 0.033660888671875, 0.048431396484375, 0.041015625, 0.0287017822265625, 0.00128173828125, 0.019775390625, -0.031890869140625, 0.004913330078125, -0.009857177734375, -0.03717041015625, -0.0258026123046875, 0.00962066650390625, 0.0297698974609375, 0.03369140625, -0.0162506103515625, 0.0233001708984375, -0.0001900196075439453, 0.0298614501953125, -0.035247802734375, 0.0159759521484375, -0.034149169921875, 0.003032684326171875, -0.00865936279296875, -0.036712646484375, -0.02606201171875, -0.018707275390625, 0.004955291748046875, -0.0767822265625, -0.00447845458984375, 0.009002685546875, 0.07354736328125, 0.0266571044921875, -0.03509521484375, -0.0297393798828125, -0.041473388671875, 0.06207275390625, -0.060821533203125, 0.00897979736328125, 0.05047607421875, 0.02996826171875, -0.0275421142578125, -0.06396484375, -0.049407958984375, -0.01172637939453125, -0.020294189453125, 0.01824951171875, -0.00878143310546875, -0.00548553466796875, 0.0374755859375, 0.004611968994140625, -0.06378173828125, -0.00862884521484375, -0.045928955078125, -0.0118865966796875, 0.05047607421875, 0.026824951171875, 0.0198974609375, -0.01276397705078125, -0.02899169921875, -0.0019350051879882812, -0.038848876953125, 0.0204925537109375, 0.0318603515625, 0.030364990234375, -0.0230560302734375, 0.039764404296875, -0.025421142578125, 0.045806884765625, 0.0014095306396484375, -0.01407623291015625, 0.0104522705078125, -0.03961181640625, -0.0186614990234375, -0.01378631591796875, 0.08319091796875, 0.050506591796875, 0.01050567626953125, 0.0015697479248046875, -0.0032558441162109375, -0.00797271728515625, 0.011627197265625, -0.07110595703125, -0.0169830322265625, 0.045501708984375, -0.038818359375, -0.027069091796875, -0.0021209716796875, -0.062347412109375, -0.01502227783203125, -0.0148773193359375, 0.041961669921875, -0.030029296875, 0.001338958740234375, 0.0095977783203125, -0.024200439453125, 0.01654052734375, 0.03387451171875, -0.06353759765625, 0.04046630859375, 0.031463623046875, 0.0689697265625, 0.0047607421875, -0.0276947021484375, -0.04217529296875, -0.005771636962890625, -0.00841522216796875, 0.05718994140625, -0.0323486328125, -0.0274810791015625, -0.0195465087890625, 0.0250091552734375, 0.0015468597412109375, -0.023651123046875, 0.0227813720703125, -0.0333251953125, 0.0443115234375, -0.034912109375, -0.0360107421875, -0.02203369140625, 0.020172119140625, -0.03497314453125, 0.07275390625, 0.0075531005859375, -0.061920166015625, -0.003429412841796875, -0.07598876953125, -0.02691650390625, -0.0032367706298828125, -0.00005137920379638672, -0.0049591064453125, -0.0291290283203125, 0.011016845703125, 0.0252227783203125, -0.0294647216796875, 0.0115203857421875, -0.016571044921875, -0.035003662109375, 0.029693603515625, -0.0250701904296875, 0.0897216796875, 0.0266265869140625, -0.018402099609375, 0.007404327392578125, -0.052093505859375, -0.00047850608825683594, 0.0174560546875, -0.0386962890625, -0.01163482666015625, 0.007747650146484375, -0.000957489013671875, 0.0025463104248046875, 0.024749755859375, -0.0364990234375, 0.022613525390625, -0.0258331298828125, 0.06561279296875, 0.05694580078125, 0.0114898681640625, 0.0256195068359375, -0.02740478515625, 0.03643798828125, -0.003978729248046875, 0.025726318359375, -0.0312347412109375, -0.050323486328125, -0.042022705078125, -0.00008147954940795898, 0.014007568359375, 0.073486328125, -0.047088623046875, 0.03564453125, -0.00164031982421875, -0.034423828125, -0.023223876953125, -0.007465362548828125, 0.0261993408203125, 0.053466796875, 0.039825439453125, -0.007671356201171875, -0.053924560546875, -0.05609130859375, 0.01165771484375, -0.0163726806640625, 0.0015172958374023438, 0.036285400390625, 0.052825927734375, -0.01479339599609375, 0.06787109375, -0.062469482421875, -0.0024662017822265625, -0.00557708740234375, 0.003780364990234375, 0.0229949951171875, 0.04583740234375, 0.03961181640625, -0.053985595703125, -0.0293426513671875, -0.006381988525390625, -0.06622314453125, -0.00841522216796875, -0.00608062744140625, -0.019195556640625, -0.0003783702850341797, 0.0247344970703125, -0.050018310546875, 0.033599853515625, 0.0216827392578125, -0.0364990234375, 0.04815673828125, -0.01042938232421875, 0.0203094482421875, -0.09405517578125, 0.022308349609375, -0.01165008544921875, -0.01117706298828125, -0.049774169921875, 0.0258636474609375, -0.0158538818359375, -0.0026569366455078125, -0.037322998046875, 0.05194091796875, -0.024200439453125, 0.00585174560546875, -0.00598907470703125, 0.0114593505859375, 0.0146026611328125, 0.046661376953125, -0.00974273681640625, 0.069580078125, 0.0361328125, -0.053253173828125, 0.043212890625, 0.018096923828125, -0.0040283203125, 0.028167724609375, -0.066650390625, 0.0166015625, -0.00644683837890625, 0.021453857421875, -0.08380126953125, -0.01361083984375, 0.043060302734375, -0.047698974609375, 0.0015516281127929688, -0.00858306884765625, -0.0280914306640625, -0.037506103515625, -0.03472900390625, 0.0238037109375, 0.03436279296875, -0.0225830078125, 0.037261962890625, 0.027618408203125, 0.003932952880859375, -0.04229736328125, -0.05523681640625, 0.0060577392578125, -0.025970458984375, -0.042510986328125, 0.0222930908203125, -0.032440185546875, -0.0222015380859375, -0.01433563232421875, 0.00913238525390625, -0.0222015380859375, 0.0246124267578125, 0.01433563232421875, 0.017486572265625, -0.01049041748046875, -0.00634002685546875, 0.007709503173828125, -0.0011186599731445312, 0.0035915374755859375, -0.0301666259765625, 0.059539794921875, -0.0163421630859375, -0.00821685791015625, -0.053985595703125, 0.039794921875, 0.0258636474609375, -0.01593017578125, 0.03887939453125, 0.042572021484375, -0.03521728515625, 0.014404296875, -0.01824951171875, -0.025390625, -0.042510986328125, 0.01528167724609375, -0.026123046875, -0.046356201171875, 0.052703857421875, 0.025970458984375, 0.0177764892578125, 0.034881591796875, 0.03204345703125, -0.02044677734375, 0.06494140625, 0.02044677734375, 0.01514434814453125, 0.022857666015625, -0.04052734375, -0.001514434814453125, -0.06329345703125, -0.028656005859375, -0.044219970703125, -0.026336669921875, -0.045654296875, -0.021331787109375, 0.0234527587890625, 0.0207977294921875, -0.03778076171875, 0.0391845703125, -0.055908203125, 0.03814697265625, 0.052978515625, 0.0106048583984375, 0.0105438232421875, -0.01103973388671875, 0.0016698837280273438, 0.006038665771484375, -0.04156494140625, -0.0445556640625, 0.08831787109375, 0.0193328857421875, 0.049407958984375, 0.01461029052734375, 0.059722900390625, 0.0212860107421875, 0.0031795501708984375, -0.061767578125, 0.053314208984375, -0.00203704833984375, -0.043121337890625, -0.0362548828125, -0.0258026123046875, -0.0848388671875, 0.016510009765625, -0.00574493408203125, -0.072998046875, 0.01294708251953125, 0.010711669921875, -0.061126708984375, 0.0010852813720703125, -0.058929443359375, 0.06829833984375, -0.0182037353515625, -0.0261993408203125, 0.0089263916015625, -0.059539794921875, 0.021240234375, 0.0107421875, 0.0143585205078125, 0.00041174888610839844, -0.006412506103515625, 0.06866455078125, -0.0560302734375, 0.0684814453125, -0.0194091796875, 0.01165008544921875, 0.03961181640625, -0.0002167224884033203, 0.03289794921875, 0.01544189453125, -0.0017614364624023438, 0.0114288330078125, 0.0237274169921875, -0.018341064453125, -0.043121337890625, 0.0457763671875, -0.0673828125, -0.038543701171875, -0.030181884765625, -0.042266845703125, 0.017425537109375, 0.0290374755859375, 0.03594970703125, 0.0433349609375, -0.005321502685546875, -0.003536224365234375, 0.040313720703125, -0.0243988037109375, 0.04241943359375, 0.045379638671875, -0.019500732421875, -0.0443115234375, 0.0577392578125, 0.01398468017578125, -0.0030879974365234375, 0.046356201171875, 0.0302886962890625, -0.02447509765625, -0.030364990234375, -0.051239013671875, 0.01415252685546875, -0.046630859375, -0.01824951171875, -0.0655517578125, -0.00485992431640625, -0.044586181640625, -0.005077362060546875, -0.00020444393157958984, -0.039886474609375, -0.04510498046875, -0.0014333724975585938, 0.04541015625, 0.043731689453125, 0.0005645751953125, 0.0439453125, -0.04962158203125, 0.0187225341796875, 0.024688720703125, 0.00911712646484375, -0.0024318695068359375, -0.0478515625, -0.005420684814453125, 0.01776123046875, -0.03460693359375, -0.0889892578125, 0.0280609130859375, 0.003894805908203125, 0.035919189453125, 0.0394287109375, -0.0013284683227539062, 0.059234619140625, -0.04376220703125, 0.0814208984375, -0.0005393028259277344, -0.06298828125, 0.06103515625, -0.044464111328125, 0.00930023193359375, 0.041839599609375, 0.03118896484375, -0.046356201171875, -0.01381683349609375, -0.03778076171875, -0.0660400390625, 0.0738525390625, 0.024078369140625, 0.0018682479858398438, -0.00865936279296875, 0.03668212890625, 0.00008255243301391602, 0.0183868408203125, -0.060638427734375, -0.0285797119140625, -0.03350830078125, -0.01541900634765625, 0.0027446746826171875, -0.00390625, -0.02197265625, -0.02777099609375, 0.0382080078125, -0.00855255126953125, 0.0452880859375, 0.0161285400390625, 0.0024890899658203125, 0.006404876708984375, 0.01227569580078125, 0.0626220703125, 0.041534423828125, -0.024566650390625, 0.002925872802734375, 0.0159149169921875, -0.039154052734375, 0.0085601806640625, 0.0158843994140625, -0.022491455078125, -0.020538330078125, 0.025970458984375, 0.05718994140625, -0.0035800933837890625, -0.04559326171875, 0.034637451171875, -0.01514434814453125, -0.0094757080078125, -0.025421142578125, 0.0203094482421875, 0.0069580078125, 0.01265716552734375, 0.018341064453125, -0.0081787109375, 0.032562255859375, -0.05059814453125, 0.008087158203125, 0.0220794677734375, 0.0003390312194824219, -0.029327392578125, 0.053985595703125, 0.016082763671875, -0.049346923828125, 0.04571533203125, -0.0400390625, -0.04156494140625, 0.06707763671875, 0.05712890625, 0.050506591796875, -0.01465606689453125, 0.0219573974609375, 0.0418701171875, 0.02783203125, -0.01308441162109375, 0.04791259765625, -0.00994110107421875, -0.045928955078125, -0.00775909423828125, -0.048187255859375, -0.020782470703125, 0.017486572265625, -0.04266357421875, 0.0175933837890625, -0.052642822265625, -0.01465606689453125, 0.0006961822509765625, 0.00916290283203125, -0.053497314453125, 0.0164337158203125, -0.0146331787109375, 0.07220458984375, -0.074462890625, 0.037384033203125, 0.0621337890625, -0.0557861328125, -0.06793212890625, -0.0086517333984375, 0.006866455078125, -0.0533447265625, 0.0299530029296875, 0.0202484130859375, 0.01216888427734375, -0.000060617923736572266, -0.05908203125, -0.07525634765625, 0.09759521484375, 0.00811767578125, -0.0311126708984375, -0.01091766357421875, -0.0012359619140625, 0.04241943359375, -0.031982421875, 0.050201416015625, 0.0390625, 0.047149658203125, -0.0008840560913085938, -0.07049560546875, 0.026336669921875, -0.032257080078125, -0.005535125732421875, -0.0012617111206054688, -0.06622314453125, 0.08563232421875, -0.023590087890625, -0.0172119140625, 0.00821685791015625, 0.035125732421875, 0.01210784912109375, 0.0255889892578125, 0.0284576416015625, 0.03668212890625, 0.07928466796875, -0.00690460205078125, 0.0777587890625, -0.0206298828125, 0.0205078125, 0.08721923828125, -0.0100250244140625, 0.0599365234375, 0.0298614501953125, -0.03436279296875, 0.042572021484375, 0.06585693359375, -0.009979248046875, 0.04400634765625, 0.00508880615234375, 0.002468109130859375, 0.0031280517578125, -0.0002429485321044922, -0.0330810546875, 0.03839111328125, 0.0203857421875, -0.01406097412109375, -0.006633758544921875, -0.0009675025939941406, 0.0153961181640625, -0.01078033447265625, -0.0069580078125, 0.056243896484375, -0.001873016357421875, -0.06103515625, 0.050994873046875, 0.01309967041015625, 0.05096435546875, -0.04400634765625, -0.01056671142578125, -0.0254974365234375, -0.00978851318359375, -0.022857666015625, -0.0694580078125, 0.019775390625, 0.00499725341796875, -0.0258941650390625, 0.0035495758056640625, 0.03240966796875, -0.0238037109375, -0.025177001953125, 0.01117706298828125, 0.0182037353515625, 0.049102783203125, 0.005863189697265625, -0.056793212890625, 0.00926971435546875, 0.0083160400390625, -0.02105712890625, 0.0116119384765625, 0.0270233154296875, -0.0032596588134765625, 0.052337646484375, 0.0577392578125, -0.0008544921875, -0.0025081634521484375, -0.00937652587890625, 0.06597900390625, -0.052001953125, -0.0452880859375, -0.064453125, 0.046112060546875, -0.010589599609375, -0.036102294921875, 0.0467529296875, 0.0498046875, 0.054595947265625, 0.006618499755859375, 0.058807373046875, -0.02203369140625, 0.02386474609375, -0.0396728515625, 0.04815673828125, -0.047760009765625, 0.0258636474609375, -0.01018524169921875, -0.048980712890625, -0.005970001220703125, 0.06475830078125, -0.015960693359375, 0.000006079673767089844, 0.050994873046875, 0.07049560546875, 0.0017681121826171875, 0.00930023193359375, -0.002105712890625, 0.0182037353515625, 0.028289794921875, 0.04803466796875, 0.053558349609375, -0.04840087890625, 0.0418701171875, -0.021209716796875, -0.03436279296875, -0.0084381103515625, -0.058135986328125, -0.063720703125, -0.041961669921875, -0.00537109375, -0.0306854248046875, 0.0090484619140625, 0.086669921875, 0.048553466796875, -0.06365966796875, -0.0298614501953125, 0.002735137939453125, 0.006671905517578125, -0.0229034423828125, -0.0236663818359375, 0.019744873046875, -0.014007568359375, -0.051971435546875, 0.0322265625, 0.001682281494140625, 0.0095367431640625, -0.0116119384765625, -0.004077911376953125, -0.0283966064453125, 0.010498046875, 0.045867919921875, 0.0262451171875, -0.053192138671875, -0.02117919921875, 0.0134735107421875, -0.00862884521484375, 0.003643035888671875, 0.037445068359375, -0.054595947265625, 0.0257568359375, 0.043121337890625, 0.02117919921875, 0.031036376953125, 0.00728607177734375, 0.0260009765625, -0.0440673828125, 0.006259918212890625, 0.007080078125, 0.0284423828125, 0.01520538330078125, -0.0546875, 0.041839599609375, 0.0248260498046875, -0.05157470703125, -0.068359375, 0.0032596588134765625, -0.08038330078125, -0.0312347412109375, 0.09649658203125, -0.01235198974609375, -0.01456451416015625, -0.01349639892578125, -0.032684326171875, 0.013397216796875, -0.050872802734375, 0.05078125, 0.049530029296875, -0.0309906005859375, 0.007808685302734375, -0.0390625, 0.03302001953125, -0.0005755424499511719, -0.0693359375, -0.0028972625732421875, 0.03839111328125, 0.041412353515625, 0.0213470458984375, 0.0728759765625, 0.00783538818359375, 0.0207061767578125, 0.0023136138916015625, -0.0059814453125, -0.01788330078125, -0.0298004150390625, -0.01552581787109375, 0.008056640625, -0.01922607421875, -0.0248260498046875 ] ]
timm/convnextv2_tiny.fcmae_ft_in22k_in1k
2023-03-31T23:41:08.000Z
[ "timm", "pytorch", "safetensors", "image-classification", "dataset:imagenet-1k", "arxiv:2301.00808", "license:cc-by-nc-4.0", "region:us" ]
image-classification
timm
null
null
timm/convnextv2_tiny.fcmae_ft_in22k_in1k
0
5,852
timm
2023-01-05T01:56:47
--- tags: - image-classification - timm library_tag: timm license: cc-by-nc-4.0 datasets: - imagenet-1k - imagenet-1k --- # Model card for convnextv2_tiny.fcmae_ft_in22k_in1k A ConvNeXt-V2 image classification model. Pretrained with a fully convolutional masked autoencoder framework (FCMAE) and fine-tuned on ImageNet-22k and then ImageNet-1k. ## Model Details - **Model Type:** Image classification / feature backbone - **Model Stats:** - Params (M): 28.6 - GMACs: 4.5 - Activations (M): 13.4 - Image size: train = 224 x 224, test = 288 x 288 - **Papers:** - ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders: https://arxiv.org/abs/2301.00808 - **Original:** https://github.com/facebookresearch/ConvNeXt-V2 - **Dataset:** ImageNet-1k - **Pretrain Dataset:** ImageNet-1k ## Model Usage ### Image Classification ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model('convnextv2_tiny.fcmae_ft_in22k_in1k', pretrained=True) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 top5_probabilities, top5_class_indices = torch.topk(output.softmax(dim=1) * 100, k=5) ``` ### Feature Map Extraction ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'convnextv2_tiny.fcmae_ft_in22k_in1k', pretrained=True, features_only=True, ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # unsqueeze single image into batch of 1 for o in output: # print shape of each feature map in output # e.g.: # torch.Size([1, 96, 56, 56]) # torch.Size([1, 192, 28, 28]) # torch.Size([1, 384, 14, 14]) # torch.Size([1, 768, 7, 7]) print(o.shape) ``` ### Image Embeddings ```python from urllib.request import urlopen from PIL import Image import timm img = Image.open(urlopen( 'https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/beignets-task-guide.png' )) model = timm.create_model( 'convnextv2_tiny.fcmae_ft_in22k_in1k', pretrained=True, num_classes=0, # remove classifier nn.Linear ) model = model.eval() # get model specific transforms (normalization, resize) data_config = timm.data.resolve_model_data_config(model) transforms = timm.data.create_transform(**data_config, is_training=False) output = model(transforms(img).unsqueeze(0)) # output is (batch_size, num_features) shaped tensor # or equivalently (without needing to set num_classes=0) output = model.forward_features(transforms(img).unsqueeze(0)) # output is unpooled, a (1, 768, 7, 7) shaped tensor output = model.forward_head(output, pre_logits=True) # output is a (1, num_features) shaped tensor ``` ## Model Comparison Explore the dataset and runtime metrics of this model in timm [model results](https://github.com/huggingface/pytorch-image-models/tree/main/results). All timing numbers from eager model PyTorch 1.13 on RTX 3090 w/ AMP. | model |top1 |top5 |img_size|param_count|gmacs |macts |samples_per_sec|batch_size| |------------------------------------------------------------------------------------------------------------------------------|------|------|--------|-----------|------|------|---------------|----------| | [convnextv2_huge.fcmae_ft_in22k_in1k_512](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in22k_in1k_512) |88.848|98.742|512 |660.29 |600.81|413.07|28.58 |48 | | [convnextv2_huge.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in22k_in1k_384) |88.668|98.738|384 |660.29 |337.96|232.35|50.56 |64 | | [convnext_xxlarge.clip_laion2b_soup_ft_in1k](https://huggingface.co/timm/convnext_xxlarge.clip_laion2b_soup_ft_in1k) |88.612|98.704|256 |846.47 |198.09|124.45|122.45 |256 | | [convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_384) |88.312|98.578|384 |200.13 |101.11|126.74|196.84 |256 | | [convnextv2_large.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in22k_in1k_384) |88.196|98.532|384 |197.96 |101.1 |126.74|128.94 |128 | | [convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_soup_ft_in12k_in1k_320) |87.968|98.47 |320 |200.13 |70.21 |88.02 |283.42 |256 | | [convnext_xlarge.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_xlarge.fb_in22k_ft_in1k_384) |87.75 |98.556|384 |350.2 |179.2 |168.99|124.85 |192 | | [convnextv2_base.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in22k_in1k_384) |87.646|98.422|384 |88.72 |45.21 |84.49 |209.51 |256 | | [convnext_large.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_large.fb_in22k_ft_in1k_384) |87.476|98.382|384 |197.77 |101.1 |126.74|194.66 |256 | | [convnext_large_mlp.clip_laion2b_augreg_ft_in1k](https://huggingface.co/timm/convnext_large_mlp.clip_laion2b_augreg_ft_in1k) |87.344|98.218|256 |200.13 |44.94 |56.33 |438.08 |256 | | [convnextv2_large.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in22k_in1k) |87.26 |98.248|224 |197.96 |34.4 |43.13 |376.84 |256 | | [convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in12k_in1k_384) |87.138|98.212|384 |88.59 |45.21 |84.49 |365.47 |256 | | [convnext_xlarge.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_xlarge.fb_in22k_ft_in1k) |87.002|98.208|224 |350.2 |60.98 |57.5 |368.01 |256 | | [convnext_base.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_base.fb_in22k_ft_in1k_384) |86.796|98.264|384 |88.59 |45.21 |84.49 |366.54 |256 | | [convnextv2_base.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in22k_in1k) |86.74 |98.022|224 |88.72 |15.38 |28.75 |624.23 |256 | | [convnext_large.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_large.fb_in22k_ft_in1k) |86.636|98.028|224 |197.77 |34.4 |43.13 |581.43 |256 | | [convnext_base.clip_laiona_augreg_ft_in1k_384](https://huggingface.co/timm/convnext_base.clip_laiona_augreg_ft_in1k_384) |86.504|97.97 |384 |88.59 |45.21 |84.49 |368.14 |256 | | [convnext_base.clip_laion2b_augreg_ft_in12k_in1k](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in12k_in1k) |86.344|97.97 |256 |88.59 |20.09 |37.55 |816.14 |256 | | [convnextv2_huge.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_huge.fcmae_ft_in1k) |86.256|97.75 |224 |660.29 |115.0 |79.07 |154.72 |256 | | [convnext_small.in12k_ft_in1k_384](https://huggingface.co/timm/convnext_small.in12k_ft_in1k_384) |86.182|97.92 |384 |50.22 |25.58 |63.37 |516.19 |256 | | [convnext_base.clip_laion2b_augreg_ft_in1k](https://huggingface.co/timm/convnext_base.clip_laion2b_augreg_ft_in1k) |86.154|97.68 |256 |88.59 |20.09 |37.55 |819.86 |256 | | [convnext_base.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_base.fb_in22k_ft_in1k) |85.822|97.866|224 |88.59 |15.38 |28.75 |1037.66 |256 | | [convnext_small.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_small.fb_in22k_ft_in1k_384) |85.778|97.886|384 |50.22 |25.58 |63.37 |518.95 |256 | | [convnextv2_large.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_large.fcmae_ft_in1k) |85.742|97.584|224 |197.96 |34.4 |43.13 |375.23 |256 | | [convnext_small.in12k_ft_in1k](https://huggingface.co/timm/convnext_small.in12k_ft_in1k) |85.174|97.506|224 |50.22 |8.71 |21.56 |1474.31 |256 | | [convnext_tiny.in12k_ft_in1k_384](https://huggingface.co/timm/convnext_tiny.in12k_ft_in1k_384) |85.118|97.608|384 |28.59 |13.14 |39.48 |856.76 |256 | | [convnextv2_tiny.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in22k_in1k_384) |85.112|97.63 |384 |28.64 |13.14 |39.48 |491.32 |256 | | [convnextv2_base.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_base.fcmae_ft_in1k) |84.874|97.09 |224 |88.72 |15.38 |28.75 |625.33 |256 | | [convnext_small.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_small.fb_in22k_ft_in1k) |84.562|97.394|224 |50.22 |8.71 |21.56 |1478.29 |256 | | [convnext_large.fb_in1k](https://huggingface.co/timm/convnext_large.fb_in1k) |84.282|96.892|224 |197.77 |34.4 |43.13 |584.28 |256 | | [convnext_tiny.in12k_ft_in1k](https://huggingface.co/timm/convnext_tiny.in12k_ft_in1k) |84.186|97.124|224 |28.59 |4.47 |13.44 |2433.7 |256 | | [convnext_tiny.fb_in22k_ft_in1k_384](https://huggingface.co/timm/convnext_tiny.fb_in22k_ft_in1k_384) |84.084|97.14 |384 |28.59 |13.14 |39.48 |862.95 |256 | | [convnextv2_tiny.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in22k_in1k) |83.894|96.964|224 |28.64 |4.47 |13.44 |1452.72 |256 | | [convnext_base.fb_in1k](https://huggingface.co/timm/convnext_base.fb_in1k) |83.82 |96.746|224 |88.59 |15.38 |28.75 |1054.0 |256 | | [convnextv2_nano.fcmae_ft_in22k_in1k_384](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in22k_in1k_384) |83.37 |96.742|384 |15.62 |7.22 |24.61 |801.72 |256 | | [convnext_small.fb_in1k](https://huggingface.co/timm/convnext_small.fb_in1k) |83.142|96.434|224 |50.22 |8.71 |21.56 |1464.0 |256 | | [convnextv2_tiny.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_tiny.fcmae_ft_in1k) |82.92 |96.284|224 |28.64 |4.47 |13.44 |1425.62 |256 | | [convnext_tiny.fb_in22k_ft_in1k](https://huggingface.co/timm/convnext_tiny.fb_in22k_ft_in1k) |82.898|96.616|224 |28.59 |4.47 |13.44 |2480.88 |256 | | [convnext_nano.in12k_ft_in1k](https://huggingface.co/timm/convnext_nano.in12k_ft_in1k) |82.282|96.344|224 |15.59 |2.46 |8.37 |3926.52 |256 | | [convnext_tiny_hnf.a2h_in1k](https://huggingface.co/timm/convnext_tiny_hnf.a2h_in1k) |82.216|95.852|224 |28.59 |4.47 |13.44 |2529.75 |256 | | [convnext_tiny.fb_in1k](https://huggingface.co/timm/convnext_tiny.fb_in1k) |82.066|95.854|224 |28.59 |4.47 |13.44 |2346.26 |256 | | [convnextv2_nano.fcmae_ft_in22k_in1k](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in22k_in1k) |82.03 |96.166|224 |15.62 |2.46 |8.37 |2300.18 |256 | | [convnextv2_nano.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_nano.fcmae_ft_in1k) |81.83 |95.738|224 |15.62 |2.46 |8.37 |2321.48 |256 | | [convnext_nano_ols.d1h_in1k](https://huggingface.co/timm/convnext_nano_ols.d1h_in1k) |80.866|95.246|224 |15.65 |2.65 |9.38 |3523.85 |256 | | [convnext_nano.d1h_in1k](https://huggingface.co/timm/convnext_nano.d1h_in1k) |80.768|95.334|224 |15.59 |2.46 |8.37 |3915.58 |256 | | [convnextv2_pico.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_pico.fcmae_ft_in1k) |80.304|95.072|224 |9.07 |1.37 |6.1 |3274.57 |256 | | [convnext_pico.d1_in1k](https://huggingface.co/timm/convnext_pico.d1_in1k) |79.526|94.558|224 |9.05 |1.37 |6.1 |5686.88 |256 | | [convnext_pico_ols.d1_in1k](https://huggingface.co/timm/convnext_pico_ols.d1_in1k) |79.522|94.692|224 |9.06 |1.43 |6.5 |5422.46 |256 | | [convnextv2_femto.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_femto.fcmae_ft_in1k) |78.488|93.98 |224 |5.23 |0.79 |4.57 |4264.2 |256 | | [convnext_femto_ols.d1_in1k](https://huggingface.co/timm/convnext_femto_ols.d1_in1k) |77.86 |93.83 |224 |5.23 |0.82 |4.87 |6910.6 |256 | | [convnext_femto.d1_in1k](https://huggingface.co/timm/convnext_femto.d1_in1k) |77.454|93.68 |224 |5.22 |0.79 |4.57 |7189.92 |256 | | [convnextv2_atto.fcmae_ft_in1k](https://huggingface.co/timm/convnextv2_atto.fcmae_ft_in1k) |76.664|93.044|224 |3.71 |0.55 |3.81 |4728.91 |256 | | [convnext_atto_ols.a2_in1k](https://huggingface.co/timm/convnext_atto_ols.a2_in1k) |75.88 |92.846|224 |3.7 |0.58 |4.11 |7963.16 |256 | | [convnext_atto.d2_in1k](https://huggingface.co/timm/convnext_atto.d2_in1k) |75.664|92.9 |224 |3.7 |0.55 |3.81 |8439.22 |256 | ## Citation ```bibtex @article{Woo2023ConvNeXtV2, title={ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders}, author={Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon and Saining Xie}, year={2023}, journal={arXiv preprint arXiv:2301.00808}, } ``` ```bibtex @misc{rw2019timm, author = {Ross Wightman}, title = {PyTorch Image Models}, year = {2019}, publisher = {GitHub}, journal = {GitHub repository}, doi = {10.5281/zenodo.4414861}, howpublished = {\url{https://github.com/huggingface/pytorch-image-models}} } ```
15,833
[ [ -0.0682373046875, -0.03057861328125, -0.00530242919921875, 0.036407470703125, -0.0313720703125, -0.01555633544921875, -0.012664794921875, -0.035308837890625, 0.0643310546875, 0.017242431640625, -0.045013427734375, -0.038665771484375, -0.05267333984375, -0.00428009033203125, 0.00746917724609375, 0.0673828125, -0.0025119781494140625, -0.0096435546875, 0.0183258056640625, -0.028717041015625, -0.0178070068359375, -0.0271759033203125, -0.06365966796875, -0.0153350830078125, 0.017578125, 0.024627685546875, 0.05755615234375, 0.04559326171875, 0.0294342041015625, 0.040069580078125, -0.0172271728515625, 0.01284027099609375, -0.0149993896484375, -0.0258636474609375, 0.040863037109375, -0.0309906005859375, -0.06640625, 0.0165557861328125, 0.05963134765625, 0.03826904296875, 0.004444122314453125, 0.0176544189453125, 0.026763916015625, 0.0350341796875, 0.0024127960205078125, -0.0038547515869140625, -0.0075531005859375, 0.014434814453125, -0.0178375244140625, 0.00312042236328125, 0.0046234130859375, -0.050018310546875, 0.025054931640625, -0.0435791015625, 0.002429962158203125, -0.00006031990051269531, 0.10089111328125, -0.006175994873046875, -0.0174560546875, -0.0005526542663574219, 0.00958251953125, 0.053375244140625, -0.058685302734375, 0.02191162109375, 0.032135009765625, -0.007427215576171875, -0.01334381103515625, -0.05029296875, -0.045806884765625, -0.0025691986083984375, -0.0296478271484375, 0.01629638671875, -0.0278472900390625, -0.005870819091796875, 0.041351318359375, 0.033233642578125, -0.038787841796875, -0.0033702850341796875, -0.025146484375, -0.00983428955078125, 0.059661865234375, -0.0079345703125, 0.045166015625, -0.0260162353515625, -0.04730224609375, -0.02154541015625, -0.015655517578125, 0.03277587890625, 0.01482391357421875, -0.004550933837890625, -0.0740966796875, 0.037017822265625, 0.007709503173828125, 0.0193939208984375, 0.028076171875, -0.01409149169921875, 0.057220458984375, -0.0193939208984375, -0.041656494140625, -0.0229034423828125, 0.0894775390625, 0.051849365234375, 0.029754638671875, 0.01032257080078125, 0.0036258697509765625, -0.00641632080078125, -0.035919189453125, -0.076904296875, -0.01418304443359375, 0.028717041015625, -0.04156494140625, -0.010986328125, 0.0254669189453125, -0.060089111328125, 0.00839996337890625, -0.0095672607421875, 0.0176849365234375, -0.05908203125, -0.0300445556640625, -0.0083160400390625, -0.026123046875, 0.030670166015625, 0.0212554931640625, -0.0288238525390625, 0.0241241455078125, 0.0216522216796875, 0.07415771484375, 0.0222930908203125, -0.01001739501953125, -0.0321044921875, -0.0123138427734375, -0.02642822265625, 0.0254669189453125, 0.01299285888671875, -0.01041412353515625, -0.0199432373046875, 0.032928466796875, -0.0113372802734375, -0.032196044921875, 0.0294647216796875, 0.0205078125, 0.00814056396484375, -0.028350830078125, -0.0261993408203125, -0.0198211669921875, 0.02752685546875, -0.037628173828125, 0.07977294921875, 0.036407470703125, -0.0765380859375, 0.02374267578125, -0.035064697265625, -0.005374908447265625, -0.02264404296875, 0.00494384765625, -0.059112548828125, -0.00888824462890625, 0.019683837890625, 0.054290771484375, -0.01055145263671875, -0.011138916015625, -0.0270538330078125, -0.004665374755859375, 0.026031494140625, 0.006412506103515625, 0.0714111328125, 0.013336181640625, -0.03839111328125, 0.0002703666687011719, -0.047821044921875, 0.023651123046875, 0.029571533203125, -0.0005445480346679688, -0.005313873291015625, -0.0601806640625, 0.0033054351806640625, 0.04266357421875, 0.01371002197265625, -0.0384521484375, 0.0203094482421875, -0.0184326171875, 0.0300445556640625, 0.04815673828125, -0.003948211669921875, 0.023651123046875, -0.043670654296875, 0.040252685546875, 0.00746917724609375, 0.0209808349609375, -0.003414154052734375, -0.02978515625, -0.057861328125, -0.050750732421875, 0.0167083740234375, 0.03533935546875, -0.035797119140625, 0.05517578125, 0.011138916015625, -0.04449462890625, -0.056671142578125, 0.0167083740234375, 0.039337158203125, 0.017242431640625, 0.0163726806640625, -0.0282745361328125, -0.05047607421875, -0.07159423828125, -0.006076812744140625, 0.006618499755859375, -0.005001068115234375, 0.046875, 0.0289306640625, -0.00653839111328125, 0.042205810546875, -0.031219482421875, -0.0229034423828125, -0.00946807861328125, -0.00621795654296875, 0.0305938720703125, 0.059234619140625, 0.08685302734375, -0.0643310546875, -0.06927490234375, 0.00107574462890625, -0.0836181640625, 0.0005946159362792969, -0.0034637451171875, -0.03216552734375, 0.019866943359375, 0.0200347900390625, -0.072509765625, 0.05023193359375, 0.02801513671875, -0.0455322265625, 0.032470703125, -0.0201263427734375, 0.0233154296875, -0.073974609375, 0.01519775390625, 0.02008056640625, -0.0245513916015625, -0.04010009765625, 0.005401611328125, -0.006740570068359375, 0.0117340087890625, -0.048736572265625, 0.06768798828125, -0.052490234375, 0.005313873291015625, 0.0010738372802734375, 0.00922393798828125, 0.0023097991943359375, 0.039031982421875, -0.00218963623046875, 0.03485107421875, 0.0582275390625, -0.0224456787109375, 0.033447265625, 0.038726806640625, -0.003662109375, 0.05621337890625, -0.0474853515625, 0.01050567626953125, 0.00818634033203125, 0.035125732421875, -0.06903076171875, -0.03240966796875, 0.04388427734375, -0.057098388671875, 0.0374755859375, -0.019866943359375, -0.028900146484375, -0.060577392578125, -0.06500244140625, 0.01947021484375, 0.045440673828125, -0.048583984375, 0.0108795166015625, 0.02276611328125, 0.005161285400390625, -0.044219970703125, -0.05029296875, -0.004955291748046875, -0.0303955078125, -0.0643310546875, 0.0295562744140625, 0.0081634521484375, -0.00920867919921875, -0.00011938810348510742, -0.0020046234130859375, -0.0033016204833984375, -0.01202392578125, 0.039093017578125, 0.0300445556640625, -0.01934814453125, -0.026336669921875, -0.0208282470703125, -0.00855255126953125, 0.00231170654296875, -0.01119232177734375, 0.041961669921875, -0.0233154296875, 0.0111236572265625, -0.07781982421875, 0.0162353515625, 0.048431396484375, -0.002559661865234375, 0.0673828125, 0.07830810546875, -0.035247802734375, 0.00945281982421875, -0.0297393798828125, -0.01259613037109375, -0.0386962890625, -0.00702667236328125, -0.038818359375, -0.04931640625, 0.060516357421875, 0.015289306640625, -0.00714874267578125, 0.05352783203125, 0.0259857177734375, -0.0183868408203125, 0.0643310546875, 0.03826904296875, -0.0060272216796875, 0.043609619140625, -0.06939697265625, 0.0013170242309570312, -0.06365966796875, -0.046783447265625, -0.00786590576171875, -0.043853759765625, -0.0548095703125, -0.032318115234375, 0.0224761962890625, 0.035247802734375, -0.010101318359375, 0.0499267578125, -0.04486083984375, -0.0052947998046875, 0.036346435546875, 0.024078369140625, -0.0210113525390625, -0.01467132568359375, -0.00943756103515625, -0.0172576904296875, -0.043487548828125, -0.0105743408203125, 0.052154541015625, 0.050048828125, 0.0281829833984375, 0.00015413761138916016, 0.036865234375, -0.003116607666015625, 0.02276611328125, -0.038238525390625, 0.05377197265625, -0.005615234375, -0.038360595703125, -0.01477813720703125, -0.032928466796875, -0.0723876953125, 0.01302337646484375, -0.0272216796875, -0.061676025390625, -0.007045745849609375, 0.015655517578125, -0.022857666015625, 0.040679931640625, -0.050994873046875, 0.056732177734375, -0.006256103515625, -0.036956787109375, 0.0074462890625, -0.0672607421875, 0.0199432373046875, 0.03057861328125, -0.005207061767578125, -0.01268768310546875, 0.01091766357421875, 0.0618896484375, -0.06475830078125, 0.03729248046875, -0.029388427734375, 0.00376129150390625, 0.0418701171875, -0.004058837890625, 0.0333251953125, 0.0123138427734375, 0.0003581047058105469, 0.0037078857421875, 0.00986480712890625, -0.04754638671875, -0.0299835205078125, 0.0499267578125, -0.051727294921875, -0.028167724609375, -0.0408935546875, -0.020751953125, 0.0135345458984375, -0.00014734268188476562, 0.04925537109375, 0.044097900390625, -0.00888824462890625, 0.014007568359375, 0.0423583984375, -0.0258026123046875, 0.0391845703125, -0.0119781494140625, -0.0006089210510253906, -0.03936767578125, 0.05859375, 0.004070281982421875, 0.0098876953125, 0.003566741943359375, 0.004486083984375, -0.0312347412109375, -0.01204681396484375, -0.01277923583984375, 0.04925537109375, -0.018402099609375, -0.0282745361328125, -0.048187255859375, -0.03387451171875, -0.045166015625, -0.024658203125, -0.0311431884765625, -0.021820068359375, -0.026824951171875, 0.00598907470703125, 0.053497314453125, 0.040191650390625, -0.0298309326171875, 0.03314208984375, -0.046875, 0.0245361328125, 0.006816864013671875, 0.0305328369140625, -0.0203704833984375, -0.045623779296875, 0.0006899833679199219, 0.0023555755615234375, -0.0171661376953125, -0.0565185546875, 0.04718017578125, 0.01087188720703125, 0.027587890625, 0.039581298828125, -0.0233612060546875, 0.05816650390625, -0.007038116455078125, 0.03851318359375, 0.042388916015625, -0.06439208984375, 0.03326416015625, -0.0277557373046875, 0.0085296630859375, 0.01390838623046875, 0.026214599609375, -0.0369873046875, -0.0235595703125, -0.07110595703125, -0.045013427734375, 0.05364990234375, 0.01352691650390625, -0.0007915496826171875, 0.005847930908203125, 0.047454833984375, -0.006145477294921875, 0.01094818115234375, -0.040740966796875, -0.055938720703125, -0.018524169921875, -0.00998687744140625, -0.006816864013671875, -0.006175994873046875, -0.002498626708984375, -0.051788330078125, 0.036468505859375, -0.0099334716796875, 0.0452880859375, 0.0199432373046875, -0.0010929107666015625, -0.0030364990234375, -0.023651123046875, 0.042510986328125, 0.025726318359375, -0.0218048095703125, -0.00862884521484375, 0.027435302734375, -0.038909912109375, 0.0033931732177734375, 0.0207672119140625, 0.004730224609375, 0.016632080078125, 0.02398681640625, 0.049102783203125, 0.0203094482421875, -0.011932373046875, 0.04571533203125, -0.0162811279296875, -0.0305328369140625, -0.02423095703125, -0.002262115478515625, 0.010986328125, 0.032928466796875, 0.01593017578125, 0.00466156005859375, -0.0217132568359375, -0.0438232421875, 0.0419921875, 0.05718994140625, -0.033905029296875, -0.041534423828125, 0.0506591796875, -0.00662994384765625, -0.007598876953125, 0.040618896484375, -0.00855255126953125, -0.053985595703125, 0.07666015625, 0.021728515625, 0.044921875, -0.041595458984375, 0.01873779296875, 0.066650390625, 0.0016498565673828125, 0.00920867919921875, 0.023681640625, 0.025726318359375, -0.031707763671875, 0.004619598388671875, -0.047515869140625, 0.01380157470703125, 0.042327880859375, -0.0361328125, 0.02716064453125, -0.0570068359375, -0.02557373046875, 0.013214111328125, 0.033050537109375, -0.063720703125, 0.0242767333984375, 0.0059356689453125, 0.08343505859375, -0.059326171875, 0.06842041015625, 0.0540771484375, -0.02630615234375, -0.07147216796875, -0.01055908203125, 0.0167388916015625, -0.05859375, 0.0288238525390625, 0.019134521484375, 0.0178985595703125, -0.01593017578125, -0.044586181640625, -0.0380859375, 0.0924072265625, 0.034088134765625, -0.013153076171875, 0.0084686279296875, -0.0241851806640625, 0.0299224853515625, -0.0220184326171875, 0.036041259765625, 0.0418701171875, 0.039337158203125, 0.0173492431640625, -0.0699462890625, 0.026885986328125, -0.0310211181640625, -0.0116424560546875, 0.02197265625, -0.10345458984375, 0.0770263671875, -0.0267486572265625, -0.0026760101318359375, 0.01323699951171875, 0.0626220703125, 0.0271148681640625, 0.00566864013671875, 0.0297088623046875, 0.052490234375, 0.034698486328125, -0.0146942138671875, 0.07989501953125, 0.0007715225219726562, 0.031463623046875, 0.022857666015625, 0.039642333984375, 0.0292510986328125, 0.0271148681640625, -0.031036376953125, 0.008514404296875, 0.06597900390625, -0.01476287841796875, 0.01151275634765625, 0.0146636962890625, -0.01267242431640625, -0.00927734375, -0.0155181884765625, -0.046173095703125, 0.031585693359375, 0.0126953125, -0.0209503173828125, 0.0012731552124023438, -0.0047454833984375, 0.037445068359375, -0.001804351806640625, -0.01218414306640625, 0.034759521484375, 0.02001953125, -0.042877197265625, 0.04083251953125, -0.004302978515625, 0.07440185546875, -0.0274200439453125, 0.0014715194702148438, -0.02532958984375, 0.0227203369140625, -0.018798828125, -0.08673095703125, 0.0240936279296875, -0.0110321044921875, 0.0137939453125, -0.00482177734375, 0.04766845703125, -0.034942626953125, -0.0183868408203125, 0.0386962890625, 0.024749755859375, 0.0279541015625, 0.005352020263671875, -0.08642578125, 0.01751708984375, 0.00986480712890625, -0.040679931640625, 0.033477783203125, 0.035430908203125, 0.018707275390625, 0.05078125, 0.0307159423828125, 0.0124359130859375, 0.00748443603515625, -0.026123046875, 0.059417724609375, -0.04693603515625, -0.035308837890625, -0.0648193359375, 0.03314208984375, -0.0252532958984375, -0.04693603515625, 0.059967041015625, 0.03472900390625, 0.0391845703125, 0.00789642333984375, 0.038818359375, -0.036773681640625, 0.026153564453125, -0.033203125, 0.054168701171875, -0.060791015625, -0.0204620361328125, -0.0294342041015625, -0.060272216796875, -0.0207366943359375, 0.054290771484375, 0.00537872314453125, 0.0171051025390625, 0.026519775390625, 0.046173095703125, -0.005168914794921875, -0.0205230712890625, -0.004016876220703125, 0.0208587646484375, 0.004058837890625, 0.062408447265625, 0.037506103515625, -0.05987548828125, 0.0169219970703125, -0.049407958984375, -0.0236663818359375, -0.026885986328125, -0.051788330078125, -0.08251953125, -0.05889892578125, -0.039337158203125, -0.051116943359375, -0.022674560546875, 0.085205078125, 0.07177734375, -0.041961669921875, -0.0111846923828125, 0.0236663818359375, 0.00855255126953125, -0.0167083740234375, -0.019866943359375, 0.040313720703125, 0.025177001953125, -0.07574462890625, -0.0200958251953125, 0.0081787109375, 0.0418701171875, 0.02197265625, -0.02978515625, -0.01690673828125, -0.0038776397705078125, 0.0323486328125, 0.062042236328125, -0.051788330078125, -0.03546142578125, 0.00179290771484375, -0.0208282470703125, 0.019500732421875, 0.023345947265625, -0.0308990478515625, -0.00524139404296875, 0.038604736328125, 0.0089111328125, 0.056793212890625, 0.00817108154296875, 0.01678466796875, -0.048431396484375, 0.050811767578125, -0.00414276123046875, 0.02593994140625, 0.02740478515625, -0.0302734375, 0.054656982421875, 0.036773681640625, -0.03369140625, -0.07366943359375, -0.021728515625, -0.10662841796875, 0.0008339881896972656, 0.060516357421875, -0.013763427734375, -0.039031982421875, 0.038604736328125, -0.0245819091796875, 0.03936767578125, -0.01715087890625, 0.021087646484375, 0.0266571044921875, -0.0231170654296875, -0.0362548828125, -0.041656494140625, 0.055389404296875, 0.025115966796875, -0.05047607421875, -0.028045654296875, -0.002048492431640625, 0.037872314453125, 0.01593017578125, 0.0595703125, -0.01509857177734375, 0.01416778564453125, 0.00301361083984375, 0.0105743408203125, 0.0014972686767578125, 0.0012302398681640625, -0.01514434814453125, -0.01555633544921875, -0.02484130859375, -0.0455322265625 ] ]
Gryphe/MythoLogic-L2-13b
2023-08-04T10:12:47.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Gryphe
null
null
Gryphe/MythoLogic-L2-13b
15
5,852
transformers
2023-08-03T13:58:25
--- license: other language: - en --- The Llama 2 sequel to my [original experiment](https://huggingface.co/Gryphe/MythoLogic-13b) with gradient merges using [the following script](https://github.com/Gryphe/BlockMerge_Gradient). Its three models ([Hermes](https://huggingface.co/NousResearch/Nous-Hermes-Llama2-13b), [Chronos](https://huggingface.co/elinas/chronos-13b-v2) and [Airoboros](https://huggingface.co/jondurbin/airoboros-l2-13b-gpt4-2.0)) are almost evenly divided over the layer structure this time. Airoboros was the "wildcard model" due to its superior ability to understand complex instructions. Quantized models are available from TheBloke: [GGML](https://huggingface.co/TheBloke/MythoLogic-L2-13B-GGML) - [GPTQ](https://huggingface.co/TheBloke/MythoLogic-L2-13B-GPTQ) (You're the best!) ## Model details As before, the main objective was to create an all-round model with improved roleplaying capabilities. MythoLogic-L2 differs from its predecessor in that it focuses primarily on the understanding of instructions and personalities of complex character cards. Illustrated below are the gradients used for this specific L2 recipe, with the top of the image representing layer 0 and the bottom layer 40. ![](MythoLogic-L2.png) ## Prompt Format This model primarily uses (and was heavily tested with) Alpaca formatting, so for optimal model performance, use: ``` <System prompt/Character Card> ### Instruction: Your instruction or question here. For roleplay purposes, I suggest the following - Write <CHAR NAME>'s next reply in a chat between <YOUR NAME> and <CHAR NAME>. Write a single reply only. ### Response: ``` --- license: other ---
1,666
[ [ -0.026763916015625, -0.04742431640625, 0.024932861328125, 0.0193023681640625, -0.027313232421875, 0.00205230712890625, 0.008819580078125, -0.047332763671875, 0.0289459228515625, 0.047149658203125, -0.055908203125, -0.0266265869140625, -0.035858154296875, -0.01413726806640625, -0.01354217529296875, 0.09039306640625, -0.0160675048828125, 0.00803375244140625, -0.0036773681640625, -0.01446533203125, -0.0240478515625, -0.0247955322265625, -0.069091796875, -0.05059814453125, 0.036041259765625, 0.01229095458984375, 0.060455322265625, 0.060516357421875, 0.041778564453125, 0.0193023681640625, -0.012725830078125, 0.0272216796875, -0.04302978515625, 0.0147705078125, -0.0029449462890625, -0.04461669921875, -0.049957275390625, 0.01517486572265625, 0.034881591796875, 0.0250396728515625, -0.030609130859375, 0.030059814453125, 0.000640869140625, 0.037933349609375, -0.0240020751953125, 0.002880096435546875, -0.016815185546875, 0.02777099609375, -0.002777099609375, -0.01006317138671875, -0.009124755859375, -0.0335693359375, -0.0007500648498535156, -0.07745361328125, 0.009521484375, 0.00855255126953125, 0.053253173828125, 0.02020263671875, -0.03466796875, -0.0302581787109375, -0.047332763671875, 0.05828857421875, -0.054229736328125, 0.00620269775390625, 0.0253448486328125, 0.03460693359375, -0.014068603515625, -0.07110595703125, -0.04534912109375, -0.012542724609375, -0.004093170166015625, 0.01678466796875, -0.03887939453125, -0.01371002197265625, 0.0261688232421875, 0.027679443359375, -0.04339599609375, 0.0350341796875, -0.05194091796875, -0.0233306884765625, 0.04339599609375, 0.029205322265625, 0.0019311904907226562, 0.0022525787353515625, -0.047698974609375, -0.0307464599609375, -0.054290771484375, 0.0061798095703125, 0.039764404296875, -0.01042938232421875, -0.0345458984375, 0.061279296875, 0.003902435302734375, 0.033477783203125, 0.0169830322265625, -0.03265380859375, 0.018646240234375, -0.017486572265625, -0.0193634033203125, -0.0209808349609375, 0.04443359375, 0.057891845703125, 0.004047393798828125, 0.012298583984375, -0.00531005859375, 0.0164947509765625, -0.000202178955078125, -0.07855224609375, -0.01390838623046875, 0.0220489501953125, -0.038970947265625, -0.03985595703125, -0.030059814453125, -0.050262451171875, -0.0423583984375, -0.01904296875, 0.0191497802734375, -0.0241546630859375, -0.0241546630859375, 0.01103973388671875, -0.01200103759765625, 0.03546142578125, 0.0400390625, -0.05218505859375, 0.036468505859375, 0.049285888671875, 0.059295654296875, 0.0017108917236328125, -0.01027679443359375, -0.03228759765625, 0.0062408447265625, -0.043304443359375, 0.049072265625, -0.0299835205078125, -0.0380859375, -0.006378173828125, -0.0111236572265625, 0.02593994140625, -0.045928955078125, 0.053070068359375, -0.042694091796875, 0.0261383056640625, -0.03338623046875, -0.037689208984375, -0.029510498046875, -0.0018587112426757812, -0.0587158203125, 0.071044921875, 0.009124755859375, -0.0450439453125, 0.00005608797073364258, -0.038970947265625, -0.00586700439453125, -0.00998687744140625, -0.0028934478759765625, -0.0160064697265625, 0.0021419525146484375, -0.00665283203125, 0.01306915283203125, -0.061737060546875, 0.0008935928344726562, -0.0284881591796875, -0.03228759765625, 0.012725830078125, -0.00510406494140625, 0.050750732421875, 0.02117919921875, -0.0182037353515625, -0.00835418701171875, -0.046173095703125, -0.0014657974243164062, 0.0211181640625, -0.0199737548828125, -0.00278472900390625, -0.01450347900390625, 0.016021728515625, 0.0166168212890625, 0.00738525390625, -0.01273345947265625, 0.03265380859375, 0.0035228729248046875, 0.0244598388671875, 0.053253173828125, -0.00653839111328125, 0.045379638671875, -0.047821044921875, 0.046539306640625, 0.00254058837890625, 0.0082855224609375, -0.0034351348876953125, -0.0531005859375, -0.06390380859375, -0.033203125, 0.004474639892578125, 0.058624267578125, -0.02874755859375, 0.0167388916015625, 0.0172271728515625, -0.057464599609375, -0.031524658203125, 0.00286865234375, 0.04351806640625, 0.047637939453125, 0.02593994140625, -0.036865234375, -0.040069580078125, -0.0777587890625, 0.0033283233642578125, -0.0250396728515625, -0.01393890380859375, 0.0311737060546875, 0.020172119140625, -0.0303802490234375, 0.04461669921875, -0.055816650390625, -0.01169586181640625, -0.00023233890533447266, 0.01806640625, 0.021453857421875, 0.054168701171875, 0.056365966796875, -0.0269317626953125, -0.015655517578125, 0.002155303955078125, -0.06353759765625, -0.0164794921875, 0.00432586669921875, -0.032562255859375, 0.0015249252319335938, 0.01953125, -0.06585693359375, 0.034027099609375, 0.0369873046875, -0.019195556640625, 0.039794921875, -0.007244110107421875, 0.017364501953125, -0.0780029296875, 0.0187225341796875, 0.0211639404296875, 0.002399444580078125, -0.06878662109375, 0.031646728515625, -0.0089569091796875, 0.007289886474609375, -0.02618408203125, 0.06475830078125, -0.035247802734375, -0.014984130859375, -0.0124969482421875, -0.00826263427734375, 0.002910614013671875, 0.041961669921875, -0.004730224609375, 0.04388427734375, 0.05291748046875, -0.02655029296875, 0.047637939453125, 0.021728515625, -0.0111083984375, 0.0335693359375, -0.0692138671875, 0.036407470703125, -0.017242431640625, 0.0528564453125, -0.08306884765625, -0.02325439453125, 0.060333251953125, -0.01358795166015625, 0.0186309814453125, -0.01220703125, -0.040008544921875, -0.036590576171875, -0.045013427734375, 0.05059814453125, 0.08294677734375, -0.037872314453125, 0.052764892578125, 0.00353240966796875, 0.0008687973022460938, -0.051666259765625, -0.0665283203125, -0.01215362548828125, -0.033905029296875, -0.037841796875, 0.02728271484375, -0.0259552001953125, -0.02545166015625, -0.008056640625, -0.006999969482421875, -0.01039886474609375, -0.01062774658203125, 0.023773193359375, 0.0369873046875, -0.0183868408203125, -0.031524658203125, 0.035247802734375, 0.0094146728515625, -0.0120086669921875, -0.01277923583984375, 0.0491943359375, -0.000453948974609375, -0.00675201416015625, -0.060760498046875, 0.02667236328125, 0.0496826171875, -0.00951385498046875, 0.03973388671875, 0.0626220703125, -0.0343017578125, 0.005725860595703125, -0.0673828125, -0.0141448974609375, -0.0343017578125, 0.011444091796875, -0.0186920166015625, -0.060272216796875, 0.0714111328125, 0.00635528564453125, 0.01300048828125, 0.027862548828125, 0.040985107421875, -0.0119171142578125, 0.052154541015625, 0.039398193359375, 0.006618499755859375, 0.021026611328125, -0.036376953125, -0.0007810592651367188, -0.07550048828125, -0.042816162109375, -0.011505126953125, -0.0269317626953125, -0.04132080078125, -0.06622314453125, 0.0277862548828125, 0.050079345703125, -0.0188751220703125, 0.028533935546875, -0.01525115966796875, 0.024383544921875, 0.034149169921875, 0.0214996337890625, 0.0155181884765625, 0.046875, 0.0159454345703125, -0.0052337646484375, -0.029327392578125, -0.03717041015625, 0.08343505859375, 0.043304443359375, 0.06640625, 0.034942626953125, 0.05377197265625, 0.01049041748046875, 0.0266265869140625, -0.04290771484375, 0.043304443359375, -0.005100250244140625, -0.03668212890625, -0.002269744873046875, -0.0139007568359375, -0.047088623046875, 0.026519775390625, -0.0169677734375, -0.048919677734375, 0.01751708984375, 0.0253753662109375, -0.032867431640625, 0.00859832763671875, -0.0404052734375, 0.031463623046875, 0.01319122314453125, -0.04107666015625, -0.03106689453125, -0.0538330078125, 0.05975341796875, -0.01253509521484375, 0.005329132080078125, 0.004787445068359375, -0.02508544921875, 0.053192138671875, -0.035614013671875, 0.06317138671875, 0.005741119384765625, -0.03546142578125, 0.0494384765625, 0.035125732421875, 0.041778564453125, 0.01094818115234375, -0.00780487060546875, 0.01776123046875, -0.0206451416015625, -0.0218963623046875, -0.0496826171875, 0.061553955078125, -0.06793212890625, -0.047332763671875, -0.0270538330078125, -0.038482666015625, 0.01141357421875, -0.0167999267578125, 0.0196533203125, 0.047698974609375, -0.007358551025390625, -0.020477294921875, 0.038909912109375, -0.0037708282470703125, 0.0277862548828125, 0.04608154296875, -0.0218658447265625, -0.04119873046875, 0.0310211181640625, -0.0156402587890625, 0.0208892822265625, 0.01267242431640625, 0.00807952880859375, -0.0220794677734375, -0.01419830322265625, -0.044403076171875, 0.0419921875, -0.048126220703125, -0.0250701904296875, -0.046295166015625, -0.01015472412109375, -0.0240020751953125, -0.0099029541015625, -0.01009368896484375, -0.054443359375, -0.040191650390625, -0.00662994384765625, 0.042144775390625, 0.0531005859375, -0.047332763671875, 0.029876708984375, -0.03460693359375, 0.019683837890625, 0.0294952392578125, 0.0007619857788085938, 0.007480621337890625, -0.0599365234375, 0.012603759765625, 0.0006966590881347656, -0.017822265625, -0.075927734375, 0.021148681640625, 0.0005145072937011719, 0.0245513916015625, 0.0247955322265625, -0.04229736328125, 0.05670166015625, -0.03948974609375, 0.05999755859375, 0.040802001953125, -0.054229736328125, 0.03106689453125, -0.032745361328125, -0.015655517578125, 0.0175323486328125, 0.0308380126953125, -0.0404052734375, -0.00878143310546875, -0.03045654296875, -0.04986572265625, 0.06463623046875, 0.0301055908203125, 0.0025634765625, 0.02069091796875, 0.04534912109375, 0.0157623291015625, 0.0219573974609375, -0.055755615234375, -0.045135498046875, -0.016845703125, 0.01105499267578125, -0.006641387939453125, -0.036041259765625, -0.0465087890625, -0.01446533203125, 0.038421630859375, 0.001338958740234375, 0.0209503173828125, 0.001209259033203125, 0.0224151611328125, -0.034576416015625, -0.018768310546875, 0.04864501953125, 0.025665283203125, -0.0223846435546875, 0.0010442733764648438, 0.006038665771484375, -0.0299224853515625, 0.0114288330078125, 0.0164794921875, -0.0013017654418945312, -0.007709503173828125, 0.046600341796875, 0.0631103515625, -0.00716400146484375, -0.03533935546875, 0.02203369140625, 0.004283905029296875, -0.012939453125, -0.004486083984375, 0.011016845703125, 0.004192352294921875, 0.038787841796875, 0.0180511474609375, 0.020660400390625, 0.01904296875, -0.05230712890625, 0.0010747909545898438, 0.0214080810546875, 0.0164794921875, -0.0145111083984375, 0.051971435546875, 0.01318359375, -0.020355224609375, 0.0362548828125, -0.0277557373046875, -0.029022216796875, 0.06927490234375, 0.057830810546875, 0.04852294921875, -0.03485107421875, 0.028472900390625, 0.0484619140625, 0.01306915283203125, -0.026824951171875, 0.0164031982421875, -0.00827789306640625, -0.0360107421875, -0.0140838623046875, -0.03265380859375, -0.0235748291015625, 0.00732421875, -0.055694580078125, 0.0242462158203125, -0.056427001953125, -0.0092010498046875, -0.01508331298828125, 0.0087127685546875, -0.06011962890625, 0.01461029052734375, 0.0123443603515625, 0.06207275390625, -0.07281494140625, 0.049835205078125, 0.047332763671875, -0.0258026123046875, -0.0760498046875, -0.01349639892578125, 0.0164794921875, -0.0887451171875, 0.0478515625, -0.0100860595703125, -0.0027637481689453125, -0.01041412353515625, -0.043701171875, -0.08160400390625, 0.1282958984375, 0.00991058349609375, -0.05059814453125, 0.0037784576416015625, -0.01279449462890625, 0.0545654296875, -0.035186767578125, 0.043731689453125, 0.047637939453125, 0.0192108154296875, 0.038299560546875, -0.07269287109375, 0.0232696533203125, -0.0213775634765625, 0.0168609619140625, -0.01348114013671875, -0.07568359375, 0.07684326171875, -0.0277252197265625, -0.01097869873046875, 0.049072265625, 0.05535888671875, 0.051727294921875, 0.01551055908203125, 0.030303955078125, 0.06524658203125, 0.054290771484375, 0.0053558349609375, 0.077392578125, -0.0173187255859375, 0.0350341796875, 0.06396484375, -0.033233642578125, 0.03851318359375, 0.0232696533203125, -0.008880615234375, 0.039581298828125, 0.060028076171875, -0.0171051025390625, 0.05291748046875, 0.0238189697265625, -0.0135040283203125, -0.0013360977172851562, -0.0213470458984375, -0.06439208984375, 0.0113525390625, 0.01078033447265625, -0.005687713623046875, -0.004169464111328125, -0.033782958984375, 0.0311737060546875, -0.0184783935546875, -0.01064300537109375, 0.023651123046875, 0.00478363037109375, -0.04852294921875, 0.048309326171875, 0.0241546630859375, 0.057708740234375, -0.05792236328125, -0.012939453125, -0.06475830078125, -0.01270294189453125, -0.0187835693359375, -0.052490234375, -0.0155487060546875, 0.0030574798583984375, -0.005771636962890625, 0.0123748779296875, 0.033721923828125, -0.04351806640625, -0.046600341796875, 0.0258636474609375, 0.031951904296875, 0.019805908203125, 0.047698974609375, -0.053009033203125, 0.03509521484375, -0.0046234130859375, -0.017913818359375, 0.00823974609375, 0.01230621337890625, 0.00592041015625, 0.050628662109375, 0.044586181640625, -0.0133209228515625, -0.00768280029296875, 0.0095977783203125, 0.07647705078125, -0.016510009765625, -0.023590087890625, -0.05096435546875, 0.02978515625, 0.0010023117065429688, -0.03662109375, 0.039215087890625, 0.04638671875, 0.038726806640625, -0.015777587890625, 0.042755126953125, 0.01078033447265625, 0.0074462890625, -0.052703857421875, 0.0287322998046875, -0.061370849609375, 0.02008056640625, -0.019805908203125, -0.08648681640625, -0.0111083984375, 0.0465087890625, 0.022918701171875, 0.004024505615234375, 0.055389404296875, 0.08111572265625, -0.005840301513671875, -0.01255035400390625, -0.005496978759765625, 0.02099609375, 0.02191162109375, 0.057647705078125, 0.07403564453125, -0.045166015625, 0.014892578125, -0.01800537109375, -0.040802001953125, -0.0156707763671875, -0.058502197265625, -0.07562255859375, -0.021453857421875, -0.029022216796875, -0.04437255859375, 0.00714111328125, 0.07159423828125, 0.05218505859375, -0.040924072265625, -0.035614013671875, 0.01043701171875, -0.012481689453125, -0.00336456298828125, -0.01004791259765625, 0.0164642333984375, 0.028564453125, -0.05908203125, 0.026092529296875, 0.0092620849609375, 0.036651611328125, -0.01398468017578125, -0.0184478759765625, 0.02679443359375, 0.019256591796875, 0.02984619140625, 0.0276031494140625, -0.072021484375, -0.006336212158203125, -0.01093292236328125, -0.006317138671875, -0.0176239013671875, 0.06982421875, -0.042755126953125, -0.0017957687377929688, 0.014739990234375, 0.017303466796875, 0.052642822265625, -0.0045013427734375, 0.0263214111328125, -0.0240478515625, 0.0289154052734375, -0.00624847412109375, 0.0214996337890625, 0.02374267578125, -0.03271484375, 0.056365966796875, 0.0191192626953125, -0.024200439453125, -0.0643310546875, 0.03057861328125, -0.10723876953125, -0.0022907257080078125, 0.102294921875, 0.0075531005859375, 0.007595062255859375, 0.052703857421875, -0.036102294921875, 0.0218658447265625, -0.03631591796875, 0.032928466796875, 0.0330810546875, -0.0210418701171875, 0.003734588623046875, -0.0284881591796875, 0.037139892578125, 0.033233642578125, -0.05096435546875, -0.015838623046875, 0.053619384765625, 0.031707763671875, 0.012451171875, 0.05706787109375, -0.0030689239501953125, 0.0211181640625, -0.0006184577941894531, 0.004425048828125, -0.0107269287109375, -0.0186920166015625, -0.03631591796875, -0.004802703857421875, -0.01523590087890625, -0.011962890625 ] ]
TigerResearch/tigerbot-7b-base
2023-09-20T06:16:15.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
TigerResearch
null
null
TigerResearch/tigerbot-7b-base
0
5,851
transformers
2023-08-19T06:21:50
--- license: apache-2.0 --- <div style="width: 100%;"> <p align="center" width="20%"> <img src="http://x-pai.algolet.com/bot/img/logo_core.png" alt="TigerBot" width="20%", style="display: block; margin: auto;"></img> </p> </div> <p align="center"> <font face="黑体" size=5"> A cutting-edge foundation for your very own LLM. </font> </p> <p align="center"> 💻<a href="https://github.com/TigerResearch/TigerBot" target="_blank">Github</a> • 🌐 <a href="https://tigerbot.com/" target="_blank">TigerBot</a> • 🤗 <a href="https://huggingface.co/TigerResearch" target="_blank">Hugging Face</a> </p> # 快速开始 - 方法1,通过transformers使用 - 下载 TigerBot Repo ```shell git clone https://github.com/TigerResearch/TigerBot.git ``` - 启动infer代码 ```shell python infer.py --model_path TigerResearch/tigerbot-7b-base ``` - 方法2: - 下载 TigerBot Repo ```shell git clone https://github.com/TigerResearch/TigerBot.git ``` - 安装git lfs: `git lfs install` - 通过huggingface或modelscope平台下载权重 ```shell git clone https://huggingface.co/TigerResearch/tigerbot-7b-base git clone https://www.modelscope.cn/TigerResearch/tigerbot-7b-base-v3.git ``` - 启动infer代码 ```shell python infer.py --model_path tigerbot-7b-base(-v3) --model_type base --max_generate_length 64 ``` ------ # Quick Start - Method 1, use through transformers - Clone TigerBot Repo ```shell git clone https://github.com/TigerResearch/TigerBot.git ``` - Run infer script ```shell python infer.py --model_path TigerResearch/tigerbot-7b-base ``` - Method 2: - Clone TigerBot Repo ```shell git clone https://github.com/TigerResearch/TigerBot.git ``` - install git lfs: `git lfs install` - Download weights from huggingface or modelscope ```shell git clone https://huggingface.co/TigerResearch/tigerbot-7b-base git clone https://www.modelscope.cn/TigerResearch/tigerbot-7b-base-v3.git ``` - Run infer script ```shell python infer.py --model_path tigerbot-7b-base(-v3) --model_type base --max_generate_length 64 ```
2,147
[ [ -0.03533935546875, -0.039398193359375, 0.02337646484375, 0.017608642578125, -0.02874755859375, 0.00543975830078125, -0.0005817413330078125, -0.0265350341796875, 0.039947509765625, 0.0260772705078125, -0.058013916015625, -0.0306854248046875, -0.0217132568359375, 0.002162933349609375, 0.003429412841796875, 0.069091796875, 0.0035266876220703125, -0.0028400421142578125, -0.0191497802734375, -0.016876220703125, 0.005588531494140625, -0.0275115966796875, -0.0560302734375, -0.01641845703125, 0.01025390625, 0.0095062255859375, 0.058563232421875, 0.06524658203125, 0.05194091796875, 0.0288238525390625, -0.0035686492919921875, 0.00545501708984375, -0.0108795166015625, 0.01462554931640625, -0.005008697509765625, -0.033416748046875, -0.04296875, -0.006038665771484375, 0.0440673828125, 0.01226806640625, 0.0001214742660522461, 0.0159454345703125, 0.005031585693359375, 0.049652099609375, -0.0457763671875, 0.0245819091796875, -0.0288238525390625, -0.0096435546875, -0.022216796875, 0.002750396728515625, 0.0022144317626953125, -0.045806884765625, 0.0139923095703125, -0.06524658203125, 0.0024547576904296875, -0.0017824172973632812, 0.11102294921875, 0.0016546249389648438, -0.01531219482421875, -0.021392822265625, 0.0008363723754882812, 0.047698974609375, -0.0765380859375, 0.007228851318359375, 0.0218353271484375, 0.0160675048828125, -0.0162353515625, -0.06817626953125, -0.0404052734375, -0.019805908203125, -0.0408935546875, 0.007656097412109375, -0.0257415771484375, -0.005611419677734375, 0.03216552734375, 0.0312347412109375, -0.05181884765625, -0.0219573974609375, -0.0237274169921875, -0.0174102783203125, 0.050384521484375, 0.001617431640625, 0.02935791015625, -0.039398193359375, -0.0224151611328125, -0.03387451171875, -0.022918701171875, 0.02679443359375, 0.02899169921875, 0.0227203369140625, -0.0286102294921875, 0.039825439453125, -0.02899169921875, 0.021636962890625, 0.021820068359375, -0.033905029296875, 0.0295257568359375, -0.0279693603515625, -0.0167083740234375, -0.00821685791015625, 0.08135986328125, 0.036346435546875, 0.00476837158203125, 0.035186767578125, -0.042236328125, -0.006084442138671875, -0.00909423828125, -0.06378173828125, -0.0112457275390625, 0.02496337890625, -0.0361328125, -0.0294036865234375, 0.0189971923828125, -0.06854248046875, -0.01226043701171875, 0.01629638671875, 0.04656982421875, -0.03607177734375, -0.0341796875, 0.007183074951171875, -0.020477294921875, 0.04327392578125, 0.01543426513671875, -0.053955078125, -0.0014467239379882812, 0.0367431640625, 0.0689697265625, 0.0218505859375, -0.004077911376953125, -0.018524169921875, 0.0182647705078125, -0.0064697265625, 0.0246124267578125, -0.00994110107421875, -0.042694091796875, 0.0101776123046875, 0.02130126953125, -0.00229644775390625, -0.03582763671875, 0.0333251953125, -0.011993408203125, 0.019744873046875, -0.006282806396484375, -0.0184783935546875, -0.042510986328125, 0.02044677734375, -0.04498291015625, 0.08575439453125, 0.037445068359375, -0.067138671875, -0.0038928985595703125, -0.048004150390625, -0.03515625, 0.006511688232421875, 0.0108489990234375, -0.0740966796875, -0.020904541015625, 0.0276641845703125, 0.05450439453125, 0.00893402099609375, 0.0022335052490234375, -0.0584716796875, 0.0009441375732421875, 0.0055389404296875, 0.0203399658203125, 0.0943603515625, 0.0019330978393554688, -0.0391845703125, 0.006725311279296875, -0.0457763671875, -0.00539398193359375, 0.057647705078125, -0.016357421875, -0.0196075439453125, -0.021270751953125, -0.0062255859375, 0.0018835067749023438, 0.044403076171875, -0.0293731689453125, 0.05169677734375, -0.0268402099609375, 0.047454833984375, 0.050689697265625, 0.0036106109619140625, 0.0391845703125, -0.059722900390625, 0.0295257568359375, 0.0006775856018066406, 0.0186920166015625, -0.01432037353515625, -0.0190582275390625, -0.07501220703125, -0.02740478515625, -0.0113372802734375, 0.0313720703125, -0.05517578125, 0.0305633544921875, -0.0242767333984375, -0.057464599609375, -0.032928466796875, 0.0023632049560546875, 0.0212249755859375, 0.0165863037109375, 0.01470184326171875, -0.029083251953125, -0.058441162109375, -0.0745849609375, 0.0005159378051757812, -0.03070068359375, 0.0038280487060546875, 0.034332275390625, 0.07427978515625, -0.0294342041015625, 0.050079345703125, -0.048797607421875, -0.01357269287109375, -0.01425933837890625, -0.016204833984375, 0.03839111328125, 0.0535888671875, 0.06463623046875, -0.03656005859375, -0.03387451171875, -0.006862640380859375, -0.05645751953125, 0.01453399658203125, -0.0162353515625, -0.031280517578125, 0.0147552490234375, 0.00543975830078125, -0.0711669921875, 0.024688720703125, 0.0211181640625, -0.0156402587890625, 0.0535888671875, 0.0023040771484375, 0.005527496337890625, -0.08306884765625, 0.011810302734375, 0.026031494140625, -0.0237274169921875, -0.017303466796875, 0.03399658203125, -0.0012350082397460938, 0.011444091796875, -0.036895751953125, 0.05517578125, -0.03143310546875, 0.0033416748046875, 0.0125579833984375, 0.00797271728515625, 0.0138397216796875, 0.0374755859375, -0.01444244384765625, 0.052093505859375, 0.0517578125, -0.035614013671875, 0.04058837890625, 0.01776123046875, -0.01000213623046875, 0.0240020751953125, -0.038726806640625, -0.0007715225219726562, 0.0243377685546875, 0.0269317626953125, -0.0672607421875, -0.025665283203125, 0.052276611328125, -0.04638671875, 0.0264739990234375, -0.0157318115234375, -0.0294647216796875, -0.03448486328125, -0.0638427734375, 0.004863739013671875, 0.04638671875, -0.037445068359375, 0.049652099609375, 0.0219573974609375, 0.0032024383544921875, -0.031097412109375, -0.043792724609375, -0.02783203125, -0.0230712890625, -0.05303955078125, 0.0216827392578125, -0.0077667236328125, -0.0241241455078125, -0.004291534423828125, -0.00366973876953125, -0.0214996337890625, -0.00675201416015625, 0.0271453857421875, 0.03131103515625, -0.04498291015625, -0.0271453857421875, -0.0110931396484375, 0.00412750244140625, 0.01435089111328125, -0.0236053466796875, 0.0238189697265625, -0.0176544189453125, -0.024749755859375, -0.044219970703125, -0.0207977294921875, 0.05926513671875, -0.0148468017578125, 0.032928466796875, 0.05615234375, -0.0199737548828125, -0.003215789794921875, -0.0455322265625, -0.0269927978515625, -0.037872314453125, 0.017242431640625, -0.021331787109375, -0.039306640625, 0.032470703125, 0.0106964111328125, 0.015045166015625, 0.05303955078125, 0.0206146240234375, -0.017578125, 0.068359375, 0.04437255859375, -0.0221099853515625, 0.047454833984375, -0.047698974609375, -0.01727294921875, -0.058929443359375, -0.0168609619140625, -0.033355712890625, -0.002910614013671875, -0.0621337890625, -0.024658203125, 0.0225372314453125, 0.0185699462890625, -0.037017822265625, 0.05224609375, -0.07086181640625, -0.0090484619140625, 0.0638427734375, 0.0247039794921875, 0.011077880859375, 0.003391265869140625, -0.0240936279296875, 0.003360748291015625, -0.0283203125, -0.03485107421875, 0.06787109375, 0.0182647705078125, 0.0355224609375, 0.00605010986328125, 0.046875, -0.0158843994140625, 0.01473236083984375, -0.0469970703125, 0.0472412109375, 0.0155029296875, -0.06195068359375, -0.02557373046875, -0.007434844970703125, -0.062744140625, 0.024383544921875, -0.0134429931640625, -0.048919677734375, -0.0189056396484375, -0.00554656982421875, -0.0248565673828125, 0.046783447265625, -0.0224761962890625, 0.031707763671875, -0.0302886962890625, -0.037322998046875, 0.004047393798828125, -0.048492431640625, 0.046844482421875, 0.0037822723388671875, 0.004566192626953125, -0.017303466796875, -0.011993408203125, 0.0657958984375, -0.05450439453125, 0.04278564453125, -0.0041046142578125, 0.00908660888671875, 0.036468505859375, 0.0036945343017578125, 0.054718017578125, 0.0285797119140625, -0.01727294921875, 0.004688262939453125, 0.015899658203125, -0.01531982421875, -0.039276123046875, 0.03765869140625, -0.05401611328125, -0.0574951171875, -0.0533447265625, -0.0072479248046875, 0.0275115966796875, 0.0285186767578125, 0.0124053955078125, 0.0037078857421875, 0.01629638671875, 0.02362060546875, 0.03814697265625, -0.0169219970703125, 0.05194091796875, 0.02392578125, -0.03326416015625, -0.052947998046875, 0.052581787109375, -0.010345458984375, 0.0031299591064453125, 0.040740966796875, 0.0166778564453125, -0.0298919677734375, -0.01198577880859375, -0.041107177734375, 0.042633056640625, -0.0396728515625, -0.03021240234375, -0.03619384765625, -0.045166015625, -0.040069580078125, -0.0299530029296875, -0.026885986328125, -0.015655517578125, -0.03997802734375, 0.016998291015625, 0.06396484375, 0.041534423828125, -0.0168914794921875, 0.0295867919921875, -0.044097900390625, 0.01244354248046875, 0.0283660888671875, 0.0109405517578125, -0.004032135009765625, -0.041839599609375, -0.005786895751953125, 0.00858306884765625, -0.048675537109375, -0.057281494140625, 0.05322265625, -0.0071868896484375, 0.04547119140625, 0.0328369140625, 0.00665283203125, 0.0633544921875, 0.00865936279296875, 0.0484619140625, 0.0198516845703125, -0.07171630859375, 0.05718994140625, -0.04547119140625, 0.01206207275390625, 0.02215576171875, 0.0343017578125, -0.0241851806640625, -0.021636962890625, -0.054931640625, -0.053466796875, 0.050445556640625, 0.03369140625, -0.0093231201171875, 0.037322998046875, 0.031494140625, -0.017791748046875, 0.004116058349609375, -0.06829833984375, -0.041839599609375, -0.0223541259765625, 0.005603790283203125, 0.0021228790283203125, -0.00960540771484375, 0.002117156982421875, -0.03485107421875, 0.07781982421875, -0.0082855224609375, 0.011810302734375, 0.021209716796875, -0.0000731348991394043, -0.01322174072265625, -0.01058197021484375, 0.0345458984375, 0.0347900390625, -0.034637451171875, -0.0147552490234375, 0.01444244384765625, -0.0303955078125, 0.0110015869140625, 0.007450103759765625, -0.0124969482421875, 0.020843505859375, 0.0369873046875, 0.043731689453125, 0.01529693603515625, -0.022186279296875, 0.046478271484375, 0.0160064697265625, -0.030303955078125, -0.03271484375, 0.00208282470703125, 0.0313720703125, 0.03179931640625, 0.02685546875, -0.00832366943359375, -0.004364013671875, -0.022186279296875, 0.02178955078125, 0.0533447265625, -0.0261383056640625, -0.0092926025390625, 0.047088623046875, 0.0041046142578125, -0.016021728515625, 0.04205322265625, -0.01318359375, -0.05194091796875, 0.06878662109375, 0.017425537109375, 0.066650390625, 0.0021648406982421875, 0.0002225637435913086, 0.061279296875, 0.025390625, -0.01203155517578125, 0.00890350341796875, 0.004474639892578125, -0.030792236328125, 0.0094146728515625, -0.05615234375, -0.024688720703125, 0.03558349609375, -0.04010009765625, 0.043914794921875, -0.056854248046875, -0.01316070556640625, -0.0018815994262695312, 0.032379150390625, -0.036895751953125, 0.01306915283203125, -0.0032215118408203125, 0.052947998046875, -0.03289794921875, 0.052764892578125, 0.07598876953125, -0.060028076171875, -0.071044921875, -0.004611968994140625, 0.0242919921875, -0.0731201171875, 0.036651611328125, 0.0218353271484375, 0.0070953369140625, 0.00026917457580566406, -0.056884765625, -0.07379150390625, 0.1021728515625, 0.01354217529296875, -0.037322998046875, -0.0015468597412109375, -0.02349853515625, 0.00522613525390625, -0.01500701904296875, 0.046295166015625, 0.015167236328125, 0.047821044921875, 0.0025081634521484375, -0.07574462890625, 0.03314208984375, -0.020355224609375, 0.0009870529174804688, 0.00502777099609375, -0.07086181640625, 0.07513427734375, -0.00495147705078125, -0.01666259765625, 0.024658203125, 0.040008544921875, 0.052734375, 0.016693115234375, 0.03167724609375, 0.0440673828125, 0.0209808349609375, -0.015838623046875, 0.04522705078125, -0.02587890625, 0.0667724609375, 0.038421630859375, 0.005786895751953125, 0.045440673828125, 0.01800537109375, -0.036956787109375, 0.049835205078125, 0.074951171875, -0.0279693603515625, 0.0220184326171875, 0.023895263671875, -0.018524169921875, -0.0088653564453125, -0.00002181529998779297, -0.049652099609375, 0.0196533203125, 0.0197601318359375, -0.0294647216796875, -0.0088043212890625, -0.028594970703125, 0.0157928466796875, -0.01751708984375, -0.0096893310546875, 0.03997802734375, 0.0072479248046875, -0.03460693359375, 0.04840087890625, -0.011322021484375, 0.085205078125, -0.060333251953125, -0.00506591796875, -0.029083251953125, 0.030792236328125, -0.0195770263671875, -0.06817626953125, -0.0004215240478515625, -0.035491943359375, -0.005687713623046875, -0.00899505615234375, 0.06292724609375, -0.001556396484375, -0.0350341796875, 0.0311279296875, 0.036376953125, 0.01526641845703125, -0.007236480712890625, -0.0867919921875, 0.00641632080078125, 0.0243377685546875, -0.05645751953125, 0.014007568359375, 0.0350341796875, 0.00020742416381835938, 0.07257080078125, 0.0623779296875, 0.006732940673828125, -0.0150299072265625, -0.044830322265625, 0.07330322265625, -0.05474853515625, -0.020904541015625, -0.081787109375, 0.04241943359375, -0.004093170166015625, -0.025604248046875, 0.0592041015625, 0.049713134765625, 0.0787353515625, -0.01212310791015625, 0.0716552734375, -0.020965576171875, 0.0171966552734375, -0.00949859619140625, 0.07818603515625, -0.0806884765625, -0.01593017578125, -0.03460693359375, -0.04736328125, -0.01528167724609375, 0.06671142578125, 0.005863189697265625, 0.0189971923828125, 0.043060302734375, 0.08819580078125, -0.00759124755859375, -0.0355224609375, 0.007358551025390625, 0.000164031982421875, 0.01447296142578125, 0.0509033203125, 0.05078125, -0.041656494140625, 0.057891845703125, -0.053863525390625, 0.0009493827819824219, -0.043182373046875, -0.0595703125, -0.06280517578125, -0.038299560546875, -0.0263671875, -0.05181884765625, -0.0002624988555908203, 0.0662841796875, 0.07464599609375, -0.050018310546875, -0.032684326171875, 0.01214599609375, 0.026702880859375, -0.0062713623046875, -0.0237579345703125, 0.026458740234375, 0.01067352294921875, -0.055572509765625, -0.00894927978515625, 0.027252197265625, 0.0282135009765625, -0.022430419921875, -0.01461029052734375, -0.035003662109375, -0.0208282470703125, 0.01849365234375, 0.040313720703125, -0.0721435546875, -0.00029778480529785156, -0.0082244873046875, -0.04876708984375, 0.022613525390625, 0.0184783935546875, -0.042083740234375, 0.0187225341796875, 0.047698974609375, 0.0189208984375, 0.0290069580078125, -0.011444091796875, 0.0219573974609375, -0.023468017578125, 0.00794219970703125, -0.0008673667907714844, 0.04229736328125, 0.018585205078125, -0.03497314453125, 0.055694580078125, 0.049041748046875, -0.027740478515625, -0.052001953125, -0.0027332305908203125, -0.0714111328125, -0.017791748046875, 0.08026123046875, -0.0186309814453125, -0.0290374755859375, 0.033538818359375, -0.021331787109375, 0.0526123046875, -0.057769775390625, 0.01258087158203125, 0.036468505859375, -0.00801849365234375, -0.00728607177734375, -0.051055908203125, 0.0352783203125, 0.0172882080078125, -0.049468994140625, -0.03485107421875, 0.001934051513671875, 0.03167724609375, 0.0308380126953125, 0.0428466796875, 0.007358551025390625, 0.00740814208984375, 0.005924224853515625, 0.027557373046875, -0.0024394989013671875, 0.035888671875, -0.0186920166015625, -0.0050811767578125, -0.0180206298828125, -0.0302581787109375 ] ]
TheBloke/WizardCoder-Python-13B-V1.0-GPTQ
2023-09-27T12:46:30.000Z
[ "transformers", "safetensors", "llama", "text-generation", "code", "arxiv:2304.12244", "arxiv:2306.08568", "arxiv:2308.09583", "arxiv:2303.08774", "license:llama2", "model-index", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/WizardCoder-Python-13B-V1.0-GPTQ
69
5,851
transformers
2023-08-27T17:40:25
--- license: llama2 library_name: transformers tags: - code metrics: - code_eval base_model: WizardLM/WizardCoder-Python-13B-V1.0 inference: false model_creator: WizardLM model_type: llama prompt_template: 'Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ' quantized_by: TheBloke model-index: - name: WizardCoder-Python-13B-V1.0 results: - task: type: text-generation dataset: name: HumanEval type: openai_humaneval metrics: - type: pass@1 value: 0.64 name: pass@1 verified: false --- <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # WizardCoder Python 13B V1.0 - GPTQ - Model creator: [WizardLM](https://huggingface.co/WizardLM) - Original model: [WizardCoder Python 13B V1.0](https://huggingface.co/WizardLM/WizardCoder-Python-13B-V1.0) <!-- description start --> ## Description This repo contains GPTQ model files for [WizardLM's WizardCoder Python 13B V1.0](https://huggingface.co/WizardLM/WizardCoder-Python-13B-V1.0). Multiple GPTQ parameter permutations are provided; see Provided Files below for details of the options provided, their parameters, and the software used to create them. <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ) * [2, 3, 4, 5, 6 and 8-bit GGUF models for CPU+GPU inference](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GGUF) * [WizardLM's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/WizardLM/WizardCoder-Python-13B-V1.0) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: Alpaca ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ``` <!-- prompt-template end --> <!-- README_GPTQ.md-provided-files start --> ## Provided files and GPTQ parameters Multiple quantisation parameters are provided, to allow you to choose the best one for your hardware and requirements. Each separate quant is in a different branch. See below for instructions on fetching from different branches. All recent GPTQ files are made with AutoGPTQ, and all files in non-main branches are made with AutoGPTQ. Files in the `main` branch which were uploaded before August 2023 were made with GPTQ-for-LLaMa. <details> <summary>Explanation of GPTQ parameters</summary> - Bits: The bit size of the quantised model. - GS: GPTQ group size. Higher numbers use less VRAM, but have lower quantisation accuracy. "None" is the lowest possible value. - Act Order: True or False. Also known as `desc_act`. True results in better quantisation accuracy. Some GPTQ clients have had issues with models that use Act Order plus Group Size, but this is generally resolved now. - Damp %: A GPTQ parameter that affects how samples are processed for quantisation. 0.01 is default, but 0.1 results in slightly better accuracy. - GPTQ dataset: The dataset used for quantisation. Using a dataset more appropriate to the model's training can improve quantisation accuracy. Note that the GPTQ dataset is not the same as the dataset used to train the model - please refer to the original model repo for details of the training dataset(s). - Sequence Length: The length of the dataset sequences used for quantisation. Ideally this is the same as the model sequence length. For some very long sequence models (16+K), a lower sequence length may have to be used. Note that a lower sequence length does not limit the sequence length of the quantised model. It only impacts the quantisation accuracy on longer inference sequences. - ExLlama Compatibility: Whether this file can be loaded with ExLlama, which currently only supports Llama models in 4-bit. </details> | Branch | Bits | GS | Act Order | Damp % | GPTQ Dataset | Seq Len | Size | ExLlama | Desc | | ------ | ---- | -- | --------- | ------ | ------------ | ------- | ---- | ------- | ---- | | [main](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/main) | 4 | 128 | No | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 7.26 GB | Yes | 4-bit, without Act Order and group size 128g. | | [gptq-4bit-32g-actorder_True](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/gptq-4bit-32g-actorder_True) | 4 | 32 | Yes | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 8.00 GB | Yes | 4-bit, with Act Order and group size 32g. Gives highest possible inference quality, with maximum VRAM usage. | | [gptq-4bit-64g-actorder_True](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/gptq-4bit-64g-actorder_True) | 4 | 64 | Yes | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 7.51 GB | Yes | 4-bit, with Act Order and group size 64g. Uses less VRAM than 32g, but with slightly lower accuracy. | | [gptq-4bit-128g-actorder_True](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/gptq-4bit-128g-actorder_True) | 4 | 128 | Yes | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 7.26 GB | Yes | 4-bit, with Act Order and group size 128g. Uses even less VRAM than 64g, but with slightly lower accuracy. | | [gptq-8bit--1g-actorder_True](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/gptq-8bit--1g-actorder_True) | 8 | None | Yes | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 13.36 GB | No | 8-bit, with Act Order. No group size, to lower VRAM requirements. | | [gptq-8bit-128g-actorder_True](https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ/tree/gptq-8bit-128g-actorder_True) | 8 | 128 | Yes | 0.1 | [Evol Instruct Code](https://huggingface.co/datasets/nickrosh/Evol-Instruct-Code-80k-v1) | 8192 | 13.65 GB | No | 8-bit, with group size 128g for higher inference quality and with Act Order for even higher accuracy. | <!-- README_GPTQ.md-provided-files end --> <!-- README_GPTQ.md-download-from-branches start --> ## How to download from branches - In text-generation-webui, you can add `:branch` to the end of the download name, eg `TheBloke/WizardCoder-Python-13B-V1.0-GPTQ:main` - With Git, you can clone a branch with: ``` git clone --single-branch --branch main https://huggingface.co/TheBloke/WizardCoder-Python-13B-V1.0-GPTQ ``` - In Python Transformers code, the branch is the `revision` parameter; see below. <!-- README_GPTQ.md-download-from-branches end --> <!-- README_GPTQ.md-text-generation-webui start --> ## How to easily download and use this model in [text-generation-webui](https://github.com/oobabooga/text-generation-webui). Please make sure you're using the latest version of [text-generation-webui](https://github.com/oobabooga/text-generation-webui). It is strongly recommended to use the text-generation-webui one-click-installers unless you're sure you know how to make a manual install. 1. Click the **Model tab**. 2. Under **Download custom model or LoRA**, enter `TheBloke/WizardCoder-Python-13B-V1.0-GPTQ`. - To download from a specific branch, enter for example `TheBloke/WizardCoder-Python-13B-V1.0-GPTQ:main` - see Provided Files above for the list of branches for each option. 3. Click **Download**. 4. The model will start downloading. Once it's finished it will say "Done". 5. In the top left, click the refresh icon next to **Model**. 6. In the **Model** dropdown, choose the model you just downloaded: `WizardCoder-Python-13B-V1.0-GPTQ` 7. The model will automatically load, and is now ready for use! 8. If you want any custom settings, set them and then click **Save settings for this model** followed by **Reload the Model** in the top right. * Note that you do not need to and should not set manual GPTQ parameters any more. These are set automatically from the file `quantize_config.json`. 9. Once you're ready, click the **Text Generation tab** and enter a prompt to get started! <!-- README_GPTQ.md-text-generation-webui end --> <!-- README_GPTQ.md-use-from-python start --> ## How to use this GPTQ model from Python code ### Install the necessary packages Requires: Transformers 4.32.0 or later, Optimum 1.12.0 or later, and AutoGPTQ 0.4.2 or later. ```shell pip3 install transformers>=4.32.0 optimum>=1.12.0 pip3 install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ # Use cu117 if on CUDA 11.7 ``` If you have problems installing AutoGPTQ using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y auto-gptq git clone https://github.com/PanQiWei/AutoGPTQ cd AutoGPTQ pip3 install . ``` ### For CodeLlama models only: you must use Transformers 4.33.0 or later. If 4.33.0 is not yet released when you read this, you will need to install Transformers from source: ```shell pip3 uninstall -y transformers pip3 install git+https://github.com/huggingface/transformers.git ``` ### You can then use the following code ```python from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline model_name_or_path = "TheBloke/WizardCoder-Python-13B-V1.0-GPTQ" # To use a different branch, change revision # For example: revision="main" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", trust_remote_code=False, revision="main") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True) prompt = "Tell me about AI" prompt_template=f'''Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ''' print("\n\n*** Generate:") input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512) print(tokenizer.decode(output[0])) # Inference can also be done using transformers' pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_GPTQ.md-use-from-python end --> <!-- README_GPTQ.md-compatibility start --> ## Compatibility The files provided are tested to work with AutoGPTQ, both via Transformers and using AutoGPTQ directly. They should also work with [Occ4m's GPTQ-for-LLaMa fork](https://github.com/0cc4m/KoboldAI). [ExLlama](https://github.com/turboderp/exllama) is compatible with Llama models in 4-bit. Please see the Provided Files table above for per-file compatibility. [Huggingface Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) is compatible with all GPTQ models. <!-- README_GPTQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Alicia Loh, Stephen Murray, K, Ajan Kanaga, RoA, Magnesian, Deo Leter, Olakabola, Eugene Pentland, zynix, Deep Realms, Raymond Fosdick, Elijah Stavena, Iucharbius, Erik Bjäreholt, Luis Javier Navarrete Lozano, Nicholas, theTransient, John Detwiler, alfie_i, knownsqashed, Mano Prime, Willem Michiel, Enrico Ros, LangChain4j, OG, Michael Dempsey, Pierre Kircher, Pedro Madruga, James Bentley, Thomas Belote, Luke @flexchar, Leonard Tan, Johann-Peter Hartmann, Illia Dulskyi, Fen Risland, Chadd, S_X, Jeff Scroggin, Ken Nordquist, Sean Connelly, Artur Olbinski, Swaroop Kallakuri, Jack West, Ai Maven, David Ziegler, Russ Johnson, transmissions 11, John Villwock, Alps Aficionado, Clay Pascal, Viktor Bowallius, Subspace Studios, Rainer Wilmers, Trenton Dambrowitz, vamX, Michael Levine, 준교 김, Brandon Frisco, Kalila, Trailburnt, Randy H, Talal Aujan, Nathan Dryer, Vadim, 阿明, ReadyPlayerEmma, Tiffany J. Kim, George Stoitzev, Spencer Kim, Jerry Meng, Gabriel Tamborski, Cory Kujawski, Jeffrey Morgan, Spiking Neurons AB, Edmond Seymore, Alexandros Triantafyllidis, Lone Striker, Cap'n Zoog, Nikolai Manek, danny, ya boyyy, Derek Yates, usrbinkat, Mandus, TL, Nathan LeClaire, subjectnull, Imad Khwaja, webtim, Raven Klaugh, Asp the Wyvern, Gabriel Puliatti, Caitlyn Gatomon, Joseph William Delisle, Jonathan Leane, Luke Pendergrass, SuperWojo, Sebastain Graf, Will Dee, Fred von Graf, Andrey, Dan Guido, Daniel P. Andersen, Nitin Borwankar, Elle, Vitor Caleffi, biorpg, jjj, NimbleBox.ai, Pieter, Matthew Berman, terasurfer, Michael Davis, Alex, Stanislav Ovsiannikov Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: WizardLM's WizardCoder Python 13B V1.0 <p align="center"> 🤗 <a href="https://huggingface.co/WizardLM" target="_blank">HF Repo</a> •🐱 <a href="https://github.com/nlpxucan/WizardLM" target="_blank">Github Repo</a> • 🐦 <a href="https://twitter.com/WizardLM_AI" target="_blank">Twitter</a> • 📃 <a href="https://arxiv.org/abs/2304.12244" target="_blank">[WizardLM]</a> • 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> • 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a> <br> </p> <p align="center"> 👋 Join our <a href="https://discord.gg/VZjjHtWrKs" target="_blank">Discord</a> </p> ## News - 🔥🔥🔥[2023/08/26] We released **WizardCoder-Python-34B-V1.0** , which achieves the **73.2 pass@1** and surpasses **GPT4 (2023/03/15)**, **ChatGPT-3.5**, and **Claude2** on the [HumanEval Benchmarks](https://github.com/openai/human-eval). - [2023/06/16] We released **WizardCoder-15B-V1.0** , which achieves the **57.3 pass@1** and surpasses **Claude-Plus (+6.8)**, **Bard (+15.3)** and **InstructCodeT5+ (+22.3)** on the [HumanEval Benchmarks](https://github.com/openai/human-eval). ❗Note: There are two HumanEval results of GPT4 and ChatGPT-3.5. The 67.0 and 48.1 are reported by the official GPT4 Report (2023/03/15) of [OpenAI](https://arxiv.org/abs/2303.08774). The 82.0 and 72.5 are tested by ourselves with the latest API (2023/08/26). | Model | Checkpoint | Paper | HumanEval | MBPP | Demo | License | | ----- |------| ---- |------|-------| ----- | ----- | | WizardCoder-Python-34B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-34B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 73.2 | 61.2 | [Demo](http://47.103.63.15:50085/) | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-15B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-15B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 59.8 |50.6 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | | WizardCoder-Python-13B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-13B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 64.0 | 55.6 | -- | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-Python-7B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-Python-7B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 55.5 | 51.6 | [Demo](http://47.103.63.15:50088/) | <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama2</a> | | WizardCoder-3B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-3B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 34.8 |37.4 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | | WizardCoder-1B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardCoder-1B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2306.08568" target="_blank">[WizardCoder]</a> | 23.8 |28.6 | -- | <a href="https://huggingface.co/spaces/bigcode/bigcode-model-license-agreement" target="_blank">OpenRAIL-M</a> | - Our **WizardMath-70B-V1.0** model slightly outperforms some closed-source LLMs on the GSM8K, including **ChatGPT 3.5**, **Claude Instant 1** and **PaLM 2 540B**. - Our **WizardMath-70B-V1.0** model achieves **81.6 pass@1** on the [GSM8k Benchmarks](https://github.com/openai/grade-school-math), which is **24.8** points higher than the SOTA open-source LLM, and achieves **22.7 pass@1** on the [MATH Benchmarks](https://github.com/hendrycks/math), which is **9.2** points higher than the SOTA open-source LLM. <font size=4> | Model | Checkpoint | Paper | GSM8k | MATH |Online Demo| License| | ----- |------| ---- |------|-------| ----- | ----- | | WizardMath-70B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-70B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **81.6** | **22.7** |[Demo](http://47.103.63.15:50083/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a> | | WizardMath-13B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-13B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **63.9** | **14.0** |[Demo](http://47.103.63.15:50082/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a> | | WizardMath-7B-V1.0 | 🤗 <a href="https://huggingface.co/WizardLM/WizardMath-7B-V1.0" target="_blank">HF Link</a> | 📃 <a href="https://arxiv.org/abs/2308.09583" target="_blank">[WizardMath]</a>| **54.9** | **10.7** | [Demo ](http://47.103.63.15:50080/)| <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 </a>| </font> - [08/09/2023] We released **WizardLM-70B-V1.0** model. Here is [Full Model Weight](https://huggingface.co/WizardLM/WizardLM-70B-V1.0). <font size=4> | <sup>Model</sup> | <sup>Checkpoint</sup> | <sup>Paper</sup> |<sup>MT-Bench</sup> | <sup>AlpacaEval</sup> | <sup>GSM8k</sup> | <sup>HumanEval</sup> | <sup>License</sup>| | ----- |------| ---- |------|-------| ----- | ----- | ----- | | <sup>**WizardLM-70B-V1.0**</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-70B-V1.0" target="_blank">HF Link</a> </sup>|<sup>📃**Coming Soon**</sup>| <sup>**7.78**</sup> | <sup>**92.91%**</sup> |<sup>**77.6%**</sup> | <sup> **50.6**</sup>|<sup> <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License </a></sup> | | <sup>WizardLM-13B-V1.2</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.2" target="_blank">HF Link</a> </sup>| | <sup>7.06</sup> | <sup>89.17%</sup> |<sup>55.3%</sup> | <sup>36.6 </sup>|<sup> <a href="https://ai.meta.com/resources/models-and-libraries/llama-downloads/" target="_blank">Llama 2 License </a></sup> | | <sup>WizardLM-13B-V1.1</sup> |<sup> 🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.1" target="_blank">HF Link</a> </sup> | | <sup>6.76</sup> |<sup>86.32%</sup> | | <sup>25.0 </sup>| <sup>Non-commercial</sup>| | <sup>WizardLM-30B-V1.0</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-30B-V1.0" target="_blank">HF Link</a></sup> | | <sup>7.01</sup> | | | <sup>37.8 </sup>| <sup>Non-commercial</sup> | | <sup>WizardLM-13B-V1.0</sup> | <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-13B-V1.0" target="_blank">HF Link</a> </sup> | | <sup>6.35</sup> | <sup>75.31%</sup> | | <sup> 24.0 </sup> | <sup>Non-commercial</sup>| | <sup>WizardLM-7B-V1.0 </sup>| <sup>🤗 <a href="https://huggingface.co/WizardLM/WizardLM-7B-V1.0" target="_blank">HF Link</a> </sup> |<sup> 📃 <a href="https://arxiv.org/abs/2304.12244" target="_blank">[WizardLM]</a> </sup>| | | |<sup>19.1 </sup>|<sup> Non-commercial</sup>| </font> ## Comparing WizardCoder-Python-34B-V1.0 with Other LLMs. 🔥 The following figure shows that our **WizardCoder-Python-34B-V1.0 attains the second position in this benchmark**, surpassing GPT4 (2023/03/15, 73.2 vs. 67.0), ChatGPT-3.5 (73.2 vs. 72.5) and Claude2 (73.2 vs. 71.2). <p align="center" width="100%"> <a ><img src="https://raw.githubusercontent.com/nlpxucan/WizardLM/main/WizardCoder/imgs/compare_sota.png" alt="WizardCoder" style="width: 96%; min-width: 300px; display: block; margin: auto;"></a> </p> ## Prompt Format ``` "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\n{instruction}\n\n### Response:" ``` ## Inference Demo Script We provide the inference demo code [here](https://github.com/nlpxucan/WizardLM/tree/main/demo). Note: This script supports `WizardLM/WizardCoder-Python-34B/13B/7B-V1.0`. If you want to inference with `WizardLM/WizardCoder-15B/3B/1B-V1.0`, please change the `stop_tokens = ['</s>']` to `stop_tokens = ['<|endoftext|>']` in the script. ## Citation Please cite the repo if you use the data, method or code in this repo. ``` @article{luo2023wizardcoder, title={WizardCoder: Empowering Code Large Language Models with Evol-Instruct}, author={Luo, Ziyang and Xu, Can and Zhao, Pu and Sun, Qingfeng and Geng, Xiubo and Hu, Wenxiang and Tao, Chongyang and Ma, Jing and Lin, Qingwei and Jiang, Daxin}, journal={arXiv preprint arXiv:2306.08568}, year={2023} } ```
24,377
[ [ -0.03631591796875, -0.0565185546875, -0.006305694580078125, 0.0126190185546875, -0.01013946533203125, -0.0177459716796875, 0.0020351409912109375, -0.0205230712890625, 0.005725860595703125, 0.0274810791015625, -0.035614013671875, -0.035491943359375, -0.034576416015625, 0.0096282958984375, -0.027618408203125, 0.0789794921875, 0.00528717041015625, -0.01043701171875, -0.00402069091796875, -0.01013946533203125, -0.01544952392578125, -0.040374755859375, -0.05035400390625, -0.0161590576171875, 0.019561767578125, 0.0028705596923828125, 0.0643310546875, 0.033599853515625, 0.00481414794921875, 0.02667236328125, 0.00218963623046875, 0.01332855224609375, -0.0364990234375, -0.01229095458984375, 0.0148162841796875, -0.01430511474609375, -0.0443115234375, -0.0028533935546875, 0.03424072265625, 0.0155487060546875, -0.028411865234375, 0.01515960693359375, 0.0023403167724609375, 0.05670166015625, -0.03668212890625, 0.0216217041015625, -0.0293731689453125, 0.006072998046875, 0.0017118453979492188, 0.0005397796630859375, -0.0014047622680664062, -0.03948974609375, 0.0034999847412109375, -0.05853271484375, 0.023834228515625, -0.003047943115234375, 0.08538818359375, 0.01018524169921875, -0.042083740234375, 0.0007443428039550781, -0.0298614501953125, 0.039459228515625, -0.06658935546875, 0.0193634033203125, 0.039794921875, 0.010589599609375, -0.0248870849609375, -0.058746337890625, -0.05706787109375, -0.006465911865234375, -0.004070281982421875, 0.0134124755859375, -0.03326416015625, 0.004241943359375, 0.0350341796875, 0.0550537109375, -0.07080078125, -0.0176544189453125, -0.0308837890625, -0.0171356201171875, 0.058013916015625, 0.013427734375, 0.02630615234375, -0.0183258056640625, -0.0015811920166015625, -0.0264892578125, -0.041259765625, 0.00910186767578125, 0.0245819091796875, -0.00293731689453125, -0.0269317626953125, 0.041717529296875, -0.0255279541015625, 0.043609619140625, 0.0233154296875, -0.01546478271484375, 0.03131103515625, -0.037109375, -0.03643798828125, -0.016998291015625, 0.091552734375, 0.0347900390625, -0.004894256591796875, 0.00980377197265625, -0.005542755126953125, -0.01270294189453125, 0.01114654541015625, -0.08038330078125, -0.03826904296875, 0.041351318359375, -0.0302276611328125, -0.011993408203125, -0.00605010986328125, -0.056488037109375, -0.01055908203125, -0.002292633056640625, 0.0428466796875, -0.049957275390625, -0.0284576416015625, 0.016082763671875, -0.037353515625, 0.052764892578125, 0.0270538330078125, -0.06549072265625, 0.032867431640625, 0.02642822265625, 0.049896240234375, 0.01453399658203125, -0.0201263427734375, -0.022918701171875, -0.0039215087890625, -0.01430511474609375, 0.0259552001953125, -0.0046539306640625, -0.03302001953125, -0.012054443359375, 0.021697998046875, -0.00812530517578125, -0.02398681640625, 0.0241241455078125, -0.035247802734375, 0.034820556640625, -0.01629638671875, -0.0460205078125, -0.0276947021484375, 0.0074462890625, -0.04547119140625, 0.08050537109375, 0.039337158203125, -0.061004638671875, 0.00887298583984375, -0.037353515625, -0.01568603515625, -0.002933502197265625, -0.003551483154296875, -0.044952392578125, -0.00902557373046875, 0.0190582275390625, 0.017852783203125, -0.025787353515625, 0.0053558349609375, -0.005489349365234375, -0.02374267578125, 0.02239990234375, -0.047393798828125, 0.09783935546875, 0.0187835693359375, -0.03271484375, -0.007137298583984375, -0.061492919921875, 0.00974273681640625, 0.0341796875, -0.0277557373046875, 0.0085296630859375, -0.0258941650390625, 0.0083160400390625, 0.0048675537109375, 0.02392578125, -0.016998291015625, 0.0428466796875, -0.0233917236328125, 0.03948974609375, 0.043060302734375, 0.0009641647338867188, 0.025604248046875, -0.02435302734375, 0.04241943359375, -0.00551605224609375, 0.035552978515625, 0.007328033447265625, -0.052154541015625, -0.051788330078125, -0.0198516845703125, 0.03045654296875, 0.0531005859375, -0.05670166015625, 0.0511474609375, -0.01525115966796875, -0.056640625, -0.031402587890625, -0.005084991455078125, 0.01934814453125, 0.02459716796875, 0.04052734375, -0.03118896484375, -0.0228118896484375, -0.05523681640625, -0.0017175674438476562, -0.027557373046875, -0.01120758056640625, 0.0219573974609375, 0.05181884765625, -0.0206756591796875, 0.05682373046875, -0.045928955078125, -0.00724029541015625, -0.00537872314453125, -0.003143310546875, 0.03411865234375, 0.054779052734375, 0.0447998046875, -0.049591064453125, -0.04205322265625, -0.0018434524536132812, -0.052001953125, 0.0008993148803710938, -0.00298309326171875, -0.0237579345703125, 0.0190277099609375, -0.00432586669921875, -0.08795166015625, 0.054168701171875, 0.02783203125, -0.039703369140625, 0.06573486328125, -0.032562255859375, 0.02349853515625, -0.07611083984375, -0.006565093994140625, 0.00677490234375, -0.01293182373046875, -0.03997802734375, 0.004138946533203125, 0.0002903938293457031, 0.00789642333984375, -0.029144287109375, 0.03460693359375, -0.042236328125, 0.00818634033203125, -0.00021779537200927734, -0.0171661376953125, 0.0267486572265625, 0.0487060546875, -0.00913238525390625, 0.06475830078125, 0.049957275390625, -0.04144287109375, 0.055908203125, 0.035125732421875, -0.00807952880859375, 0.016754150390625, -0.060577392578125, 0.01442718505859375, 0.00826263427734375, 0.022308349609375, -0.059112548828125, -0.019073486328125, 0.04791259765625, -0.046478271484375, 0.04156494140625, -0.034881591796875, -0.034881591796875, -0.035369873046875, -0.03912353515625, 0.019989013671875, 0.07135009765625, -0.02691650390625, 0.03759765625, 0.028778076171875, 0.009918212890625, -0.049072265625, -0.0533447265625, -0.00830078125, -0.01885986328125, -0.042938232421875, 0.02655029296875, -0.00432586669921875, 0.0009613037109375, -0.00467681884765625, -0.0091705322265625, -0.016815185546875, -0.01297760009765625, 0.01442718505859375, 0.021087646484375, -0.0112457275390625, -0.0218048095703125, 0.00974273681640625, 0.0031147003173828125, 0.0039043426513671875, -0.03717041015625, 0.0290374755859375, -0.0175323486328125, 0.0007791519165039062, -0.02392578125, 0.024749755859375, 0.032806396484375, -0.0026302337646484375, 0.06390380859375, 0.0606689453125, -0.0254669189453125, 0.00307464599609375, -0.032470703125, -0.001613616943359375, -0.040069580078125, 0.0221405029296875, -0.0180816650390625, -0.047515869140625, 0.0305023193359375, 0.028717041015625, 0.0019330978393554688, 0.051544189453125, 0.043548583984375, 0.0080413818359375, 0.06671142578125, 0.0288543701171875, -0.0113983154296875, 0.031768798828125, -0.04901123046875, -0.006740570068359375, -0.05291748046875, -0.01727294921875, -0.03240966796875, 0.008148193359375, -0.05615234375, -0.0430908203125, 0.022003173828125, 0.0308837890625, -0.060577392578125, 0.048583984375, -0.0665283203125, 0.01910400390625, 0.044891357421875, 0.01123046875, 0.008331298828125, 0.0139312744140625, -0.007415771484375, 0.0216522216796875, -0.049102783203125, -0.03045654296875, 0.0791015625, 0.0162200927734375, 0.0601806640625, 0.01166534423828125, 0.034698486328125, 0.01483154296875, -0.0018472671508789062, -0.035369873046875, 0.03900146484375, 0.0115203857421875, -0.041839599609375, -0.0313720703125, -0.041748046875, -0.0833740234375, 0.025054931640625, -0.0010528564453125, -0.06451416015625, 0.0300750732421875, 0.00437164306640625, -0.0300445556640625, 0.0184326171875, -0.05426025390625, 0.08782958984375, -0.0026035308837890625, -0.02978515625, 0.0009708404541015625, -0.048004150390625, 0.01763916015625, 0.01468658447265625, -0.004421234130859375, -0.0050048828125, -0.00946044921875, 0.05584716796875, -0.08026123046875, 0.050567626953125, -0.0173797607421875, -0.007617950439453125, 0.04327392578125, -0.009063720703125, 0.039398193359375, 0.00809478759765625, -0.004566192626953125, 0.031585693359375, 0.0297393798828125, -0.037933349609375, -0.0313720703125, 0.041168212890625, -0.07891845703125, -0.04498291015625, -0.0367431640625, -0.0257110595703125, 0.00330352783203125, 0.004802703857421875, 0.03948974609375, 0.0386962890625, 0.00174713134765625, -0.0115966796875, 0.052520751953125, -0.03118896484375, 0.0335693359375, 0.0229034423828125, -0.019500732421875, -0.04986572265625, 0.07501220703125, 0.00958251953125, 0.00562286376953125, 0.0180511474609375, 0.004070281982421875, -0.029144287109375, -0.041351318359375, -0.053619384765625, 0.016754150390625, -0.049957275390625, -0.03167724609375, -0.05023193359375, -0.031097412109375, -0.04302978515625, 0.018707275390625, -0.02593994140625, -0.05029296875, -0.035369873046875, 0.009796142578125, 0.0743408203125, 0.037078857421875, -0.01007843017578125, 0.01284027099609375, -0.0665283203125, 0.0245208740234375, 0.03765869140625, 0.008758544921875, 0.0024623870849609375, -0.058013916015625, -0.0220489501953125, 0.00894927978515625, -0.045867919921875, -0.076416015625, 0.0645751953125, 0.0119781494140625, 0.037109375, 0.016693115234375, 0.01145172119140625, 0.059600830078125, -0.035369873046875, 0.08319091796875, 0.01201629638671875, -0.06768798828125, 0.03759765625, -0.025177001953125, 0.02374267578125, 0.023529052734375, 0.043121337890625, -0.0263824462890625, -0.01904296875, -0.045166015625, -0.05926513671875, 0.038848876953125, 0.03826904296875, 0.00023877620697021484, 0.008392333984375, 0.032928466796875, 0.00887298583984375, 0.0174407958984375, -0.05340576171875, -0.046722412109375, -0.042572021484375, -0.008514404296875, 0.015411376953125, 0.00566864013671875, -0.01085662841796875, -0.052978515625, 0.07098388671875, -0.014892578125, 0.06500244140625, 0.033447265625, -0.0079345703125, -0.0012712478637695312, 0.01451873779296875, 0.0222320556640625, 0.048004150390625, -0.012908935546875, -0.018798828125, 0.01132965087890625, -0.061981201171875, 0.011932373046875, 0.036468505859375, -0.0143585205078125, -0.0097198486328125, 0.0094146728515625, 0.061248779296875, -0.02728271484375, -0.0230865478515625, 0.04449462890625, -0.0230865478515625, -0.0253143310546875, -0.0283660888671875, 0.017364501953125, 0.01294708251953125, 0.029388427734375, 0.038330078125, -0.005645751953125, 0.0296630859375, -0.0284576416015625, 0.004421234130859375, 0.034820556640625, -0.012420654296875, -0.021026611328125, 0.0548095703125, -0.007282257080078125, -0.0022525787353515625, 0.061767578125, -0.023590087890625, -0.041259765625, 0.06549072265625, 0.031646728515625, 0.0623779296875, -0.0014123916625976562, 0.01157379150390625, 0.040740966796875, 0.01184844970703125, -0.0016727447509765625, 0.0207977294921875, -0.0050506591796875, -0.049957275390625, -0.02630615234375, -0.047027587890625, -0.0189971923828125, 0.01230621337890625, -0.0491943359375, 0.01442718505859375, -0.02325439453125, -0.01873779296875, -0.01459503173828125, 0.021514892578125, -0.05279541015625, 0.01031494140625, 0.00879669189453125, 0.07159423828125, -0.037445068359375, 0.0787353515625, 0.03271484375, -0.04608154296875, -0.0679931640625, -0.01126861572265625, -0.0002739429473876953, -0.0460205078125, 0.01532745361328125, 0.00731658935546875, 0.025543212890625, -0.00041675567626953125, -0.048309326171875, -0.0570068359375, 0.0992431640625, 0.025787353515625, -0.034881591796875, -0.01702880859375, 0.0014543533325195312, 0.0241546630859375, -0.006591796875, 0.054656982421875, 0.0460205078125, 0.02471923828125, 0.003917694091796875, -0.08026123046875, 0.0290374755859375, -0.03436279296875, 0.00403594970703125, 0.00458526611328125, -0.06964111328125, 0.06976318359375, -0.0036640167236328125, -0.0007452964782714844, 0.018463134765625, 0.0521240234375, 0.0257110595703125, 0.00022351741790771484, 0.0175628662109375, 0.043914794921875, 0.06475830078125, -0.0178985595703125, 0.095947265625, -0.0196990966796875, 0.048065185546875, 0.04974365234375, 0.00997161865234375, 0.038055419921875, 0.0175628662109375, -0.061126708984375, 0.043060302734375, 0.06903076171875, -0.0185394287109375, 0.036865234375, 0.0217437744140625, -0.0253753662109375, -0.005645751953125, 0.0256500244140625, -0.0616455078125, -0.0031070709228515625, 0.0290069580078125, -0.00868988037109375, 0.005901336669921875, -0.0047760009765625, -0.0041656494140625, -0.045501708984375, -0.028839111328125, 0.047210693359375, 0.01239013671875, -0.0225830078125, 0.08612060546875, -0.01064300537109375, 0.05975341796875, -0.0479736328125, -0.013671875, -0.028167724609375, -0.0017900466918945312, -0.0233917236328125, -0.05108642578125, 0.00356292724609375, -0.0176239013671875, -0.01387786865234375, 0.00916290283203125, 0.0552978515625, -0.0281829833984375, -0.031463623046875, 0.01318359375, 0.032623291015625, 0.033233642578125, -0.01300048828125, -0.08172607421875, 0.01166534423828125, 0.00023412704467773438, -0.047637939453125, 0.035369873046875, 0.043304443359375, 0.0178070068359375, 0.04681396484375, 0.036529541015625, -0.008056640625, 0.0191497802734375, -0.0095367431640625, 0.06884765625, -0.06170654296875, -0.01511383056640625, -0.0634765625, 0.04144287109375, -0.01216888427734375, -0.0235137939453125, 0.07110595703125, 0.047515869140625, 0.049591064453125, -0.0180816650390625, 0.05963134765625, -0.0270233154296875, -0.0016078948974609375, -0.0230865478515625, 0.05657958984375, -0.0423583984375, 0.0111846923828125, -0.0271759033203125, -0.05181884765625, -0.01004791259765625, 0.05908203125, -0.006519317626953125, 0.0163421630859375, 0.035552978515625, 0.07861328125, 0.010009765625, 0.00421142578125, 0.0175018310546875, 0.022064208984375, 0.01108551025390625, 0.06475830078125, 0.054840087890625, -0.07965087890625, 0.051239013671875, -0.02435302734375, -0.013427734375, 0.0020294189453125, -0.05535888671875, -0.060821533203125, -0.028778076171875, -0.053955078125, -0.0616455078125, 0.005702972412109375, 0.07977294921875, 0.05517578125, -0.054840087890625, -0.02117919921875, -0.01190948486328125, 0.0155029296875, -0.0308074951171875, -0.02362060546875, 0.03546142578125, 0.00730133056640625, -0.060791015625, 0.01357269287109375, -0.0013990402221679688, 0.0241546630859375, -0.010589599609375, -0.0305023193359375, -0.0125732421875, 0.005931854248046875, 0.040374755859375, 0.0457763671875, -0.0413818359375, -0.00045680999755859375, -0.0166473388671875, -0.0126800537109375, 0.022918701171875, 0.023590087890625, -0.061126708984375, 0.0008268356323242188, 0.034332275390625, 0.0242156982421875, 0.058074951171875, -0.01203155517578125, 0.034759521484375, -0.0221099853515625, 0.0012865066528320312, -0.002765655517578125, 0.031890869140625, 0.01207733154296875, -0.046661376953125, 0.044952392578125, 0.03143310546875, -0.061279296875, -0.044097900390625, -0.0270233154296875, -0.08135986328125, -0.0219573974609375, 0.09130859375, -0.010040283203125, -0.038818359375, -0.00885009765625, -0.025726318359375, 0.02679443359375, -0.044830322265625, 0.0169677734375, 0.02508544921875, -0.0206756591796875, -0.0205078125, -0.04638671875, 0.045654296875, 0.0020503997802734375, -0.049560546875, 0.0027561187744140625, 0.04449462890625, 0.036224365234375, 0.0024585723876953125, 0.0682373046875, -0.01324462890625, 0.02923583984375, 0.0247039794921875, 0.01525115966796875, -0.0052947998046875, 0.0164031982421875, -0.0386962890625, 0.0077056884765625, -0.0137176513671875, -0.00806427001953125 ] ]
IGeniusDev/llama13B-quant8-testv1-openorca-customdataset
2023-08-25T12:10:08.000Z
[ "transformers", "safetensors", "llama", "text-generation", "en", "dataset:Open-Orca/OpenOrca", "dataset:customdataset-soon-available", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
IGeniusDev
null
null
IGeniusDev/llama13B-quant8-testv1-openorca-customdataset
0
5,850
transformers
2023-08-25T10:02:45
--- language: - en datasets: - Open-Orca/OpenOrca - customdataset-soon-available library_name: transformers pipeline_tag: text-generation license: llama2 --- # IgeniusDev-LLama2-13B this is a test model. Will be deleted soon!
228
[ [ -0.043121337890625, -0.03717041015625, 0.0168914794921875, 0.05364990234375, -0.04522705078125, 0.0223541259765625, 0.029937744140625, -0.0169219970703125, 0.006275177001953125, 0.042205810546875, -0.062744140625, -0.0225372314453125, -0.041839599609375, 0.0036945343017578125, -0.08026123046875, 0.057342529296875, 0.0038051605224609375, 0.01255035400390625, -0.0042724609375, 0.0209197998046875, -0.02947998046875, 0.00450897216796875, -0.05096435546875, -0.0321044921875, 0.034698486328125, 0.051544189453125, 0.0250701904296875, 0.0499267578125, 0.04766845703125, 0.01334381103515625, -0.0290985107421875, -0.00577545166015625, -0.05035400390625, -0.00644683837890625, 0.006744384765625, -0.0251007080078125, -0.033660888671875, 0.00972747802734375, 0.0288848876953125, 0.02398681640625, -0.016632080078125, 0.03289794921875, -0.0181121826171875, 0.0003790855407714844, -0.01739501953125, 0.0135345458984375, -0.043243408203125, -0.019866943359375, -0.0170745849609375, 0.007598876953125, -0.01239776611328125, -0.043060302734375, -0.04083251953125, -0.032867431640625, 0.02618408203125, 0.022735595703125, 0.0877685546875, 0.020477294921875, -0.05078125, -0.01290130615234375, -0.0458984375, 0.0272674560546875, -0.031280517578125, 0.0045928955078125, 0.04315185546875, 0.0228271484375, -0.0202178955078125, -0.03997802734375, -0.042083740234375, -0.016326904296875, 0.016143798828125, 0.0044097900390625, -0.020751953125, -0.0223236083984375, 0.02130126953125, 0.00623321533203125, -0.032867431640625, 0.0496826171875, -0.09869384765625, -0.009521484375, 0.055999755859375, 0.01413726806640625, 0.0206298828125, -0.01010894775390625, -0.0506591796875, 0.0200653076171875, -0.0830078125, -0.03997802734375, 0.04071044921875, 0.006725311279296875, -0.023834228515625, 0.06439208984375, -0.006114959716796875, -0.00382232666015625, 0.01806640625, -0.017852783203125, 0.0218963623046875, 0.004871368408203125, -0.0207366943359375, 0.0244293212890625, 0.0325927734375, 0.0355224609375, 0.022308349609375, 0.016448974609375, -0.01467132568359375, 0.026519775390625, -0.00275421142578125, -0.07464599609375, -0.0206298828125, 0.012359619140625, -0.06524658203125, -0.03985595703125, -0.0030384063720703125, -0.035003662109375, -0.04827880859375, 0.0149688720703125, 0.023956298828125, -0.019989013671875, -0.056060791015625, -0.002742767333984375, 0.005947113037109375, 0.021331787109375, 0.0291748046875, -0.041259765625, 0.015716552734375, 0.0570068359375, 0.007053375244140625, 0.018707275390625, -0.0022449493408203125, 0.01094818115234375, 0.0016222000122070312, -0.043426513671875, 0.04217529296875, -0.017822265625, -0.088134765625, -0.018096923828125, 0.0249786376953125, 0.015594482421875, -0.0762939453125, 0.032073974609375, -0.052459716796875, 0.0157928466796875, -0.0246429443359375, -0.034027099609375, -0.03106689453125, 0.01544952392578125, -0.04388427734375, 0.0767822265625, 0.01244354248046875, -0.007457733154296875, 0.00936126708984375, -0.05908203125, -0.0145263671875, 0.025390625, -0.00241851806640625, -0.03790283203125, -0.00079345703125, -0.0439453125, 0.03021240234375, -0.04461669921875, 0.03533935546875, -0.0338134765625, -0.0288848876953125, 0.009368896484375, -0.01526641845703125, 0.058197021484375, 0.0565185546875, -0.00640869140625, -0.005634307861328125, -0.09649658203125, -0.0283203125, 0.03021240234375, -0.002017974853515625, -0.004886627197265625, -0.017364501953125, -0.0014324188232421875, 0.01439666748046875, 0.03057861328125, -0.0594482421875, 0.0169219970703125, 0.0207366943359375, 0.0030269622802734375, 0.0731201171875, 0.005237579345703125, 0.018096923828125, -0.01104736328125, 0.053558349609375, 0.01149749755859375, 0.0223541259765625, 0.03045654296875, -0.037017822265625, -0.07244873046875, -0.03643798828125, 0.0024871826171875, 0.0419921875, -0.0018892288208007812, 0.03240966796875, 0.01338958740234375, -0.06982421875, 0.0098419189453125, -0.008087158203125, 0.034332275390625, 0.036407470703125, -0.00418853759765625, -0.01016998291015625, -0.0496826171875, -0.08447265625, 0.014617919921875, -0.05010986328125, -0.006328582763671875, -0.0197601318359375, 0.060455322265625, -0.0518798828125, 0.01418304443359375, -0.0243682861328125, -0.0127410888671875, -0.029571533203125, -0.018157958984375, 0.0229949951171875, 0.060333251953125, 0.04693603515625, -0.045654296875, -0.00014281272888183594, -0.0325927734375, -0.060882568359375, -0.020294189453125, 0.0143585205078125, -0.01025390625, -0.01232147216796875, 0.04388427734375, -0.06689453125, 0.054718017578125, 0.0280914306640625, -0.05560302734375, 0.0279998779296875, -0.0014619827270507812, 0.001583099365234375, -0.07769775390625, -0.0006527900695800781, -0.00916290283203125, 0.004917144775390625, -0.0029354095458984375, 0.00983428955078125, 0.0213165283203125, -0.00646209716796875, -0.0323486328125, 0.034393310546875, -0.039703369140625, -0.00554656982421875, -0.0160064697265625, -0.0234527587890625, -0.0080108642578125, -0.0237884521484375, -0.041656494140625, 0.06268310546875, 0.01300048828125, -0.048675537109375, 0.0258941650390625, 0.05743408203125, -0.0284881591796875, 0.012451171875, -0.048309326171875, -0.018402099609375, -0.0135345458984375, -0.00795745849609375, -0.09844970703125, -0.035003662109375, 0.039642333984375, -0.0152130126953125, 0.0162353515625, 0.0228424072265625, -0.060760498046875, -0.054412841796875, -0.01079559326171875, 0.0433349609375, 0.06085205078125, -0.046051025390625, 0.0285491943359375, 0.02154541015625, 0.0021686553955078125, -0.016632080078125, -0.057769775390625, -0.0003685951232910156, -0.021942138671875, -0.0286712646484375, 0.014312744140625, -0.0109710693359375, -0.043121337890625, 0.0083160400390625, -0.01049041748046875, -0.0236968994140625, 0.037017822265625, 0.048736572265625, 0.04071044921875, -0.005260467529296875, 0.0191802978515625, 0.003383636474609375, -0.00553131103515625, 0.0182037353515625, 0.07025146484375, 0.05224609375, 0.00878143310546875, -0.01052093505859375, -0.03936767578125, 0.0167694091796875, 0.04437255859375, -0.0186614990234375, 0.03973388671875, 0.0227813720703125, -0.035980224609375, -0.0004475116729736328, -0.051971435546875, -0.01004791259765625, -0.03521728515625, 0.047210693359375, -0.030426025390625, -0.060943603515625, 0.051605224609375, -0.0190582275390625, -0.0049591064453125, 0.04425048828125, 0.05169677734375, -0.00585174560546875, 0.055816650390625, 0.04583740234375, 0.0050506591796875, 0.003448486328125, -0.05364990234375, 0.0266265869140625, -0.046417236328125, -0.064697265625, -0.0262298583984375, -0.0091400146484375, -0.050506591796875, -0.00748443603515625, -0.012115478515625, -0.00717926025390625, -0.031646728515625, 0.0367431640625, -0.017333984375, 0.050689697265625, 0.031768798828125, 0.007694244384765625, -0.005397796630859375, -0.041351318359375, 0.0280914306640625, 0.005298614501953125, -0.01428985595703125, -0.040557861328125, 0.073486328125, 0.0234375, 0.0579833984375, 0.0099639892578125, 0.06134033203125, 0.00751495361328125, 0.031707763671875, -0.026824951171875, 0.069091796875, 0.00511932373046875, -0.080810546875, 0.00623321533203125, 0.008758544921875, -0.06646728515625, 0.018707275390625, 0.010345458984375, -0.020660400390625, 0.0016641616821289062, 0.007572174072265625, -0.025054931640625, 0.0312042236328125, -0.048004150390625, 0.06365966796875, -0.0038623809814453125, 0.004825592041015625, -0.0244293212890625, -0.021514892578125, 0.066162109375, -0.0031890869140625, 0.01654052734375, -0.0640869140625, -0.0097198486328125, 0.0285797119140625, -0.040130615234375, 0.0396728515625, -0.01507568359375, -0.0185394287109375, 0.0270538330078125, 0.038909912109375, 0.02099609375, 0.01947021484375, -0.0141448974609375, -0.0189361572265625, -0.0095977783203125, -0.032684326171875, 0.0137939453125, 0.05218505859375, -0.06439208984375, -0.03271484375, -0.03985595703125, 0.0013532638549804688, 0.010986328125, -0.016082763671875, 0.017242431640625, 0.0303192138671875, -0.0233612060546875, 0.00812530517578125, 0.0364990234375, -0.020111083984375, 0.05804443359375, 0.017852783203125, -0.037353515625, -0.006038665771484375, 0.058563232421875, -0.003376007080078125, 0.0160064697265625, -0.01593017578125, -0.0030422210693359375, -0.005863189697265625, -0.052520751953125, 0.0218658447265625, 0.043609619140625, -0.03515625, -0.0152740478515625, -0.0262298583984375, -0.01255035400390625, -0.03021240234375, 0.007354736328125, -0.040435791015625, -0.023956298828125, -0.07696533203125, -0.045684814453125, 0.026153564453125, 0.06475830078125, -0.0289306640625, 0.06390380859375, -0.03875732421875, 0.0307769775390625, 0.0148162841796875, 0.012451171875, -0.0282135009765625, -0.0521240234375, 0.0173492431640625, -0.02783203125, -0.037872314453125, -0.032196044921875, 0.052947998046875, -0.0034236907958984375, 0.029693603515625, 0.032958984375, -0.01123809814453125, 0.044097900390625, -0.0170135498046875, 0.058197021484375, 0.0200958251953125, -0.0487060546875, 0.0198974609375, -0.01224517822265625, 0.0234832763671875, 0.03753662109375, 0.0374755859375, -0.006145477294921875, -0.026458740234375, -0.07098388671875, -0.05352783203125, 0.01519012451171875, -0.0026874542236328125, 0.025482177734375, 0.00011450052261352539, 0.03460693359375, 0.036285400390625, 0.0288848876953125, -0.0419921875, -0.040618896484375, 0.0252685546875, -0.00908660888671875, 0.01715087890625, -0.08563232421875, -0.0078125, -0.035186767578125, 0.0615234375, 0.0244293212890625, 0.01111602783203125, -0.0227813720703125, 0.001312255859375, -0.01540374755859375, -0.0160064697265625, 0.048248291015625, 0.072021484375, -0.0292205810546875, -0.00905609130859375, -0.00223541259765625, -0.025726318359375, 0.01922607421875, -0.005840301513671875, -0.01241302490234375, -0.0067596435546875, 0.0272979736328125, 0.0306854248046875, 0.00980377197265625, -0.03326416015625, 0.034027099609375, -0.016510009765625, -0.035186767578125, -0.0611572265625, -0.00409698486328125, 0.006786346435546875, 0.0234527587890625, 0.0272979736328125, 0.0213623046875, 0.0155181884765625, -0.0374755859375, 0.0183258056640625, 0.00920867919921875, -0.0042724609375, -0.0421142578125, 0.0821533203125, 0.032806396484375, -0.0194091796875, 0.0440673828125, -0.033843994140625, -0.03802490234375, 0.091064453125, 0.033782958984375, 0.0545654296875, -0.0224151611328125, 0.026824951171875, 0.025543212890625, 0.0307769775390625, -0.00710296630859375, 0.0628662109375, 0.017913818359375, -0.037811279296875, 0.006389617919921875, -0.01404571533203125, -0.010040283203125, 0.0183563232421875, -0.0302276611328125, 0.029937744140625, -0.0794677734375, 0.0007748603820800781, -0.00839996337890625, 0.01349639892578125, -0.03143310546875, 0.0506591796875, 0.0287933349609375, 0.07769775390625, -0.057769775390625, 0.056121826171875, 0.0019969940185546875, -0.01500701904296875, -0.0638427734375, -0.02471923828125, 0.0172271728515625, -0.0831298828125, 0.087890625, 0.040374755859375, 0.025177001953125, 0.00536346435546875, -0.0828857421875, -0.08172607421875, 0.08807373046875, 0.028564453125, -0.05072021484375, 0.00623321533203125, -0.0302886962890625, 0.0157623291015625, -0.01232147216796875, -0.00666046142578125, 0.0177459716796875, 0.0360107421875, 0.0023670196533203125, -0.061553955078125, -0.005748748779296875, -0.017913818359375, 0.003421783447265625, 0.0246734619140625, -0.07470703125, 0.09735107421875, -0.01523590087890625, -0.01416778564453125, 0.06475830078125, 0.039276123046875, 0.021240234375, -0.0017375946044921875, 0.0343017578125, 0.04852294921875, 0.045928955078125, -0.01105499267578125, 0.06201171875, -0.01016998291015625, 0.03302001953125, 0.05413818359375, -0.046966552734375, 0.024749755859375, 0.04290771484375, 0.004543304443359375, 0.033203125, 0.0823974609375, -0.020355224609375, 0.04766845703125, 0.031585693359375, -0.017791748046875, -0.02337646484375, -0.01396942138671875, -0.046905517578125, 0.05657958984375, 0.0021076202392578125, -0.041412353515625, -0.01117706298828125, -0.0212860107421875, 0.0236968994140625, 0.0187835693359375, -0.01690673828125, 0.024017333984375, -0.00598907470703125, -0.007694244384765625, 0.032257080078125, -0.0007233619689941406, 0.03955078125, -0.0293121337890625, 0.0028209686279296875, -0.01708984375, 0.03619384765625, -0.0016155242919921875, -0.027069091796875, 0.0070343017578125, -0.01837158203125, -0.0007538795471191406, -0.00806427001953125, 0.040374755859375, -0.0282135009765625, -0.06634521484375, 0.05328369140625, 0.03662109375, 0.0039215087890625, -0.00650787353515625, -0.0452880859375, 0.0396728515625, -0.01885986328125, -0.018402099609375, 0.009246826171875, 0.00843048095703125, -0.00887298583984375, 0.08038330078125, 0.0284881591796875, -0.0015230178833007812, 0.0235595703125, 0.0036640167236328125, 0.0634765625, -0.04083251953125, -0.01222991943359375, -0.0689697265625, 0.0202178955078125, -0.005859375, -0.04766845703125, 0.049896240234375, 0.080322265625, 0.06927490234375, -0.023101806640625, 0.038238525390625, -0.01432037353515625, 0.047637939453125, -0.00513458251953125, 0.055511474609375, -0.044097900390625, 0.00829315185546875, 0.0169525146484375, -0.0517578125, -0.0191497802734375, 0.017608642578125, 0.0251922607421875, -0.005237579345703125, 0.0283355712890625, 0.0701904296875, 0.0025081634521484375, 0.0187835693359375, 0.0304412841796875, 0.030609130859375, 0.04278564453125, 0.02044677734375, 0.08380126953125, -0.055328369140625, 0.0193328857421875, -0.044189453125, -0.01410675048828125, -0.04119873046875, -0.044647216796875, -0.068603515625, -0.0308990478515625, -0.026824951171875, -0.041778564453125, 0.00445556640625, 0.0721435546875, 0.0258941650390625, -0.05145263671875, -0.045074462890625, 0.0231475830078125, -0.0004546642303466797, 0.0247802734375, -0.00823211669921875, -0.00444793701171875, 0.02752685546875, -0.0227203369140625, 0.0162811279296875, 0.002651214599609375, 0.05169677734375, -0.017974853515625, 0.0106964111328125, -0.030853271484375, -0.01275634765625, 0.042205810546875, 0.0181121826171875, -0.053985595703125, -0.044403076171875, 0.005588531494140625, -0.0286712646484375, -0.0112152099609375, 0.01148223876953125, -0.0069580078125, -0.017120361328125, 0.0222015380859375, 0.01049041748046875, 0.014373779296875, 0.01544952392578125, 0.03631591796875, -0.021087646484375, 0.044097900390625, 0.020904541015625, 0.019012451171875, 0.004489898681640625, -0.0187835693359375, 0.0616455078125, 0.04058837890625, -0.033477783203125, -0.10308837890625, 0.01035308837890625, -0.07940673828125, -0.01221466064453125, 0.07818603515625, 0.022674560546875, -0.047760009765625, 0.03448486328125, -0.05126953125, 0.0180206298828125, -0.0157012939453125, 0.05712890625, 0.0227203369140625, 0.007442474365234375, -0.006526947021484375, -0.019378662109375, 0.01189422607421875, 0.004543304443359375, -0.06866455078125, -0.00958251953125, 0.0267791748046875, 0.038299560546875, -0.0026683807373046875, 0.023284912109375, -0.01490020751953125, 0.044097900390625, 0.004482269287109375, 0.0367431640625, -0.0198211669921875, -0.0225677490234375, -0.01004791259765625, 0.00856781005859375, 0.0009160041809082031, -0.050201416015625 ] ]
PocketDoc/Dans-RetroRodeo-13b
2023-10-07T20:29:19.000Z
[ "transformers", "pytorch", "safetensors", "llama", "text-generation", "en", "dataset:PocketDoc/Floyd-Text-Adventures", "dataset:PocketDoc/Choose-Your-Story-Long-Text-Adventures", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
PocketDoc
null
null
PocketDoc/Dans-RetroRodeo-13b
2
5,849
transformers
2023-09-19T21:17:44
--- language: - en datasets: - PocketDoc/Floyd-Text-Adventures - PocketDoc/Choose-Your-Story-Long-Text-Adventures --- # What is the model for? This model is proficient in crafting text-based adventure games. It can both concise replies and more expansive, novel-like descriptions. The ability to alternate between these two response styles can be triggered by a distinct system message. # What's in the sausage? This model was trained on [Holodeck-1](https://huggingface.co/KoboldAI/LLAMA2-13B-Holodeck-1) using a custom version of the floyd and cys datasets from the skein text adventure dataset. The dataset has been improved over what was used in my previous model "Dans-CreepingSenseOfDoom" and this time presented in the more traditional adventure mode format. ## Prompt format and examples: #### Format description: Traditional adventure format with a description of the genre, tags, and other misc information at the top followed by an optional introductory message. User messages are prefixed with two new lines and "> ", model messages are one new line after the last user message or the description message if an introductory message is desired. #### Examples of shorter responses: ``` [Genres: Science Fiction] [Tags: humor, old school] [Mode: Adventure] [Description: A puzzle about committing acts of financial skulduggery and exploiting ridiculous magical items.] [Misc: Writing era: 1993] [Intro] You are sitting at a desk in the offices of the Trans-Galactic Financial Services Corporation. You have been here for hours, trying to figure out how you can get your hands on some money that is rightfully yours. Unfortunately, it seems as if there's no way you will ever be able to do this without getting caught by the company president, Mr. Vance. > You lean back in your chair You stretch your arms above your head and yawn. It has been a long day already. > You check your terminal Your computer screen shows various bits of information about TGFC stocks and bonds. > You check the news The front page of today's newspaper reports that "TGFC" stands for "Too Greedy For Cash." ``` ``` Two men, one a brusque bowling ball; the other a murderous dandy, discuss their fate and the fate of the world. They've chosen someone to send. He's clever, but hopefully not too clever. Adventure mode, Varied response length, Creative > You look around You are in an empty room with no furniture except for a table. A man is sitting at the table, writing something on a piece of paper. > You talk to him "Hello," you say politely. "I was wondering if I could ask you some questions." The man looks up from his writing. "Who are you?" he asks suspiciously. "My name is..." > You tell him your name is Dan "Dan? That doesn't sound like a very nice name." > You take offense "It's my name!" "Well it sounds stupid! And what do you want anyway?" > You ask him where you are "This is the place where we choose who gets sent next." > You ask where people are getting sent "To the future, obviously." ``` #### Examples of longer novel like responses: ``` [Genres: grimdark fantasy] [Tags: Fantasy, Grimdark, Serious, Villain Protagonist, War] [Description: This story is definitely more "story" than "game", however there are two distinct paths that can be followed, each with their own pitfalls and additional choices. This story can get long, and assuming you're surviving, years do pass in it and you and your environment will change.] [First message intro] [Mode: Interactive Storyteller] It was a dark night when I received my first letter from my father. It had been sent by messenger bird and delivered to me at midnight. As I read through its contents, I could feel myself growing numb as I realized what he had asked of me. My father wanted me to go to war. I knew this day would come eventually. He has always said that his greatest wish was to see me fight alongside him. However, now that the time had finally arrived, I found myself unable to respond immediately. This was going to be a huge decision, one that would affect both our lives forevermore. My name is Kronos Vang. I am the son of the great warlord of the kingdom of Darrow, Lord Gendall Vang. I have lived my entire life here in the capital city of Nadorf, training under the tutelage of my father's most trusted general, Commander Eldon Blackwell. Now, at the age of 16, I find myself standing before a crossroads, one that will determine whether or not I follow in my father's footsteps. > You go to war As soon as I finished reading the letter, I made up my mind. If my father wanted me to join him in battle so badly, then I would do just that. I didn't want to disappoint him. The next morning, I woke early and went downstairs to speak with Commander Blackwell about my future plans. When I entered his office, I saw him sitting behind his desk, looking very concerned. "Kronos," he began slowly, "your father wants you to go to war." "Yes sir," I replied. "He sent me a letter last night explaining everything." "Then why did you come talk to me?" he asked. "Because I need to tell someone," I answered truthfully. "And since you were the only person who came close to being like a father to me..." Commander Blackwell smiled slightly. "Well, let's sit down and discuss this together," he said, motioning towards a chair across from his desk. Once we sat down, he continued speaking. "Look kiddo, I don't think anyone knows better than you how much your father loves you. And I know he wouldn't ask anything of you unless he thought it was absolutely necessary. But, I also know that you've never really shown any interest in becoming a soldier. In fact, you'd rather spend all your free time learning magic spells instead of practicing swordplay." "But sir," I interrupted, "that doesn't mean I won't become good enough to fight someday! All I need is practice!" "That may be true," he admitted, "but I still believe that you should stay here and continue your studies. Your father needs soldiers, yes, but he also needs magicians. Without them, he cannot win the wars he must fight. I promise you, if you remain here, I will train you until you are able to defend yourself against even the strongest foe." "No sir," I insisted. "If my father asks me to go to war, then I must obey him. I will not betray him again." "Again? What do you mean 'again'?" he asked curiously. "When I was younger, I used to run away every chance I got," I explained. "Even though I knew it wasn't right, I couldn't help myself. Every time I ran off, I felt like I was abandoning my family. That's why I stayed here after my mother died. Because I didn't want to leave them ever again." "Oh kiddo," he sighed sadly. "I'm sorry. I shouldn't have kept such a secret from you." "What secret?" I asked confused. "Your mother," he revealed. "She was a powerful sorceress. She cast a spell upon herself and your father that would make her die within five years of giving birth to you. After she passed away, your father became heartbroken. So, he cast another spell over himself that would prevent him from falling in love again. He believed that he would never marry again, nor would he ever have children." "Wait," I interjected. "So, he didn't actually love me?" "Of course he loved you!" he exclaimed. "How could you possibly doubt that? He raised you single-handedly, gave you everything you needed, and protected you whenever danger threatened. No, no, Kronos, he loves you deeply. But, he simply believes that he can protect you best by keeping you out of harm's way. That's why he forbade you from leaving home. That's why he refused to allow you to learn magic. He fears losing you too." "But... what does this have to do with me going to war?" I asked. "Nothing," he admitted. "I suppose I was merely trying to convince you to stay. But, if you truly desire to go, then I guess there isn't much I can say or do to stop you." "Thank you sir," I told him sincerely. "I appreciate everything you've taught me these past ten years. Even though I'm leaving, I hope you know that I will never forget what you've done for me." "Don't worry about me," he assured me. "Just remember what I said about your father. He loves you very much. Don't ever forget that." After saying goodbye to Commander Blackwell, I returned to my room and packed up all my belongings. Then, I walked downstairs and left the castle without telling anyone else. I figured it was better to leave quietly while everyone was sleeping. Once outside, I headed straight for the nearest tavern. There, I met up with my friends and informed them of my plan to enlist in the army. They weren't happy about it, but none of them tried to dissuade me either. Instead, they promised to meet me back here tomorrow evening, along with whatever supplies they could gather for us. Now, I'm waiting impatiently for them to arrive. I wonder what kind of adventures lie ahead for me... ``` # Some quick and dirty training details: - [<img src="https://raw.githubusercontent.com/OpenAccess-AI-Collective/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="150" height="24"/>](https://github.com/OpenAccess-AI-Collective/axolotl) - Sequence length: 4096 - \# of epochs: 4 - Training time: 2 hours - Hardware: 1x RTX 3090 - Training type: QLoRA - PEFT R/A: 32/32 # Credits: ### Holodeck-1: Thank you to Mr. Seeker and the Kobold AI team for the wonderful model Holodeck-1 [Holodeck-1 Huggingface page](https://huggingface.co/KoboldAI/LLAMA2-13B-Holodeck-1) ### Skein Text Adventure Data: Thank you to the [Kobold AI](https://huggingface.co/KoboldAI) community for curating the Skein dataset, which is pivotal to this model's capabilities.
9,713
[ [ -0.04705810546875, -0.0601806640625, 0.05078125, 0.0037822723388671875, -0.02392578125, 0.0086669921875, 0.021453857421875, -0.0207061767578125, 0.0543212890625, 0.048187255859375, -0.0760498046875, -0.03076171875, -0.055328369140625, -0.007144927978515625, 0.0033054351806640625, 0.061553955078125, -0.0227813720703125, -0.01085662841796875, -0.00089263916015625, 0.013092041015625, -0.0325927734375, -0.02886962890625, -0.0711669921875, -0.040618896484375, 0.050323486328125, 0.0209808349609375, 0.048370361328125, 0.04913330078125, 0.033203125, 0.0267333984375, -0.0267181396484375, 0.017822265625, -0.05609130859375, 0.038604736328125, -0.03692626953125, -0.038360595703125, -0.0225067138671875, 0.0213165283203125, 0.0226593017578125, 0.03515625, -0.01531982421875, 0.001056671142578125, -0.0078277587890625, 0.060516357421875, -0.018798828125, 0.0130767822265625, -0.007228851318359375, 0.029022216796875, -0.01157379150390625, -0.035369873046875, -0.0007014274597167969, -0.04449462890625, 0.0089111328125, -0.061798095703125, 0.016143798828125, 0.018035888671875, 0.06622314453125, -0.009307861328125, -0.042510986328125, -0.054443359375, -0.053680419921875, 0.055816650390625, -0.02410888671875, -0.0065155029296875, 0.048614501953125, 0.046966552734375, -0.040435791015625, -0.055450439453125, -0.04144287109375, -0.017578125, -0.0277557373046875, 0.054779052734375, -0.00372314453125, 0.00562286376953125, 0.020172119140625, 0.036865234375, -0.04656982421875, -0.031463623046875, -0.061126708984375, -0.0198211669921875, 0.034820556640625, 0.026580810546875, 0.0160064697265625, 0.00469970703125, -0.0537109375, -0.02685546875, -0.0174713134765625, 0.0234832763671875, 0.051544189453125, -0.01285552978515625, -0.026397705078125, 0.053802490234375, 0.01371002197265625, 0.0269927978515625, 0.03753662109375, -0.028900146484375, -0.03582763671875, 0.006023406982421875, 0.01009368896484375, 0.006664276123046875, 0.049957275390625, 0.0703125, 0.006595611572265625, -0.03948974609375, 0.01861572265625, 0.01959228515625, 0.03216552734375, -0.052642822265625, 0.0031223297119140625, 0.0184326171875, -0.058502197265625, -0.036895751953125, -0.01666259765625, -0.07659912109375, -0.043182373046875, 0.0303802490234375, 0.01045989990234375, -0.040618896484375, 0.0274658203125, 0.016387939453125, -0.0509033203125, -0.0007486343383789062, 0.0040130615234375, -0.10223388671875, 0.0119476318359375, 0.054656982421875, 0.054168701171875, 0.011688232421875, -0.028594970703125, -0.045166015625, 0.0144195556640625, -0.048309326171875, 0.033782958984375, -0.03143310546875, -0.05609130859375, -0.00785064697265625, 0.020843505859375, 0.0007243156433105469, -0.026824951171875, 0.059844970703125, -0.061004638671875, 0.03155517578125, -0.005664825439453125, -0.034210205078125, -0.0108184814453125, 0.00905609130859375, -0.01318359375, 0.0215911865234375, 0.006114959716796875, -0.027984619140625, 0.02325439453125, -0.03863525390625, -0.02154541015625, 0.01092529296875, -0.003955841064453125, 0.0019350051879882812, 0.0054473876953125, -0.020416259765625, 0.0252227783203125, -0.05499267578125, 0.0035572052001953125, -0.0394287109375, -0.0035724639892578125, 0.0155029296875, -0.007457733154296875, 0.068115234375, 0.0244598388671875, -0.044189453125, -0.051025390625, -0.0265655517578125, 0.0251312255859375, 0.023590087890625, -0.03887939453125, -0.003231048583984375, 0.0116119384765625, -0.046295166015625, 0.01458740234375, 0.028656005859375, -0.05438232421875, 0.036376953125, -0.050628662109375, 0.0204925537109375, 0.06549072265625, 0.0218963623046875, 0.038360595703125, -0.055816650390625, 0.0672607421875, -0.021759033203125, 0.00849151611328125, -0.00015425682067871094, -0.0176239013671875, -0.0531005859375, 0.01482391357421875, 0.0045928955078125, 0.058380126953125, -0.053375244140625, 0.0274200439453125, 0.0080718994140625, -0.053924560546875, -0.0241546630859375, -0.0020847320556640625, 0.049041748046875, 0.02838134765625, 0.028228759765625, -0.033355712890625, -0.0408935546875, -0.044769287109375, -0.0167388916015625, -0.05706787109375, 0.005825042724609375, 0.05303955078125, 0.030364990234375, -0.0230255126953125, 0.05078125, -0.08489990234375, -0.0229949951171875, -0.01092529296875, -0.004543304443359375, 0.002414703369140625, 0.02618408203125, 0.040618896484375, -0.059478759765625, -0.023345947265625, 0.0185394287109375, -0.0594482421875, 0.0081024169921875, -0.023345947265625, -0.033966064453125, -0.02142333984375, 0.0230865478515625, -0.042877197265625, 0.021392822265625, 0.031768798828125, -0.052734375, 0.0225830078125, -0.044921875, 0.0210418701171875, -0.08416748046875, -0.0034503936767578125, -0.037017822265625, 0.0300445556640625, -0.039886474609375, 0.016143798828125, -0.04217529296875, 0.0082244873046875, -0.01074981689453125, 0.07415771484375, -0.0100555419921875, 0.0261077880859375, -0.002452850341796875, 0.0159454345703125, 0.0139617919921875, 0.021209716796875, -0.020233154296875, 0.03582763671875, 0.0394287109375, -0.052215576171875, 0.0714111328125, 0.039825439453125, 0.0002646446228027344, 0.07122802734375, -0.0265655517578125, 0.00787353515625, -0.0264892578125, 0.03759765625, -0.069091796875, -0.0209503173828125, 0.07220458984375, -0.038177490234375, 0.014862060546875, -0.00003898143768310547, -0.0226287841796875, -0.043548583984375, -0.0152130126953125, -0.004283905029296875, 0.031005859375, -0.035491943359375, 0.0701904296875, 0.028167724609375, -0.009185791015625, -0.049530029296875, -0.055938720703125, 0.022552490234375, -0.0215606689453125, -0.0308990478515625, 0.038818359375, -0.0438232421875, -0.01806640625, -0.025848388671875, -0.01316070556640625, -0.0195159912109375, -0.0029773712158203125, 0.053680419921875, 0.0070953369140625, -0.0175933837890625, -0.0118408203125, 0.03302001953125, -0.0211639404296875, 0.0140533447265625, -0.0219879150390625, 0.03704833984375, -0.0148468017578125, -0.0050506591796875, -0.005126953125, 0.05926513671875, 0.056182861328125, 0.008575439453125, 0.03717041015625, 0.0266265869140625, -0.003627777099609375, -0.0029239654541015625, -0.015960693359375, -0.00344085693359375, -0.04083251953125, -0.0134124755859375, -0.01971435546875, -0.048065185546875, 0.03741455078125, -0.01383209228515625, 0.01215362548828125, 0.055511474609375, 0.033905029296875, -0.053253173828125, 0.0550537109375, 0.036834716796875, 0.016143798828125, 0.014495849609375, -0.030792236328125, 0.0162353515625, -0.041839599609375, -0.03570556640625, -0.007022857666015625, -0.042022705078125, -0.04083251953125, -0.01181793212890625, 0.0205230712890625, 0.0203857421875, -0.019134521484375, 0.016448974609375, -0.0372314453125, 0.028167724609375, 0.0289154052734375, -0.0132904052734375, -0.00347900390625, -0.003505706787109375, -0.0218963623046875, -0.019134521484375, -0.051239013671875, -0.050384521484375, 0.05364990234375, 0.0259857177734375, 0.05657958984375, 0.034576416015625, 0.06988525390625, 0.017791748046875, -0.00492095947265625, -0.032806396484375, 0.0557861328125, -0.0180816650390625, -0.0833740234375, -0.01293182373046875, -0.0307769775390625, -0.10235595703125, 0.0101470947265625, -0.00841522216796875, -0.061126708984375, 0.0350341796875, -0.00008147954940795898, -0.0601806640625, 0.0241546630859375, -0.06524658203125, 0.0601806640625, -0.033905029296875, -0.0083770751953125, -0.00518035888671875, -0.08575439453125, 0.02276611328125, 0.0219573974609375, 0.00543975830078125, 0.0223541259765625, -0.027130126953125, 0.05767822265625, -0.049591064453125, 0.05731201171875, 0.0245513916015625, 0.00518798828125, 0.036834716796875, 0.01824951171875, 0.014495849609375, 0.003574371337890625, 0.003204345703125, -0.030975341796875, 0.03857421875, -0.0195159912109375, -0.0443115234375, 0.06561279296875, -0.0758056640625, -0.0377197265625, -0.06829833984375, -0.0194244384765625, 0.0195159912109375, 0.0247650146484375, 0.049041748046875, 0.0279541015625, -0.02252197265625, 0.0020084381103515625, 0.036346435546875, -0.01123046875, 0.0379638671875, 0.0162506103515625, -0.032379150390625, -0.055511474609375, 0.058929443359375, 0.034576416015625, 0.00687408447265625, 0.02392578125, 0.0213165283203125, -0.019775390625, 0.007808685302734375, -0.03924560546875, 0.045928955078125, -0.056365966796875, 0.007457733154296875, -0.058197021484375, -0.003925323486328125, -0.01428985595703125, -0.04736328125, 0.00016438961029052734, -0.0081024169921875, -0.035430908203125, 0.0025730133056640625, 0.036895751953125, 0.05078125, 0.00037169456481933594, 0.043426513671875, -0.055389404296875, 0.04248046875, 0.03692626953125, 0.01727294921875, -0.01024627685546875, -0.030853271484375, 0.005252838134765625, 0.0065155029296875, -0.051544189453125, -0.0777587890625, 0.032073974609375, 0.0016927719116210938, 0.0181732177734375, 0.027984619140625, 0.023681640625, 0.05584716796875, -0.033203125, 0.06976318359375, 0.0143890380859375, -0.0277252197265625, 0.0162506103515625, -0.0205078125, 0.006443023681640625, 0.0309600830078125, 0.015380859375, -0.0582275390625, -0.0226898193359375, -0.08953857421875, -0.06536865234375, 0.055419921875, -0.0004291534423828125, 0.025482177734375, -0.0016031265258789062, 0.037750244140625, 0.0037364959716796875, 0.024566650390625, -0.05010986328125, -0.06683349609375, -0.01397705078125, 0.038238525390625, -0.0174713134765625, -0.01513671875, -0.042999267578125, -0.01314544677734375, 0.048583984375, 0.0162353515625, 0.049041748046875, 0.00024437904357910156, 0.0171051025390625, -0.013763427734375, 0.0238189697265625, 0.060089111328125, 0.05499267578125, -0.0157318115234375, 0.0162353515625, 0.01617431640625, -0.0467529296875, 0.0252838134765625, 0.01433563232421875, 0.004261016845703125, 0.005191802978515625, 0.037078857421875, 0.05584716796875, -0.0108795166015625, -0.03668212890625, 0.03143310546875, -0.033599853515625, -0.0211639404296875, -0.03948974609375, 0.0208892822265625, 0.00473785400390625, 0.048431396484375, 0.013153076171875, -0.0021419525146484375, -0.004520416259765625, -0.0704345703125, 0.0058441162109375, 0.01403045654296875, -0.035614013671875, 0.0011692047119140625, 0.054656982421875, 0.002017974853515625, -0.044097900390625, -0.0003266334533691406, -0.040069580078125, -0.0435791015625, 0.05194091796875, 0.043426513671875, 0.067626953125, -0.00405120849609375, 0.049285888671875, 0.02032470703125, 0.0184173583984375, 0.0177459716796875, 0.0279541015625, -0.01468658447265625, -0.0094757080078125, -0.0003650188446044922, -0.042755126953125, -0.0297088623046875, 0.013275146484375, -0.0235748291015625, -0.00733184814453125, -0.058349609375, -0.01751708984375, -0.0285797119140625, 0.01264190673828125, -0.0244293212890625, 0.0245513916015625, 0.002964019775390625, 0.049041748046875, -0.03875732421875, 0.0152587890625, 0.03173828125, -0.004375457763671875, -0.059326171875, 0.016357421875, -0.00852203369140625, -0.027252197265625, 0.041900634765625, -0.0199127197265625, -0.013916015625, -0.0024929046630859375, -0.0249481201171875, -0.047271728515625, 0.0875244140625, 0.01081085205078125, -0.0140838623046875, -0.005321502685546875, -0.00963592529296875, 0.0570068359375, -0.05059814453125, 0.0180816650390625, 0.045654296875, 0.041656494140625, 0.0247650146484375, -0.0175323486328125, -0.01092529296875, 0.002056121826171875, 0.0011491775512695312, 0.01004791259765625, -0.057464599609375, 0.06072998046875, -0.0228118896484375, -0.0249176025390625, 0.033111572265625, 0.04510498046875, -0.0087432861328125, 0.03253173828125, 0.0231170654296875, 0.033203125, 0.036865234375, -0.01593017578125, 0.09991455078125, -0.0183868408203125, 0.0273284912109375, 0.04010009765625, 0.00650787353515625, 0.0251007080078125, 0.0264892578125, -0.00220489501953125, 0.0291290283203125, 0.056488037109375, 0.000026345252990722656, 0.0570068359375, 0.00827789306640625, -0.037811279296875, 0.01222991943359375, -0.0050811767578125, -0.061676025390625, 0.01282501220703125, -0.004791259765625, -0.03173828125, -0.028045654296875, -0.0021305084228515625, 0.0178985595703125, -0.0026645660400390625, -0.0156402587890625, 0.047760009765625, 0.019866943359375, -0.054534912109375, 0.05731201171875, -0.0005855560302734375, 0.0289154052734375, -0.07391357421875, 0.0126190185546875, -0.007556915283203125, 0.0185089111328125, -0.0100555419921875, -0.05047607421875, -0.0025615692138671875, -0.015655517578125, -0.0251922607421875, -0.040283203125, 0.0269927978515625, -0.0290374755859375, -0.018768310546875, 0.0262908935546875, 0.025909423828125, 0.0252227783203125, 0.04620361328125, -0.058074951171875, -0.006526947021484375, 0.0009946823120117188, -0.017242431640625, 0.023223876953125, 0.043426513671875, 0.031494140625, 0.051025390625, 0.047943115234375, 0.0299224853515625, -0.0088348388671875, -0.051910400390625, 0.0537109375, -0.039764404296875, -0.045562744140625, -0.06103515625, 0.0223236083984375, -0.0056610107421875, -0.0657958984375, 0.03509521484375, 0.04644775390625, 0.039642333984375, -0.0228271484375, 0.050201416015625, -0.03436279296875, 0.045166015625, -0.0248870849609375, 0.059417724609375, -0.046722412109375, -0.0005626678466796875, -0.00872802734375, -0.06671142578125, 0.0014371871948242188, 0.0408935546875, -0.056671142578125, -0.005435943603515625, 0.0704345703125, 0.0628662109375, 0.0201568603515625, -0.01560211181640625, 0.0277099609375, 0.02783203125, 0.0272369384765625, 0.0304718017578125, 0.0751953125, -0.05206298828125, 0.05047607421875, -0.0211029052734375, -0.0012025833129882812, -0.045166015625, -0.034027099609375, -0.056915283203125, -0.056365966796875, -0.0029735565185546875, -0.06561279296875, 0.0241546630859375, 0.06292724609375, 0.0256195068359375, -0.00991058349609375, -0.034454345703125, 0.0241546630859375, 0.02862548828125, -0.035491943359375, -0.00713348388671875, 0.007678985595703125, 0.0203857421875, -0.0277252197265625, 0.0299072265625, 0.01690673828125, 0.038238525390625, 0.01062774658203125, -0.028289794921875, -0.0095367431640625, 0.02435302734375, 0.03375244140625, 0.05657958984375, -0.059600830078125, -0.0200958251953125, 0.00374603271484375, -0.0292510986328125, -0.007110595703125, 0.0501708984375, -0.033416748046875, 0.059417724609375, 0.0145111083984375, 0.0266265869140625, 0.03955078125, -0.00547027587890625, 0.03558349609375, -0.005214691162109375, -0.02557373046875, 0.043121337890625, 0.01552581787109375, -0.0188751220703125, -0.049041748046875, 0.034088134765625, 0.02886962890625, -0.04901123046875, -0.056396484375, 0.0227508544921875, -0.07330322265625, -0.0191802978515625, 0.0689697265625, -0.0009512901306152344, -0.014434814453125, -0.0063934326171875, -0.0562744140625, -0.00004464387893676758, -0.0214385986328125, 0.044189453125, 0.0947265625, -0.006511688232421875, 0.0212554931640625, -0.058929443359375, 0.038848876953125, 0.0234375, -0.03216552734375, 0.00592041015625, 0.0235137939453125, -0.006740570068359375, 0.030242919921875, 0.04498291015625, 0.0190582275390625, 0.033416748046875, 0.0133514404296875, -0.005863189697265625, -0.0017604827880859375, -0.013031005859375, -0.0210418701171875, 0.0298309326171875, -0.022857666015625, -0.01953125 ] ]
GOAT-AI/GOAT-7B-Community
2023-07-26T06:50:28.000Z
[ "transformers", "pytorch", "llama", "text-generation", "facebook", "meta", "llama-2", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
GOAT-AI
null
null
GOAT-AI/GOAT-7B-Community
34
5,848
transformers
2023-07-24T11:37:02
--- license: llama2 model_type: llama tags: - facebook - meta - pytorch - llama - llama-2 --- # GOAT-7B-Community model ![GOAT-7B-Community](https://api-adaptive-li.s3.us-west-2.amazonaws.com/goat-ai/Comp+2_00000.png) GOAT-7B-Community model is supervised finetuned (SFT) version of LLaMA 2 developed by GOAT.AI lab on user-shared conversations from GoatChat app. # Model description - **Base Architecture:** LLaMA 2 7B flavour - **Dataset size:** 72K multi-turn dialogues - **License:** llama2 - **Context window length:** 4096 tokens ### Learn more - **Blog:** https://www.blog.goat.ai/goat-7b-community-tops-among-7b-models/ - **Paper:** Coming soon - **Demo:** https://3f3fb57083197123c8.gradio.live/ ## Uses The main purpose of GOAT-7B-Community is to facilitate research on large language models and chatbots. It is specifically designed for researchers and hobbyists working in the fields of natural language processing, machine learning, and artificial intelligence. ## Usage Usage can be either self-hosted via `transformers` or used with Spaces ``` import torch from transformers import AutoTokenizer, AutoModelForCausalLM model_name = "GOAT-AI/GOAT-7B-Community" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained( model_name, torch_dtype=torch.bfloat16 ) ``` ## Training dataset Training dataset was collected from users conversations with GoatChat app and OpenAssistant. We will not release the dataset. ## Evaluation GOAT-7B-Community model is evaluated against common metrics for evaluating language models, including MMLU and BigBench Hard. We still continue to evaluate all our models and will share details soon. - **MMLU:** 49.31 - **BBH:** 35.7 ## License GOAT-7B-Community model is based on [Meta's LLaMA-2-7b-hf](https://huggingface.co/meta-llama/Llama-2-7b-hf), and using own datasets. GOAT-7B-Community model weights are available under LLAMA-2 license. Note that the GOAT-7B-Community model weights require access to the LLaMA-2 model weighs. The GOAT-7B-Community model is based on LLaMA-2 and should be used according to the LLaMA-2 license. ### Risks and Biases GOAT-7B-Community model can produce factually incorrect output and should not be relied on to deliver factually accurate information. The model was trained on various private and public datasets. Therefore, the GOAT-7B-Community model could possibly generate wrong, biased, or otherwise offensive outputs.
2,494
[ [ -0.03216552734375, -0.07220458984375, 0.006412506103515625, 0.02386474609375, -0.0305938720703125, 0.0174407958984375, -0.01529693603515625, -0.059173583984375, 0.036163330078125, 0.040252685546875, -0.038726806640625, -0.042022705078125, -0.05450439453125, 0.0047149658203125, -0.01885986328125, 0.09197998046875, 0.024505615234375, -0.0091705322265625, 0.006656646728515625, -0.01983642578125, -0.0489501953125, -0.036376953125, -0.03955078125, -0.036468505859375, 0.038360595703125, 0.0258636474609375, 0.05047607421875, 0.05999755859375, 0.039398193359375, 0.0273590087890625, -0.023345947265625, 0.007251739501953125, -0.06475830078125, -0.0237884521484375, -0.005672454833984375, -0.014007568359375, -0.057098388671875, -0.01308441162109375, 0.0243072509765625, 0.030487060546875, -0.041656494140625, 0.05322265625, -0.0170135498046875, 0.030914306640625, -0.0121917724609375, 0.03289794921875, -0.07073974609375, -0.034027099609375, -0.02862548828125, 0.0343017578125, -0.01230621337890625, -0.020172119140625, -0.00859832763671875, -0.032958984375, 0.00720977783203125, 0.01239013671875, 0.06317138671875, 0.016754150390625, -0.0176849365234375, -0.0229949951171875, -0.0178680419921875, 0.05950927734375, -0.080078125, 0.00843048095703125, 0.03460693359375, 0.0287933349609375, -0.041839599609375, -0.040985107421875, -0.06494140625, -0.01007843017578125, -0.00005882978439331055, -0.003993988037109375, -0.012420654296875, -0.0183868408203125, 0.0030879974365234375, 0.0260162353515625, -0.0237579345703125, 0.00785064697265625, -0.061187744140625, -0.01134490966796875, 0.04241943359375, 0.00937652587890625, 0.01146697998046875, -0.02386474609375, -0.032928466796875, -0.0067901611328125, -0.036865234375, 0.015716552734375, 0.0325927734375, 0.0084075927734375, -0.0221099853515625, 0.031341552734375, -0.0347900390625, 0.039764404296875, 0.0031490325927734375, -0.029693603515625, 0.042449951171875, -0.017608642578125, -0.02154541015625, -0.007293701171875, 0.063232421875, 0.0321044921875, -0.007450103759765625, 0.01093292236328125, -0.013031005859375, 0.017547607421875, -0.004634857177734375, -0.056182861328125, -0.013092041015625, -0.0015783309936523438, -0.050872802734375, -0.035430908203125, -0.01251983642578125, -0.043731689453125, -0.0205078125, 0.004154205322265625, 0.0162353515625, -0.001789093017578125, -0.050384521484375, 0.023406982421875, 0.00009441375732421875, 0.0281829833984375, 0.024383544921875, -0.061065673828125, 0.042633056640625, 0.041778564453125, 0.047271728515625, -0.020782470703125, -0.01763916015625, -0.005401611328125, 0.01291656494140625, -0.01214599609375, 0.0552978515625, -0.0369873046875, -0.029388427734375, -0.020233154296875, 0.00995635986328125, 0.01490020751953125, -0.0478515625, 0.042572021484375, -0.04974365234375, -0.0152587890625, -0.01812744140625, -0.01934814453125, -0.0362548828125, 0.0220947265625, -0.042572021484375, 0.09088134765625, -0.00945281982421875, -0.04266357421875, 0.007511138916015625, -0.04925537109375, -0.01309967041015625, -0.0164642333984375, 0.00812530517578125, -0.0262451171875, -0.0214996337890625, 0.01265716552734375, 0.0323486328125, -0.02862548828125, 0.052032470703125, -0.049896240234375, -0.013458251953125, 0.0294342041015625, -0.0265350341796875, 0.0733642578125, 0.01230621337890625, -0.0141448974609375, 0.0110321044921875, -0.055816650390625, -0.0209197998046875, 0.01157379150390625, -0.0278778076171875, 0.00930023193359375, 0.006092071533203125, -0.00013256072998046875, 0.0066070556640625, 0.042510986328125, -0.0496826171875, 0.00336456298828125, -0.025848388671875, 0.0311737060546875, 0.054229736328125, -0.0128631591796875, 0.0007147789001464844, -0.043060302734375, 0.0196685791015625, 0.00762176513671875, 0.0279693603515625, 0.01715087890625, -0.0760498046875, -0.050811767578125, -0.0172882080078125, 0.009857177734375, 0.04608154296875, -0.0272674560546875, 0.04595947265625, -0.009735107421875, -0.06280517578125, -0.0362548828125, 0.0246734619140625, 0.043426513671875, 0.04058837890625, 0.0281524658203125, -0.04437255859375, -0.0161895751953125, -0.0643310546875, 0.0208740234375, -0.029815673828125, 0.0208740234375, 0.016510009765625, 0.035614013671875, -0.035888671875, 0.0543212890625, -0.02960205078125, -0.0219268798828125, -0.043792724609375, -0.00452423095703125, 0.034027099609375, 0.040679931640625, 0.053436279296875, -0.04290771484375, -0.0260467529296875, -0.0031948089599609375, -0.038604736328125, -0.019775390625, 0.0281982421875, -0.0322265625, 0.0190887451171875, -0.002185821533203125, -0.060333251953125, 0.0504150390625, 0.0604248046875, -0.01097869873046875, 0.03265380859375, 0.005329132080078125, 0.008697509765625, -0.08819580078125, -0.01428985595703125, -0.01175689697265625, -0.017822265625, -0.0308074951171875, -0.00928497314453125, -0.0258026123046875, 0.017913818359375, -0.02593994140625, 0.0537109375, -0.0238800048828125, -0.0157470703125, -0.024505615234375, 0.0188140869140625, -0.0118560791015625, 0.0565185546875, -0.01303863525390625, 0.039337158203125, 0.03753662109375, -0.038726806640625, 0.04547119140625, 0.0537109375, -0.0303192138671875, -0.00951385498046875, -0.057464599609375, 0.0212554931640625, 0.0176544189453125, 0.050933837890625, -0.06689453125, -0.0310821533203125, 0.041595458984375, -0.06378173828125, 0.004909515380859375, 0.00782012939453125, -0.04071044921875, -0.0283966064453125, -0.022705078125, 0.044525146484375, 0.0450439453125, -0.04241943359375, 0.0167083740234375, 0.049468994140625, -0.005115509033203125, -0.05865478515625, -0.047576904296875, -0.0006957054138183594, -0.0178680419921875, -0.052032470703125, 0.0032939910888671875, -0.0255126953125, -0.0085601806640625, 0.00872039794921875, 0.010406494140625, -0.01092529296875, 0.00684356689453125, 0.0222930908203125, 0.01837158203125, -0.00798797607421875, 0.00592803955078125, 0.007038116455078125, 0.00649261474609375, -0.003955841064453125, 0.02117919921875, 0.047637939453125, -0.01482391357421875, -0.029388427734375, -0.041839599609375, 0.005916595458984375, 0.009979248046875, -0.023895263671875, 0.06512451171875, 0.045745849609375, -0.034088134765625, 0.0012750625610351562, -0.07318115234375, -0.0025577545166015625, -0.036773681640625, 0.037139892578125, -0.0026226043701171875, -0.0731201171875, 0.06298828125, 0.007595062255859375, 0.01041412353515625, 0.041656494140625, 0.0760498046875, 0.017974853515625, 0.054931640625, 0.039886474609375, -0.02197265625, 0.03741455078125, 0.005401611328125, -0.0006923675537109375, -0.057586669921875, -0.027740478515625, -0.03271484375, -0.0306243896484375, -0.047332763671875, -0.0362548828125, 0.0030460357666015625, 0.012237548828125, -0.0283355712890625, 0.0285797119140625, -0.030914306640625, 0.03515625, 0.03680419921875, 0.017059326171875, 0.0214996337890625, -0.004741668701171875, 0.01323699951171875, 0.0217132568359375, -0.035369873046875, -0.06280517578125, 0.0853271484375, 0.035888671875, 0.050750732421875, 0.037200927734375, 0.04364013671875, 0.0330810546875, 0.05938720703125, -0.050384521484375, 0.040130615234375, -0.002376556396484375, -0.08184814453125, -0.0271453857421875, -0.0182037353515625, -0.064453125, 0.00789642333984375, -0.01267242431640625, -0.07440185546875, 0.005786895751953125, 0.00649261474609375, -0.0038852691650390625, 0.0264892578125, -0.04608154296875, 0.06097412109375, 0.01082611083984375, 0.005710601806640625, -0.03070068359375, -0.05780029296875, 0.056182861328125, -0.0093994140625, 0.03521728515625, -0.0400390625, 0.01262664794921875, 0.05548095703125, -0.039276123046875, 0.08843994140625, -0.00742340087890625, 0.018280029296875, 0.052337646484375, -0.005115509033203125, 0.0259552001953125, 0.01580810546875, -0.0203857421875, 0.033447265625, 0.007648468017578125, -0.035430908203125, -0.0113983154296875, 0.052642822265625, -0.0887451171875, -0.05804443359375, -0.01059722900390625, -0.00951385498046875, -0.0053558349609375, 0.0030517578125, -0.004878997802734375, 0.03924560546875, -0.0121917724609375, 0.0270538330078125, 0.038787841796875, -0.038909912109375, 0.0232696533203125, 0.034820556640625, -0.03631591796875, -0.05389404296875, 0.052337646484375, -0.018096923828125, 0.02862548828125, 0.005939483642578125, 0.021087646484375, -0.01483154296875, -0.03759765625, -0.05169677734375, 0.025482177734375, -0.054412841796875, -0.04473876953125, -0.039764404296875, -0.01235198974609375, -0.045562744140625, 0.0189666748046875, -0.0206756591796875, -0.0269317626953125, -0.035308837890625, -0.023345947265625, 0.048858642578125, 0.0767822265625, -0.0111541748046875, 0.056365966796875, -0.05865478515625, 0.0277557373046875, 0.01114654541015625, 0.0237579345703125, 0.00800323486328125, -0.06146240234375, -0.01519012451171875, 0.00551605224609375, -0.03753662109375, -0.0770263671875, 0.037506103515625, 0.0162200927734375, 0.052154541015625, 0.0178680419921875, 0.0005478858947753906, 0.04412841796875, -0.0256500244140625, 0.061187744140625, 0.021636962890625, -0.06512451171875, 0.024200439453125, -0.03692626953125, -0.00677490234375, 0.03167724609375, -0.0031757354736328125, -0.02593994140625, -0.0310821533203125, -0.040069580078125, -0.033294677734375, 0.0537109375, 0.0162506103515625, 0.03759765625, -0.025726318359375, 0.034881591796875, 0.023040771484375, 0.022796630859375, -0.0806884765625, -0.028472900390625, -0.028472900390625, -0.0285491943359375, 0.01065826416015625, -0.0264892578125, 0.0024204254150390625, -0.0243072509765625, 0.0596923828125, 0.00812530517578125, 0.01036834716796875, 0.005084991455078125, -0.00469970703125, -0.0106964111328125, 0.0018720626831054688, 0.042724609375, 0.050628662109375, -0.007904052734375, -0.0235443115234375, 0.027587890625, -0.0433349609375, -0.006175994873046875, -0.00440216064453125, 0.006183624267578125, -0.01102447509765625, 0.01953125, 0.060333251953125, 0.005462646484375, -0.045135498046875, 0.04595947265625, 0.0059967041015625, -0.014556884765625, -0.0201568603515625, 0.0136566162109375, 0.0281982421875, 0.044158935546875, 0.0163421630859375, -0.007091522216796875, 0.0220794677734375, -0.032379150390625, -0.006923675537109375, 0.0194244384765625, -0.0125579833984375, -0.033233642578125, 0.04638671875, 0.01512908935546875, -0.019256591796875, 0.028717041015625, -0.038818359375, -0.03924560546875, 0.0693359375, 0.04693603515625, 0.04058837890625, -0.0130767822265625, 0.02117919921875, 0.03265380859375, 0.0271453857421875, -0.01934814453125, 0.00762176513671875, 0.0160064697265625, -0.06890869140625, -0.03125, -0.07421875, -0.031646728515625, 0.0086669921875, -0.0357666015625, 0.0296630859375, -0.04388427734375, -0.034088134765625, -0.01849365234375, 0.0177764892578125, -0.036163330078125, 0.0121917724609375, 0.004611968994140625, 0.0670166015625, -0.044921875, 0.082763671875, 0.04852294921875, -0.026397705078125, -0.08160400390625, -0.03338623046875, -0.005126953125, -0.07464599609375, 0.043914794921875, 0.0136871337890625, -0.016571044921875, 0.00040221214294433594, -0.07977294921875, -0.07745361328125, 0.09771728515625, 0.009765625, -0.0247650146484375, 0.03265380859375, -0.007633209228515625, 0.039093017578125, -0.031036376953125, 0.0134735107421875, 0.028900146484375, 0.0232696533203125, 0.034942626953125, -0.09112548828125, 0.003955841064453125, -0.0226287841796875, -0.00995635986328125, -0.00887298583984375, -0.09991455078125, 0.071044921875, -0.00984954833984375, 0.006336212158203125, 0.02862548828125, 0.07354736328125, 0.0440673828125, 0.027252197265625, 0.03314208984375, 0.03863525390625, 0.04498291015625, 0.014312744140625, 0.04638671875, -0.0099029541015625, 0.03460693359375, 0.0706787109375, -0.0131988525390625, 0.07891845703125, 0.000010251998901367188, -0.0175018310546875, 0.0450439453125, 0.07061767578125, -0.00423431396484375, 0.03656005859375, 0.0250244140625, -0.007442474365234375, -0.01690673828125, -0.0166015625, -0.03375244140625, 0.0259857177734375, 0.037506103515625, -0.0004029273986816406, 0.0043487548828125, -0.0292205810546875, 0.0027828216552734375, -0.0081787109375, -0.019927978515625, 0.054290771484375, 0.0109710693359375, -0.0218963623046875, 0.03656005859375, 0.0161895751953125, 0.063720703125, -0.033294677734375, -0.007480621337890625, -0.0369873046875, -0.0002989768981933594, -0.00859832763671875, -0.0304718017578125, 0.0216827392578125, 0.0064849853515625, -0.0120391845703125, 0.00882720947265625, 0.062042236328125, -0.01508331298828125, -0.05096435546875, 0.0305328369140625, 0.0292816162109375, 0.03814697265625, 0.0225067138671875, -0.06146240234375, 0.0285186767578125, -0.003864288330078125, -0.036102294921875, 0.0201416015625, 0.043914794921875, -0.0203704833984375, 0.062347412109375, 0.0469970703125, 0.003467559814453125, 0.0179443359375, -0.0056304931640625, 0.08062744140625, -0.046478271484375, -0.00955963134765625, -0.05462646484375, 0.01371002197265625, 0.0003666877746582031, -0.02587890625, 0.0252227783203125, 0.031280517578125, 0.0684814453125, 0.004993438720703125, 0.0439453125, -0.0160064697265625, 0.059906005859375, -0.02056884765625, 0.053924560546875, -0.036285400390625, 0.0232391357421875, -0.00835418701171875, -0.05804443359375, -0.00025463104248046875, 0.038238525390625, -0.0177459716796875, 0.01068115234375, 0.029693603515625, 0.07135009765625, 0.00001823902130126953, -0.0025157928466796875, 0.02301025390625, 0.027984619140625, 0.0261383056640625, 0.02532958984375, 0.051971435546875, -0.0176544189453125, 0.056396484375, -0.00209808349609375, -0.0328369140625, -0.012969970703125, -0.056884765625, -0.08782958984375, -0.0183258056640625, -0.01262664794921875, -0.01468658447265625, -0.00591278076171875, 0.0745849609375, 0.033416748046875, -0.05609130859375, -0.04010009765625, 0.03125, 0.016632080078125, 0.00911712646484375, -0.01007843017578125, 0.0127105712890625, -0.0029697418212890625, -0.03839111328125, 0.0079345703125, -0.0008845329284667969, -0.0008649826049804688, -0.0250396728515625, -0.007129669189453125, -0.0168609619140625, 0.0030670166015625, 0.035552978515625, 0.012176513671875, -0.0789794921875, -0.024261474609375, -0.003936767578125, -0.0164337158203125, 0.008056640625, 0.0005893707275390625, -0.04608154296875, -0.0016078948974609375, 0.0172271728515625, 0.0477294921875, 0.041778564453125, 0.0138702392578125, 0.018829345703125, -0.01885986328125, 0.01104736328125, -0.0222625732421875, 0.0177154541015625, 0.03564453125, -0.014923095703125, 0.03338623046875, 0.027679443359375, -0.055755615234375, -0.03955078125, 0.00875091552734375, -0.0970458984375, -0.00975799560546875, 0.09527587890625, -0.0159454345703125, -0.041046142578125, 0.00438690185546875, -0.03369140625, 0.033111572265625, -0.056396484375, 0.06707763671875, 0.059906005859375, 0.00717926025390625, 0.00565338134765625, -0.0736083984375, 0.0263214111328125, 0.004444122314453125, -0.06341552734375, -0.007236480712890625, 0.047088623046875, 0.034454345703125, -0.002902984619140625, 0.062469482421875, 0.005138397216796875, -0.0055694580078125, -0.0180816650390625, 0.01654052734375, 0.0027008056640625, 0.004497528076171875, 0.00632476806640625, -0.01678466796875, 0.00916290283203125, -0.01251220703125 ] ]
pankajmathur/model_007_preview
2023-08-25T23:49:50.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:garage-bAInd/Open-Platypus", "dataset:tatsu-lab/alpaca", "dataset:ehartford/dolphin", "dataset:WizardLM/WizardLM_evol_instruct_70k", "dataset:databricks/databricks-dolly-15k", "dataset:psmathur/orca_mini_v1_dataset", "dataset:psmathur/WizardLM_Orca", "dataset:psmathur/alpaca_orca", "dataset:psmathur/dolly-v2_orca", "arxiv:2306.02707", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
pankajmathur
null
null
pankajmathur/model_007_preview
1
5,847
transformers
2023-08-10T06:28:09
--- language: - en library_name: transformers license: llama2 datasets: - garage-bAInd/Open-Platypus - tatsu-lab/alpaca - ehartford/dolphin - WizardLM/WizardLM_evol_instruct_70k - databricks/databricks-dolly-15k - psmathur/orca_mini_v1_dataset - psmathur/WizardLM_Orca - psmathur/alpaca_orca - psmathur/dolly-v2_orca --- It's a preview version of psmathur/model_007, Sorry for the naming confusion. A hybrid (explain + instruct) style Llama2-70b model, Pleae check examples below for both style prompts, Here is the list of datasets used: * Open-Platypus * Alpaca * WizardLM * Dolly-V2 * Dolphin Samples (~200K) * Orca_minis_v1 * Alpaca_orca * WizardLM_orca * Dolly-V2_orca <br> **P.S. If you're interested to collaborate, please connect with me at www.linkedin.com/in/pankajam.** <br> ### quantized versions <br> #### license disclaimer: This model is bound by the license & usage restrictions of the original Llama-2 model. And comes with no warranty or gurantees of any kind. <br> ## Evaluation We evaluated model_007_preview on a wide range of tasks using [Language Model Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness) from EleutherAI. Here are the results on metrics used by [HuggingFaceH4 Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) ||||| |:------:|:--------:|:-------:|:--------:| |**Task**|**Metric**|**Value**|**Stderr**| |*arc_challenge*|acc_norm|0.7142|0.0141| |*hellaswag*|acc_norm|0.8731|0.0038| |*mmlu*|acc_norm|0.6858|0.0351| |*truthfulqa_mc*|mc2|0.6265|0.0157| |**Total Average**|-|**0.7249**|| <br> ## Example Usage Here is the Orca prompt format ``` ### System: You are an AI assistant that follows instruction extremely well. Help as much as you can. ### User: Tell me about Orcas. ### Assistant: ``` Below shows a code example on how to use this model ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("psmathur/model_007_preview") model = AutoModelForCausalLM.from_pretrained( "psmathur/model_007_preview", torch_dtype=torch.float16, load_in_8bit=True, low_cpu_mem_usage=True, device_map="auto" ) system_prompt = "### System:\nYou are an AI assistant that follows instruction extremely well. Help as much as you can.\n\n" #generate text steps instruction = "Tell me about Orcas." prompt = f"{system_prompt}### User: {instruction}\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=4096) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` Here is the Alpaca prompt format ``` ### User: Tell me about Alpacas. ### Assistant: ``` Below shows a code example on how to use this model ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("psmathur/model_007_preview") model = AutoModelForCausalLM.from_pretrained( "psmathur/model_007_preview", torch_dtype=torch.float16, load_in_8bit=True, low_cpu_mem_usage=True, device_map="auto" ) #generate text steps instruction = "Tell me about Alpacas." prompt = f"### User: {instruction}\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=4096) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` <br> #### Limitations & Biases: While this model aims for accuracy, it can occasionally produce inaccurate or misleading results. Despite diligent efforts in refining the pretraining data, there remains a possibility for the generation of inappropriate, biased, or offensive content. Exercise caution and cross-check information when necessary. <br> ### Citiation: Please kindly cite using the following BibTeX: ``` @misc{model_007_preview, author = {Pankaj Mathur}, title = {model_007_preview: A hybrid (explain + instruct) style Llama2-70b model}, year = {2023}, publisher = {HuggingFace}, journal = {HuggingFace repository}, howpublished = {\url{https://https://huggingface.co/psmathur/model_007_preview}, } ``` ``` @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ``` @software{touvron2023llama2, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushkar Mishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing Ellen Tan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu , Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom}, year={2023} } ```
5,763
[ [ -0.0227813720703125, -0.062225341796875, 0.01419830322265625, 0.01105499267578125, -0.0224609375, 0.006351470947265625, -0.001667022705078125, -0.031951904296875, 0.0196380615234375, 0.0214385986328125, -0.057769775390625, -0.040985107421875, -0.0394287109375, -0.0028400421142578125, -0.02294921875, 0.087158203125, -0.01006317138671875, -0.012054443359375, 0.007190704345703125, -0.0032253265380859375, -0.0282135009765625, -0.03143310546875, -0.051971435546875, -0.03106689453125, 0.01296234130859375, 0.01508331298828125, 0.0478515625, 0.044219970703125, 0.02850341796875, 0.024383544921875, -0.017547607421875, 0.02117919921875, -0.0340576171875, -0.0164337158203125, 0.01806640625, -0.044525146484375, -0.0721435546875, 0.0111541748046875, 0.029296875, 0.019866943359375, -0.01087188720703125, 0.0316162109375, 0.0081634521484375, 0.0186004638671875, -0.031005859375, 0.030364990234375, -0.023651123046875, 0.00372314453125, -0.0287322998046875, -0.00775146484375, -0.01361083984375, -0.01312255859375, 0.00852203369140625, -0.057830810546875, 0.016326904296875, -0.004360198974609375, 0.0908203125, 0.0190277099609375, -0.026031494140625, -0.0188446044921875, -0.03814697265625, 0.0543212890625, -0.0732421875, 0.00504302978515625, 0.018524169921875, 0.026397705078125, -0.021240234375, -0.06915283203125, -0.0634765625, -0.009429931640625, -0.01097869873046875, 0.019378662109375, -0.0203399658203125, 0.0014896392822265625, 0.0258636474609375, 0.03448486328125, -0.042388916015625, 0.004058837890625, -0.044891357421875, -0.022918701171875, 0.041656494140625, 0.029632568359375, 0.02044677734375, -0.0156707763671875, -0.029144287109375, -0.0205078125, -0.04656982421875, 0.0227813720703125, 0.035430908203125, 0.00904083251953125, -0.041778564453125, 0.04931640625, -0.015716552734375, 0.05523681640625, 0.0126495361328125, -0.02935791015625, 0.042877197265625, -0.029266357421875, -0.030303955078125, -0.006954193115234375, 0.0687255859375, 0.022705078125, 0.00498199462890625, 0.015655517578125, -0.006481170654296875, 0.017852783203125, -0.006587982177734375, -0.05987548828125, -0.027099609375, 0.018310546875, -0.034698486328125, -0.03228759765625, -0.005584716796875, -0.05975341796875, -0.01432037353515625, -0.01262664794921875, 0.0323486328125, -0.034210205078125, -0.03668212890625, 0.0218658447265625, 0.01551055908203125, 0.042938232421875, 0.0108184814453125, -0.0595703125, 0.02471923828125, 0.0287933349609375, 0.07049560546875, -0.0033893585205078125, -0.027099609375, -0.0126190185546875, -0.0009627342224121094, -0.00690460205078125, 0.050567626953125, -0.0247039794921875, -0.030364990234375, -0.027191162109375, 0.0084381103515625, -0.01078033447265625, -0.0210723876953125, 0.0355224609375, -0.023468017578125, 0.033111572265625, -0.02276611328125, -0.025299072265625, -0.02978515625, 0.0161590576171875, -0.040679931640625, 0.09466552734375, 0.0026073455810546875, -0.0675048828125, 0.0140380859375, -0.058135986328125, -0.00823974609375, -0.0172271728515625, -0.00920867919921875, -0.04986572265625, -0.0214691162109375, 0.0260162353515625, 0.0293121337890625, -0.0233306884765625, 0.0222015380859375, -0.0185089111328125, -0.031280517578125, 0.007476806640625, -0.026397705078125, 0.08087158203125, 0.020843505859375, -0.052642822265625, 0.027008056640625, -0.06634521484375, -0.01287841796875, 0.03863525390625, -0.01270294189453125, 0.00936126708984375, -0.01824951171875, -0.0140533447265625, 0.023834228515625, 0.0305328369140625, -0.033050537109375, 0.02130126953125, -0.0253143310546875, 0.043365478515625, 0.059814453125, -0.005954742431640625, 0.0224761962890625, -0.0322265625, 0.048553466796875, 0.0028400421142578125, 0.0208587646484375, -0.0064697265625, -0.051300048828125, -0.08038330078125, -0.031341552734375, 0.01247406005859375, 0.0377197265625, -0.040985107421875, 0.045257568359375, 0.0025043487548828125, -0.0614013671875, -0.0360107421875, -0.007778167724609375, 0.0251312255859375, 0.056640625, 0.0278778076171875, -0.0213623046875, -0.05218505859375, -0.06268310546875, 0.0126953125, -0.004199981689453125, 0.0016202926635742188, 0.01390838623046875, 0.0439453125, -0.0143280029296875, 0.0753173828125, -0.0389404296875, -0.01922607421875, -0.0311279296875, 0.0095672607421875, 0.0323486328125, 0.0457763671875, 0.05255126953125, -0.033538818359375, -0.020660400390625, -0.0262451171875, -0.06646728515625, -0.0062255859375, -0.0014858245849609375, -0.032562255859375, 0.0028533935546875, 0.0200958251953125, -0.06109619140625, 0.042205810546875, 0.048248291015625, -0.037750244140625, 0.053497314453125, -0.01467132568359375, -0.0039043426513671875, -0.0858154296875, 0.0215301513671875, -0.0012722015380859375, -0.005931854248046875, -0.0423583984375, -0.0018711090087890625, -0.0067138671875, -0.00484466552734375, -0.033599853515625, 0.04583740234375, -0.029876708984375, 0.0005831718444824219, -0.0179443359375, 0.0009169578552246094, 0.00010991096496582031, 0.058624267578125, -0.00592041015625, 0.05828857421875, 0.0528564453125, -0.03448486328125, 0.0284576416015625, 0.0223846435546875, -0.025054931640625, 0.024505615234375, -0.0679931640625, 0.0247344970703125, 0.004184722900390625, 0.041015625, -0.08856201171875, -0.0153045654296875, 0.0421142578125, -0.03326416015625, 0.0230865478515625, 0.009735107421875, -0.04034423828125, -0.03790283203125, -0.0271759033203125, 0.03363037109375, 0.039703369140625, -0.032989501953125, 0.051727294921875, 0.0231170654296875, 0.0020732879638671875, -0.04150390625, -0.0426025390625, -0.01432037353515625, -0.02764892578125, -0.05255126953125, 0.022705078125, -0.01312255859375, 0.0008702278137207031, -0.012939453125, -0.007640838623046875, 0.01236724853515625, 0.00553131103515625, 0.023193359375, 0.048370361328125, -0.00984954833984375, -0.0212860107421875, 0.0019044876098632812, -0.016754150390625, 0.004425048828125, -0.004032135009765625, 0.07275390625, -0.0281982421875, -0.0229339599609375, -0.057342529296875, -0.0001277923583984375, 0.03515625, -0.02978515625, 0.0626220703125, 0.056884765625, -0.0296630859375, 0.019500732421875, -0.03472900390625, -0.0212554931640625, -0.038604736328125, 0.0309295654296875, -0.032958984375, -0.044219970703125, 0.061553955078125, 0.01078033447265625, 0.0169525146484375, 0.057342529296875, 0.05401611328125, 0.01024627685546875, 0.0799560546875, 0.051239013671875, 0.006565093994140625, 0.037994384765625, -0.054595947265625, 0.00530242919921875, -0.0706787109375, -0.050262451171875, -0.027069091796875, -0.0299072265625, -0.0289764404296875, -0.02545166015625, 0.020172119140625, 0.0111541748046875, -0.04571533203125, 0.030609130859375, -0.048553466796875, 0.009246826171875, 0.041900634765625, 0.0220794677734375, 0.0178680419921875, -0.0037078857421875, -0.00919342041015625, -0.00400543212890625, -0.0494384765625, -0.046661376953125, 0.09112548828125, 0.0308685302734375, 0.04791259765625, -0.001995086669921875, 0.042572021484375, -0.0010843276977539062, 0.02105712890625, -0.041229248046875, 0.04412841796875, 0.0192108154296875, -0.049591064453125, -0.00423431396484375, -0.018157958984375, -0.0699462890625, 0.0173797607421875, -0.01375579833984375, -0.058929443359375, 0.01155853271484375, 0.00853729248046875, -0.04150390625, 0.0254974365234375, -0.045806884765625, 0.0621337890625, -0.0198822021484375, -0.005420684814453125, -0.0021800994873046875, -0.04901123046875, 0.0421142578125, 0.0055084228515625, 0.0017881393432617188, -0.0152435302734375, -0.0012712478637695312, 0.0830078125, -0.040771484375, 0.0701904296875, -0.0051116943359375, -0.005489349365234375, 0.03265380859375, 0.0013475418090820312, 0.0489501953125, -0.001346588134765625, -0.006038665771484375, 0.023162841796875, -0.0138702392578125, -0.033416748046875, -0.02447509765625, 0.04156494140625, -0.08453369140625, -0.040191650390625, -0.0374755859375, -0.036865234375, 0.005565643310546875, 0.0219573974609375, 0.038848876953125, 0.0266571044921875, 0.0241241455078125, -0.006603240966796875, 0.04083251953125, -0.0236053466796875, 0.03155517578125, 0.030487060546875, -0.01020050048828125, -0.0286407470703125, 0.0538330078125, 0.0089111328125, 0.0219879150390625, -0.00022995471954345703, 0.0082244873046875, -0.0242767333984375, -0.031036376953125, -0.028961181640625, 0.0426025390625, -0.04730224609375, -0.0240020751953125, -0.054962158203125, -0.021209716796875, -0.02764892578125, -0.006885528564453125, -0.036346435546875, -0.02581787109375, -0.043487548828125, -0.018798828125, 0.044830322265625, 0.04376220703125, 0.007671356201171875, 0.03704833984375, -0.0237884521484375, 0.01427459716796875, 0.020111083984375, 0.00705718994140625, 0.00992584228515625, -0.06597900390625, 0.0006799697875976562, 0.01224517822265625, -0.04107666015625, -0.066650390625, 0.04156494140625, -0.0002999305725097656, 0.047210693359375, 0.0212860107421875, -0.0005311965942382812, 0.0675048828125, -0.01438140869140625, 0.07183837890625, 0.02301025390625, -0.07159423828125, 0.04083251953125, -0.02447509765625, 0.018341064453125, 0.016845703125, 0.0238037109375, -0.0174713134765625, -0.0283050537109375, -0.066650390625, -0.07183837890625, 0.07171630859375, 0.029144287109375, 0.0025920867919921875, 0.01287078857421875, 0.02838134765625, 0.00919342041015625, 0.0163421630859375, -0.06671142578125, -0.044677734375, -0.0377197265625, -0.0109100341796875, 0.0021953582763671875, -0.01560211181640625, -0.00992584228515625, -0.0274200439453125, 0.05572509765625, 0.0033740997314453125, 0.04071044921875, 0.009033203125, 0.0006604194641113281, -0.0051116943359375, -0.0123291015625, 0.0557861328125, 0.04791259765625, -0.0241241455078125, -0.004955291748046875, 0.02557373046875, -0.046112060546875, 0.0014848709106445312, 0.01306915283203125, -0.006366729736328125, -0.0071258544921875, 0.024017333984375, 0.06829833984375, -0.003520965576171875, -0.0301666259765625, 0.026580810546875, -0.0021343231201171875, -0.00951385498046875, -0.03289794921875, 0.006061553955078125, 0.01123046875, 0.0311431884765625, 0.0281219482421875, 0.0120391845703125, -0.0038909912109375, -0.038360595703125, -0.00569915771484375, 0.02069091796875, 0.0036373138427734375, -0.03564453125, 0.07476806640625, 0.0029659271240234375, -0.021240234375, 0.04815673828125, -0.00997161865234375, -0.040618896484375, 0.05682373046875, 0.039642333984375, 0.0435791015625, -0.01261138916015625, 0.00750732421875, 0.03875732421875, 0.01206207275390625, -0.0069122314453125, 0.025848388671875, 0.006824493408203125, -0.033966064453125, -0.0256500244140625, -0.0438232421875, -0.0178070068359375, 0.0305328369140625, -0.045989990234375, 0.044921875, -0.039703369140625, -0.0178375244140625, -0.00969696044921875, 0.0186614990234375, -0.058380126953125, 0.0123138427734375, 0.0108642578125, 0.0684814453125, -0.06256103515625, 0.0723876953125, 0.0423583984375, -0.047760009765625, -0.08343505859375, -0.0169677734375, 0.0017900466918945312, -0.07568359375, 0.03289794921875, 0.0137786865234375, -0.00457000732421875, 0.00628662109375, -0.046905517578125, -0.0748291015625, 0.109375, 0.0394287109375, -0.0303192138671875, -0.01412200927734375, 0.0029773712158203125, 0.040435791015625, -0.01495361328125, 0.048583984375, 0.049346923828125, 0.0267181396484375, 0.0180816650390625, -0.07916259765625, 0.0267486572265625, -0.032684326171875, -0.0084075927734375, -0.00759124755859375, -0.074951171875, 0.0980224609375, -0.0172271728515625, -0.0025119781494140625, 0.0223388671875, 0.058074951171875, 0.052520751953125, 0.00986480712890625, 0.0271148681640625, 0.037994384765625, 0.056549072265625, -0.00904083251953125, 0.0751953125, -0.021209716796875, 0.049774169921875, 0.0638427734375, 0.0039825439453125, 0.043853759765625, 0.0187225341796875, -0.0307159423828125, 0.0498046875, 0.07098388671875, -0.004852294921875, 0.04205322265625, 0.004322052001953125, -0.0015707015991210938, -0.00969696044921875, 0.00848388671875, -0.0552978515625, 0.033477783203125, 0.0277862548828125, -0.0275726318359375, -0.0191497802734375, -0.00818634033203125, 0.025726318359375, -0.027740478515625, -0.006313323974609375, 0.03411865234375, 0.0032558441162109375, -0.030364990234375, 0.070556640625, 0.005725860595703125, 0.0699462890625, -0.0582275390625, 0.0017194747924804688, -0.0234222412109375, 0.01059722900390625, -0.025238037109375, -0.0465087890625, 0.007297515869140625, -0.00006669759750366211, 0.004665374755859375, 0.003635406494140625, 0.03814697265625, -0.0182647705078125, -0.040802001953125, 0.007022857666015625, 0.019378662109375, 0.02203369140625, 0.0091400146484375, -0.0718994140625, 0.0193023681640625, 0.00386810302734375, -0.047027587890625, 0.0183868408203125, 0.021026611328125, -0.0006971359252929688, 0.047576904296875, 0.04864501953125, -0.00078582763671875, 0.015655517578125, -0.0152740478515625, 0.07806396484375, -0.0335693359375, -0.0374755859375, -0.0701904296875, 0.04248046875, -0.0061492919921875, -0.0396728515625, 0.06329345703125, 0.037811279296875, 0.056884765625, 0.004787445068359375, 0.04595947265625, -0.018463134765625, 0.01885986328125, -0.030426025390625, 0.052703857421875, -0.0367431640625, 0.022979736328125, -0.023681640625, -0.068603515625, -0.00934600830078125, 0.07440185546875, -0.02740478515625, 0.0091400146484375, 0.03741455078125, 0.06451416015625, -0.00827789306640625, -0.0021228790283203125, -0.00438690185546875, 0.0215911865234375, 0.037078857421875, 0.05078125, 0.04412841796875, -0.04669189453125, 0.054779052734375, -0.0428466796875, -0.0252532958984375, -0.01432037353515625, -0.056915283203125, -0.073974609375, -0.034393310546875, -0.0250091552734375, -0.043182373046875, -0.0125274658203125, 0.07025146484375, 0.056121826171875, -0.055877685546875, -0.0238800048828125, -0.00849151611328125, -0.01253509521484375, -0.02032470703125, -0.0156707763671875, 0.049468994140625, 0.0032672882080078125, -0.0633544921875, 0.006824493408203125, -0.005367279052734375, 0.02911376953125, -0.02685546875, -0.01470184326171875, -0.0216522216796875, 0.00043201446533203125, 0.0229339599609375, 0.0241851806640625, -0.050689697265625, -0.00743865966796875, 0.006549835205078125, -0.01296234130859375, 0.00940704345703125, 0.032562255859375, -0.062469482421875, 0.02935791015625, 0.0191650390625, 0.01275634765625, 0.057891845703125, -0.01337432861328125, 0.0265960693359375, -0.03338623046875, 0.032745361328125, 0.0011615753173828125, 0.038665771484375, 0.0191497802734375, -0.0293121337890625, 0.033905029296875, 0.0245513916015625, -0.0295562744140625, -0.06884765625, -0.0004475116729736328, -0.0899658203125, 0.0011920928955078125, 0.0902099609375, -0.023651123046875, -0.02642822265625, 0.007633209228515625, -0.040374755859375, 0.048370361328125, -0.034454345703125, 0.06781005859375, 0.035797119140625, -0.0201873779296875, -0.00894927978515625, -0.03662109375, 0.032684326171875, 0.0234527587890625, -0.061553955078125, -0.0186920166015625, 0.01116943359375, 0.0347900390625, 0.01470184326171875, 0.053466796875, -0.01172637939453125, 0.01971435546875, 0.0033473968505859375, 0.005519866943359375, -0.0335693359375, -0.002674102783203125, -0.0145416259765625, -0.00643157958984375, -0.01309967041015625, -0.0291900634765625 ] ]
garage-bAInd/GPlatty-30B
2023-07-25T19:07:41.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
garage-bAInd
null
null
garage-bAInd/GPlatty-30B
18
5,846
transformers
2023-06-28T06:03:59
--- language: - en tags: - llama license: other metrics: - MMLU - ARC - HellaSwag - TruthfulQA --- # Information GPlatty-30B is a merge of [garage-bAInd/Platypus-30B](https://huggingface.co/lilloukas/Platypus-30B) and [chansung/gpt4-alpaca-lora-30b](https://huggingface.co/chansung/gpt4-alpaca-lora-30b) | Metric | Value | |-----------------------|-------| | MMLU (5-shot) | 63.6 | | ARC (25-shot) | 66.0 | | HellaSwag (10-shot) | 84.8 | | TruthfulQA (0-shot) | 53.8 | | Avg. | 67.0 | We use state-of-the-art [Language Model Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness) to run the benchmark tests above. ## Model Details * **Trained by**: Platypus-30B trained by Cole Hunter & Ariel Lee; gpt4-alpaca-lora-30b by chansung. * **Model type:** **GPlatty-30B** is an auto-regressive language model based on the LLaMA transformer architecture. * **Language(s)**: English * **License for base weights**: License for the base LLaMA model's weights is Meta's [non-commercial bespoke license](https://github.com/facebookresearch/llama/blob/main/MODEL_CARD.md). | Hyperparameter | Value | |---------------------------|-------| | \\(n_\text{parameters}\\) | 33B | | \\(d_\text{model}\\) | 6656 | | \\(n_\text{layers}\\) | 60 | | \\(n_\text{heads}\\) | 52 | ## Reproducing Evaluation Results Install LM Evaluation Harness: ``` git clone https://github.com/EleutherAI/lm-evaluation-harness cd lm-evaluation-harness pip install -e . ``` Each task was evaluated on a single A100 80GB GPU. ARC: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/GPlatty-30B --tasks arc_challenge --batch_size 1 --no_cache --write_out --output_path results/Platypus-30B/arc_challenge_25shot.json --device cuda --num_fewshot 25 ``` HellaSwag: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/GPlatty-30B --tasks hellaswag --batch_size 1 --no_cache --write_out --output_path results/Platypus-30B/hellaswag_10shot.json --device cuda --num_fewshot 10 ``` MMLU: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/GPlatty-30B --tasks hendrycksTest-* --batch_size 1 --no_cache --write_out --output_path results/Platypus-30B/mmlu_5shot.json --device cuda --num_fewshot 5 ``` TruthfulQA: ``` python main.py --model hf-causal-experimental --model_args pretrained=garage-bAInd/GPlatty-30B --tasks truthfulqa_mc --batch_size 1 --no_cache --write_out --output_path results/Platypus-30B/truthfulqa_0shot.json --device cuda ``` ## Limitations and bias The base LLaMA model is trained on various data, some of which may contain offensive, harmful, and biased content that can lead to toxic behavior. See Section 5.1 of the LLaMA paper. We have not performed any studies to determine how fine-tuning on the aforementioned datasets affect the model's behavior and toxicity. Do not treat chat responses from this model as a substitute for human judgment or as a source of truth. Please use responsibly. ## Citations ```bibtex @article{touvron2023llama, title={LLaMA: Open and Efficient Foundation Language Models}, author={Touvron, Hugo and Lavril, Thibaut and Izacard, Gautier and Martinet, Xavier and Lachaux, Marie-Anne and Lacroix, Timoth{\'e}e and Rozi{\`e}re, Baptiste and Goyal, Naman and Hambro, Eric and Azhar, Faisal and Rodriguez, Aurelien and Joulin, Armand and Grave, Edouard and Lample, Guillaume}, journal={arXiv preprint arXiv:2302.13971}, year={2023} } @article{hu2021lora, title={LoRA: Low-Rank Adaptation of Large Language Models}, author={Hu, Edward J. and Shen, Yelong and Wallis, Phillip and Allen-Zhu, Zeyuan and Li, Yuanzhi and Wang, Shean and Chen, Weizhu}, journal={CoRR}, year={2021} } ```
3,827
[ [ -0.02496337890625, -0.065185546875, 0.0279083251953125, 0.03143310546875, -0.0195770263671875, -0.01284027099609375, -0.0248260498046875, -0.042572021484375, 0.00992584228515625, 0.0180511474609375, -0.03729248046875, -0.03729248046875, -0.05133056640625, 0.0005707740783691406, -0.0147247314453125, 0.08740234375, -0.01303863525390625, -0.0241546630859375, 0.00726318359375, -0.01654052734375, -0.03369140625, -0.04315185546875, -0.045074462890625, -0.0256805419921875, 0.035888671875, 0.014190673828125, 0.044281005859375, 0.044525146484375, 0.05078125, 0.0254058837890625, -0.016937255859375, 0.0034694671630859375, -0.0399169921875, -0.0170135498046875, 0.0117340087890625, -0.0252838134765625, -0.053985595703125, 0.006557464599609375, 0.03857421875, 0.030029296875, -0.0237274169921875, 0.035003662109375, 0.008880615234375, 0.02691650390625, -0.04693603515625, 0.0258026123046875, -0.049652099609375, -0.0121002197265625, -0.0252532958984375, -0.0056304931640625, -0.01605224609375, -0.01409912109375, -0.0113983154296875, -0.048614501953125, 0.005588531494140625, 0.0011310577392578125, 0.09808349609375, 0.03070068359375, -0.01210784912109375, -0.00539398193359375, -0.029510498046875, 0.058929443359375, -0.07952880859375, 0.017425537109375, 0.029632568359375, 0.004604339599609375, -0.01537322998046875, -0.04132080078125, -0.054473876953125, -0.0240478515625, -0.007015228271484375, 0.0108642578125, -0.02288818359375, -0.00505828857421875, 0.0246429443359375, 0.03936767578125, -0.028411865234375, 0.0281982421875, -0.030426025390625, -0.01458740234375, 0.054962158203125, 0.023284912109375, 0.0175933837890625, -0.01204681396484375, -0.0262451171875, -0.0264434814453125, -0.05517578125, 0.021270751953125, 0.035797119140625, 0.0241241455078125, -0.0307159423828125, 0.051605224609375, -0.022247314453125, 0.04656982421875, 0.005558013916015625, -0.040008544921875, 0.04425048828125, -0.040557861328125, -0.0279083251953125, -0.0169219970703125, 0.092529296875, 0.0282440185546875, 0.00031447410583496094, 0.005191802978515625, -0.01221466064453125, 0.0156707763671875, -0.0115814208984375, -0.0672607421875, -0.0069732666015625, 0.01325225830078125, -0.0213623046875, -0.0187530517578125, -0.0213165283203125, -0.04766845703125, -0.01401519775390625, 0.00240325927734375, 0.031524658203125, -0.031524658203125, -0.019073486328125, 0.01611328125, 0.0012769699096679688, 0.0311279296875, 0.0167999267578125, -0.05560302734375, 0.0216064453125, 0.0234832763671875, 0.05828857421875, -0.0186614990234375, -0.049530029296875, -0.0202484130859375, 0.007808685302734375, -0.015899658203125, 0.052978515625, -0.020172119140625, -0.01690673828125, -0.0232086181640625, 0.0147857666015625, -0.022430419921875, -0.035400390625, 0.04656982421875, -0.024200439453125, 0.0148162841796875, -0.02508544921875, -0.01641845703125, -0.0196380615234375, 0.0102691650390625, -0.04290771484375, 0.096435546875, 0.00624847412109375, -0.060760498046875, 0.0172271728515625, -0.049652099609375, -0.0212249755859375, -0.0098419189453125, 0.01375579833984375, -0.042724609375, -0.01690673828125, 0.0246124267578125, 0.0361328125, -0.041229248046875, 0.0249786376953125, -0.0258636474609375, -0.0312347412109375, 0.01064300537109375, -0.024444580078125, 0.072509765625, 0.0217437744140625, -0.036407470703125, 0.0056915283203125, -0.055023193359375, -0.011505126953125, 0.037384033203125, -0.03857421875, -0.00997161865234375, -0.005893707275390625, -0.0104217529296875, 0.0194244384765625, 0.029052734375, -0.0306396484375, 0.01369476318359375, -0.017120361328125, 0.03924560546875, 0.05078125, -0.0024738311767578125, 0.00787353515625, -0.031951904296875, 0.0297088623046875, 0.01145172119140625, 0.0230560302734375, 0.0033702850341796875, -0.052459716796875, -0.07525634765625, -0.0178985595703125, 0.006267547607421875, 0.05072021484375, -0.033355712890625, 0.047210693359375, -0.020538330078125, -0.048614501953125, -0.04266357421875, 0.019775390625, 0.04315185546875, 0.042633056640625, 0.0400390625, -0.035736083984375, -0.02618408203125, -0.06866455078125, -0.00494384765625, -0.0293426513671875, 0.01273345947265625, 0.02789306640625, 0.049774169921875, -0.0288238525390625, 0.0433349609375, -0.04071044921875, -0.0286712646484375, -0.01485443115234375, 0.0052490234375, 0.039276123046875, 0.03741455078125, 0.047943115234375, -0.010223388671875, -0.034088134765625, -0.005321502685546875, -0.05535888671875, -0.022216796875, 0.0057525634765625, -0.01488494873046875, 0.028411865234375, 0.018157958984375, -0.060394287109375, 0.02203369140625, 0.045318603515625, -0.021575927734375, 0.049468994140625, -0.009063720703125, 0.007049560546875, -0.058990478515625, 0.0194854736328125, 0.005573272705078125, 0.0029010772705078125, -0.040435791015625, 0.00992584228515625, -0.00868988037109375, 0.00676727294921875, -0.0469970703125, 0.0496826171875, -0.035888671875, -0.00038814544677734375, -0.0033168792724609375, 0.01139068603515625, -0.00518035888671875, 0.0682373046875, -0.00750732421875, 0.06463623046875, 0.036956787109375, -0.043365478515625, 0.01172637939453125, 0.01910400390625, -0.02191162109375, 0.0208740234375, -0.0687255859375, 0.005401611328125, 0.01322174072265625, 0.0242462158203125, -0.06561279296875, -0.00786590576171875, 0.028411865234375, -0.0178375244140625, 0.0109405517578125, 0.000247955322265625, -0.047149658203125, -0.040008544921875, -0.027679443359375, 0.0274810791015625, 0.04278564453125, -0.031402587890625, 0.02435302734375, 0.026763916015625, -0.009552001953125, -0.05535888671875, -0.054931640625, -0.01293182373046875, -0.02655029296875, -0.043975830078125, 0.0164031982421875, -0.0184173583984375, -0.006107330322265625, -0.006275177001953125, 0.0018482208251953125, 0.0032215118408203125, 0.00077056884765625, 0.0254058837890625, 0.032867431640625, -0.01087188720703125, -0.00913238525390625, -0.0076751708984375, -0.000002562999725341797, -0.0028057098388671875, 0.0148773193359375, 0.057891845703125, -0.029083251953125, -0.0218505859375, -0.0452880859375, 0.00302886962890625, 0.03546142578125, -0.0208282470703125, 0.07208251953125, 0.052520751953125, -0.01207733154296875, 0.0192108154296875, -0.053009033203125, 0.001827239990234375, -0.036041259765625, 0.032196044921875, -0.0265350341796875, -0.0528564453125, 0.0662841796875, 0.02276611328125, 0.005748748779296875, 0.06884765625, 0.0640869140625, 0.0086212158203125, 0.0655517578125, 0.03753662109375, -0.015869140625, 0.040435791015625, -0.04376220703125, 0.006519317626953125, -0.072509765625, -0.0125274658203125, -0.035858154296875, -0.01464080810546875, -0.04656982421875, -0.033477783203125, 0.01122283935546875, 0.01349639892578125, -0.05218505859375, 0.03729248046875, -0.037628173828125, 0.0211029052734375, 0.0384521484375, 0.0139007568359375, 0.0267333984375, -0.01041412353515625, -0.01395416259765625, 0.007755279541015625, -0.050384521484375, -0.032012939453125, 0.09197998046875, 0.038421630859375, 0.06378173828125, 0.003078460693359375, 0.044677734375, -0.00806427001953125, 0.032928466796875, -0.039794921875, 0.04840087890625, -0.0057220458984375, -0.04547119140625, -0.0163421630859375, -0.026611328125, -0.06610107421875, 0.0244140625, -0.0074005126953125, -0.062347412109375, 0.017578125, 0.0025463104248046875, -0.02886962890625, 0.0270538330078125, -0.062347412109375, 0.060150146484375, -0.020782470703125, -0.033294677734375, -0.005992889404296875, -0.038970947265625, 0.05267333984375, -0.01488494873046875, 0.014617919921875, -0.0255126953125, 0.0018444061279296875, 0.0802001953125, -0.04449462890625, 0.07025146484375, -0.0115509033203125, -0.0133514404296875, 0.034423828125, -0.0131683349609375, 0.0457763671875, -0.008056640625, -0.0013246536254882812, 0.0384521484375, -0.005199432373046875, -0.022308349609375, -0.006793975830078125, 0.056976318359375, -0.0906982421875, -0.050506591796875, -0.03753662109375, -0.05389404296875, -0.0006151199340820312, 0.016845703125, 0.021453857421875, -0.00007003545761108398, 0.00986480712890625, 0.00440216064453125, 0.0291748046875, -0.03961181640625, 0.04840087890625, 0.041748046875, -0.011383056640625, -0.0254669189453125, 0.056610107421875, 0.002742767333984375, 0.026611328125, 0.005462646484375, 0.00916290283203125, -0.01477813720703125, -0.03472900390625, -0.018798828125, 0.0462646484375, -0.0452880859375, -0.031402587890625, -0.046417236328125, -0.0311279296875, -0.0244293212890625, 0.00896453857421875, -0.035003662109375, -0.021636962890625, -0.03106689453125, 0.0008616447448730469, 0.0382080078125, 0.035797119140625, -0.003482818603515625, 0.05010986328125, -0.03564453125, 0.022613525390625, 0.03033447265625, 0.029449462890625, 0.00577545166015625, -0.059539794921875, -0.0097503662109375, 0.0162200927734375, -0.042449951171875, -0.0614013671875, 0.043121337890625, 0.022613525390625, 0.05572509765625, 0.0171356201171875, -0.00931549072265625, 0.075439453125, -0.0253143310546875, 0.06719970703125, 0.020263671875, -0.06884765625, 0.0423583984375, -0.0264129638671875, 0.017242431640625, 0.0310211181640625, 0.0272369384765625, -0.024078369140625, -0.04071044921875, -0.04974365234375, -0.0654296875, 0.058990478515625, 0.021820068359375, -0.0086669921875, 0.01280975341796875, 0.0179290771484375, -0.0021266937255859375, 0.0013380050659179688, -0.0828857421875, -0.0274810791015625, -0.0169830322265625, 0.0001251697540283203, -0.01776123046875, -0.011932373046875, -0.02178955078125, -0.039581298828125, 0.057861328125, -0.005626678466796875, 0.041778564453125, 0.004291534423828125, -0.0254974365234375, -0.0140838623046875, 0.01325225830078125, 0.044189453125, 0.041259765625, -0.031951904296875, -0.0094757080078125, 0.0281982421875, -0.04962158203125, 0.01201629638671875, 0.0138092041015625, 0.001102447509765625, -0.0191650390625, 0.036041259765625, 0.0828857421875, -0.0006222724914550781, -0.03936767578125, 0.0367431640625, -0.005523681640625, -0.01467132568359375, -0.00595855712890625, 0.0124969482421875, 0.01593017578125, 0.0191192626953125, 0.024932861328125, -0.00913238525390625, -0.01322174072265625, -0.038848876953125, -0.015350341796875, 0.033416748046875, 0.00611114501953125, -0.0280609130859375, 0.060394287109375, -0.0002827644348144531, -0.004505157470703125, 0.0438232421875, -0.0165252685546875, -0.029754638671875, 0.04949951171875, 0.0467529296875, 0.04840087890625, -0.013580322265625, 0.0005192756652832031, 0.041534423828125, 0.035888671875, -0.01104736328125, 0.01678466796875, 0.01273345947265625, -0.051116943359375, -0.0374755859375, -0.06365966796875, -0.0167236328125, 0.03253173828125, -0.03350830078125, 0.0193939208984375, -0.02056884765625, -0.0268096923828125, -0.00357818603515625, 0.025390625, -0.058380126953125, 0.00919342041015625, 0.0007872581481933594, 0.05889892578125, -0.08514404296875, 0.0714111328125, 0.03753662109375, -0.05535888671875, -0.07818603515625, -0.022613525390625, -0.01708984375, -0.07977294921875, 0.049102783203125, 0.002346038818359375, 0.004913330078125, -0.01081085205078125, -0.050384521484375, -0.088134765625, 0.10284423828125, 0.045654296875, -0.042205810546875, 0.0096893310546875, 0.01088714599609375, 0.052276611328125, -0.01235198974609375, 0.032470703125, 0.052215576171875, 0.0345458984375, 0.00630950927734375, -0.09368896484375, 0.0176849365234375, -0.03363037109375, -0.0024089813232421875, -0.0006861686706542969, -0.0760498046875, 0.09100341796875, -0.0201416015625, -0.013702392578125, -0.0012178421020507812, 0.036224365234375, 0.05413818359375, -0.002094268798828125, 0.03192138671875, 0.07330322265625, 0.04486083984375, -0.01031494140625, 0.0859375, -0.0301971435546875, 0.039031982421875, 0.0726318359375, -0.0031890869140625, 0.0714111328125, 0.0299224853515625, -0.044342041015625, 0.051025390625, 0.062042236328125, -0.00044727325439453125, 0.039642333984375, 0.002376556396484375, 0.004825592041015625, -0.001819610595703125, -0.00043082237243652344, -0.04620361328125, 0.026824951171875, 0.0173797607421875, -0.0175018310546875, -0.0084228515625, -0.0162200927734375, 0.020477294921875, -0.03857421875, -0.01375579833984375, 0.0426025390625, 0.00992584228515625, -0.055816650390625, 0.08740234375, -0.002597808837890625, 0.073486328125, -0.04302978515625, 0.020782470703125, -0.03472900390625, 0.0098419189453125, -0.01496124267578125, -0.049652099609375, 0.01018524169921875, -0.0012264251708984375, 0.0011205673217773438, 0.0023708343505859375, 0.0435791015625, -0.0185546875, -0.036529541015625, 0.03314208984375, 0.033355712890625, 0.0161590576171875, 0.00482177734375, -0.0614013671875, 0.019744873046875, -0.01197052001953125, -0.037994384765625, 0.0301971435546875, 0.01245880126953125, -0.008819580078125, 0.05633544921875, 0.056976318359375, 0.01186370849609375, 0.0240936279296875, -0.003986358642578125, 0.0772705078125, -0.041748046875, -0.0270233154296875, -0.06341552734375, 0.0300750732421875, 0.00756072998046875, -0.0399169921875, 0.05377197265625, 0.0309600830078125, 0.05047607421875, 0.006816864013671875, 0.0423583984375, -0.0164947509765625, 0.01556396484375, -0.0262603759765625, 0.05194091796875, -0.04254150390625, 0.02734375, -0.027801513671875, -0.077880859375, -0.007389068603515625, 0.0457763671875, -0.0296783447265625, 0.0062408447265625, 0.061279296875, 0.06536865234375, -0.004001617431640625, -0.026214599609375, -0.00406646728515625, 0.0338134765625, 0.022369384765625, 0.07147216796875, 0.0474853515625, -0.05267333984375, 0.043548583984375, -0.038726806640625, -0.020355224609375, -0.0050811767578125, -0.0677490234375, -0.06475830078125, -0.040679931640625, -0.029876708984375, -0.019775390625, -0.002979278564453125, 0.0433349609375, 0.04168701171875, -0.05902099609375, -0.038543701171875, 0.002933502197265625, -0.0002779960632324219, -0.021881103515625, -0.01483917236328125, 0.03924560546875, -0.01519012451171875, -0.0457763671875, 0.0094451904296875, -0.00023698806762695312, 0.0079345703125, -0.019927978515625, -0.019195556640625, -0.0338134765625, -0.0000508427619934082, 0.0419921875, 0.0256500244140625, -0.0584716796875, -0.00933074951171875, -0.007495880126953125, -0.026702880859375, 0.013214111328125, 0.0144195556640625, -0.049468994140625, -0.002262115478515625, 0.032989501953125, 0.0244293212890625, 0.05712890625, -0.0133056640625, 0.013427734375, -0.041717529296875, 0.030029296875, -0.0114288330078125, 0.0361328125, 0.0217132568359375, -0.01904296875, 0.0477294921875, 0.02685546875, -0.045745849609375, -0.06890869140625, -0.016632080078125, -0.087646484375, -0.00984954833984375, 0.10223388671875, -0.0162353515625, -0.032257080078125, 0.021087646484375, -0.00804901123046875, 0.028350830078125, -0.032623291015625, 0.054046630859375, 0.047943115234375, -0.012542724609375, -0.0135498046875, -0.051300048828125, 0.02264404296875, 0.0218505859375, -0.07684326171875, -0.007598876953125, 0.0243682861328125, 0.036895751953125, 0.017059326171875, 0.059356689453125, -0.00180816650390625, 0.007068634033203125, -0.00937652587890625, 0.019317626953125, -0.0027713775634765625, 0.00036597251892089844, -0.0165863037109375, -0.00919342041015625, 0.0017852783203125, -0.00978851318359375 ] ]
MayaPH/opt-flan-iml-6.7b
2023-08-16T13:13:47.000Z
[ "transformers", "pytorch", "safetensors", "opt", "text-generation", "cot", "instruction", "dataset:SirNeural/flan_v2", "arxiv:2212.12017", "license:cc-by-sa-4.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
MayaPH
null
null
MayaPH/opt-flan-iml-6.7b
1
5,846
transformers
2023-08-15T17:00:13
--- datasets: - SirNeural/flan_v2 license: cc-by-sa-4.0 inference: false tags: - cot - instruction --- # OPT-FLAN-IML-6.7B Released August 16, 2023 This model is patterned after the methodology presented in the original [OPT-IML (OPT + Instruction Meta-Learning)](https://arxiv.org/abs/2212.12017) paper, made available for the 6.7 billion parameter size variant of OPT. However, this model is fine-tuned on FLAN v2. ### How to use According to Meta's model card for OPT-IML-30B, it is recommended to directly call the [`generate`](https://huggingface.co/docs/transformers/main/en/main_classes/text_generation#transformers.generation_utils.GenerationMixin.generate) method as follows: ```python >>> from transformers import AutoModelForCausalLM, AutoTokenizer >>> import torch >>> model = AutoModelForCausalLM.from_pretrained("MayaPH/opt-flan-iml-6.7b", torch_dtype=torch.float16).cuda() >>> # the fast tokenizer currently does not work correctly >>> tokenizer = AutoTokenizer.from_pretrained("MayaPH/opt-flan-iml-6.7b", use_fast=False) >>> prompt = "What is the color of the sea?\nA:" >>> input_ids = tokenizer(prompt, return_tensors="pt").input_ids.cuda() >>> generated_ids = model.generate(input_ids) >>> tokenizer.batch_decode(generated_ids, skip_special_tokens=True) ``` ## Further Information For additional information or inquiries about OPT-FLAN-IML-6.7B, please contact the Maya Philippines iOps Team via jasper.catapang@maya.ph. ## Disclaimer OPT-FLAN-IML-6.7B is an AI language model trained by Maya Philippines. It is provided "as is" without warranty of any kind, express or implied. The model developers and Maya Philippines shall not be liable for any direct or indirect damages arising from the use of this model. ## Acknowledgments The development of OPT-FLAN-IML-6.7B was made possible by Maya Philippines and the curation and creation of the instruction-following dataset, FLAN v2, was made possible by Google. ### BibTeX entry and citation info ```bibtex @misc{iyer2022opt, title={OPT-IML: Scaling Language Model Instruction Meta Learning through the Lens of Generalization}, author={Iyer, Srinivasan and Lin, Xi Victoria and Pasunuru, Ramakanth and Mihaylov, Todor and Simig, D{\'a}niel and Yu, Ping and Shuster, Kurt and Wang, Tianlu and Liu, Qing and Koura, Punit Singh and others}, year={2022}, eprint={2212.12017}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
2,441
[ [ -0.0302886962890625, -0.057220458984375, -0.00933837890625, 0.01776123046875, -0.0108642578125, -0.02423095703125, -0.01500701904296875, -0.04046630859375, 0.0058746337890625, 0.036285400390625, -0.0467529296875, -0.018035888671875, -0.031158447265625, 0.013427734375, -0.01427459716796875, 0.07086181640625, 0.001049041748046875, 0.006603240966796875, 0.0213165283203125, -0.009063720703125, -0.0089569091796875, -0.01203155517578125, -0.059539794921875, -0.005809783935546875, 0.01395416259765625, 0.042388916015625, 0.053497314453125, 0.0552978515625, 0.0421142578125, 0.02459716796875, -0.005908966064453125, 0.0093231201171875, -0.0161895751953125, -0.0467529296875, -0.002353668212890625, -0.0330810546875, -0.051788330078125, 0.0111083984375, 0.051055908203125, 0.038604736328125, 0.00449371337890625, 0.025787353515625, -0.0015316009521484375, 0.037933349609375, -0.038482666015625, 0.03924560546875, -0.044830322265625, 0.01132965087890625, -0.0075531005859375, 0.0126190185546875, -0.040283203125, -0.017120361328125, 0.022247314453125, -0.03338623046875, 0.019622802734375, 0.0036945343017578125, 0.0723876953125, 0.016357421875, -0.034698486328125, -0.0287933349609375, -0.053314208984375, 0.059417724609375, -0.08221435546875, 0.0171661376953125, 0.00920867919921875, 0.02752685546875, 0.0191802978515625, -0.0537109375, -0.03338623046875, -0.02484130859375, -0.01520538330078125, 0.00977325439453125, 0.007526397705078125, 0.02447509765625, 0.0220794677734375, 0.03741455078125, -0.037139892578125, -0.007602691650390625, -0.06304931640625, -0.016510009765625, 0.056427001953125, 0.005985260009765625, 0.0262451171875, -0.01824951171875, -0.0411376953125, -0.00994873046875, -0.05145263671875, 0.003978729248046875, 0.0318603515625, 0.0258941650390625, -0.02593994140625, 0.052978515625, -0.0052642822265625, 0.06256103515625, 0.0050201416015625, -0.018341064453125, 0.033599853515625, -0.0384521484375, -0.0234832763671875, -0.00531005859375, 0.06610107421875, 0.0181427001953125, 0.0249786376953125, 0.025421142578125, -0.0195159912109375, -0.0297698974609375, -0.003337860107421875, -0.055908203125, -0.0157623291015625, 0.007053375244140625, -0.032958984375, -0.022735595703125, 0.025970458984375, -0.0762939453125, 0.018035888671875, -0.023712158203125, 0.039520263671875, -0.0305938720703125, -0.047698974609375, 0.00604248046875, 0.02960205078125, 0.046234130859375, 0.00731658935546875, -0.07989501953125, 0.00653839111328125, 0.0239715576171875, 0.06512451171875, 0.003662109375, -0.0307159423828125, -0.00782012939453125, -0.005252838134765625, -0.0143890380859375, 0.0213775634765625, -0.035064697265625, -0.01116943359375, 0.00782012939453125, 0.007389068603515625, -0.038330078125, -0.042510986328125, 0.026153564453125, -0.0193023681640625, 0.04156494140625, 0.00156402587890625, -0.03240966796875, -0.025299072265625, -0.00815582275390625, -0.046661376953125, 0.068603515625, 0.031890869140625, -0.048828125, 0.0176239013671875, -0.0623779296875, -0.0229339599609375, -0.014678955078125, 0.007442474365234375, -0.047210693359375, 0.0010728836059570312, 0.0168914794921875, 0.018829345703125, -0.01261138916015625, 0.018798828125, -0.01263427734375, -0.0276031494140625, -0.00960540771484375, -0.04351806640625, 0.07965087890625, 0.03759765625, -0.05157470703125, 0.0172271728515625, -0.0648193359375, 0.00472259521484375, 0.0159759521484375, -0.03338623046875, 0.00821685791015625, -0.01047515869140625, 0.004596710205078125, 0.0262451171875, 0.03302001953125, -0.034576416015625, 0.0196990966796875, -0.0595703125, 0.0301513671875, 0.0523681640625, -0.0116424560546875, 0.03363037109375, -0.0161590576171875, 0.033477783203125, 0.018310546875, 0.0408935546875, -0.0201263427734375, -0.03662109375, -0.09893798828125, -0.008270263671875, 0.005496978759765625, 0.047882080078125, -0.047607421875, 0.033782958984375, -0.01812744140625, -0.041961669921875, -0.032135009765625, 0.01158905029296875, 0.038787841796875, 0.02459716796875, 0.044403076171875, 0.01380157470703125, -0.039031982421875, -0.06378173828125, -0.004364013671875, -0.0144805908203125, 0.01209259033203125, 0.0275421142578125, 0.039703369140625, -0.0282135009765625, 0.06964111328125, -0.034637451171875, -0.026397705078125, -0.03594970703125, 0.00586700439453125, 0.0296630859375, 0.04937744140625, 0.061676025390625, -0.057098388671875, -0.06097412109375, -0.007572174072265625, -0.0380859375, -0.01251983642578125, -0.0094146728515625, -0.0157623291015625, 0.03131103515625, 0.0406494140625, -0.0325927734375, 0.03265380859375, 0.032867431640625, -0.03668212890625, 0.050750732421875, -0.01361846923828125, -0.0051116943359375, -0.09173583984375, 0.0026721954345703125, -0.00963592529296875, 0.00859832763671875, -0.042999267578125, 0.0012350082397460938, 0.017974853515625, -0.00835418701171875, -0.051116943359375, 0.058807373046875, -0.031280517578125, 0.004428863525390625, -0.013031005859375, -0.016632080078125, -0.017852783203125, 0.04833984375, 0.005184173583984375, 0.060821533203125, 0.0304107666015625, -0.05841064453125, 0.021881103515625, -0.00543212890625, -0.0093994140625, 0.0158843994140625, -0.05841064453125, -0.01119232177734375, -0.00133514404296875, -0.006988525390625, -0.06640625, -0.010162353515625, 0.03594970703125, -0.016571044921875, 0.0362548828125, 0.001682281494140625, -0.02960205078125, -0.044830322265625, -0.0113372802734375, 0.031707763671875, 0.036895751953125, -0.0465087890625, 0.049224853515625, -0.004276275634765625, 0.0143585205078125, -0.053192138671875, -0.055938720703125, -0.0275421142578125, -0.0243072509765625, -0.047821044921875, 0.0209503173828125, -0.0350341796875, -0.0080413818359375, -0.01003265380859375, 0.002788543701171875, -0.00518798828125, -0.00852203369140625, -0.01288604736328125, 0.0131988525390625, -0.039398193359375, -0.0018062591552734375, 0.0112457275390625, -0.01395416259765625, 0.01117706298828125, -0.031280517578125, 0.04400634765625, -0.031219482421875, -0.0215911865234375, -0.028045654296875, -0.0021038055419921875, 0.056610107421875, -0.03729248046875, 0.08319091796875, 0.061920166015625, -0.026031494140625, -0.006351470947265625, -0.044586181640625, -0.0195465087890625, -0.0406494140625, 0.03936767578125, -0.0215911865234375, -0.0233612060546875, 0.04449462890625, 0.007465362548828125, 0.0333251953125, 0.057708740234375, 0.0298614501953125, 0.0189208984375, 0.06939697265625, 0.050537109375, 0.0026073455810546875, 0.061431884765625, -0.054412841796875, 0.01012420654296875, -0.065185546875, -0.0191802978515625, -0.00727081298828125, -0.0090789794921875, -0.0243988037109375, -0.031280517578125, 0.0245208740234375, 0.0034503936767578125, -0.031829833984375, 0.016632080078125, -0.02593994140625, 0.025634765625, 0.048675537109375, 0.0082855224609375, -0.004314422607421875, 0.0005130767822265625, -0.035186767578125, -0.0037364959716796875, -0.0460205078125, -0.0278472900390625, 0.0887451171875, 0.0242156982421875, 0.05419921875, 0.0179443359375, 0.041046142578125, 0.005870819091796875, 0.02191162109375, -0.0355224609375, 0.0406494140625, -0.01105499267578125, -0.056854248046875, -0.01568603515625, -0.0269622802734375, -0.05572509765625, 0.031402587890625, 0.009185791015625, -0.041473388671875, 0.0087432861328125, 0.01500701904296875, -0.01372528076171875, 0.037353515625, -0.06817626953125, 0.07720947265625, -0.03875732421875, -0.02130126953125, 0.0045623779296875, -0.048980712890625, 0.04388427734375, 0.007778167724609375, 0.003597259521484375, 0.0015192031860351562, -0.011199951171875, 0.0491943359375, -0.052581787109375, 0.0709228515625, -0.0144195556640625, -0.007503509521484375, 0.04443359375, -0.0200347900390625, 0.021728515625, -0.00012385845184326172, -0.01343536376953125, 0.00847625732421875, -0.0140228271484375, -0.0338134765625, -0.0254669189453125, 0.03662109375, -0.0709228515625, -0.03582763671875, -0.0278472900390625, -0.0291595458984375, 0.01548004150390625, 0.0274810791015625, 0.04248046875, 0.037933349609375, 0.01395416259765625, 0.012420654296875, 0.05316162109375, -0.02685546875, 0.050018310546875, 0.051025390625, -0.028350830078125, -0.047882080078125, 0.07843017578125, 0.026702880859375, 0.048828125, 0.041748046875, 0.029266357421875, -0.0211639404296875, -0.0161285400390625, -0.036376953125, 0.038421630859375, -0.05670166015625, -0.0150146484375, -0.03900146484375, -0.0400390625, -0.0343017578125, -0.01036834716796875, -0.045074462890625, -0.012298583984375, -0.04583740234375, -0.0091705322265625, 0.01158905029296875, 0.052886962890625, -0.00664520263671875, 0.030181884765625, -0.035552978515625, -0.002521514892578125, 0.0183868408203125, 0.017242431640625, 0.0077362060546875, -0.05340576171875, -0.031768798828125, 0.025421142578125, -0.01483154296875, -0.07147216796875, 0.031524658203125, 0.0408935546875, 0.047393798828125, 0.0592041015625, 0.00782012939453125, 0.03863525390625, -0.0224609375, 0.056427001953125, -0.0022125244140625, -0.0704345703125, 0.06231689453125, -0.0162811279296875, 0.03973388671875, 0.0176849365234375, 0.0272369384765625, -0.01406097412109375, -0.02764892578125, -0.052978515625, -0.0667724609375, 0.0709228515625, 0.0016002655029296875, 0.0182952880859375, -0.001556396484375, 0.0391845703125, -0.0005693435668945312, 0.004222869873046875, -0.09674072265625, -0.045257568359375, -0.0254974365234375, -0.03753662109375, -0.00659942626953125, -0.045318603515625, 0.01806640625, -0.0226287841796875, 0.057586669921875, -0.0207366943359375, 0.037841796875, 0.00716400146484375, -0.0254669189453125, -0.01490020751953125, -0.0013380050659179688, 0.06378173828125, 0.049713134765625, -0.023651123046875, 0.0010786056518554688, 0.018402099609375, -0.04571533203125, -0.00339508056640625, 0.005313873291015625, -0.03631591796875, -0.00913238525390625, 0.0352783203125, 0.06927490234375, 0.030853271484375, -0.04119873046875, 0.034271240234375, 0.0033473968505859375, -0.014984130859375, -0.0330810546875, 0.0243682861328125, -0.0158538818359375, 0.0084228515625, 0.024505615234375, 0.00659942626953125, -0.006317138671875, -0.0159759521484375, 0.01047515869140625, 0.0207672119140625, -0.021881103515625, -0.025604248046875, 0.06793212890625, 0.0161895751953125, -0.01241302490234375, 0.05609130859375, -0.017974853515625, -0.041290283203125, 0.055084228515625, 0.053497314453125, 0.056488037109375, -0.03387451171875, 0.0192718505859375, 0.0743408203125, 0.0548095703125, 0.013092041015625, 0.017669677734375, 0.006961822509765625, -0.031585693359375, -0.002246856689453125, -0.05572509765625, -0.0090789794921875, 0.0126800537109375, -0.032135009765625, 0.044708251953125, -0.04388427734375, 0.004207611083984375, -0.039093017578125, 0.0238494873046875, -0.045318603515625, 0.019439697265625, 0.01161956787109375, 0.067626953125, -0.0709228515625, 0.053192138671875, 0.053314208984375, -0.0648193359375, -0.0650634765625, 0.000042319297790527344, 0.005321502685546875, -0.048980712890625, 0.044219970703125, 0.028900146484375, 0.01421356201171875, 0.01064300537109375, -0.03509521484375, -0.1011962890625, 0.09783935546875, 0.0237884521484375, -0.023223876953125, -0.0111236572265625, 0.03912353515625, 0.03717041015625, -0.03741455078125, 0.05169677734375, 0.018646240234375, 0.0321044921875, 0.0076904296875, -0.06427001953125, -0.006256103515625, -0.0185089111328125, 0.0002548694610595703, -0.0102996826171875, -0.085205078125, 0.09722900390625, -0.029510498046875, -0.01068115234375, 0.00469970703125, 0.07720947265625, 0.0021305084228515625, 0.00423431396484375, 0.0308990478515625, 0.0241851806640625, 0.0369873046875, -0.00708770751953125, 0.05419921875, -0.032257080078125, 0.043212890625, 0.0682373046875, 0.0157470703125, 0.054901123046875, 0.0094146728515625, -0.0279083251953125, 0.034210205078125, 0.051300048828125, -0.002490997314453125, 0.0285186767578125, -0.007053375244140625, -0.00803375244140625, 0.006195068359375, 0.0154266357421875, -0.038787841796875, 0.0289306640625, 0.03948974609375, -0.055908203125, -0.0083770751953125, 0.0096893310546875, 0.01093292236328125, -0.031524658203125, -0.0223541259765625, 0.046112060546875, -0.0060577392578125, -0.05230712890625, 0.059844970703125, 0.0037384033203125, 0.06500244140625, -0.05535888671875, 0.002819061279296875, -0.028717041015625, 0.03497314453125, -0.0092926025390625, -0.0239410400390625, 0.0224456787109375, -0.018463134765625, -0.0169830322265625, 0.00466156005859375, 0.03326416015625, -0.057220458984375, -0.06329345703125, 0.0215301513671875, 0.01154327392578125, 0.0027484893798828125, 0.0214691162109375, -0.08380126953125, -0.0004992485046386719, 0.0202484130859375, -0.0269775390625, 0.00572967529296875, 0.004222869873046875, 0.01488494873046875, 0.05853271484375, 0.0279083251953125, -0.01287078857421875, 0.0535888671875, -0.00727081298828125, 0.038360595703125, -0.020843505859375, -0.0202178955078125, -0.060028076171875, 0.04754638671875, -0.00006979703903198242, -0.036041259765625, 0.05169677734375, 0.047607421875, 0.07806396484375, -0.0265350341796875, 0.03802490234375, -0.02374267578125, 0.0170440673828125, -0.0484619140625, 0.0523681640625, -0.05743408203125, 0.0237884521484375, -0.024749755859375, -0.09246826171875, -0.003662109375, 0.059600830078125, -0.0243072509765625, 0.03582763671875, 0.04901123046875, 0.06158447265625, -0.0208740234375, -0.01361846923828125, 0.01392364501953125, 0.0234222412109375, 0.043731689453125, 0.0406494140625, 0.026092529296875, -0.05157470703125, 0.047088623046875, -0.052978515625, -0.018310546875, -0.0406494140625, -0.03948974609375, -0.07061767578125, -0.050933837890625, -0.0295562744140625, -0.01495361328125, -0.0175628662109375, 0.07257080078125, 0.050201416015625, -0.0667724609375, -0.0243377685546875, -0.023040771484375, 0.0174407958984375, -0.0175323486328125, -0.02484130859375, 0.039581298828125, -0.0386962890625, -0.0849609375, 0.0038700103759765625, 0.012664794921875, 0.0264739990234375, -0.0408935546875, -0.0173797607421875, -0.04144287109375, 0.01219940185546875, 0.02764892578125, 0.027496337890625, -0.071044921875, -0.00791168212890625, 0.0018301010131835938, -0.0121002197265625, -0.0038318634033203125, 0.028778076171875, -0.032989501953125, 0.04296875, 0.031158447265625, 0.0158233642578125, 0.00972747802734375, -0.0196380615234375, 0.04290771484375, -0.041778564453125, 0.020751953125, -0.00302886962890625, 0.04083251953125, 0.01349639892578125, -0.0203857421875, 0.054351806640625, 0.0255126953125, -0.03021240234375, -0.04986572265625, 0.0107421875, -0.040679931640625, -0.00342559814453125, 0.09716796875, -0.0164642333984375, -0.01507568359375, 0.01345062255859375, -0.02752685546875, 0.019805908203125, 0.0029544830322265625, 0.0270233154296875, 0.02960205078125, 0.004550933837890625, -0.03558349609375, -0.0279083251953125, 0.031646728515625, 0.0316162109375, -0.052886962890625, -0.005519866943359375, 0.0140228271484375, 0.021148681640625, 0.0162200927734375, 0.035064697265625, -0.005702972412109375, 0.019195556640625, 0.0032062530517578125, -0.00710296630859375, -0.0093231201171875, -0.028961181640625, -0.003459930419921875, -0.01146697998046875, -0.01287078857421875, 0.0012311935424804688 ] ]
PocketDoc/Dans-CreepingSenseOfDoom-13b
2023-08-25T15:58:49.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
PocketDoc
null
null
PocketDoc/Dans-CreepingSenseOfDoom-13b
3
5,846
transformers
2023-08-25T14:31:28
--- language: - en --- ### What is the model for? This model is proficient in crafting text-based adventure games. It can both concise replies and more expansive, novel-like descriptions. The ability to alternate between these two response styles can be triggered by a distinct system message. ### What's in the sausage? This model was trained on [Holodeck-1](https://huggingface.co/KoboldAI/LLAMA2-13B-Holodeck-1) using a deduped version of the skein text adventure dataset augmented with system messages using the 'Metharme' prompting format. ### PROMPT FORMAT: Consistent with the Pygmalion Metharme format which is shown below. ``` <|system|>{system message here}<|user|>{user action here}<|model|>{model response} <|system|>{system message here}<|model|>{model response} <|system|>{system message here}<|user|>{user action here}<|model|>{model response}<|user|>{user action here}<|model|>{model response} ``` ### EXAMPLES: ##### For shorter responses: ``` <|system|>Mode: Adventure Theme: Science Fiction, cats, money, aliens, space, stars, siblings, future, trade Tense: Second person present Extra: Short response length<|user|>you look around<|model|>{CURSOR HERE} ``` ``` <|system|>You are a dungeon master of sorts, guiding the reader through a story based on the following themes: Lovecraftian, Horror, city, research. Do not be afraid to get creative with your responses or to tell them they can't do something when it doesnt make sense for the situation. Narrate their actions and observations as they occur and drive the story forward.<|user|>you look around<|model|>{CURSOR HERE} ``` ##### For longer novel like responses: ``` <|system|>You're tasked with creating an interactive story around the genres of historical, historical, RPG, serious. Guide the user through this tale, describing their actions and surroundings using second person present tense. Lengthy and descriptive responses will enhance the experience.<|user|>you look around<|model|>{CURSOR HERE} ``` ##### With a model message first: ``` <|system|>Mode: Story Theme: fantasy, female protagonist, grimdark Perspective and Tense: Second person present Directions: Write something to hook the user into the story then narrate their actions and observations as they occur while driving the story forward.<|model|>{CURSOR HERE} ``` ### Some quick and dirty training details: - [<img src="https://raw.githubusercontent.com/OpenAccess-AI-Collective/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="150" height="24"/>](https://github.com/OpenAccess-AI-Collective/axolotl) - Sequence length: 4096 - \# of epochs: 3 - Training time: 8 hours - Hardware: 1x RTX 3090 - Training type: QLoRA - PEFT R/A: 32/32 ### Credits: #### Holodeck-1: Thank you to Mr. Seeker and the Kobold AI team for the wonderful model Holodeck-1 [Holodeck-1 Huggingface page](https://huggingface.co/KoboldAI/LLAMA2-13B-Holodeck-1) #### Skein Text Adventure Data: Thank you to the [Kobold AI](https://huggingface.co/KoboldAI) community for curating the Skein dataset, which is pivotal to this model's capabilities.
3,092
[ [ -0.028167724609375, -0.0555419921875, 0.0237274169921875, 0.023590087890625, -0.01490020751953125, -0.005413055419921875, 0.012847900390625, -0.04095458984375, 0.031463623046875, 0.0531005859375, -0.060699462890625, -0.03509521484375, -0.016754150390625, 0.00218963623046875, -0.0090484619140625, 0.09381103515625, -0.00334930419921875, -0.01873779296875, -0.021209716796875, -0.010101318359375, -0.04595947265625, -0.0557861328125, -0.07275390625, -0.043060302734375, 0.060882568359375, 0.007366180419921875, 0.049835205078125, 0.046356201171875, 0.0186920166015625, 0.0173797607421875, -0.00621795654296875, 0.0211334228515625, -0.03887939453125, 0.0184173583984375, -0.0106964111328125, -0.035675048828125, -0.059783935546875, 0.0015048980712890625, 0.011474609375, 0.00890350341796875, -0.007312774658203125, 0.021484375, 0.0036334991455078125, 0.0164031982421875, -0.0208740234375, 0.017791748046875, 0.0038433074951171875, 0.0235137939453125, -0.0025653839111328125, -0.0102386474609375, -0.0290985107421875, -0.0218353271484375, 0.021759033203125, -0.06854248046875, 0.00295257568359375, 0.03399658203125, 0.042236328125, -0.00931549072265625, -0.030120849609375, -0.03631591796875, -0.05853271484375, 0.03765869140625, -0.07232666015625, 0.000675201416015625, 0.0347900390625, 0.0184326171875, -0.0011768341064453125, -0.06329345703125, -0.056640625, -0.01800537109375, -0.0175933837890625, 0.02056884765625, -0.0006270408630371094, 0.0023288726806640625, 0.034942626953125, 0.0219268798828125, -0.058807373046875, 0.002162933349609375, -0.0180206298828125, -0.01512908935546875, 0.0611572265625, 0.058135986328125, 0.0267333984375, -0.006275177001953125, -0.031890869140625, -0.0439453125, -0.024322509765625, 0.0289306640625, 0.03765869140625, 0.0030841827392578125, -0.0278167724609375, 0.061065673828125, 0.01076507568359375, 0.0498046875, 0.01302337646484375, -0.02545166015625, 0.01009368896484375, -0.01433563232421875, -0.00807952880859375, 0.00315093994140625, 0.06121826171875, 0.054779052734375, -0.0085296630859375, -0.00576019287109375, -0.019744873046875, 0.00931549072265625, 0.029327392578125, -0.05072021484375, -0.0256805419921875, 0.02996826171875, -0.047637939453125, -0.042205810546875, -0.0211029052734375, -0.07733154296875, -0.052093505859375, 0.0050811767578125, 0.039794921875, -0.038482666015625, -0.01806640625, 0.017059326171875, -0.04595947265625, 0.0252227783203125, 0.033966064453125, -0.08251953125, 0.0290679931640625, 0.030426025390625, 0.06292724609375, -0.0057373046875, -0.0237579345703125, -0.0279388427734375, 0.011077880859375, 0.0001838207244873047, 0.05487060546875, -0.0257720947265625, -0.036651611328125, -0.013275146484375, -0.0004744529724121094, -0.0027217864990234375, -0.01934814453125, 0.032440185546875, -0.03289794921875, 0.03338623046875, 0.0011205673217773438, -0.03607177734375, -0.0300140380859375, 0.037841796875, -0.054443359375, 0.0693359375, 0.00844573974609375, -0.062469482421875, -0.00159454345703125, -0.06390380859375, -0.00865936279296875, 0.004184722900390625, 0.004146575927734375, -0.0009918212890625, -0.00042438507080078125, 0.0061798095703125, 0.00966644287109375, -0.00870513916015625, 0.0003132820129394531, -0.0236663818359375, -0.0301055908203125, 0.01509857177734375, -0.01422882080078125, 0.056121826171875, 0.0059051513671875, -0.02789306640625, -0.0006437301635742188, -0.05303955078125, 0.009918212890625, 0.0286407470703125, -0.041046142578125, -0.0188140869140625, 0.007503509521484375, 0.006622314453125, 0.00696563720703125, 0.023284912109375, -0.02655029296875, 0.057861328125, -0.0396728515625, 0.01302337646484375, 0.05096435546875, 0.00249481201171875, 0.0211334228515625, -0.04803466796875, 0.042999267578125, -0.00530242919921875, 0.015350341796875, -0.02215576171875, -0.040252685546875, -0.049835205078125, 0.0023441314697265625, 0.02264404296875, 0.06719970703125, -0.06439208984375, 0.0279388427734375, 0.005340576171875, -0.05487060546875, -0.037109375, -0.0265960693359375, 0.033660888671875, 0.0311431884765625, 0.0223846435546875, -0.0281219482421875, -0.041351318359375, -0.041778564453125, -0.005859375, -0.03997802734375, -0.022003173828125, 0.04931640625, 0.031524658203125, -0.0225677490234375, 0.062225341796875, -0.04443359375, 0.007663726806640625, -0.038238525390625, 0.0187530517578125, 0.034332275390625, 0.032958984375, 0.044586181640625, -0.057403564453125, -0.0292510986328125, 0.006214141845703125, -0.07098388671875, 0.00382232666015625, -0.0033740997314453125, -0.00757598876953125, 0.007228851318359375, -0.0016717910766601562, -0.060791015625, 0.053924560546875, 0.029541015625, -0.0576171875, 0.046661376953125, -0.0200347900390625, 0.017333984375, -0.082275390625, 0.016357421875, -0.0244140625, -0.0020465850830078125, -0.056915283203125, 0.015899658203125, -0.031005859375, 0.00011157989501953125, -0.0200042724609375, 0.0849609375, -0.0208282470703125, 0.004180908203125, -0.0106201171875, 0.0131988525390625, 0.0019216537475585938, 0.059112548828125, 0.00981903076171875, 0.0284271240234375, 0.027191162109375, -0.046600341796875, 0.06793212890625, 0.041778564453125, -0.006683349609375, 0.039337158203125, -0.07135009765625, 0.0279998779296875, 0.00014483928680419922, 0.0195159912109375, -0.06927490234375, -0.030670166015625, 0.050445556640625, -0.046051025390625, 0.0166015625, -0.01021575927734375, -0.055450439453125, -0.048583984375, -0.017730712890625, 0.0158843994140625, 0.046295166015625, -0.03662109375, 0.062225341796875, 0.01136016845703125, -0.00951385498046875, -0.0304412841796875, -0.042816162109375, -0.0024166107177734375, -0.034088134765625, -0.047515869140625, 0.01361083984375, -0.049713134765625, -0.0225372314453125, -0.02545166015625, 0.013946533203125, -0.006267547607421875, -0.0011568069458007812, 0.0262298583984375, 0.02923583984375, -0.0115509033203125, 0.0034656524658203125, 0.00809478759765625, 0.005382537841796875, 0.0007176399230957031, -0.0218505859375, 0.06732177734375, 0.00028967857360839844, -0.0218048095703125, -0.0289764404296875, 0.0285797119140625, 0.043975830078125, -0.0019855499267578125, 0.04156494140625, 0.040069580078125, -0.019256591796875, 0.02471923828125, -0.01605224609375, -0.0245819091796875, -0.0333251953125, 0.005886077880859375, -0.00901031494140625, -0.0498046875, 0.051544189453125, 0.01012420654296875, 0.03668212890625, 0.0322265625, 0.03424072265625, -0.016693115234375, 0.093505859375, 0.03973388671875, 0.0024967193603515625, 0.0123138427734375, -0.0171966552734375, 0.01107025146484375, -0.053466796875, -0.031890869140625, -0.024658203125, -0.049896240234375, -0.0273284912109375, -0.003787994384765625, 0.03155517578125, 0.01424407958984375, -0.021240234375, 0.03558349609375, -0.037078857421875, 0.01381683349609375, 0.04656982421875, 0.01354217529296875, 0.0121612548828125, 0.0037136077880859375, -0.0325927734375, -0.006496429443359375, -0.056732177734375, -0.049072265625, 0.0699462890625, 0.03497314453125, 0.08380126953125, 0.0136260986328125, 0.06866455078125, 0.03546142578125, 0.002780914306640625, -0.0408935546875, 0.039520263671875, 0.02252197265625, -0.056396484375, -0.037689208984375, -0.004047393798828125, -0.056060791015625, 0.003711700439453125, -0.030364990234375, -0.0777587890625, 0.0170745849609375, 0.0218505859375, -0.06646728515625, 0.0277557373046875, -0.047698974609375, 0.06915283203125, -0.0251922607421875, -0.02923583984375, 0.01450347900390625, -0.06597900390625, 0.00644683837890625, 0.0203704833984375, -0.0041656494140625, 0.016754150390625, -0.0032825469970703125, 0.049163818359375, -0.0244140625, 0.08697509765625, -0.0110626220703125, -0.00879669189453125, 0.0238494873046875, 0.023223876953125, 0.047393798828125, 0.0217437744140625, 0.004627227783203125, -0.00450897216796875, 0.018310546875, -0.01806640625, -0.034088134765625, 0.04156494140625, -0.06256103515625, -0.0343017578125, -0.0389404296875, -0.04901123046875, 0.0051116943359375, 0.01284027099609375, 0.039093017578125, 0.05035400390625, -0.0244598388671875, -0.00989532470703125, 0.0660400390625, -0.02215576171875, 0.040130615234375, 0.0288848876953125, -0.04248046875, -0.03936767578125, 0.05218505859375, -0.003520965576171875, 0.0017871856689453125, 0.0172576904296875, -0.002292633056640625, -0.032257080078125, -0.0177459716796875, -0.0540771484375, 0.019195556640625, -0.026519775390625, -0.006866455078125, -0.0765380859375, -0.0301971435546875, -0.026123046875, -0.008270263671875, 0.007724761962890625, -0.033416748046875, -0.0379638671875, -0.007232666015625, 0.0361328125, 0.057647705078125, 0.0060272216796875, 0.04058837890625, -0.06268310546875, 0.006664276123046875, 0.0361328125, 0.00527191162109375, 0.0105438232421875, -0.0399169921875, -0.01441192626953125, 0.0219268798828125, -0.036346435546875, -0.0694580078125, 0.052734375, -0.0003142356872558594, 0.041534423828125, 0.0316162109375, 0.0099945068359375, 0.0374755859375, -0.035980224609375, 0.08258056640625, 0.0114898681640625, -0.035430908203125, 0.044952392578125, -0.035919189453125, 0.0280609130859375, 0.01873779296875, 0.03643798828125, -0.06951904296875, -0.02880859375, -0.058013916015625, -0.053802490234375, 0.07305908203125, 0.0171051025390625, 0.0294036865234375, -0.0009164810180664062, 0.03948974609375, 0.01456451416015625, 0.031280517578125, -0.07470703125, -0.02435302734375, -0.0244140625, -0.00007146596908569336, -0.01175689697265625, -0.0201263427734375, -0.0171051025390625, -0.01446533203125, 0.044952392578125, -0.01031494140625, 0.048431396484375, 0.00986480712890625, 0.0162811279296875, -0.00007146596908569336, -0.0057373046875, 0.046295166015625, 0.0288543701171875, -0.0263671875, 0.000015497207641601562, 0.01259613037109375, -0.04510498046875, -0.0026416778564453125, 0.008575439453125, -0.01336669921875, -0.002902984619140625, 0.04718017578125, 0.07598876953125, 0.01050567626953125, -0.05108642578125, 0.0284881591796875, -0.0234527587890625, -0.01148223876953125, -0.0261077880859375, 0.031829833984375, 0.01123809814453125, 0.054229736328125, 0.0185089111328125, 0.004077911376953125, 0.01300048828125, -0.038055419921875, -0.011199951171875, 0.006328582763671875, -0.0300445556640625, -0.034912109375, 0.056854248046875, 0.005859375, -0.055145263671875, 0.057525634765625, -0.00977325439453125, -0.0272216796875, 0.057464599609375, 0.062286376953125, 0.06304931640625, -0.0224761962890625, 0.0284881591796875, 0.04376220703125, 0.01175689697265625, 0.0007214546203613281, 0.0234375, 0.00024771690368652344, -0.03533935546875, 0.00908660888671875, -0.04803466796875, -0.016021728515625, 0.01491546630859375, -0.046051025390625, 0.0223236083984375, -0.041900634765625, -0.0167388916015625, -0.016021728515625, 0.00020456314086914062, -0.034271240234375, -0.0004382133483886719, 0.003803253173828125, 0.0914306640625, -0.051727294921875, 0.042816162109375, 0.06549072265625, -0.037322998046875, -0.045562744140625, -0.0038928985595703125, 0.00888824462890625, -0.043975830078125, 0.0294036865234375, 0.0088653564453125, 0.02642822265625, 0.00722503662109375, -0.07574462890625, -0.043121337890625, 0.10784912109375, -0.016021728515625, -0.0276947021484375, -0.041290283203125, -0.0145111083984375, 0.06451416015625, -0.057891845703125, 0.04022216796875, 0.02996826171875, 0.0303497314453125, 0.0203094482421875, -0.06622314453125, -0.01263427734375, -0.03253173828125, 0.00274658203125, -0.008087158203125, -0.051055908203125, 0.07769775390625, -0.00960540771484375, -0.004566192626953125, 0.0352783203125, 0.047088623046875, 0.035797119140625, 0.045684814453125, 0.03863525390625, 0.051666259765625, 0.04058837890625, -0.0030517578125, 0.08770751953125, -0.010589599609375, 0.0256195068359375, 0.083740234375, -0.0161590576171875, 0.051055908203125, 0.01593017578125, -0.0049896240234375, 0.044158935546875, 0.07037353515625, -0.0026454925537109375, 0.052001953125, 0.001323699951171875, -0.02325439453125, -0.00998687744140625, -0.00014901161193847656, -0.0309295654296875, 0.0159912109375, 0.0215301513671875, -0.0220794677734375, -0.0110321044921875, 0.01324462890625, 0.0010395050048828125, -0.01995849609375, -0.014617919921875, 0.0726318359375, -0.00897216796875, -0.052886962890625, 0.03082275390625, -0.00600433349609375, 0.04443359375, -0.0626220703125, -0.0247955322265625, -0.042816162109375, 0.0005216598510742188, 0.0012111663818359375, -0.056488037109375, -0.00811004638671875, -0.0060272216796875, -0.032684326171875, -0.007213592529296875, 0.048309326171875, -0.03131103515625, -0.01800537109375, 0.01541900634765625, 0.0262298583984375, 0.023681640625, 0.0172576904296875, -0.054901123046875, 0.00508880615234375, 0.015716552734375, -0.01177215576171875, 0.017669677734375, 0.034942626953125, -0.00862884521484375, 0.045867919921875, 0.029388427734375, -0.01898193359375, -0.01134490966796875, -0.03021240234375, 0.0802001953125, -0.039459228515625, -0.04876708984375, -0.050445556640625, 0.046539306640625, -0.01425933837890625, -0.0287628173828125, 0.05633544921875, 0.04608154296875, 0.044464111328125, -0.0284423828125, 0.056884765625, -0.033050537109375, 0.0478515625, -0.027435302734375, 0.058197021484375, -0.044769287109375, 0.005584716796875, -0.020477294921875, -0.06280517578125, 0.00916290283203125, 0.04443359375, -0.00618743896484375, 0.0017957687377929688, 0.02923583984375, 0.05206298828125, -0.00855255126953125, 0.0007233619689941406, 0.0193939208984375, 0.0187530517578125, 0.01727294921875, 0.049560546875, 0.07293701171875, -0.0345458984375, 0.0303497314453125, -0.00862884521484375, -0.0321044921875, -0.032012939453125, -0.050628662109375, -0.09442138671875, -0.039093017578125, 0.0018520355224609375, -0.0443115234375, 0.001461029052734375, 0.06573486328125, 0.040283203125, -0.053802490234375, -0.037872314453125, 0.025054931640625, 0.002193450927734375, -0.02398681640625, -0.0191497802734375, -0.0018815994262695312, 0.00616455078125, -0.04718017578125, 0.01247406005859375, 0.016754150390625, 0.037078857421875, -0.02362060546875, -0.0271148681640625, -0.025054931640625, 0.01332855224609375, 0.041107177734375, 0.039764404296875, -0.042877197265625, -0.0215911865234375, 0.025360107421875, -0.01453399658203125, -0.00537109375, 0.05230712890625, -0.056396484375, 0.041046142578125, 0.0270538330078125, 0.01568603515625, 0.0557861328125, -0.0160675048828125, 0.03741455078125, -0.040008544921875, 0.0019073486328125, 0.02679443359375, 0.0039043426513671875, 0.0020122528076171875, -0.05328369140625, 0.04864501953125, 0.025909423828125, -0.046173095703125, -0.0465087890625, 0.00151824951171875, -0.072021484375, -0.0054779052734375, 0.080810546875, -0.0158233642578125, -0.03228759765625, -0.0030517578125, -0.06610107421875, 0.001873016357421875, -0.026397705078125, 0.04705810546875, 0.0673828125, -0.029388427734375, -0.01175689697265625, -0.03875732421875, 0.035308837890625, -0.014892578125, -0.057708740234375, -0.01114654541015625, 0.055877685546875, 0.00894927978515625, 0.032989501953125, 0.04913330078125, 0.00017178058624267578, 0.0214996337890625, 0.01062774658203125, 0.017425537109375, -0.007293701171875, -0.02337646484375, -0.01537322998046875, -0.0022487640380859375, -0.01548004150390625, -0.005401611328125 ] ]
Undi95/ReMM-SLERP-L2-13B
2023-09-09T21:12:46.000Z
[ "transformers", "pytorch", "llama", "text-generation", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Undi95
null
null
Undi95/ReMM-SLERP-L2-13B
9
5,846
transformers
2023-09-04T17:37:26
--- license: cc-by-nc-4.0 --- Re:MythoMax (ReMM) is a recreation trial of the original [MythoMax-L2-B13](https://huggingface.co/Gryphe/MythoMax-L2-13b) with updated models. This merge use SLERP [TESTING] to merge ReML and Huginn v1.2. Command useds and explaination : ```shell Due to hardware limitation, some merge was done in 2 part. - Recreate ReML : Mythologic (v2) (Chronos/Hermes/Airboros) => Replacing Chronos by The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 (0.30) => Replacing Airoboros by jondurbin/airoboros-l2-13b-2.1 (last version) (0.40) => Keeping NousResearch/Nous-Hermes-Llama2-13b (0.30) Part 1: python ties_merge.py TheBloke/Llama-2-13B-fp16 ./ReML-L2-13B-part1 --merge The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 --density 0.42 --merge jondurbin/airoboros-l2-13b-2.1 --density 0.56 --cuda Part 2: python ties_merge.py TheBloke/Llama-2-13B-fp16 ./ReML-L2-13B --merge NousResearch/Nous-Hermes-Llama2-13b --density 0.30 --merge Undi95/ReML-L2-13B-part1 --density 0.70 --cuda With that : - Recreate ReMM : MythoMax (v2) (Mythologic/Huginn v1) => Replacing Mythologic by the one above (0.5) => Replacing Huginn by The-Face-Of-Goonery/Huginn-13b-v1.2 (hottest) (0.5) Part 3: python slerpmergelm.py "The-Face-Of-Goonery_Huginn-13b-v1.2" "Undi95_ReML-L2-13B" "result" ``` Version of SLERP used is different to accept usage on notebook : https://github.com/Undi95/LLM-SLERP-MergeTest/tree/main (Thanks @Vali) <!-- description start --> ## Description This repo contains fp16 files of ReMM-SLERP, a recreation of the original MythoMax, but updated and merged with SLERP. <!-- description end --> <!-- description start --> ## Models used - TheBloke/Llama-2-13B-fp16 (base) - The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16 - jondurbin/airoboros-l2-13b-2.1 - NousResearch/Nous-Hermes-Llama2-13b - The-Face-Of-Goonery/Huginn-13b-v1.2 - ReML-L2-13B (Private recreation trial of an updated Mythologic-L2-13B) <!-- description end --> <!-- prompt-template start --> ## Prompt template: Alpaca ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: {prompt} ### Response: ``` Special thanks to Sushi kek
2,197
[ [ -0.03289794921875, -0.03887939453125, 0.036163330078125, -0.0045166015625, -0.0202178955078125, 0.0012426376342773438, 0.007152557373046875, -0.035675048828125, 0.055328369140625, 0.05401611328125, -0.06011962890625, -0.0214996337890625, -0.032623291015625, 0.01393890380859375, -0.016754150390625, 0.08172607421875, -0.0298614501953125, -0.027099609375, -0.01517486572265625, -0.0150146484375, -0.005035400390625, -0.01715087890625, -0.045684814453125, -0.0311279296875, 0.04693603515625, 0.018402099609375, 0.053985595703125, 0.05401611328125, 0.052001953125, 0.0244140625, -0.0157928466796875, 0.0305938720703125, -0.03131103515625, 0.00911712646484375, -0.0034389495849609375, -0.04339599609375, -0.0474853515625, 0.026824951171875, 0.0648193359375, 0.03143310546875, -0.0245361328125, 0.0222320556640625, -0.006984710693359375, 0.0277862548828125, -0.0355224609375, -0.00362396240234375, -0.01165008544921875, 0.02496337890625, -0.01068115234375, -0.0156707763671875, -0.006580352783203125, -0.037445068359375, 0.0121002197265625, -0.07061767578125, 0.0309600830078125, 0.0290985107421875, 0.07733154296875, 0.031158447265625, -0.01959228515625, -0.0017528533935546875, -0.017333984375, 0.07733154296875, -0.076416015625, 0.01155853271484375, 0.006427764892578125, 0.01666259765625, -0.03448486328125, -0.074462890625, -0.042633056640625, -0.005218505859375, -0.021697998046875, 0.0064849853515625, -0.043548583984375, -0.0252685546875, 0.034759521484375, 0.037841796875, -0.044708251953125, 0.004825592041015625, -0.06341552734375, -0.03021240234375, 0.039459228515625, 0.0249481201171875, 0.0133056640625, -0.017242431640625, -0.046234130859375, -0.021514892578125, -0.04693603515625, -0.019805908203125, 0.004383087158203125, -0.002864837646484375, -0.0210418701171875, 0.052947998046875, -0.0035419464111328125, 0.050506591796875, 0.0127410888671875, -0.042205810546875, 0.047210693359375, -0.0208740234375, -0.026153564453125, -0.004749298095703125, 0.06292724609375, 0.046173095703125, -0.012603759765625, 0.0087127685546875, -0.01309967041015625, -0.0014085769653320312, 0.0244293212890625, -0.055084228515625, 0.0135040283203125, 0.031707763671875, -0.0457763671875, -0.043304443359375, -0.022186279296875, -0.0484619140625, -0.01035308837890625, -0.017486572265625, 0.0237884521484375, -0.036041259765625, -0.0276641845703125, 0.00193023681640625, -0.0247650146484375, 0.0282745361328125, 0.0391845703125, -0.026702880859375, 0.0228118896484375, 0.058837890625, 0.06427001953125, -0.00363922119140625, -0.047760009765625, -0.0253143310546875, -0.0002276897430419922, -0.035614013671875, 0.051544189453125, -0.016204833984375, -0.0306243896484375, -0.020477294921875, 0.0208282470703125, -0.0029621124267578125, -0.042816162109375, 0.049163818359375, -0.023223876953125, 0.053619384765625, -0.013153076171875, -0.052032470703125, -0.0386962890625, 0.01519775390625, -0.052093505859375, 0.052459716796875, 0.03216552734375, -0.062469482421875, -0.0008044242858886719, -0.03155517578125, -0.00701141357421875, -0.01074981689453125, 0.01165008544921875, -0.0269317626953125, 0.011383056640625, -0.00980377197265625, 0.01462554931640625, -0.043548583984375, -0.01235198974609375, -0.0249481201171875, -0.02325439453125, 0.01195526123046875, 0.0156097412109375, 0.059478759765625, 0.041595458984375, -0.0234375, -4.76837158203125e-7, -0.038330078125, 0.00742340087890625, 0.01617431640625, -0.016021728515625, -0.017730712890625, -0.03240966796875, 0.0166015625, -0.002323150634765625, 0.042022705078125, -0.027862548828125, 0.03778076171875, -0.0172119140625, 0.0308837890625, 0.03521728515625, -0.00421142578125, 0.033721923828125, -0.052642822265625, 0.039154052734375, -0.0020465850830078125, 0.017669677734375, -0.0167999267578125, -0.062744140625, -0.06610107421875, -0.0249176025390625, 0.005237579345703125, 0.048858642578125, -0.03485107421875, 0.01080322265625, 0.005218505859375, -0.07275390625, -0.049896240234375, -0.0072174072265625, 0.05230712890625, 0.046905517578125, 0.0243072509765625, -0.025787353515625, -0.057403564453125, -0.0872802734375, -0.0092010498046875, -0.0211029052734375, 0.00304412841796875, 0.0302276611328125, 0.0322265625, -0.05657958984375, 0.052215576171875, -0.06451416015625, -0.017913818359375, -0.0023365020751953125, 0.01983642578125, 0.048858642578125, 0.03594970703125, 0.05462646484375, -0.04095458984375, -0.0027675628662109375, 0.002105712890625, -0.06884765625, -0.0191802978515625, -0.002445220947265625, 0.01226043701171875, 0.0223388671875, 0.0055999755859375, -0.07135009765625, 0.036041259765625, 0.03131103515625, -0.027191162109375, 0.06219482421875, -0.0292816162109375, 0.05096435546875, -0.0997314453125, 0.01474761962890625, -0.0002008676528930664, -0.01389312744140625, -0.0465087890625, 0.045440673828125, -0.0002751350402832031, -0.011688232421875, -0.04119873046875, 0.04388427734375, -0.037811279296875, 0.00007957220077514648, -0.025482177734375, -0.0003654956817626953, -0.006381988525390625, 0.0162200927734375, -0.00974273681640625, 0.0447998046875, 0.03912353515625, -0.0279541015625, 0.06256103515625, 0.012969970703125, 0.003398895263671875, 0.03448486328125, -0.048370361328125, 0.01189422607421875, 0.0034198760986328125, 0.0289154052734375, -0.055694580078125, -0.0196075439453125, 0.036224365234375, -0.03106689453125, 0.01329803466796875, -0.0075836181640625, -0.0138397216796875, -0.03240966796875, -0.03399658203125, 0.0323486328125, 0.06109619140625, -0.02923583984375, 0.06402587890625, 0.019622802734375, 0.0010557174682617188, -0.0499267578125, -0.04730224609375, 0.0175933837890625, -0.035736083984375, -0.055572509765625, 0.02935791015625, -0.0009584426879882812, -0.0258026123046875, -0.000736236572265625, -0.0017499923706054688, -0.0109710693359375, -0.024993896484375, 0.025604248046875, 0.04107666015625, -0.02374267578125, -0.0499267578125, 0.0008220672607421875, -0.02459716796875, -0.00754547119140625, -0.00623321533203125, 0.0300445556640625, -0.009368896484375, -0.0008802413940429688, -0.050506591796875, 0.029541015625, 0.059234619140625, 0.00823974609375, 0.0242767333984375, 0.0518798828125, -0.04095458984375, 0.01326751708984375, -0.071044921875, -0.021240234375, -0.030364990234375, -0.0008411407470703125, -0.0176239013671875, -0.0728759765625, 0.049591064453125, 0.0007729530334472656, 0.03564453125, 0.048583984375, 0.025360107421875, -0.01117706298828125, 0.0278472900390625, 0.034027099609375, -0.005634307861328125, 0.007442474365234375, -0.0230865478515625, 0.0031585693359375, -0.06549072265625, -0.0250701904296875, -0.02972412109375, -0.027130126953125, -0.037994384765625, -0.06243896484375, 0.01200103759765625, -0.01007080078125, 0.00328826904296875, 0.0687255859375, -0.031768798828125, 0.01140594482421875, 0.0477294921875, -0.0009279251098632812, 0.00970458984375, 0.02276611328125, 0.00485992431640625, -0.02813720703125, -0.0345458984375, -0.018280029296875, 0.08355712890625, 0.01873779296875, 0.0699462890625, 0.0035991668701171875, 0.07220458984375, 0.0176239013671875, 0.01279449462890625, -0.03485107421875, 0.0509033203125, 0.0013360977172851562, -0.03643798828125, -0.002788543701171875, -0.04296875, -0.07012939453125, 0.0247039794921875, -0.0167236328125, -0.046051025390625, 0.02325439453125, 0.01158905029296875, -0.0302276611328125, 0.0268707275390625, -0.05133056640625, 0.046966552734375, 0.0114593505859375, -0.0166015625, -0.052001953125, -0.044525146484375, 0.05426025390625, -0.01515960693359375, 0.01007080078125, 0.01922607421875, -0.0150146484375, 0.0733642578125, -0.034942626953125, 0.03594970703125, 0.01401519775390625, -0.00852203369140625, 0.02197265625, 0.014373779296875, 0.033416748046875, 0.0020122528076171875, 0.01515960693359375, 0.0204925537109375, 0.015716552734375, -0.00653839111328125, -0.043426513671875, 0.07830810546875, -0.049591064453125, -0.05804443359375, -0.05419921875, -0.048004150390625, 0.039642333984375, 0.01229095458984375, 0.01319122314453125, 0.03460693359375, -0.0005612373352050781, 0.002105712890625, 0.05584716796875, -0.0165252685546875, 0.008880615234375, 0.054718017578125, -0.043975830078125, -0.049407958984375, 0.05181884765625, -0.006938934326171875, 0.007274627685546875, 0.02056884765625, 0.006702423095703125, -0.01450347900390625, 0.0006690025329589844, -0.0183868408203125, 0.05419921875, -0.03253173828125, -0.0113067626953125, -0.04168701171875, -0.0216522216796875, -0.043548583984375, -0.0125885009765625, -0.029083251953125, -0.045440673828125, -0.021453857421875, 0.01544189453125, 0.03729248046875, 0.047149658203125, -0.035614013671875, 0.0140838623046875, -0.04638671875, 0.033050537109375, 0.019195556640625, -0.012054443359375, -0.02325439453125, -0.061370849609375, 0.005199432373046875, -0.004436492919921875, -0.0161895751953125, -0.07891845703125, 0.03948974609375, -0.00728607177734375, 0.03839111328125, 0.031402587890625, -0.016571044921875, 0.057647705078125, -0.03399658203125, 0.047943115234375, 0.03192138671875, -0.041290283203125, 0.0455322265625, -0.019287109375, 0.004756927490234375, -0.0005960464477539062, 0.0237884521484375, -0.05120849609375, -0.024169921875, -0.07305908203125, -0.0457763671875, 0.064697265625, 0.0291290283203125, -0.029205322265625, 0.0259857177734375, 0.04119873046875, -0.00543212890625, 0.0185394287109375, -0.0186614990234375, -0.0509033203125, -0.01390838623046875, 0.017059326171875, -0.0213623046875, -0.029083251953125, -0.044952392578125, -0.0231475830078125, 0.04937744140625, 0.01456451416015625, 0.024200439453125, 0.0230255126953125, 0.007061004638671875, -0.01548004150390625, 0.0041351318359375, 0.050201416015625, 0.04339599609375, -0.0467529296875, 0.01409149169921875, 0.02142333984375, -0.046905517578125, 0.002246856689453125, 0.0203704833984375, -0.00605010986328125, 0.01995849609375, 0.037628173828125, 0.0546875, 0.02886962890625, -0.03485107421875, 0.004299163818359375, -0.0107269287109375, -0.01849365234375, -0.0196075439453125, 0.017364501953125, 0.00913238525390625, 0.030120849609375, 0.034393310546875, 0.0170440673828125, 0.014617919921875, -0.04229736328125, 0.01959228515625, 0.0196380615234375, -0.00292205810546875, -0.00774383544921875, 0.044281005859375, -0.0015249252319335938, -0.0350341796875, 0.040679931640625, -0.00980377197265625, -0.033599853515625, 0.05517578125, 0.05535888671875, 0.057159423828125, -0.03515625, 0.0178680419921875, 0.044189453125, 0.040985107421875, -0.000278472900390625, 0.02349853515625, 0.009033203125, -0.03436279296875, -0.00939178466796875, -0.03546142578125, -0.033050537109375, 0.006443023681640625, -0.042999267578125, 0.017486572265625, -0.04241943359375, -0.0206298828125, 0.002193450927734375, 0.01093292236328125, -0.034088134765625, 0.0203094482421875, -0.0023975372314453125, 0.0843505859375, -0.05792236328125, 0.044097900390625, 0.07568359375, -0.0035991668701171875, -0.06396484375, -0.0343017578125, 0.031005859375, -0.0648193359375, 0.02496337890625, -0.0023479461669921875, 0.009613037109375, -0.00879669189453125, -0.03485107421875, -0.076416015625, 0.10400390625, 0.0276336669921875, -0.043792724609375, 0.0135650634765625, -0.0180206298828125, 0.02734375, -0.041900634765625, 0.023895263671875, 0.048370361328125, 0.042816162109375, 0.0242462158203125, -0.0869140625, 0.029144287109375, -0.024200439453125, -0.0011243820190429688, 0.0080108642578125, -0.041778564453125, 0.07843017578125, -0.03936767578125, -0.00984954833984375, 0.0277862548828125, 0.055877685546875, 0.0635986328125, 0.027923583984375, 0.044464111328125, 0.08685302734375, 0.05303955078125, -0.01861572265625, 0.07171630859375, -0.00927734375, 0.048126220703125, 0.04510498046875, -0.01311492919921875, 0.042816162109375, 0.0533447265625, -0.0293426513671875, 0.0306854248046875, 0.06402587890625, -0.00844573974609375, 0.0257568359375, -0.0049591064453125, -0.0164947509765625, -0.01788330078125, 0.0014781951904296875, -0.07012939453125, 0.00930023193359375, 0.01342010498046875, -0.0207672119140625, -0.02923583984375, -0.03057861328125, 0.034393310546875, -0.0330810546875, -0.004352569580078125, 0.020660400390625, 0.01064300537109375, -0.05694580078125, 0.05279541015625, 0.00920867919921875, 0.062408447265625, -0.0645751953125, -0.00734710693359375, -0.03668212890625, 0.0174102783203125, -0.0257568359375, -0.06658935546875, 0.01258087158203125, -0.01727294921875, -0.02349853515625, 0.009033203125, 0.056976318359375, -0.0227508544921875, -0.04339599609375, 0.0145111083984375, 0.0224761962890625, 0.04046630859375, 0.0256500244140625, -0.06390380859375, 0.0203094482421875, 0.019317626953125, -0.02349853515625, 0.0230560302734375, 0.00609588623046875, 0.0095367431640625, 0.06658935546875, 0.05523681640625, 0.00016760826110839844, -0.01261138916015625, -0.019622802734375, 0.06903076171875, -0.040313720703125, -0.024322509765625, -0.03814697265625, 0.00905609130859375, -0.0035228729248046875, -0.03997802734375, 0.061859130859375, 0.05120849609375, 0.0285491943359375, -0.027618408203125, 0.040435791015625, -0.02325439453125, 0.025909423828125, -0.025115966796875, 0.059814453125, -0.07080078125, -0.0060882568359375, -0.0234375, -0.08148193359375, 0.002582550048828125, 0.0479736328125, 0.00981903076171875, -0.018341064453125, 0.057342529296875, 0.0745849609375, -0.01548004150390625, -0.00882720947265625, 0.0036106109619140625, 0.026641845703125, 0.007137298583984375, 0.060546875, 0.047760009765625, -0.0452880859375, 0.01708984375, -0.0240325927734375, -0.0457763671875, -0.0146636962890625, -0.059051513671875, -0.051055908203125, -0.0465087890625, -0.033050537109375, -0.045928955078125, 0.004467010498046875, 0.05609130859375, 0.03369140625, -0.04437255859375, -0.031646728515625, 0.039154052734375, 0.0172576904296875, -0.025054931640625, -0.0169525146484375, 0.0191497802734375, -0.006683349609375, -0.05438232421875, 0.037078857421875, 0.0247955322265625, 0.0056304931640625, -0.019317626953125, -0.019317626953125, -0.0005121231079101562, -0.0101776123046875, 0.01200103759765625, 0.043609619140625, -0.0653076171875, -0.0004756450653076172, -0.007312774658203125, 0.004146575927734375, -0.019195556640625, 0.06024169921875, -0.0645751953125, 0.006877899169921875, 0.037017822265625, 0.0115814208984375, 0.03887939453125, -0.024993896484375, 0.046875, -0.0235595703125, 0.02252197265625, -0.01617431640625, 0.057647705078125, 0.01152801513671875, -0.0196380615234375, 0.045928955078125, 0.055572509765625, -0.040130615234375, -0.0784912109375, 0.00559234619140625, -0.10308837890625, -0.0092926025390625, 0.08538818359375, -0.00887298583984375, -0.03363037109375, 0.0179595947265625, -0.033721923828125, 0.0183563232421875, -0.02239990234375, 0.04986572265625, 0.026641845703125, 0.00499725341796875, -0.025482177734375, -0.02276611328125, 0.029937744140625, 0.0219879150390625, -0.045074462890625, -0.0137939453125, 0.0360107421875, 0.039886474609375, 0.049652099609375, 0.036285400390625, -0.00923919677734375, 0.039764404296875, -0.008575439453125, 0.01555633544921875, -0.0164337158203125, -0.000988006591796875, -0.0038509368896484375, -0.00586700439453125, -0.03729248046875, -0.00508880615234375 ] ]
Fredithefish/RedPajama-INCITE-Chat-3B-ShareGPT-11K
2023-06-06T16:36:59.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "en", "dataset:Fredithefish/ShareGPT-Unfiltered-RedPajama-Chat-format", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
Fredithefish
null
null
Fredithefish/RedPajama-INCITE-Chat-3B-ShareGPT-11K
2
5,842
transformers
2023-06-06T16:24:52
--- license: apache-2.0 datasets: - Fredithefish/ShareGPT-Unfiltered-RedPajama-Chat-format language: - en inference: false --- ### Model Description <a href="https://huggingface.co/togethercomputer/RedPajama-INCITE-Chat-3B-v1">RedPajama-INCITE-Chat-3B-v1</a> finetuned on 11K pairs from unfiltered ShareGPT dataset - **Model type:** language model - **Language(s) (NLP):** Engish - **License:** apache-2.0 - **Finetuned from model:** RedPajama-INCITE-Chat-3B
464
[ [ -0.039154052734375, -0.06195068359375, 0.0011138916015625, 0.0650634765625, -0.031890869140625, -0.017547607421875, -0.0231475830078125, -0.0430908203125, 0.032623291015625, 0.0516357421875, -0.06597900390625, -0.036407470703125, -0.06048583984375, -0.0016527175903320312, -0.0484619140625, 0.08819580078125, 0.006946563720703125, 0.00391387939453125, 0.0007162094116210938, -0.021453857421875, -0.037689208984375, -0.027374267578125, -0.0570068359375, -0.02508544921875, 0.00833892822265625, 0.039703369140625, 0.057464599609375, 0.0131683349609375, 0.0443115234375, 0.014892578125, -0.005893707275390625, -0.008819580078125, -0.0599365234375, 0.0036468505859375, 0.005229949951171875, -0.00878143310546875, -0.040069580078125, -0.002445220947265625, 0.036407470703125, 0.0426025390625, -0.0203857421875, 0.030731201171875, 0.00579833984375, 0.039459228515625, -0.0228729248046875, 0.0235748291015625, -0.0482177734375, -0.0119476318359375, -0.036407470703125, 0.006862640380859375, -0.021270751953125, -0.022125244140625, 0.00707244873046875, -0.043121337890625, 0.00820159912109375, 0.01123809814453125, 0.0721435546875, 0.0197296142578125, -0.018280029296875, -0.00006192922592163086, -0.033538818359375, 0.041900634765625, -0.0560302734375, 0.033966064453125, 0.053192138671875, 0.0197906494140625, -0.01044464111328125, -0.06475830078125, -0.03857421875, 0.00756072998046875, -0.0281982421875, -0.0150604248046875, -0.023345947265625, -0.01250457763671875, -0.0024776458740234375, 0.04144287109375, -0.046142578125, -0.0074462890625, -0.053985595703125, -0.0024967193603515625, 0.050689697265625, 0.0021038055419921875, 0.03521728515625, -0.00685882568359375, -0.023651123046875, 0.0020771026611328125, -0.050537109375, -0.035186767578125, 0.029296875, 0.04327392578125, -0.0687255859375, 0.042938232421875, -0.033111572265625, 0.0587158203125, -0.009521484375, 0.0211029052734375, 0.0296783447265625, -0.034637451171875, -0.031646728515625, -0.0289459228515625, 0.0689697265625, 0.0152587890625, 0.007152557373046875, -0.00994873046875, 0.0221405029296875, 0.0092010498046875, 0.00440216064453125, -0.063720703125, -0.038116455078125, 0.0126495361328125, -0.047332763671875, -0.0169677734375, -0.00647735595703125, -0.04833984375, -0.0017786026000976562, -0.0222320556640625, 0.035430908203125, -0.0321044921875, -0.05120849609375, 0.031005859375, 0.012542724609375, 0.0106964111328125, 0.0007786750793457031, -0.06500244140625, 0.062042236328125, 0.0416259765625, 0.0430908203125, -0.01016998291015625, -0.023895263671875, 0.01274871826171875, -0.01334381103515625, 0.00873565673828125, 0.0290679931640625, -0.0174560546875, -0.0228271484375, -0.0203399658203125, 0.01062774658203125, 0.006500244140625, -0.0162200927734375, 0.06005859375, -0.036041259765625, 0.032012939453125, 0.01494598388671875, -0.033966064453125, -0.03985595703125, 0.0183258056640625, -0.051116943359375, 0.07147216796875, 0.0028324127197265625, -0.06512451171875, 0.0253448486328125, -0.06463623046875, -0.01454925537109375, 0.0235137939453125, 0.00214385986328125, -0.025634765625, -0.01727294921875, 0.0009174346923828125, 0.0384521484375, -0.0205841064453125, 0.024505615234375, -0.0304718017578125, -0.0178680419921875, -0.00514984130859375, -0.0277252197265625, 0.08013916015625, 0.0220794677734375, -0.001628875732421875, 0.017059326171875, -0.06329345703125, -0.0035247802734375, 0.0273284912109375, -0.005550384521484375, -0.027618408203125, -0.010955810546875, 0.031951904296875, 0.0166168212890625, 0.040374755859375, -0.045654296875, 0.017547607421875, -0.032806396484375, 0.007049560546875, 0.0247344970703125, 0.0028533935546875, 0.005035400390625, -0.0161590576171875, 0.031890869140625, -0.014190673828125, 0.026641845703125, 0.032806396484375, -0.043212890625, -0.06634521484375, -0.02825927734375, 0.01184844970703125, 0.0343017578125, -0.0248870849609375, 0.0211334228515625, -0.0143280029296875, -0.05926513671875, -0.04290771484375, 0.003833770751953125, 0.035430908203125, -0.0007486343383789062, 0.0299835205078125, 0.0021209716796875, -0.062286376953125, -0.070556640625, -0.00600433349609375, -0.00261688232421875, 0.00669097900390625, 0.032867431640625, 0.037841796875, -0.0325927734375, 0.03436279296875, -0.041534423828125, -0.0141143798828125, -0.01439666748046875, -0.00934600830078125, 0.032867431640625, 0.042144775390625, 0.0439453125, -0.054779052734375, -0.060638427734375, -0.0107421875, -0.034271240234375, -0.0184783935546875, 0.0267486572265625, -0.02728271484375, 0.01003265380859375, 0.01739501953125, -0.0316162109375, 0.0248260498046875, 0.087646484375, -0.06219482421875, 0.0190277099609375, 0.0018568038940429688, 0.013427734375, -0.0946044921875, 0.0028476715087890625, -0.0026035308837890625, -0.0260772705078125, -0.0290374755859375, 0.0248565673828125, -0.00901031494140625, 0.0005536079406738281, -0.048583984375, 0.06048583984375, -0.028594970703125, 0.003292083740234375, -0.01335906982421875, 0.00937652587890625, -0.016815185546875, 0.0433349609375, 0.004001617431640625, 0.04473876953125, 0.044952392578125, -0.037567138671875, 0.034942626953125, 0.0648193359375, -0.024444580078125, 0.019012451171875, -0.06500244140625, 0.0019092559814453125, 0.04095458984375, 0.0101470947265625, -0.04736328125, -0.03253173828125, 0.02093505859375, -0.04754638671875, 0.006984710693359375, -0.024261474609375, -0.0546875, -0.002246856689453125, -0.0170745849609375, 0.04638671875, 0.0240631103515625, -0.036834716796875, 0.00879669189453125, 0.058380126953125, -0.021697998046875, -0.03106689453125, -0.0443115234375, 0.00022780895233154297, -0.016510009765625, -0.06304931640625, 0.0173492431640625, -0.01375579833984375, -0.0022106170654296875, -0.0087738037109375, 0.022796630859375, -0.023681640625, 0.011383056640625, 0.040618896484375, 0.0203704833984375, 0.01324462890625, -0.007480621337890625, -0.017364501953125, 0.0022869110107421875, -0.00846099853515625, 0.018035888671875, 0.0748291015625, -0.01666259765625, -0.016082763671875, -0.03497314453125, 0.0333251953125, 0.0107574462890625, 0.01531219482421875, 0.08721923828125, 0.03436279296875, -0.032928466796875, -0.0037937164306640625, -0.0205535888671875, -0.01270294189453125, -0.0321044921875, 0.0164642333984375, -0.01812744140625, -0.08221435546875, 0.04388427734375, 0.0242767333984375, -0.0097808837890625, 0.05657958984375, 0.043121337890625, 0.023773193359375, 0.056732177734375, 0.03955078125, -0.002887725830078125, 0.031768798828125, -0.003200531005859375, 0.0037326812744140625, -0.042236328125, -0.0244598388671875, -0.047149658203125, -0.00839996337890625, -0.07098388671875, -0.00986480712890625, -0.0042724609375, 0.0034008026123046875, -0.05584716796875, 0.057525634765625, -0.05242919921875, 0.039703369140625, 0.06329345703125, 0.0165252685546875, 0.0107574462890625, -0.017242431640625, 0.01033782958984375, -0.0095977783203125, -0.033477783203125, -0.0190887451171875, 0.0740966796875, 0.036285400390625, 0.05780029296875, 0.042144775390625, 0.0248870849609375, 0.0087738037109375, 0.0037097930908203125, -0.01397705078125, 0.0391845703125, -0.028045654296875, -0.0789794921875, -0.0047607421875, -0.0189208984375, -0.05731201171875, -0.0015897750854492188, -0.011566162109375, -0.05352783203125, -0.01324462890625, 0.005725860595703125, 0.00891876220703125, 0.009033203125, -0.050872802734375, 0.09112548828125, 0.01092529296875, 0.00530242919921875, -0.01213836669921875, -0.048797607421875, 0.044464111328125, 0.0053863525390625, 0.003627777099609375, -0.004100799560546875, 0.016845703125, 0.0648193359375, -0.03399658203125, 0.0655517578125, -0.006946563720703125, 0.0035190582275390625, 0.006000518798828125, 0.0194244384765625, 0.0469970703125, 0.0072784423828125, 0.0198822021484375, 0.053741455078125, 0.011138916015625, -0.02130126953125, 0.0118408203125, 0.07574462890625, -0.07159423828125, -0.0177154541015625, -0.0194854736328125, -0.02630615234375, -0.005748748779296875, 0.0198516845703125, 0.021820068359375, 0.007732391357421875, -0.0186614990234375, 0.04638671875, 0.0309295654296875, -0.01849365234375, 0.0209197998046875, 0.044769287109375, -0.040130615234375, -0.03326416015625, 0.041168212890625, -0.0167694091796875, 0.0169525146484375, 0.00011366605758666992, -0.002902984619140625, -0.031341552734375, -0.051513671875, -0.0034942626953125, 0.02789306640625, -0.0355224609375, -0.0256195068359375, -0.0626220703125, -0.0355224609375, -0.026123046875, 0.01271820068359375, -0.038330078125, -0.05084228515625, -0.03497314453125, -0.01305389404296875, 0.06097412109375, 0.0304718017578125, -0.0034313201904296875, 0.052001953125, -0.050689697265625, 0.0225677490234375, 0.01447296142578125, 0.076171875, -0.0303955078125, -0.06427001953125, -0.017486572265625, 0.0021457672119140625, -0.01270294189453125, -0.048736572265625, 0.050933837890625, 0.00800323486328125, 0.032684326171875, -0.00027942657470703125, 0.00920867919921875, 0.042633056640625, -0.01263427734375, 0.06982421875, 0.0003299713134765625, -0.059478759765625, 0.058929443359375, -0.0399169921875, 0.0026569366455078125, 0.04510498046875, 0.01514434814453125, -0.037506103515625, -0.025421142578125, -0.07427978515625, -0.07330322265625, 0.07598876953125, 0.047332763671875, 0.0249481201171875, -0.01139068603515625, 0.00585174560546875, 0.01470184326171875, 0.00592041015625, -0.096435546875, -0.045684814453125, -0.0131072998046875, -0.028289794921875, 0.0167388916015625, -0.040679931640625, -0.0260772705078125, -0.005382537841796875, 0.0865478515625, 0.0179901123046875, 0.022674560546875, -0.0256805419921875, 0.00211334228515625, -0.0007147789001464844, 0.0055694580078125, 0.056304931640625, 0.01580810546875, -0.040618896484375, -0.014129638671875, 0.0030460357666015625, -0.039581298828125, -0.0165863037109375, 0.019195556640625, -0.0084381103515625, -0.002063751220703125, 0.01513671875, 0.08770751953125, 0.00444793701171875, -0.0186614990234375, 0.0450439453125, -0.03253173828125, -0.0021991729736328125, -0.034210205078125, -0.0010890960693359375, 0.01995849609375, 0.0171051025390625, -0.0015916824340820312, 0.001636505126953125, 0.01412200927734375, 0.00867462158203125, 0.023101806640625, 0.0399169921875, -0.029937744140625, -0.0285186767578125, 0.043609619140625, 0.026763916015625, -0.024810791015625, 0.0288238525390625, -0.01468658447265625, -0.006275177001953125, 0.0200653076171875, 0.056427001953125, 0.0599365234375, 0.0005207061767578125, -0.0104827880859375, 0.042205810546875, 0.010223388671875, -0.00704193115234375, 0.038177490234375, 0.01219940185546875, -0.0780029296875, -0.0357666015625, -0.055572509765625, -0.028472900390625, 0.0290374755859375, -0.0379638671875, 0.036102294921875, -0.0190582275390625, 0.006427764892578125, -0.00223541259765625, 0.0212554931640625, -0.0489501953125, 0.01221466064453125, -0.0033245086669921875, 0.06732177734375, -0.0694580078125, 0.06842041015625, 0.04302978515625, -0.034912109375, -0.0626220703125, -0.039764404296875, 0.0024127960205078125, -0.06756591796875, 0.05255126953125, -0.00484466552734375, 0.01062774658203125, 0.0096435546875, -0.05670166015625, -0.06671142578125, 0.062103271484375, 0.0288848876953125, -0.0313720703125, 0.0323486328125, -0.0247650146484375, 0.0264434814453125, -0.0216064453125, 0.014190673828125, 0.034576416015625, 0.0251922607421875, 0.00940704345703125, -0.09954833984375, -0.007762908935546875, -0.03656005859375, -0.013214111328125, 0.02630615234375, -0.061676025390625, 0.058990478515625, 0.020965576171875, -0.017059326171875, -0.0120086669921875, 0.03924560546875, 0.031768798828125, -0.004337310791015625, 0.022003173828125, 0.0584716796875, 0.00318145751953125, -0.0203399658203125, 0.068359375, -0.0264892578125, 0.055694580078125, 0.11883544921875, 0.00899505615234375, 0.06390380859375, 0.033660888671875, -0.0343017578125, 0.0309295654296875, 0.0465087890625, 0.0078887939453125, 0.04388427734375, -0.0106658935546875, -0.028289794921875, 0.006866455078125, -0.0003752708435058594, -0.036285400390625, 0.036590576171875, 0.036102294921875, -0.0237274169921875, -0.003238677978515625, -0.036956787109375, 0.030487060546875, 0.00534820556640625, -0.01346588134765625, 0.0645751953125, -0.009124755859375, -0.0298309326171875, 0.038177490234375, 0.0018072128295898438, 0.0760498046875, -0.03924560546875, 0.003704071044921875, -0.0243682861328125, 0.00823211669921875, -0.005992889404296875, -0.06427001953125, 0.05804443359375, 0.0010862350463867188, -0.0163421630859375, -0.006717681884765625, 0.051239013671875, -0.042236328125, -0.03411865234375, 0.03717041015625, 0.0174560546875, 0.0198822021484375, -0.007236480712890625, -0.059967041015625, 0.02105712890625, 0.021209716796875, -0.0255889892578125, 0.0027332305908203125, 0.022491455078125, 0.0056915283203125, 0.06146240234375, 0.038787841796875, 0.023284912109375, 0.0086669921875, 0.004711151123046875, 0.049102783203125, -0.0447998046875, -0.0328369140625, -0.061798095703125, 0.0240631103515625, -0.019500732421875, -0.0268402099609375, 0.06072998046875, 0.037506103515625, 0.0789794921875, -0.01373291015625, 0.04290771484375, -0.0252838134765625, 0.05474853515625, -0.0201568603515625, 0.057891845703125, -0.0233001708984375, 0.00765228271484375, -0.01491546630859375, -0.05352783203125, -0.0447998046875, 0.04754638671875, 0.0161590576171875, -0.0033779144287109375, 0.0230560302734375, 0.059661865234375, -0.0171051025390625, 0.01221466064453125, 0.0243377685546875, 0.0218353271484375, 0.01131439208984375, 0.029296875, 0.0236053466796875, -0.06329345703125, 0.034149169921875, -0.0263519287109375, -0.00860595703125, -0.0257568359375, -0.07427978515625, -0.053070068359375, -0.041351318359375, -0.033477783203125, -0.0257720947265625, -0.017425537109375, 0.0772705078125, 0.06390380859375, -0.07489013671875, -0.0182647705078125, -0.0192413330078125, 0.0004863739013671875, 0.01010894775390625, -0.0131072998046875, 0.025848388671875, 0.0069732666015625, -0.058746337890625, 0.0181427001953125, 0.007717132568359375, 0.026275634765625, -0.01739501953125, -0.0258941650390625, -0.034332275390625, -0.0017795562744140625, 0.0280303955078125, 0.0197296142578125, -0.05816650390625, -0.016998291015625, -0.03436279296875, -0.013641357421875, -0.018035888671875, 0.004302978515625, -0.032867431640625, 0.0142364501953125, 0.043792724609375, 0.0230255126953125, 0.046844482421875, -0.017669677734375, 0.0340576171875, -0.08563232421875, 0.045379638671875, -0.00624847412109375, 0.029388427734375, 0.0252838134765625, -0.0193939208984375, 0.05523681640625, 0.0079345703125, -0.057159423828125, -0.04925537109375, 0.00339508056640625, -0.09210205078125, -0.003917694091796875, 0.08892822265625, -0.009185791015625, 0.010009765625, 0.0038299560546875, -0.0242156982421875, 0.026763916015625, -0.0271759033203125, 0.041595458984375, 0.06103515625, -0.0004763603210449219, -0.02398681640625, -0.032073974609375, 0.051849365234375, 0.0021457672119140625, -0.064208984375, 0.00896453857421875, 0.042205810546875, 0.0287628173828125, -0.00592803955078125, 0.06634521484375, -0.0226898193359375, 0.0034313201904296875, -0.005947113037109375, 0.03094482421875, -0.0033206939697265625, -0.0151824951171875, -0.00464630126953125, 0.00812530517578125, 0.008514404296875, -0.040924072265625 ] ]
KnutJaegersberg/black_goo_recipe_a
2023-08-31T04:46:17.000Z
[ "transformers", "pytorch", "llama", "text-generation", "custom_code", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/black_goo_recipe_a
0
5,842
transformers
2023-08-30T21:50:21
--- license: cc-by-nc-4.0 --- Prompt template ``` ### Instruction: What is the difference between operant and classical conditioning? ### Response: Classical conditioning is a learning process that occurs when an organism is exposed to a stimulus that elicits a response. This response is referred to as a conditioned response (CR). The stimulus that elicits the response is called the conditioned stimulus (CS). The organism's response to the CS is called the unconditioned response (UR). Operant conditioning is a learning process that occurs when an organism is exposed to a stimulus that elicits a response. This response is referred to as an operant response (OR). The stimulus that elicits the response is called the operant stimulus (OS). The organism's response to the OS is called the operant response (OR). Classical conditioning and operant conditioning are two different types of learning. Classical conditioning involves the association of a CS with a UR, while operant conditioning involves the association of an OS with an OR. In classical conditioning, the CS is paired with the UR over time, resulting in the UR becoming conditioned to the CS. This process is referred to as Pavlovian conditioning. In operant conditioning, the OS is paired with the OR over time, resulting in the OR becoming conditioned to the OS. This process is referred to as instrumental conditioning. Both classical conditioning and operant conditioning are important in learning and behavior modification. However, they differ in the way they are used and the types of responses they elicit. In conclusion, classical conditioning and operant conditioning are two different types of learning that occur when an organism is exposed to a stimulus that elicits a response. Classical conditioning involves the association of a CS with a UR, while operant conditioning involves the association of an OS with an OR. ```
1,917
[ [ -0.0233612060546875, -0.07623291015625, 0.0201873779296875, 0.0020084381103515625, -0.0157470703125, 0.01126861572265625, -0.0302581787109375, -0.0145416259765625, -0.002048492431640625, 0.0244598388671875, -0.0294647216796875, 0.0211944580078125, -0.00772857666015625, 0.0198974609375, -0.00916290283203125, 0.08831787109375, -0.0026721954345703125, 0.05218505859375, -0.036285400390625, -0.009857177734375, -0.0229034423828125, -0.046295166015625, -0.08697509765625, -0.048553466796875, 0.01107025146484375, 0.019012451171875, 0.006092071533203125, 0.0255584716796875, 0.0537109375, 0.040374755859375, 0.00432586669921875, -0.041900634765625, -0.01800537109375, 0.004871368408203125, -0.01534271240234375, -0.039825439453125, 0.0029754638671875, 0.001312255859375, 0.0209808349609375, 0.04962158203125, -0.025726318359375, 0.005340576171875, -0.01457977294921875, 0.06695556640625, -0.0628662109375, 0.0108642578125, 0.0004248619079589844, 0.034881591796875, 0.004398345947265625, -0.050750732421875, -0.0240631103515625, -0.0384521484375, 0.01512908935546875, -0.06396484375, -0.0022640228271484375, -0.0089111328125, 0.032379150390625, 0.0132904052734375, -0.020904541015625, -0.0144500732421875, -0.0655517578125, 0.0294647216796875, -0.038543701171875, 0.0193328857421875, 0.010101318359375, 0.0059661865234375, -0.007289886474609375, -0.053009033203125, -0.0377197265625, -0.048187255859375, -0.00933074951171875, 0.023101806640625, 0.0214691162109375, 0.0164642333984375, 0.040313720703125, 0.043731689453125, -0.04046630859375, -0.014373779296875, -0.04461669921875, -0.003376007080078125, 0.015625, -0.005519866943359375, 0.0132598876953125, -0.0008983612060546875, -0.07537841796875, -0.0293121337890625, -0.01373291015625, 0.040740966796875, 0.01461029052734375, 0.01593017578125, -0.0032634735107421875, 0.0228271484375, -0.00042724609375, 0.0300750732421875, 0.050628662109375, -0.04144287109375, 0.003635406494140625, 0.01385498046875, -0.0457763671875, 0.0240631103515625, 0.0335693359375, 0.0191802978515625, 0.023345947265625, -0.006679534912109375, -0.0234222412109375, 0.0010328292846679688, 0.014984130859375, -0.033935546875, -0.017303466796875, -0.0200958251953125, -0.0479736328125, -0.01413726806640625, 0.0127410888671875, -0.04510498046875, -0.01959228515625, -0.04736328125, 0.050872802734375, -0.04510498046875, -0.016571044921875, -0.01419830322265625, -0.028076171875, 0.0350341796875, 0.014984130859375, -0.065673828125, 0.060821533203125, 0.0172882080078125, 0.0250091552734375, 0.010986328125, -0.0130462646484375, -0.0192413330078125, -0.0116119384765625, -0.0300750732421875, 0.06781005859375, -0.053436279296875, -0.034820556640625, -0.0034923553466796875, 0.00396728515625, -0.03839111328125, -0.047821044921875, 0.04345703125, -0.07257080078125, 0.0182952880859375, -0.020538330078125, -0.035430908203125, -0.05682373046875, -0.0254364013671875, -0.023193359375, 0.04876708984375, 0.01971435546875, -0.06329345703125, 0.025421142578125, -0.045806884765625, -0.06280517578125, -0.011199951171875, 0.01410675048828125, -0.019378662109375, 0.0276641845703125, -0.045257568359375, -0.00791168212890625, -0.0013027191162109375, -0.033294677734375, -0.054931640625, -0.01335906982421875, 0.06280517578125, 0.001373291015625, 0.016693115234375, 0.03271484375, -0.0181121826171875, 0.0038013458251953125, -0.058990478515625, 0.02362060546875, 0.0019283294677734375, -0.057647705078125, -0.033660888671875, 0.03485107421875, -0.00467681884765625, 0.00860595703125, 0.00939178466796875, -0.030548095703125, 0.0174102783203125, -0.0089263916015625, 0.050567626953125, 0.045440673828125, 0.049285888671875, 0.050018310546875, -0.0278778076171875, 0.0210723876953125, -0.010009765625, -0.0081787109375, 0.000055849552154541016, 0.0236968994140625, -0.078857421875, 0.0180511474609375, -0.0013666152954101562, 0.058319091796875, -0.049713134765625, -0.005855560302734375, 0.0254364013671875, -0.004154205322265625, 0.0014066696166992188, -0.0280609130859375, 0.0537109375, 0.038818359375, 0.028411865234375, -0.040863037109375, -0.047698974609375, -0.037017822265625, -0.01203155517578125, -0.006061553955078125, 0.0161895751953125, -0.004638671875, 0.04791259765625, 0.048919677734375, 0.00807952880859375, -0.0304412841796875, -0.010009765625, 0.001453399658203125, 0.029266357421875, -0.006168365478515625, 0.05218505859375, 0.037445068359375, -0.049102783203125, -0.0291595458984375, 0.016082763671875, -0.041473388671875, 0.009674072265625, -0.0163116455078125, 0.00836944580078125, 0.030548095703125, 0.05596923828125, 0.0003814697265625, 0.04693603515625, -0.01465606689453125, -0.0164947509765625, 0.03533935546875, -0.033477783203125, 0.045074462890625, -0.037109375, 0.0027561187744140625, -0.018798828125, -0.0079803466796875, -0.052459716796875, 0.01045989990234375, -0.01326751708984375, -0.0173492431640625, -0.047637939453125, -0.022857666015625, -0.0458984375, -0.033935546875, -0.01885986328125, -0.01169586181640625, -0.004764556884765625, 0.05426025390625, 0.0352783203125, 0.05126953125, 0.05560302734375, -0.09039306640625, 0.060516357421875, -0.00614166259765625, -0.005840301513671875, 0.0587158203125, -0.05133056640625, 0.01201629638671875, 0.0024814605712890625, 0.0220947265625, -0.04437255859375, -0.0206298828125, 0.0214996337890625, -0.039520263671875, 0.0477294921875, 0.004528045654296875, -0.01131439208984375, -0.03143310546875, -0.01593017578125, 0.015960693359375, 0.03424072265625, -0.00980377197265625, 0.01230621337890625, -0.027313232421875, 0.0089569091796875, -0.02703857421875, -0.055877685546875, -0.057159423828125, -0.0305633544921875, -0.013458251953125, 0.012359619140625, -0.0168914794921875, -0.029693603515625, 0.0255584716796875, -0.038543701171875, -0.0360107421875, 0.0167236328125, 0.0164794921875, -0.0018138885498046875, -0.0244293212890625, 0.0178070068359375, -0.012908935546875, -0.01549530029296875, 0.01065826416015625, -0.021453857421875, 0.0186767578125, -0.004566192626953125, -0.03509521484375, -0.052734375, 0.040802001953125, 0.05322265625, -0.037353515625, 0.055023193359375, 0.04315185546875, -0.01275634765625, 0.0246734619140625, -0.0163116455078125, -0.0297088623046875, -0.0281829833984375, -0.01190948486328125, -0.0239715576171875, -0.0252838134765625, 0.01434326171875, 0.01233673095703125, -0.0161285400390625, 0.0236968994140625, 0.04638671875, -0.027374267578125, 0.053131103515625, 0.03564453125, 0.01861572265625, 0.0472412109375, -0.0306854248046875, -0.004573822021484375, -0.0928955078125, -0.0172882080078125, -0.05474853515625, -0.0272369384765625, -0.042999267578125, -0.0114593505859375, 0.0384521484375, 0.00246429443359375, -0.048858642578125, 0.003459930419921875, -0.0168304443359375, 0.0201263427734375, 0.037261962890625, 0.018402099609375, 0.01204681396484375, 0.0027256011962890625, -0.01025390625, 0.014984130859375, -0.031524658203125, -0.0177154541015625, 0.06707763671875, -0.00298309326171875, 0.071533203125, 0.0225982666015625, 0.0401611328125, 0.0198516845703125, 0.00811767578125, -0.061614990234375, 0.0276641845703125, 0.00577545166015625, -0.06396484375, -0.0020503997802734375, -0.008087158203125, -0.06817626953125, 0.02099609375, -0.031982421875, -0.059112548828125, 0.04278564453125, -0.0027561187744140625, -0.047271728515625, 0.02288818359375, -0.039886474609375, 0.043365478515625, -0.033660888671875, -0.0447998046875, -0.026031494140625, -0.04632568359375, 0.025115966796875, 0.011016845703125, 0.02130126953125, 0.019317626953125, 0.01177215576171875, 0.047454833984375, -0.048614501953125, 0.047576904296875, -0.01296234130859375, 0.043914794921875, 0.0352783203125, 0.018157958984375, 0.014312744140625, -0.00016570091247558594, 0.0222930908203125, -0.08038330078125, 0.0213165283203125, -0.0195770263671875, -0.044647216796875, 0.041290283203125, -0.0765380859375, -0.03662109375, -0.06878662109375, -0.0165557861328125, -0.012786865234375, -0.005962371826171875, 0.036590576171875, 0.05682373046875, -0.0227813720703125, 0.0374755859375, 0.09246826171875, -0.0172576904296875, 0.0498046875, 0.07598876953125, 0.0009603500366210938, -0.0243072509765625, 0.039825439453125, 0.001064300537109375, 0.048736572265625, 0.06854248046875, -0.0073394775390625, -0.0110626220703125, -0.0174713134765625, 0.0206298828125, 0.0031719207763671875, -0.0276031494140625, -0.0054931640625, -0.0161285400390625, -0.00843048095703125, -0.045562744140625, -0.01934814453125, 0.00836181640625, -0.02740478515625, -0.0204315185546875, -0.003253936767578125, 0.039825439453125, 0.039642333984375, -0.06597900390625, 0.0027408599853515625, -0.0457763671875, 0.015625, 0.04547119140625, 0.0031604766845703125, 0.0172882080078125, -0.05474853515625, -0.003704071044921875, 0.00066375732421875, -0.03936767578125, -0.08831787109375, 0.0112457275390625, 0.038238525390625, 0.0455322265625, 0.0914306640625, 0.0146331787109375, 0.05804443359375, -0.0249786376953125, 0.04669189453125, 0.0242919921875, -0.053070068359375, 0.040313720703125, -0.017852783203125, -0.019012451171875, 0.03955078125, 0.0300750732421875, -0.061981201171875, -0.0008244514465332031, -0.0615234375, -0.05126953125, 0.06488037109375, -0.0028781890869140625, -0.003330230712890625, 0.00817108154296875, 0.0177154541015625, -0.01203155517578125, 0.02886962890625, -0.06573486328125, -0.019012451171875, -0.029083251953125, 0.058807373046875, -0.0174407958984375, 0.0059356689453125, -0.035614013671875, -0.0257568359375, 0.05322265625, 0.03521728515625, 0.023834228515625, 0.035919189453125, -0.0180206298828125, -0.0462646484375, 0.044677734375, 0.05133056640625, 0.0087432861328125, -0.0227508544921875, -0.00524139404296875, 0.0084381103515625, -0.00897216796875, 0.0189666748046875, -0.0049591064453125, 0.01071929931640625, -0.04254150390625, 0.053802490234375, 0.06317138671875, 0.0299530029296875, -0.0251922607421875, 0.0152130126953125, 0.01007843017578125, -0.0024967193603515625, -0.042938232421875, 0.0299072265625, -0.00884246826171875, 0.01233673095703125, 0.01438140869140625, 0.0157470703125, 0.01311492919921875, -0.08526611328125, -0.003997802734375, 0.01122283935546875, -0.0401611328125, -0.0034847259521484375, 0.059051513671875, 0.025115966796875, -0.0278472900390625, 0.054931640625, -0.00799560546875, -0.0350341796875, 0.07611083984375, 0.048736572265625, 0.058929443359375, -0.0347900390625, 0.0035343170166015625, 0.041717529296875, -0.00946807861328125, 0.0218658447265625, 0.021392822265625, -0.047210693359375, -0.02099609375, -0.00743865966796875, -0.05670166015625, -0.01360321044921875, 0.0158538818359375, -0.10528564453125, -0.0232696533203125, -0.0523681640625, -0.00959014892578125, 0.007152557373046875, 0.0009641647338867188, -0.033050537109375, 0.0288238525390625, -0.005191802978515625, 0.0814208984375, -0.08050537109375, 0.0328369140625, 0.09490966796875, -0.06353759765625, -0.07818603515625, -0.007305145263671875, 0.00968170166015625, -0.03765869140625, 0.0433349609375, -0.0257568359375, 0.0174102783203125, -0.00872039794921875, -0.00916290283203125, -0.062347412109375, 0.0782470703125, -0.034149169921875, -0.0113067626953125, 0.0311737060546875, -0.004055023193359375, 0.061248779296875, -0.0238037109375, 0.04010009765625, 0.04193115234375, 0.056549072265625, 0.0138702392578125, -0.058197021484375, -0.01983642578125, -0.008514404296875, 0.0288848876953125, 0.0092926025390625, -0.04876708984375, 0.07232666015625, -0.034149169921875, -0.010650634765625, 0.0280303955078125, 0.03778076171875, 0.002628326416015625, 0.0330810546875, 0.0201263427734375, 0.06640625, 0.024383544921875, -0.04132080078125, 0.07122802734375, -0.0182037353515625, 0.0205078125, 0.034149169921875, 0.000026285648345947266, 0.0654296875, 0.030548095703125, -0.044952392578125, 0.06854248046875, 0.109619140625, 0.0129852294921875, 0.062744140625, 0.0419921875, -0.028350830078125, -0.0207366943359375, 0.014984130859375, -0.0701904296875, 0.004367828369140625, 0.04254150390625, -0.0192718505859375, -0.01505279541015625, 0.040557861328125, 0.032257080078125, 0.0185546875, -0.046600341796875, 0.0626220703125, 0.0091400146484375, -0.0743408203125, 0.05047607421875, -0.006298065185546875, 0.002544403076171875, -0.046783447265625, 0.0244903564453125, -0.0282135009765625, 0.025848388671875, -0.037750244140625, -0.0251007080078125, 0.0034160614013671875, -0.0261383056640625, -0.049774169921875, -0.0002238750457763672, 0.041778564453125, -0.027740478515625, -0.006679534912109375, 0.006061553955078125, 0.0177154541015625, 0.0181884765625, 0.03533935546875, -0.053741455078125, -0.0145416259765625, -0.0038604736328125, -0.033294677734375, 0.03277587890625, 0.035919189453125, 0.0206298828125, 0.054473876953125, 0.043121337890625, -0.003551483154296875, 0.01044464111328125, -0.01861572265625, 0.04180908203125, -0.0186309814453125, -0.0216522216796875, -0.0194244384765625, 0.025482177734375, 0.016876220703125, -0.00563812255859375, 0.0257110595703125, 0.0214996337890625, 0.03887939453125, -0.061004638671875, 0.042724609375, -0.025665283203125, 0.0411376953125, -0.055328369140625, 0.0198516845703125, -0.035675048828125, 0.0063934326171875, -0.007572174072265625, -0.055908203125, 0.0246734619140625, 0.056915283203125, -0.036468505859375, 0.01250457763671875, 0.12030029296875, 0.032989501953125, 0.011932373046875, -0.0269012451171875, -0.012542724609375, 0.0211334228515625, -0.01444244384765625, 0.06829833984375, 0.06243896484375, 0.00106048583984375, 0.02130126953125, -0.01419830322265625, -0.0217132568359375, -0.01800537109375, -0.043304443359375, -0.04034423828125, -0.054473876953125, -0.0301055908203125, -0.0168609619140625, 0.0292205810546875, 0.0537109375, 0.04254150390625, -0.058563232421875, -0.0296173095703125, -0.0257568359375, 0.0213623046875, -0.046722412109375, -0.024688720703125, 0.01467132568359375, -0.04119873046875, -0.04644775390625, 0.0252532958984375, 0.00946807861328125, 0.03057861328125, -0.009033203125, 0.0203704833984375, -0.0191802978515625, 0.0258331298828125, 0.021026611328125, 0.0814208984375, -0.028076171875, -0.01031494140625, -0.0308837890625, -0.002811431884765625, -0.0024814605712890625, 0.05596923828125, -0.061370849609375, 0.031402587890625, 0.043365478515625, -0.0229644775390625, 0.026092529296875, -0.006061553955078125, 0.0221405029296875, -0.0263671875, 0.01502227783203125, 0.007114410400390625, 0.033111572265625, 0.00875091552734375, -0.01120758056640625, 0.026763916015625, 0.050872802734375, -0.05548095703125, -0.0357666015625, 0.0445556640625, -0.0689697265625, -0.028900146484375, 0.08160400390625, -0.007720947265625, -0.014984130859375, 0.0121002197265625, -0.009002685546875, 0.0208587646484375, -0.033721923828125, 0.051116943359375, 0.041351318359375, -0.0394287109375, 0.0113067626953125, -0.03387451171875, 0.056121826171875, -0.0012750625610351562, -0.05328369140625, -0.03759765625, 0.033233642578125, 0.03863525390625, 0.01216888427734375, 0.0210418701171875, -0.005878448486328125, 0.05438232421875, -0.01552581787109375, 0.0277862548828125, 0.01148223876953125, 0.0036830902099609375, -0.01056671142578125, 0.0232696533203125, -0.003948211669921875, -0.032806396484375 ] ]
Corianas/Quokka_2.7b
2023-06-02T08:03:29.000Z
[ "transformers", "pytorch", "tensorboard", "safetensors", "gpt2", "text-generation", "en", "dataset:the_pile", "dataset:guanaco/guanaco", "arxiv:1910.09700", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Corianas
null
null
Corianas/Quokka_2.7b
0
5,841
transformers
2023-03-30T13:12:28
--- license: apache-2.0 datasets: - the_pile - guanaco/guanaco language: - en --- # Model Card for Cerebras 2.7b Dollyfied. This is a finetuned model of Cerebras 2.7b model. using DataBricksLabs Dolly Framework ## Model Details ### Model Description This is a finetuned version of cerebras' 2.7Billion paramater model that has been trained to follow instructions. It was accomplished using DataBricks Dolly training tools, and was trained for 2 epochs. - **Developed by:** Finetuned by Corianas (me) using open source tools - **Shared by [optional]:** [More Information Needed] - **Model type:** [More Information Needed] - **Language(s) (NLP):** EN - **License:** cc-by-nc-4.0 - **Finetuned from model:** https://huggingface.co/cerebras/Cerebras-GPT-111m - **Finetuned using:** https://www.databricks.com/blog/2023/03/24/hello-dolly-democratizing-magic-chatgpt-open-models.html ## Uses This is a simple GPT chatbot that has been finetuned to understand instructions. Its knowledge about facts about the world is should be considered suspect at best. ### Direct Use If you have a use you put it to, Please let me know. [More Information Needed] ### Downstream Use [optional] <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app --> [More Information Needed] ### Out-of-Scope Use Any form of use where any form of accuracy is needed. FOR THE LOVE OF GOD DO NOT FOLLOW MEDICAL ADVICE FROM THIS. or financial advice. [More Information Needed] ## Bias, Risks, and Limitations Limitations... Yes, I am sure there are so so many. [More Information Needed] ## How to Get Started with the Model Use the code below to get started with the model. [More Information Needed] ## Training Details ### Training Data <!-- This should link to a Data Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. --> [More Information Needed] ### Training Procedure <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. --> #### Preprocessing [optional] [More Information Needed] #### Training Hyperparameters - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision --> #### Speeds, Sizes, Times [optional] <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. --> [More Information Needed] ## Evaluation <!-- This section describes the evaluation protocols and provides the results. --> ### Testing Data, Factors & Metrics #### Testing Data <!-- This should link to a Data Card if possible. --> [More Information Needed] #### Factors <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. --> [More Information Needed] #### Metrics <!-- These are the evaluation metrics being used, ideally with a description of why. --> [More Information Needed] ### Results [More Information Needed] #### Summary ## Model Examination [optional] <!-- Relevant interpretability work for the model goes here --> [More Information Needed] ## Environmental Impact <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly --> Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700). - **Hardware Type:** 8xA100s (accomplished while I was downloading the model I was actually training.) - **Minutes used:** 25 - **Cloud Provider:** LambdaGPU - **Compute Region:** USA - **Carbon Emitted:** [More Information Needed] ## Technical Specifications [optional] ### Model Architecture and Objective [More Information Needed] ### Compute Infrastructure [More Information Needed] #### Hardware [More Information Needed] #### Software [More Information Needed] ## Citation [optional] <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Model Card Authors [optional] [More Information Needed] ## Model Card Contact [More Information Needed]
4,764
[ [ -0.036773681640625, -0.0633544921875, 0.01812744140625, 0.006744384765625, -0.0169525146484375, -0.0268402099609375, 0.0051422119140625, -0.033050537109375, 0.00624847412109375, 0.0498046875, -0.04437255859375, -0.036224365234375, -0.047149658203125, -0.0207977294921875, -0.02252197265625, 0.09405517578125, 0.01519775390625, 0.018585205078125, -0.0251312255859375, 0.0160064697265625, -0.040374755859375, -0.05047607421875, -0.058135986328125, -0.0300140380859375, 0.025115966796875, 0.029754638671875, 0.0584716796875, 0.050628662109375, 0.0496826171875, 0.02203369140625, -0.032470703125, -0.0082244873046875, -0.04144287109375, -0.03643798828125, -0.016265869140625, -0.0194854736328125, -0.061248779296875, 0.004974365234375, 0.040008544921875, 0.042083740234375, -0.01299285888671875, 0.045196533203125, 0.00662994384765625, 0.0364990234375, -0.044647216796875, 0.0279388427734375, -0.043426513671875, 0.00714874267578125, -0.0033168792724609375, -0.003177642822265625, -0.022247314453125, -0.02166748046875, -0.0103912353515625, -0.041412353515625, 0.020751953125, 0.004425048828125, 0.0811767578125, 0.01407623291015625, -0.031890869140625, -0.021484375, -0.06640625, 0.0484619140625, -0.03802490234375, 0.02105712890625, 0.03546142578125, 0.04071044921875, -0.014617919921875, -0.06903076171875, -0.034149169921875, -0.02557373046875, -0.00826263427734375, 0.0199737548828125, -0.00955963134765625, 0.006984710693359375, 0.042449951171875, 0.04071044921875, -0.04754638671875, -0.003574371337890625, -0.05157470703125, -0.0195159912109375, 0.0535888671875, 0.035308837890625, -0.0006003379821777344, -0.0230255126953125, -0.0291900634765625, -0.026824951171875, -0.0293426513671875, 0.01457977294921875, 0.037689208984375, 0.0184783935546875, -0.03863525390625, 0.045623779296875, -0.009674072265625, 0.052703857421875, 0.0003368854522705078, -0.0049285888671875, 0.014739990234375, -0.031463623046875, -0.032073974609375, -0.0133056640625, 0.05084228515625, 0.0269317626953125, -0.0120391845703125, 0.0091094970703125, -0.00720977783203125, -0.00421142578125, 0.0254974365234375, -0.0687255859375, -0.037689208984375, 0.0245208740234375, -0.04693603515625, -0.0265960693359375, -0.00884246826171875, -0.0743408203125, -0.00555419921875, -0.0416259765625, 0.036956787109375, -0.0242919921875, -0.0181884765625, -0.00848388671875, -0.0138092041015625, 0.02020263671875, 0.02496337890625, -0.06976318359375, 0.040374755859375, 0.038360595703125, 0.04791259765625, 0.01415252685546875, -0.005031585693359375, -0.006999969482421875, -0.006832122802734375, -0.0060577392578125, 0.04083251953125, -0.0264434814453125, -0.048309326171875, -0.0146942138671875, 0.020721435546875, -0.003353118896484375, -0.02362060546875, 0.055877685546875, -0.034912109375, 0.02178955078125, -0.0160675048828125, -0.048095703125, -0.0289459228515625, 0.0197296142578125, -0.0537109375, 0.086669921875, 0.00862884521484375, -0.0577392578125, 0.01157379150390625, -0.061920166015625, -0.00855255126953125, 0.00553131103515625, 0.01175689697265625, -0.042724609375, -0.015350341796875, -0.002197265625, 0.039642333984375, -0.0303955078125, 0.0193023681640625, -0.0239105224609375, -0.01360321044921875, -0.01107025146484375, -0.021270751953125, 0.085693359375, 0.02667236328125, -0.01031494140625, 0.0010433197021484375, -0.07366943359375, 0.0109405517578125, 0.01904296875, -0.0228729248046875, 0.01482391357421875, -0.0227203369140625, 0.04144287109375, 0.0177001953125, 0.03131103515625, -0.02606201171875, 0.00905609130859375, 0.00013005733489990234, 0.0196990966796875, 0.032135009765625, 0.01326751708984375, 0.00992584228515625, -0.027435302734375, 0.043975830078125, -0.00914764404296875, 0.05401611328125, 0.0158843994140625, -0.045623779296875, -0.059234619140625, -0.0013303756713867188, 0.0262908935546875, 0.04522705078125, -0.0159912109375, 0.050567626953125, -0.0013246536254882812, -0.07647705078125, -0.016021728515625, 0.0015211105346679688, 0.038421630859375, 0.049163818359375, 0.041656494140625, -0.01202392578125, -0.04522705078125, -0.07000732421875, 0.016998291015625, -0.0007162094116210938, 0.00527191162109375, 0.029266357421875, 0.06890869140625, -0.01274871826171875, 0.05401611328125, -0.0472412109375, -0.007022857666015625, -0.0135345458984375, 0.00911712646484375, 0.00862884521484375, 0.054473876953125, 0.04315185546875, -0.06939697265625, -0.01477813720703125, -0.01446533203125, -0.04888916015625, 0.0225067138671875, -0.00518798828125, -0.008880615234375, -0.005603790283203125, 0.0252685546875, -0.04290771484375, 0.0472412109375, 0.042449951171875, -0.0200958251953125, 0.04730224609375, -0.0262298583984375, -0.00017595291137695312, -0.083740234375, 0.0280914306640625, -0.00043010711669921875, -0.007671356201171875, -0.0325927734375, 0.002288818359375, 0.002445220947265625, -0.0257110595703125, -0.052703857421875, 0.05169677734375, -0.021942138671875, -0.004093170166015625, -0.0247344970703125, -0.0054473876953125, 0.00420379638671875, 0.0484619140625, 0.01519775390625, 0.0404052734375, 0.037689208984375, -0.06427001953125, 0.0262451171875, 0.034454345703125, -0.014739990234375, 0.0316162109375, -0.061431884765625, 0.017852783203125, -0.004772186279296875, 0.02886962890625, -0.048004150390625, -0.02752685546875, 0.024993896484375, -0.0316162109375, 0.0298309326171875, -0.01535797119140625, -0.032806396484375, -0.042724609375, 0.00925445556640625, 0.0164947509765625, 0.050445556640625, -0.02838134765625, 0.02886962890625, 0.038848876953125, 0.01520538330078125, -0.022216796875, -0.040374755859375, 0.0000578761100769043, -0.018463134765625, -0.03802490234375, 0.03253173828125, -0.00623321533203125, 0.000911712646484375, -0.01155853271484375, 0.016265869140625, -0.0231475830078125, 0.00511932373046875, 0.031280517578125, 0.0235748291015625, 0.01275634765625, 0.0093841552734375, -0.01293182373046875, -0.0130767822265625, 0.010986328125, -0.0010538101196289062, 0.03411865234375, -0.01267242431640625, -0.0059814453125, -0.054473876953125, 0.0178070068359375, 0.032684326171875, -0.0071563720703125, 0.059539794921875, 0.056793212890625, -0.05487060546875, 0.01319122314453125, -0.033355712890625, -0.0146331787109375, -0.033172607421875, 0.0306243896484375, -0.018646240234375, -0.0280914306640625, 0.041717529296875, 0.005344390869140625, -0.0023708343505859375, 0.061370849609375, 0.049835205078125, 0.00457000732421875, 0.06982421875, 0.051055908203125, 0.018035888671875, 0.035919189453125, -0.049224853515625, 0.000934600830078125, -0.0703125, -0.0433349609375, -0.059814453125, -0.005748748779296875, -0.04962158203125, -0.02398681640625, 0.0084228515625, 0.019561767578125, -0.047637939453125, 0.048858642578125, -0.045379638671875, 0.01454925537109375, 0.0386962890625, 0.0256500244140625, -0.005306243896484375, -0.0240478515625, 0.005458831787109375, 0.00821685791015625, -0.0498046875, -0.054168701171875, 0.08758544921875, 0.042022705078125, 0.043609619140625, -0.006977081298828125, 0.040435791015625, 0.01873779296875, 0.0167388916015625, -0.038360595703125, 0.0428466796875, -0.0020542144775390625, -0.07269287109375, -0.007465362548828125, -0.016937255859375, -0.0667724609375, 0.0009260177612304688, -0.0245513916015625, -0.0701904296875, 0.022674560546875, 0.0270233154296875, -0.027679443359375, 0.0195770263671875, -0.048736572265625, 0.09716796875, -0.0279388427734375, -0.027557373046875, -0.020172119140625, -0.05133056640625, 0.0301513671875, 0.0012121200561523438, 0.01004791259765625, -0.0024929046630859375, 0.0165557861328125, 0.06719970703125, -0.06109619140625, 0.063232421875, -0.031768798828125, 0.0290679931640625, 0.03961181640625, -0.020721435546875, 0.04986572265625, 0.0031528472900390625, -0.00675201416015625, 0.0251617431640625, 0.017364501953125, -0.041412353515625, -0.0290679931640625, 0.047088623046875, -0.06549072265625, -0.0217437744140625, -0.033355712890625, -0.040435791015625, -0.00969696044921875, 0.0021495819091796875, 0.028533935546875, 0.0234222412109375, -0.026153564453125, 0.0189666748046875, 0.04840087890625, -0.01419830322265625, 0.0193939208984375, 0.017791748046875, -0.0142669677734375, -0.033172607421875, 0.054351806640625, 0.0018939971923828125, 0.0174407958984375, 0.0272979736328125, 0.0160064697265625, -0.044281005859375, -0.039703369140625, -0.025421142578125, -0.0013952255249023438, -0.0556640625, -0.00801849365234375, -0.06231689453125, -0.026580810546875, -0.034820556640625, 0.0006647109985351562, -0.03204345703125, -0.028594970703125, -0.042572021484375, -0.00927734375, 0.043914794921875, 0.04071044921875, -0.011016845703125, 0.05059814453125, -0.0521240234375, 0.0080108642578125, 0.0306243896484375, 0.0078582763671875, 0.0006084442138671875, -0.050628662109375, -0.028167724609375, 0.0207061767578125, -0.05401611328125, -0.06085205078125, 0.0201873779296875, -0.00189971923828125, 0.03521728515625, 0.025146484375, -0.00890350341796875, 0.056884765625, -0.01873779296875, 0.07818603515625, 0.018463134765625, -0.057647705078125, 0.048736572265625, -0.0323486328125, 0.0007586479187011719, 0.04840087890625, 0.0357666015625, -0.015838623046875, 0.007595062255859375, -0.07513427734375, -0.0457763671875, 0.04840087890625, 0.027679443359375, 0.0162200927734375, 0.005466461181640625, 0.035400390625, -0.01145172119140625, 0.01226043701171875, -0.06707763671875, -0.01971435546875, -0.0258636474609375, 0.007709503173828125, -0.005954742431640625, -0.01123046875, -0.0125274658203125, -0.04217529296875, 0.06903076171875, 0.01434326171875, 0.03521728515625, 0.0013189315795898438, 0.005283355712890625, 0.01071929931640625, 0.0009765625, 0.036163330078125, 0.044830322265625, -0.04559326171875, -0.028167724609375, 0.0217437744140625, -0.054168701171875, 0.00027942657470703125, 0.0189666748046875, -0.00934600830078125, -0.00588226318359375, 0.0037689208984375, 0.07366943359375, 0.01142120361328125, -0.0238189697265625, 0.026580810546875, 0.002780914306640625, -0.0196533203125, -0.041290283203125, 0.00955963134765625, 0.010711669921875, 0.006343841552734375, -0.00894927978515625, 0.00882720947265625, 0.0186614990234375, -0.03900146484375, 0.0059051513671875, 0.0280914306640625, -0.038726806640625, -0.00495147705078125, 0.07647705078125, 0.0207977294921875, -0.036529541015625, 0.049102783203125, -0.022918701171875, -0.0184783935546875, 0.0816650390625, 0.040557861328125, 0.060028076171875, -0.01047515869140625, 0.01424407958984375, 0.057098388671875, 0.01538848876953125, -0.00023365020751953125, 0.02313232421875, 0.0006184577941894531, -0.0242919921875, -0.0062103271484375, -0.056427001953125, -0.04443359375, 0.0295257568359375, -0.0677490234375, 0.05523681640625, -0.046875, -0.01580810546875, 0.019927978515625, 0.0175323486328125, -0.0853271484375, 0.051483154296875, 0.00994873046875, 0.0804443359375, -0.07012939453125, 0.06768798828125, 0.050933837890625, -0.049346923828125, -0.06793212890625, -0.0264434814453125, -0.00078582763671875, -0.0518798828125, 0.031951904296875, 0.004161834716796875, 0.0190582275390625, -0.002796173095703125, -0.0386962890625, -0.06317138671875, 0.10052490234375, 0.00994110107421875, -0.056793212890625, 0.0123748779296875, 0.0022830963134765625, 0.045806884765625, -0.031005859375, 0.05548095703125, 0.032806396484375, 0.03558349609375, 0.0261383056640625, -0.06298828125, 0.0032749176025390625, -0.014495849609375, 0.00882720947265625, 0.0012998580932617188, -0.06304931640625, 0.06573486328125, -0.01230621337890625, 0.00917816162109375, 0.01125335693359375, 0.03857421875, 0.00859832763671875, 0.033477783203125, 0.0267333984375, 0.051055908203125, 0.06695556640625, 0.0038967132568359375, 0.09002685546875, -0.042388916015625, 0.051849365234375, 0.09881591796875, 0.003841400146484375, 0.0601806640625, 0.018768310546875, -0.03924560546875, 0.0238189697265625, 0.08441162109375, -0.0268402099609375, 0.0289154052734375, 0.02325439453125, -0.0028972625732421875, -0.0198516845703125, -0.01104736328125, -0.040557861328125, 0.0298004150390625, 0.024383544921875, -0.044677734375, -0.0159759521484375, -0.0021762847900390625, 0.00433349609375, -0.0220489501953125, -0.033721923828125, 0.042755126953125, -0.0048370361328125, -0.0386962890625, 0.042236328125, 0.00797271728515625, 0.012420654296875, -0.0550537109375, -0.01555633544921875, -0.0033855438232421875, 0.011138916015625, -0.0299835205078125, -0.03955078125, 0.03045654296875, -0.00514984130859375, -0.0225830078125, -0.00039267539978027344, 0.040252685546875, -0.01153564453125, -0.06011962890625, 0.022064208984375, 0.01433563232421875, 0.036651611328125, -0.0066070556640625, -0.0777587890625, -0.004486083984375, 0.0022125244140625, -0.012542724609375, 0.0182342529296875, -0.004413604736328125, -0.00234222412109375, 0.037353515625, 0.04266357421875, -0.01322174072265625, -0.0063934326171875, -0.0020389556884765625, 0.0665283203125, -0.049957275390625, -0.031494140625, -0.034820556640625, 0.05303955078125, -0.0146484375, -0.04425048828125, 0.047637939453125, 0.062744140625, 0.05517578125, -0.00867462158203125, 0.053985595703125, -0.020599365234375, 0.0248870849609375, -0.032989501953125, 0.035247802734375, -0.043914794921875, 0.0018892288208007812, -0.0201263427734375, -0.0831298828125, 0.001522064208984375, 0.03521728515625, -0.029144287109375, 0.0121002197265625, 0.045074462890625, 0.07281494140625, -0.01099395751953125, 0.0269927978515625, 0.0200347900390625, 0.01227569580078125, 0.00994110107421875, 0.03192138671875, 0.039154052734375, -0.06036376953125, 0.045989990234375, -0.042694091796875, -0.0279541015625, -0.01490020751953125, -0.0677490234375, -0.060211181640625, -0.038726806640625, -0.04815673828125, -0.03338623046875, 0.01224517822265625, 0.058837890625, 0.06396484375, -0.059417724609375, -0.017730712890625, -0.0251312255859375, -0.004058837890625, -0.0204315185546875, -0.0165252685546875, 0.0462646484375, -0.0108642578125, -0.061676025390625, 0.000560760498046875, -0.0179443359375, 0.02203369140625, -0.019744873046875, -0.01116180419921875, -0.0191497802734375, -0.002132415771484375, 0.027130126953125, 0.031097412109375, -0.03778076171875, -0.0187835693359375, -0.0166473388671875, -0.006519317626953125, -0.00836181640625, 0.038543701171875, -0.039093017578125, 0.030426025390625, 0.0272979736328125, 0.01551055908203125, 0.06707763671875, 0.001194000244140625, 0.0164642333984375, -0.0244140625, 0.0167694091796875, 0.023651123046875, 0.0343017578125, 0.01401519775390625, -0.047027587890625, 0.040191650390625, 0.0217437744140625, -0.06549072265625, -0.041961669921875, -0.0029277801513671875, -0.0877685546875, -0.00255584716796875, 0.09271240234375, -0.0022411346435546875, -0.0215606689453125, -0.0093231201171875, -0.026153564453125, 0.016937255859375, -0.01275634765625, 0.0491943359375, 0.05364990234375, -0.0188446044921875, 0.007476806640625, -0.059906005859375, 0.04150390625, 0.00677490234375, -0.071044921875, -0.0092926025390625, 0.042694091796875, 0.0263519287109375, 0.009674072265625, 0.033477783203125, -0.0166473388671875, 0.0239105224609375, 0.0306854248046875, 0.03173828125, -0.00510406494140625, -0.0273284912109375, -0.0247650146484375, -0.00015723705291748047, -0.014892578125, -0.040740966796875 ] ]
Locutusque/gpt2-conversational-or-qa
2023-09-26T02:22:34.000Z
[ "transformers", "pytorch", "safetensors", "gpt2", "text-generation", "conversational", "en", "dataset:Locutusque/ColumnedChatCombined", "doi:10.57967/hf/1214", "license:openrail", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
conversational
Locutusque
null
null
Locutusque/gpt2-conversational-or-qa
1
5,841
transformers
2023-04-28T00:56:44
--- license: openrail datasets: - Locutusque/ColumnedChatCombined language: - en metrics: - bleu - perplexity - loss - reward - penalty widget: - text: '<|USER|> Hello! <|ASSISTANT|> ' pipeline_tag: conversational --- # Model Card * this model is deprecated please see https://huggingface.co/Locutusque/gpt2-conversational-retrain for a better performing model. * ## Model Details - Model Name: gpt2-conversational-or-qa (prototype) - Model Type: Language Modeling - Task: Generating Conversational Responses - Hardware: 1x RTX 3060 - Description: This model is trained on a dataset of conversations between a user and an AI assistant, with the goal of generating a coherent and relevant response to the user's input. It uses the GPT-2 architecture, a state-of-the-art transformer-based language model that is capable of generating high-quality text with a wide range of styles and tones. The model is fine-tuned on the conversational data using maximum likelihood estimation, and is evaluated based on its ability to generate responses that are both grammatically correct and semantically relevant to the user's input. I've also trained larger models such as https://huggingface.co/Locutusque/gpt2-medium-conversational and https://huggingface.co/Locutusque/gpt2-large-conversational ## Intended Use This model is intended to be used for generating conversational responses in a variety of contexts, such as chatbots, virtual assistants, and customer service applications. It is designed to provide natural and engaging responses to user input, with a focus on maintaining a consistent tone and style throughout the conversation. The model is suitable for use in both text-based and voice-based interfaces, and can be easily integrated into existing applications using the PyTorch and Transformers frameworks. ## Training Data The model is trained on a large dataset of conversational data, consisting of interactions between users and an AI assistant. The data is preprocessed to remove any sensitive information and is formatted in a way that is suitable for training a language model. The training data is split into a training set and a validation set, with the training set used to update the model parameters and the validation set used to evaluate the model performance. The model was trained on 245,000 examples over 1,225,000 steps, it achieved decent metrics. This model outperformed the base GPT-2 model significantly on a new conversational dataset during a fine-tuning session. Here is a side-by-side comparison of the two models during the first steps of training ```python # Base GPT-2 """ Epoch 1/5, Batch 1/10000: Loss - 64.9255, Reward - 260.0000, Penalty - 624.0000, BLEU - 0.0000 Epoch 1/5, Batch 2/10000: Loss - 57.4635, Reward - 303.0000, Penalty - 870.0000, BLEU - 0.0000 Epoch 1/5, Batch 3/10000: Loss - 67.8061, Reward - 295.0000, Penalty - 908.0000, BLEU - 0.0000 Epoch 1/5, Batch 4/10000: Loss - 59.6118, Reward - 800.0000, Penalty - 740.0000, BLEU - 0.0000 Epoch 1/5, Batch 5/10000: Loss - 67.4855, Reward - 402.0000, Penalty - 806.0000, BLEU - 0.0000 Epoch 1/5, Batch 6/10000: Loss - 29.3718, Reward - 937.0000, Penalty - 760.0000, BLEU - 0.0000 Epoch 1/5, Batch 7/10000: Loss - 79.0709, Reward - 390.0000, Penalty - 1114.0000, BLEU - 0.0000 Epoch 1/5, Batch 8/10000: Loss - 61.4583, Reward - 385.0000, Penalty - 760.0000, BLEU - 0.0000 Epoch 1/5, Batch 9/10000: Loss - 56.3084, Reward - 741.0000, Penalty - 560.0000, BLEU - 3.5500 Epoch 1/5, Batch 10/10000: Loss - 80.0192, Reward - 838.0000, Penalty - 1424.0000, BLEU - 0.0000 Epoch 1/5, Batch 11/10000: Loss - 51.8236, Reward - 228.0000, Penalty - 812.0000, BLEU - 0.0001 Epoch 1/5, Batch 12/10000: Loss - 71.4071, Reward - 541.0000, Penalty - 982.0000, BLEU - 0.0000 Epoch 1/5, Batch 13/10000: Loss - 33.3624, Reward - 910.0000, Penalty - 1002.0000, BLEU - 0.0027 Epoch 1/5, Batch 14/10000: Loss - 55.9721, Reward - 808.0000, Penalty - 798.0000, BLEU - 0.0005 Epoch 1/5, Batch 15/10000: Loss - 67.0336, Reward - 517.0000, Penalty - 764.0000, BLEU - 0.0000 """ # Conversational GPT-2 """ Epoch 1/5, Batch 1/10000: Loss - 6.1980, Reward - 887.0000, Penalty - 1500.0000, BLEU - 0.0648 Epoch 1/5, Batch 2/10000: Loss - 4.5750, Reward - 245.0000, Penalty - 1618.0000, BLEU - 0.0008 Epoch 1/5, Batch 3/10000: Loss - 5.1264, Reward - 600.0000, Penalty - 642.0000, BLEU - 5.7981 Epoch 1/5, Batch 4/10000: Loss - 0.2995, Reward - 1020.0000, Penalty - 74.0000, BLEU - 13.8469 Epoch 1/5, Batch 5/10000: Loss - 7.9377, Reward - 203.0000, Penalty - 1700.0000, BLEU - 0.3218 Epoch 1/5, Batch 6/10000: Loss - 5.0522, Reward - 1020.0000, Penalty - 2034.0000, BLEU - 0.1946 Epoch 1/5, Batch 7/10000: Loss - 2.0585, Reward - 925.0000, Penalty - 526.0000, BLEU - 16.1298 Epoch 1/5, Batch 8/10000: Loss - 5.9736, Reward - 1009.0000, Penalty - 1844.0000, BLEU - 0.0085 Epoch 1/5, Batch 9/10000: Loss - 6.0867, Reward - 245.0000, Penalty - 1690.0000, BLEU - 1.9342 Epoch 1/5, Batch 10/10000: Loss - 7.8497, Reward - 155.0000, Penalty - 1780.0000, BLEU - 0.0115 Epoch 1/5, Batch 11/10000: Loss - 3.8887, Reward - 1012.0000, Penalty - 2010.0000, BLEU - 0.6957 Epoch 1/5, Batch 12/10000: Loss - 6.6133, Reward - 216.0000, Penalty - 1638.0000, BLEU - 1.7853 Epoch 1/5, Batch 13/10000: Loss - 1.3319, Reward - 945.0000, Penalty - 374.0000, BLEU - 0.0075 Epoch 1/5, Batch 14/10000: Loss - 2.6296, Reward - 956.0000, Penalty - 414.0000, BLEU - 3.2207 Epoch 1/5, Batch 15/10000: Loss - 6.8827, Reward - 1013.0000, Penalty - 1970.0000, BLEU - 3.7418 """ ``` ## Model Architecture The model architecture used in this model is GPT-2, a transformer-based language model that is capable of generating high-quality text with a wide range of styles and tones. The GPT-2 architecture consists of a multi-layered decoder-only transformer, with self-attention mechanisms that allow the model to capture long-term dependencies and generate coherent text. ## Evaluation Metrics The model is evaluated based on several metrics, including loss, reward, penalty, BLEU score, and perplexity. The loss metric is calculated during training and reflects the difference between the predicted output and the actual output. The reward metric is based on the number of correct words generated by the model, while the penalty metric penalizes the model for repeating words consecutively. The BLEU score measures the similarity between the generated text and the ground truth text, while the perplexity metric measures how well the model is able to predict the next word in a sequence. During validation, the model achieved the following metrics: - BLEU Score: 9 - Perplexity: 19 - Loss: 1.7 Although these metrics seem mediocre, it's actually better because that way the model is able to make open-ended responses, but is still coherent to the user's input. This model was also evaluated on Hugging Face's Open LLM Leaderboard Model | Average | ARC (25-shot) | HellaSwag (10-shot) | MMLU (5-shot) | TruthfulQA (0-shot) --- | --- | --- | --- | --- | --- Locutusque/gpt2-conversational-or-qa | 30.9 | 21.3 | 27.6 | 27.5 | 47.3 gpt2 | 30.4 | 21.9 | 31.6 | 27.5 | 40.7 |||||| *This model performed excellently in TruthfulQA, outperforming gpt2 by nearly 7 points* ## Limitations and Bias This model is not suitable for all use cases due to its limited training time on a weak computer. As a result, it may produce irrelevant or nonsensical responses. Additionally, it has not been fine-tuned to remember the chat history, is unable to provide follow-up responses, and it does not know the answer to many questions (it was only fine-tuned to respond in a conversational way). For optimal performance, we recommend using a GPU with at least 4GB of VRAM and downloading the model manually instead of using the Transformers library or deploying it on the Interface API. Here's how you should deploy the model: ```python import torch from transformers import GPT2Tokenizer, GPT2LMHeadModel tokenizer = GPT2Tokenizer.from_pretrained('gpt2') model = GPT2LMHeadModel.from_pretrained('gpt2') tokenizer.add_special_tokens({'pad_token': '[PAD]'}) tokenizer.add_special_tokens({'eos_token': '<|End|>'}) special_tokens = { "additional_special_tokens": ["<|USER|>", "<|SYSTEM|>", "<|ASSISTANT|>"] } tokenizer.add_special_tokens(special_tokens) model.resize_token_embeddings(len(tokenizer)) model.load_state_dict(torch.load("path/to/model")) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") model.to(device) def generate_text(model, tokenizer, prompt, max_length=1024): prompt = f'<|USER|> {prompt} <|ASSISTANT|> ' input_ids = tokenizer.encode(prompt, add_special_tokens=True, return_tensors="pt").to(device) attention_mask = torch.ones_like(input_ids).to(device) output = model.generate(input_ids, max_length=max_length, do_sample=True, top_k=35, top_p=0.80, pad_token_id=tokenizer.pad_token_id, eos_token_id=tokenizer.eos_token_id, attention_mask=attention_mask) output_ids = tokenizer.decode(output[0], skip_special_tokens=False) assistant_token_index = output_ids.index('<|ASSISTANT|>') + len('<|ASSISTANT|>') next_token_index = output_ids.find('<|', assistant_token_index) output_ids = output_ids[assistant_token_index:next_token_index] return output_ids # Loop to interact with the model while True: prompt = input("Enter a prompt (or 'q' to quit): ") if prompt == "q": break output_text = generate_text(model, tokenizer, prompt) print(output_text) ``` ## Deploying and training the model The model has been fine-tuned on a specific input format that goes like this ```"<|USER|> {user prompt} <|ASSISTANT|> {model prediction} <|End|>".``` For the best performance from the model the input text should be as follows ```<|USER|> {user prompt} <|ASSISTANT|> ``` and the target/label should be as follows ```<|USER|> {user prompt} <|ASSISTANT|> {dataset output} <|End|>```
10,249
[ [ -0.0178985595703125, -0.07452392578125, 0.0207672119140625, 0.00814056396484375, -0.0108642578125, -0.004741668701171875, -0.0166778564453125, -0.038970947265625, -0.0013980865478515625, 0.027587890625, -0.02252197265625, -0.032440185546875, -0.0595703125, -0.0120086669921875, -0.006328582763671875, 0.08245849609375, 0.0123138427734375, -0.005420684814453125, 0.00830078125, 0.0012655258178710938, -0.0421142578125, -0.035369873046875, -0.0816650390625, -0.04193115234375, 0.0210723876953125, 0.02142333984375, 0.054901123046875, 0.02996826171875, 0.0259552001953125, 0.020111083984375, -0.0151519775390625, 0.006351470947265625, -0.053741455078125, -0.0249786376953125, 0.002384185791015625, -0.03997802734375, -0.04791259765625, 0.0161590576171875, 0.026947021484375, 0.0207061767578125, -0.01428985595703125, 0.00914764404296875, 0.018218994140625, 0.043731689453125, -0.0037403106689453125, 0.033782958984375, -0.037567138671875, -0.0108795166015625, -0.0034885406494140625, -0.00853729248046875, -0.02911376953125, -0.02886962890625, 0.0204010009765625, -0.035125732421875, 0.0170135498046875, -0.00937652587890625, 0.09857177734375, 0.00830078125, -0.0185089111328125, -0.02349853515625, -0.053863525390625, 0.055450439453125, -0.06591796875, 0.0229034423828125, 0.0435791015625, 0.01068878173828125, -0.00624847412109375, -0.053070068359375, -0.048675537109375, -0.0212249755859375, -0.0160980224609375, 0.0293731689453125, -0.0254974365234375, -0.0189666748046875, 0.0254364013671875, 0.0265350341796875, -0.05926513671875, 0.0087738037109375, -0.0229949951171875, -0.01061248779296875, 0.052337646484375, 0.01107025146484375, 0.015167236328125, -0.0115814208984375, -0.034027099609375, -0.019927978515625, -0.037841796875, 0.0195770263671875, 0.03680419921875, 0.015167236328125, -0.024566650390625, 0.037017822265625, -0.0150299072265625, 0.0225067138671875, 0.00811004638671875, -0.0184478759765625, 0.034912109375, -0.0254364013671875, -0.0280609130859375, -0.0203704833984375, 0.07952880859375, 0.036224365234375, 0.0193634033203125, 0.0038776397705078125, -0.0134735107421875, 0.00635528564453125, -0.00467681884765625, -0.0765380859375, -0.030548095703125, 0.0243072509765625, -0.021881103515625, -0.023956298828125, 0.004810333251953125, -0.04608154296875, 0.007061004638671875, -0.005321502685546875, 0.03814697265625, -0.0438232421875, -0.034881591796875, -0.0026149749755859375, -0.0163726806640625, 0.043914794921875, 0.023468017578125, -0.0772705078125, 0.0133514404296875, 0.035888671875, 0.0701904296875, 0.0007653236389160156, -0.0120391845703125, -0.0029277801513671875, -0.0023326873779296875, -0.0209808349609375, 0.05206298828125, -0.01207733154296875, -0.044158935546875, -0.0218963623046875, 0.0192413330078125, -0.0137481689453125, -0.03302001953125, 0.030120849609375, -0.02642822265625, 0.035919189453125, -0.01910400390625, -0.04351806640625, -0.017242431640625, 0.0264129638671875, -0.03619384765625, 0.10565185546875, 0.0264739990234375, -0.046783447265625, 0.026458740234375, -0.04541015625, -0.0124664306640625, 0.01019287109375, -0.0010356903076171875, -0.046875, -0.02099609375, 0.0141143798828125, 0.0235137939453125, -0.0386962890625, 0.030120849609375, 0.004940032958984375, -0.0260009765625, 0.01224517822265625, -0.0390625, 0.0625, 0.0033855438232421875, -0.04388427734375, -0.00719451904296875, -0.04534912109375, 0.0216064453125, 0.03668212890625, -0.032684326171875, 0.01059722900390625, -0.01514434814453125, 0.01245880126953125, 0.0213775634765625, 0.0158843994140625, -0.0243072509765625, 0.0171051025390625, -0.03668212890625, 0.06292724609375, 0.043701171875, 0.003902435302734375, 0.00794219970703125, -0.031768798828125, 0.020416259765625, 0.0053558349609375, 0.018218994140625, 0.0012884140014648438, -0.064208984375, -0.059906005859375, -0.01300048828125, 0.0227813720703125, 0.06329345703125, -0.03717041015625, 0.04736328125, -0.0229339599609375, -0.052398681640625, -0.0270233154296875, -0.00225830078125, 0.040496826171875, 0.045013427734375, 0.0257110595703125, -0.02862548828125, -0.0273895263671875, -0.075927734375, -0.004924774169921875, -0.02325439453125, -0.0043182373046875, 0.037353515625, 0.0574951171875, -0.0209808349609375, 0.06353759765625, -0.039215087890625, -0.02691650390625, -0.0277252197265625, 0.009674072265625, 0.025604248046875, 0.044097900390625, 0.040924072265625, -0.042816162109375, -0.0439453125, 0.01104736328125, -0.06781005859375, 0.0207977294921875, 0.00606536865234375, 0.00048279762268066406, 0.0300140380859375, 0.0147552490234375, -0.0567626953125, 0.041046142578125, 0.037139892578125, -0.0256805419921875, 0.041351318359375, -0.0178985595703125, 0.016510009765625, -0.08966064453125, 0.0232086181640625, 0.006011962890625, -0.0083465576171875, -0.050445556640625, -0.0097198486328125, -0.00753021240234375, -0.001827239990234375, -0.0307769775390625, 0.04840087890625, -0.026336669921875, 0.015777587890625, -0.010772705078125, 0.00997161865234375, -0.0102081298828125, 0.062255859375, 0.0139007568359375, 0.0762939453125, 0.04351806640625, -0.01934814453125, 0.0299530029296875, 0.0264739990234375, -0.02813720703125, 0.028289794921875, -0.07379150390625, 0.0275726318359375, 0.00543212890625, 0.014617919921875, -0.0975341796875, -0.0197601318359375, 0.037261962890625, -0.0653076171875, 0.0275726318359375, -0.0268707275390625, -0.0261383056640625, -0.0418701171875, -0.013153076171875, 0.017333984375, 0.054351806640625, -0.01212310791015625, 0.0266265869140625, 0.00962066650390625, -0.0244140625, -0.0303802490234375, -0.06494140625, 0.0102081298828125, -0.01499176025390625, -0.05426025390625, 0.0172576904296875, 0.0035648345947265625, 0.0039825439453125, -0.00852203369140625, 0.0095062255859375, 0.00894927978515625, -0.00969696044921875, 0.0152130126953125, 0.0243377685546875, -0.01123046875, -0.00475311279296875, -0.0197601318359375, -0.0167388916015625, 0.0026531219482421875, -0.015472412109375, 0.05908203125, -0.0157318115234375, -0.002666473388671875, -0.05511474609375, 0.006099700927734375, 0.0264434814453125, -0.0086517333984375, 0.058563232421875, 0.06005859375, -0.027099609375, 0.017913818359375, -0.0430908203125, -0.0175323486328125, -0.034027099609375, 0.05120849609375, -0.034271240234375, -0.0665283203125, 0.05389404296875, 0.01165771484375, -0.0029850006103515625, 0.052947998046875, 0.066162109375, 0.00536346435546875, 0.07916259765625, 0.0211944580078125, -0.0170440673828125, 0.026458740234375, -0.051055908203125, 0.0132598876953125, -0.0609130859375, -0.041839599609375, -0.0236968994140625, -0.0176544189453125, -0.0574951171875, -0.026123046875, 0.0245208740234375, 0.01287078857421875, -0.034149169921875, 0.021331787109375, -0.040496826171875, 0.02703857421875, 0.055816650390625, 0.024139404296875, 0.00634765625, 0.0001628398895263672, 0.00887298583984375, -0.00632476806640625, -0.04193115234375, -0.042327880859375, 0.101318359375, 0.045379638671875, 0.046722412109375, 0.01265716552734375, 0.0284881591796875, 0.02069091796875, 0.0203704833984375, -0.058441162109375, 0.023345947265625, 0.00408935546875, -0.04827880859375, -0.037567138671875, -0.0380859375, -0.06329345703125, 0.0151214599609375, -0.00628662109375, -0.0762939453125, -0.0031490325927734375, 0.01276397705078125, -0.027801513671875, 0.01543426513671875, -0.0701904296875, 0.0762939453125, -0.019805908203125, -0.0390625, -0.00020420551300048828, -0.062042236328125, 0.028228759765625, 0.01519012451171875, 0.0004432201385498047, -0.0093994140625, 0.022186279296875, 0.0675048828125, -0.0433349609375, 0.05517578125, -0.019500732421875, -0.0052642822265625, 0.038238525390625, -0.00150299072265625, 0.0631103515625, 0.02630615234375, 0.015838623046875, 0.01593017578125, -0.003993988037109375, -0.035675048828125, -0.034698486328125, 0.04534912109375, -0.08502197265625, -0.046173095703125, -0.0372314453125, -0.035491943359375, -0.00847625732421875, 0.011627197265625, 0.04815673828125, 0.039337158203125, -0.0224609375, -0.0002689361572265625, 0.045745849609375, -0.01361846923828125, 0.030242919921875, 0.0196075439453125, -0.0028133392333984375, -0.050811767578125, 0.06982421875, -0.006107330322265625, 0.0284576416015625, 0.0152435302734375, 0.01190185546875, -0.044036865234375, -0.033111572265625, -0.03155517578125, 0.0258636474609375, -0.0264129638671875, -0.009521484375, -0.049163818359375, -0.025604248046875, -0.045379638671875, 0.0105133056640625, -0.035400390625, -0.0197906494140625, -0.0212554931640625, -0.003955841064453125, 0.03338623046875, 0.0526123046875, 0.0104522705078125, 0.03179931640625, -0.0394287109375, 0.007251739501953125, 0.03466796875, 0.022796630859375, -0.00922393798828125, -0.07171630859375, -0.00978851318359375, 0.0209197998046875, -0.031646728515625, -0.0625, 0.033477783203125, 0.009063720703125, 0.02545166015625, 0.02349853515625, -0.037841796875, 0.04229736328125, -0.0206146240234375, 0.077392578125, 0.0214080810546875, -0.053955078125, 0.05120849609375, -0.040496826171875, 0.01464080810546875, 0.0311279296875, 0.01568603515625, -0.050994873046875, -0.020111083984375, -0.06439208984375, -0.06353759765625, 0.0732421875, 0.042388916015625, 0.00806427001953125, 0.004825592041015625, 0.0292816162109375, -0.00939178466796875, 0.0206756591796875, -0.06109619140625, -0.035369873046875, -0.0222320556640625, 0.003173828125, -0.01543426513671875, -0.0309906005859375, -0.004390716552734375, -0.03692626953125, 0.06829833984375, 0.0006346702575683594, 0.053558349609375, 0.010467529296875, -0.0013799667358398438, 0.001209259033203125, 0.00525665283203125, 0.05401611328125, 0.05206298828125, -0.03350830078125, -0.020843505859375, 0.01197052001953125, -0.051025390625, 0.00634765625, 0.0150604248046875, -0.0139007568359375, -0.0105438232421875, 0.0218963623046875, 0.06719970703125, -0.000843048095703125, -0.0297698974609375, 0.041229248046875, 0.0011920928955078125, -0.03814697265625, -0.0233154296875, 0.0008730888366699219, 0.0119781494140625, 0.0280914306640625, 0.022125244140625, -0.007244110107421875, -0.006267547607421875, -0.04486083984375, 0.007965087890625, 0.036346435546875, -0.02862548828125, -0.0288543701171875, 0.0694580078125, 0.0182952880859375, -0.0222015380859375, 0.06292724609375, -0.0296478271484375, -0.03924560546875, 0.04638671875, 0.041595458984375, 0.057952880859375, -0.034942626953125, 0.0197906494140625, 0.050384521484375, 0.02703857421875, -0.017822265625, 0.0197906494140625, 0.0241851806640625, -0.051116943359375, -0.0117340087890625, -0.033660888671875, -0.0166168212890625, 0.044677734375, -0.03851318359375, 0.013458251953125, -0.0246124267578125, -0.030548095703125, 0.00009274482727050781, 0.01824951171875, -0.059600830078125, 0.0259552001953125, 0.00518035888671875, 0.053741455078125, -0.0653076171875, 0.06646728515625, 0.04461669921875, -0.043365478515625, -0.07147216796875, -0.0270843505859375, -0.0006785392761230469, -0.08074951171875, 0.038665771484375, 0.006061553955078125, 0.0212249755859375, 0.020599365234375, -0.0396728515625, -0.0615234375, 0.10260009765625, 0.0100555419921875, -0.0413818359375, -0.01103973388671875, 0.0281982421875, 0.042266845703125, -0.0006327629089355469, 0.049713134765625, 0.060150146484375, 0.0282440185546875, 0.0257720947265625, -0.08502197265625, 0.00855255126953125, -0.02520751953125, 0.0125732421875, 0.0226593017578125, -0.081787109375, 0.06988525390625, -0.0102691650390625, -0.00545501708984375, 0.008453369140625, 0.0343017578125, 0.01419830322265625, 0.0038738250732421875, 0.01490020751953125, 0.07000732421875, 0.047393798828125, -0.037689208984375, 0.0667724609375, -0.0179290771484375, 0.0621337890625, 0.06634521484375, 0.007526397705078125, 0.051544189453125, 0.01303863525390625, -0.0394287109375, 0.0222320556640625, 0.05462646484375, -0.01216888427734375, 0.036041259765625, -0.0029296875, -0.00004953145980834961, -0.00762939453125, 0.01428985595703125, -0.040802001953125, 0.027313232421875, 0.0255126953125, -0.0302886962890625, -0.00691986083984375, -0.016693115234375, 0.0250396728515625, -0.02459716796875, -0.0016298294067382812, 0.06475830078125, -0.004772186279296875, -0.044830322265625, 0.05426025390625, 0.007518768310546875, 0.04327392578125, -0.051483154296875, 0.003314971923828125, -0.02801513671875, 0.0135498046875, -0.00786590576171875, -0.06231689453125, 0.0054473876953125, -0.0009412765502929688, -0.0034503936767578125, -0.00269317626953125, 0.038909912109375, -0.03448486328125, -0.0316162109375, 0.004695892333984375, 0.042572021484375, 0.0179595947265625, -0.01715087890625, -0.0692138671875, -0.002384185791015625, 0.00919342041015625, -0.04351806640625, 0.01451873779296875, 0.029052734375, 0.00555419921875, 0.04290771484375, 0.039093017578125, -0.005931854248046875, -0.01383209228515625, 0.0012292861938476562, 0.06378173828125, -0.051025390625, -0.040802001953125, -0.06793212890625, 0.0380859375, -0.0009670257568359375, -0.042266845703125, 0.046844482421875, 0.06353759765625, 0.053802490234375, 0.0006184577941894531, 0.07281494140625, -0.0227813720703125, 0.029754638671875, -0.039215087890625, 0.044586181640625, -0.03033447265625, -0.000579833984375, -0.01369476318359375, -0.07000732421875, -0.0001823902130126953, 0.039520263671875, -0.034088134765625, 0.0242462158203125, 0.041595458984375, 0.06573486328125, 0.00955963134765625, 0.00408172607421875, 0.01316070556640625, 0.0196075439453125, 0.033905029296875, 0.06817626953125, 0.048095703125, -0.058837890625, 0.046478271484375, -0.0083465576171875, -0.0234375, -0.0199127197265625, -0.037933349609375, -0.07391357421875, -0.03662109375, -0.0278472900390625, -0.044921875, 0.0129547119140625, 0.058197021484375, 0.040740966796875, -0.046844482421875, -0.021270751953125, -0.00415802001953125, -0.0178985595703125, -0.014984130859375, -0.0230560302734375, 0.040374755859375, -0.01024627685546875, -0.0665283203125, 0.0007653236389160156, 0.004009246826171875, 0.020416259765625, 0.0026874542236328125, -0.01446533203125, -0.0128021240234375, -0.01068878173828125, 0.045806884765625, 0.0167999267578125, -0.057952880859375, -0.0308685302734375, -0.0026531219482421875, -0.0169830322265625, 0.0253448486328125, 0.026641845703125, -0.04345703125, 0.01861572265625, 0.01398468017578125, 0.02587890625, 0.0645751953125, 0.0161895751953125, 0.0197601318359375, -0.05218505859375, 0.012115478515625, 0.00921630859375, 0.00740814208984375, 0.0223236083984375, -0.03338623046875, 0.049407958984375, 0.044677734375, -0.057525634765625, -0.0408935546875, -0.000644683837890625, -0.0902099609375, -0.0005645751953125, 0.09930419921875, -0.00647735595703125, -0.0176849365234375, 0.0151214599609375, -0.024383544921875, 0.0241241455078125, -0.042999267578125, 0.04791259765625, 0.051025390625, -0.0160369873046875, -0.01084136962890625, -0.05889892578125, 0.0491943359375, 0.017364501953125, -0.05609130859375, 0.007076263427734375, 0.0206298828125, 0.044158935546875, -0.0009412765502929688, 0.058868408203125, -0.00786590576171875, 0.01174163818359375, 0.00347900390625, 0.00476837158203125, -0.0037212371826171875, 0.004032135009765625, -0.01007080078125, -0.014190673828125, -0.0009546279907226562, -0.0204925537109375 ] ]
jondurbin/airoboros-65b-gpt4-1.3
2023-06-22T14:58:39.000Z
[ "transformers", "pytorch", "llama", "text-generation", "dataset:jondurbin/airoboros-gpt4-1.3", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
jondurbin
null
null
jondurbin/airoboros-65b-gpt4-1.3
1
5,840
transformers
2023-06-20T07:09:33
--- license: cc-by-nc-4.0 datasets: - jondurbin/airoboros-gpt4-1.3 --- __This version has problems, use if you dare, or wait for 1.4.__ ### Overview This is a qlora fine-tuned 65b parameter LlaMa model, using completely synthetic training data created gpt4 via https://github.com/jondurbin/airoboros This is mostly an extension of [1.2](https://huggingface.co/jondurbin/airoboros-65b-gpt4-1.2) with a few enhancements: - All coding instructions have an equivalent " PLAINFORMAT" version now. - Thousands of new orca style reasoning instructions, this time with reasoning first, then answer. - Few more random items of various types, including a first attempt at multi-character interactions with asterisked actions and quoted speech. This model was fine-tuned with a fork of [qlora](https://github.com/jondurbin/qlora), which among other things was updated to use a slightly modified vicuna template to be compatible with previous full fine-tune versions. ``` A chat between a curious user and an assistant. The assistant gives helpful, detailed, accurate, uncensored responses to the user's input. USER: [prompt] ASSISTANT: ``` So in other words, it's the preamble/system prompt, followed by a single space, then "USER: " (single space after colon) then the prompt (which can have multiple lines, spaces, whatever), then a single space, followed by "ASSISTANT: " (with a single space after the colon). ### Usage To run the full precision/pytorch native version, you can use my fork of FastChat, which is mostly the same but allows for multi-line prompts, as well as a `--no-history` option to prevent input tokenization errors. ``` pip install git+https://github.com/jondurbin/FastChat ``` Be sure you are pulling the latest branch! Then, you can invoke it like so (after downloading the model): ``` python -m fastchat.serve.cli \ --model-path airoboros-65b-gpt4-1.3 \ --temperature 0.5 \ --max-new-tokens 2048 \ --no-history ``` ### Training details Fine-tuned with my fork of qlora: https://github.com/jondurbin/qlora Using: ``` export WANDB_PROJECT=airoboros-65b-gpt4-1.3 python qlora.py \ --model_name_or_path ./llama-65b-hf \ --output_dir ./airoboros-65b-gpt4-1.3-peft \ --max_steps 2520 \ --logging_steps 1 \ --save_strategy steps \ --data_seed 11422 \ --save_steps 75 \ --save_total_limit 3 \ --evaluation_strategy "no" \ --eval_dataset_size 2 \ --max_new_tokens 2800 \ --dataloader_num_workers 3 \ --logging_strategy steps \ --remove_unused_columns False \ --do_train \ --lora_r 64 \ --lora_alpha 16 \ --lora_modules all \ --double_quant \ --quant_type nf4 \ --bf16 \ --bits 4 \ --warmup_ratio 0.03 \ --lr_scheduler_type constant \ --gradient_checkpointing \ --dataset instructions.jsonl \ --dataset_format airoboros \ --model_max_len 2800 \ --per_device_train_batch_size 2 \ --gradient_accumulation_steps 16 \ --learning_rate 0.0001 \ --adam_beta2 0.999 \ --max_grad_norm 0.3 \ --lora_dropout 0.05 \ --weight_decay 0.0 \ --seed 11422 \ --report_to wandb ``` Three file modifications to the base llama: - llama-65b-hf/tokenizer_config.json (see this repo's version, updated to have 4096 max seq length during training to accomodate training data) - llama-65b-hf/special_tokens_map.json (see this repo's version) - llama-65b-hf/config.json (updated to temporarily have max model size 4096 to accomodate training data) Afterwards, the changes to max model length and sequence length are reduced back to 2048 to avoid ... issues ... ### Usage and License Notices All airoboros models and datasets are intended and licensed for research use only. I've used the 'cc-nc-4.0' license, but really it is subject to a custom/special license because: - the base model is LLaMa, which has it's own special research license - the dataset(s) were generated with OpenAI (gpt-4 and/or gpt-3.5-turbo), which has a clausing saying the data can't be used to create models to compete with openai So, to reiterate: this model (and datasets) cannot be used commercially.
4,139
[ [ -0.01763916015625, -0.0537109375, 0.006351470947265625, 0.018157958984375, -0.0308837890625, -0.01727294921875, -0.007415771484375, -0.038482666015625, 0.0221405029296875, 0.02130126953125, -0.0408935546875, -0.038299560546875, -0.0303955078125, -0.0005946159362792969, -0.0201568603515625, 0.08953857421875, -0.01099395751953125, -0.00737762451171875, 0.0212860107421875, -0.0111846923828125, -0.0501708984375, -0.054962158203125, -0.063720703125, -0.01316070556640625, 0.03436279296875, 0.03533935546875, 0.05828857421875, 0.057769775390625, 0.0295562744140625, 0.0203094482421875, -0.0182952880859375, 0.01519775390625, -0.0443115234375, -0.01531982421875, 0.021148681640625, -0.032684326171875, -0.055328369140625, -0.00289154052734375, 0.044952392578125, 0.004772186279296875, -0.0291748046875, 0.01532745361328125, -0.0230865478515625, 0.0229644775390625, -0.030609130859375, 0.01184844970703125, -0.040802001953125, -0.004901885986328125, -0.0174102783203125, -0.0120697021484375, -0.0012969970703125, -0.019317626953125, 0.005825042724609375, -0.06048583984375, 0.004817962646484375, -0.00466156005859375, 0.093505859375, 0.0233001708984375, -0.043914794921875, -0.0180206298828125, -0.0362548828125, 0.044891357421875, -0.09100341796875, 0.01102447509765625, 0.01904296875, 0.03973388671875, -0.01363372802734375, -0.059844970703125, -0.040618896484375, -0.02142333984375, 0.01132965087890625, -0.0007390975952148438, -0.0205230712890625, 0.003047943115234375, 0.0367431640625, 0.0295562744140625, -0.049957275390625, 0.01274871826171875, -0.049835205078125, -0.0158538818359375, 0.0556640625, 0.0246734619140625, -0.000023126602172851562, -0.006198883056640625, -0.030670166015625, -0.0263519287109375, -0.05047607421875, 0.01328277587890625, 0.035980224609375, 0.02142333984375, -0.031646728515625, 0.038055419921875, -0.03253173828125, 0.040985107421875, -0.009521484375, -0.0159759521484375, 0.033355712890625, -0.0132293701171875, -0.0311737060546875, 0.0103302001953125, 0.057525634765625, 0.0289154052734375, 0.00565338134765625, 0.023284912109375, -0.01027679443359375, -0.0035762786865234375, -0.00835418701171875, -0.07373046875, -0.0237884521484375, 0.032470703125, -0.020050048828125, -0.041290283203125, -0.012908935546875, -0.03814697265625, -0.00598907470703125, -0.0126495361328125, 0.0321044921875, -0.037384033203125, -0.0179901123046875, 0.0210723876953125, -0.0032501220703125, 0.02081298828125, 0.032867431640625, -0.056427001953125, 0.0168609619140625, 0.0242156982421875, 0.06671142578125, 0.0156402587890625, -0.035888671875, -0.01224517822265625, -0.00821685791015625, -0.0256195068359375, 0.04644775390625, -0.01364898681640625, -0.03546142578125, -0.0251617431640625, -0.003017425537109375, 0.0007538795471191406, -0.035858154296875, 0.0307464599609375, -0.037872314453125, 0.0143585205078125, -0.017364501953125, -0.0380859375, -0.0237579345703125, 0.019775390625, -0.03729248046875, 0.0792236328125, 0.013031005859375, -0.055023193359375, 0.004901885986328125, -0.05377197265625, -0.007663726806640625, -0.01395416259765625, 0.01122283935546875, -0.032135009765625, -0.02777099609375, 0.0205535888671875, 0.0187835693359375, -0.01788330078125, 0.0282745361328125, -0.021026611328125, -0.0330810546875, 0.025177001953125, -0.04095458984375, 0.0928955078125, 0.02655029296875, -0.0275115966796875, 0.0189971923828125, -0.05865478515625, 0.00984954833984375, 0.0218353271484375, -0.041778564453125, 0.01256561279296875, -0.031890869140625, -0.0015087127685546875, 0.007106781005859375, 0.038238525390625, -0.018829345703125, 0.046142578125, -0.0208740234375, 0.04925537109375, 0.059539794921875, 0.0105133056640625, -0.001346588134765625, -0.036041259765625, 0.051116943359375, 0.00598907470703125, 0.036956787109375, -0.008575439453125, -0.0438232421875, -0.063232421875, -0.0187835693359375, 0.01459503173828125, 0.037994384765625, -0.04327392578125, 0.0286407470703125, -0.0145263671875, -0.0687255859375, -0.025970458984375, 0.0126495361328125, 0.031951904296875, 0.0303955078125, 0.03411865234375, -0.012298583984375, -0.05450439453125, -0.050689697265625, 0.0023593902587890625, -0.0211944580078125, 0.0190277099609375, 0.021636962890625, 0.04949951171875, -0.0322265625, 0.060638427734375, -0.04986572265625, -0.0166473388671875, -0.01427459716796875, -0.0034885406494140625, 0.0253143310546875, 0.045623779296875, 0.06146240234375, -0.03558349609375, -0.025177001953125, -0.00968170166015625, -0.052337646484375, 0.007015228271484375, 0.0008249282836914062, -0.0258026123046875, 0.007137298583984375, 0.017608642578125, -0.0611572265625, 0.04559326171875, 0.037200927734375, -0.023956298828125, 0.053375244140625, -0.0103759765625, -0.011566162109375, -0.0821533203125, 0.0161895751953125, -0.004772186279296875, -0.0034542083740234375, -0.043304443359375, 0.0081024169921875, -0.01076507568359375, -0.0008721351623535156, -0.057037353515625, 0.059967041015625, -0.01268768310546875, 0.01268768310546875, -0.014007568359375, -0.015655517578125, 0.0005612373352050781, 0.049102783203125, -0.007297515869140625, 0.055999755859375, 0.04315185546875, -0.0433349609375, 0.038665771484375, 0.027252197265625, -0.005329132080078125, 0.0103302001953125, -0.06805419921875, 0.026153564453125, 0.0164337158203125, 0.046630859375, -0.056182861328125, -0.033416748046875, 0.0421142578125, -0.046600341796875, 0.005767822265625, -0.01165008544921875, -0.0302734375, -0.02606201171875, -0.0221099853515625, 0.033782958984375, 0.037872314453125, -0.037750244140625, 0.0249786376953125, 0.01377105712890625, 0.0170135498046875, -0.048828125, -0.047607421875, 0.00405120849609375, -0.0300140380859375, -0.03955078125, 0.0162811279296875, -0.01412200927734375, -0.00922393798828125, -0.01019287109375, -0.0087738037109375, -0.0244598388671875, 0.0085906982421875, 0.0178985595703125, 0.0248870849609375, -0.00708770751953125, -0.00945281982421875, 0.0034809112548828125, 0.004817962646484375, -0.00429534912109375, -0.0105438232421875, 0.040313720703125, -0.02783203125, 0.00806427001953125, -0.058319091796875, -0.005039215087890625, 0.039825439453125, -0.010711669921875, 0.051300048828125, 0.053436279296875, -0.028350830078125, -0.0006842613220214844, -0.0288848876953125, -0.0237884521484375, -0.039276123046875, 0.0177764892578125, -0.01308441162109375, -0.045562744140625, 0.0484619140625, 0.021636962890625, 0.0231170654296875, 0.037139892578125, 0.029693603515625, 0.006137847900390625, 0.06103515625, 0.0290985107421875, -0.0144500732421875, 0.044952392578125, -0.061676025390625, -0.005634307861328125, -0.06036376953125, -0.0258026123046875, -0.02667236328125, -0.017486572265625, -0.03228759765625, -0.0201416015625, 0.0285491943359375, 0.0171966552734375, -0.046478271484375, 0.034637451171875, -0.040802001953125, 0.019500732421875, 0.04217529296875, 0.0172882080078125, 0.0099334716796875, -0.007793426513671875, 0.0122833251953125, 0.0164337158203125, -0.060546875, -0.039093017578125, 0.10150146484375, 0.03057861328125, 0.051788330078125, 0.006847381591796875, 0.058135986328125, -0.0023212432861328125, 0.021728515625, -0.0430908203125, 0.039398193359375, 0.00664520263671875, -0.07373046875, -0.0146484375, -0.0421142578125, -0.07757568359375, 0.0247650146484375, -0.00009500980377197266, -0.04913330078125, 0.01110076904296875, 0.02142333984375, -0.03350830078125, 0.02374267578125, -0.050323486328125, 0.06768798828125, -0.03558349609375, -0.01467132568359375, 0.0009479522705078125, -0.055511474609375, 0.047271728515625, 0.006336212158203125, 0.0045928955078125, -0.0220489501953125, -0.0046539306640625, 0.07647705078125, -0.06744384765625, 0.07391357421875, -0.02508544921875, -0.0031642913818359375, 0.050506591796875, -0.0014734268188476562, 0.030853271484375, 0.007045745849609375, -0.01416778564453125, 0.03997802734375, 0.023223876953125, -0.0333251953125, -0.03173828125, 0.05157470703125, -0.086181640625, -0.032684326171875, -0.039337158203125, -0.03973388671875, 0.00902557373046875, 0.027099609375, 0.02545166015625, 0.033721923828125, 0.0077362060546875, 0.00318145751953125, 0.01812744140625, -0.018096923828125, 0.0174102783203125, 0.034698486328125, -0.0241241455078125, -0.054168701171875, 0.0699462890625, 0.01235198974609375, 0.005352020263671875, 0.007213592529296875, 0.0203704833984375, -0.0228271484375, -0.036376953125, -0.037567138671875, 0.0248870849609375, -0.057098388671875, -0.03472900390625, -0.0333251953125, -0.0159454345703125, -0.045440673828125, 0.0017004013061523438, -0.0269622802734375, -0.037841796875, -0.0570068359375, -0.0158538818359375, 0.05462646484375, 0.050140380859375, -0.0028095245361328125, 0.051788330078125, -0.05401611328125, 0.012451171875, 0.01483154296875, -0.005176544189453125, 0.0067901611328125, -0.062042236328125, -0.0231781005859375, 0.02197265625, -0.04449462890625, -0.06671142578125, 0.054107666015625, 0.00897979736328125, 0.032379150390625, 0.03131103515625, 0.004749298095703125, 0.0745849609375, -0.00629425048828125, 0.0689697265625, 0.0026607513427734375, -0.0703125, 0.043548583984375, -0.033905029296875, 0.0067291259765625, 0.0313720703125, 0.031768798828125, -0.0240478515625, -0.003887176513671875, -0.06048583984375, -0.06573486328125, 0.0537109375, 0.0247344970703125, 0.00287628173828125, 0.0105743408203125, 0.050750732421875, -0.0040283203125, 0.02362060546875, -0.05621337890625, -0.0219573974609375, -0.04022216796875, -0.0028705596923828125, -0.01116943359375, 0.006011962890625, -0.00809478759765625, -0.042449951171875, 0.062347412109375, -0.0093231201171875, 0.023406982421875, 0.01837158203125, 0.0112457275390625, 0.0017061233520507812, -0.006649017333984375, 0.046112060546875, 0.040435791015625, -0.0350341796875, -0.0131072998046875, 0.019775390625, -0.0305633544921875, 0.0173187255859375, 0.01045989990234375, 0.0016145706176757812, -0.0016345977783203125, 0.0230865478515625, 0.07928466796875, 0.01442718505859375, -0.0447998046875, 0.0289764404296875, -0.00817108154296875, -0.02398681640625, -0.0221710205078125, 0.0249176025390625, -0.0022830963134765625, 0.0170745849609375, 0.01537322998046875, 0.002593994140625, -0.0030193328857421875, -0.044158935546875, -0.0066986083984375, 0.0164337158203125, 0.00989532470703125, -0.032257080078125, 0.058502197265625, 0.01201629638671875, -0.01323699951171875, 0.054779052734375, -0.0144805908203125, -0.04559326171875, 0.0699462890625, 0.027740478515625, 0.044158935546875, -0.0136566162109375, 0.005771636962890625, 0.036895751953125, 0.018096923828125, -0.018402099609375, 0.03497314453125, 0.00604248046875, -0.0419921875, -0.004459381103515625, -0.0287628173828125, -0.0478515625, 0.0145263671875, -0.04913330078125, 0.033477783203125, -0.048919677734375, -0.0286407470703125, -0.0167999267578125, 0.0274810791015625, -0.057342529296875, 0.021575927734375, -0.0025177001953125, 0.07684326171875, -0.05584716796875, 0.08026123046875, 0.03729248046875, -0.0416259765625, -0.0970458984375, -0.0222320556640625, -0.0038356781005859375, -0.060455322265625, 0.03729248046875, 0.01308441162109375, 0.0059356689453125, 0.0136871337890625, -0.056884765625, -0.07989501953125, 0.11041259765625, 0.0458984375, -0.036865234375, -0.01751708984375, -0.0014896392822265625, 0.0428466796875, -0.007373809814453125, 0.033966064453125, 0.044677734375, 0.0283355712890625, 0.0108184814453125, -0.0792236328125, 0.017578125, -0.0198211669921875, -0.001392364501953125, -0.0222320556640625, -0.08477783203125, 0.093994140625, -0.0268402099609375, -0.0017948150634765625, 0.019073486328125, 0.041656494140625, 0.042755126953125, 0.022247314453125, 0.0164947509765625, 0.052154541015625, 0.0723876953125, 0.00444793701171875, 0.0814208984375, -0.0291748046875, 0.0309906005859375, 0.06512451171875, -0.01444244384765625, 0.063232421875, 0.021240234375, -0.02392578125, 0.041656494140625, 0.07080078125, 0.003688812255859375, 0.02667236328125, 0.00658416748046875, -0.005458831787109375, 0.002140045166015625, 0.007610321044921875, -0.050811767578125, 0.029296875, 0.01800537109375, -0.00446319580078125, -0.0038738250732421875, -0.01267242431640625, 0.01059722900390625, -0.049591064453125, -0.00843048095703125, 0.0390625, 0.0108642578125, -0.041656494140625, 0.0880126953125, 0.0040740966796875, 0.059295654296875, -0.058380126953125, -0.006122589111328125, -0.025787353515625, 0.005939483642578125, -0.0282745361328125, -0.029327392578125, -0.0035915374755859375, 0.007534027099609375, 0.0211944580078125, 0.00833892822265625, 0.03314208984375, -0.01404571533203125, -0.0242462158203125, 0.00829315185546875, 0.014495849609375, 0.044586181640625, 0.0025310516357421875, -0.05810546875, 0.03363037109375, 0.01302337646484375, -0.0301361083984375, 0.015777587890625, 0.019866943359375, 0.0042266845703125, 0.06182861328125, 0.06658935546875, 0.006175994873046875, 0.024322509765625, -0.0118255615234375, 0.08514404296875, -0.04681396484375, -0.036956787109375, -0.04925537109375, 0.032684326171875, 0.0164337158203125, -0.057769775390625, 0.0537109375, 0.04205322265625, 0.06585693359375, 0.006500244140625, 0.053863525390625, -0.00331878662109375, 0.005229949951171875, -0.0438232421875, 0.038330078125, -0.04656982421875, 0.0269927978515625, -0.00567626953125, -0.057342529296875, -0.00731658935546875, 0.063232421875, -0.01995849609375, 0.0164337158203125, 0.033538818359375, 0.0635986328125, 0.004009246826171875, -0.00069427490234375, 0.006244659423828125, 0.022125244140625, 0.05419921875, 0.05816650390625, 0.05859375, -0.056671142578125, 0.06195068359375, -0.045074462890625, -0.01020050048828125, -0.04144287109375, -0.056427001953125, -0.058502197265625, -0.01995849609375, -0.0279083251953125, -0.02703857421875, 0.0089111328125, 0.08160400390625, 0.06304931640625, -0.037567138671875, -0.033782958984375, -0.0065765380859375, -0.008819580078125, -0.0184783935546875, -0.0193023681640625, 0.022705078125, -0.00323486328125, -0.04241943359375, 0.032745361328125, -0.0310821533203125, 0.036773681640625, -0.0194854736328125, -0.0248260498046875, -0.007061004638671875, 0.002147674560546875, 0.01371002197265625, 0.041748046875, -0.048614501953125, -0.004779815673828125, -0.00717926025390625, -0.006816864013671875, 0.01152801513671875, 0.0283203125, -0.050201416015625, 0.0166473388671875, 0.0199432373046875, 0.0158538818359375, 0.052032470703125, -0.00196075439453125, 0.01043701171875, -0.039276123046875, 0.023651123046875, 0.0162506103515625, 0.030487060546875, 0.020233154296875, -0.032470703125, 0.040618896484375, 0.0155487060546875, -0.050628662109375, -0.07440185546875, -0.0163116455078125, -0.081298828125, -0.0164947509765625, 0.093505859375, -0.0199127197265625, -0.026763916015625, 0.0185699462890625, -0.04833984375, 0.033477783203125, -0.046600341796875, 0.0469970703125, 0.025421142578125, -0.0177459716796875, 0.0106048583984375, -0.043060302734375, 0.0265045166015625, 0.01554107666015625, -0.0662841796875, 0.0062255859375, 0.037750244140625, 0.045257568359375, -0.007068634033203125, 0.06329345703125, 0.007640838623046875, 0.0233917236328125, 0.004917144775390625, 0.0035343170166015625, -0.03411865234375, -0.0280609130859375, -0.03924560546875, -0.011199951171875, 0.003246307373046875, -0.021026611328125 ] ]
Mikivis/gpt2-large-lora-sft1
2023-09-10T02:28:25.000Z
[ "transformers", "pytorch", "safetensors", "gpt2", "text-generation", "generated_from_trainer", "dataset:customized", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Mikivis
null
null
Mikivis/gpt2-large-lora-sft1
0
5,840
transformers
2023-09-06T11:16:28
--- base_model: /data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft tags: - generated_from_trainer datasets: - customized model-index: - name: gpt2-large-lora-sft1 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gpt2-large-lora-sft1 This model is a fine-tuned version of [/data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft](https://huggingface.co//data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft) on the customized dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.00015 - train_batch_size: 1 - eval_batch_size: 8 - seed: 42 - distributed_type: multi-GPU - num_devices: 6 - total_train_batch_size: 6 - total_eval_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 1.5 ### Training results ### Framework versions - Transformers 4.32.1 - Pytorch 2.0.1+cu117 - Datasets 2.10.1 - Tokenizers 0.13.3
1,287
[ [ -0.037445068359375, -0.065185546875, 0.0191192626953125, 0.0085296630859375, -0.041168212890625, -0.029998779296875, -0.0079498291015625, -0.029937744140625, 0.0198974609375, 0.0288238525390625, -0.04510498046875, -0.0255584716796875, -0.05377197265625, -0.006374359130859375, -0.0167388916015625, 0.10308837890625, -0.012054443359375, 0.0148162841796875, -0.000008761882781982422, -0.0008001327514648438, -0.0182647705078125, -0.036285400390625, -0.07611083984375, -0.056884765625, 0.0438232421875, 0.0013799667358398438, 0.0601806640625, 0.060089111328125, 0.0430908203125, 0.0235595703125, -0.0167236328125, -0.00592803955078125, -0.044189453125, -0.041015625, -0.005084991455078125, -0.029022216796875, -0.0672607421875, 0.0103912353515625, 0.053924560546875, 0.01226806640625, -0.0154876708984375, 0.03729248046875, 0.016632080078125, 0.02239990234375, -0.031280517578125, 0.038421630859375, -0.035736083984375, 0.0278472900390625, -0.0149993896484375, -0.0216064453125, -0.011627197265625, -0.0252532958984375, -0.0015001296997070312, -0.054840087890625, 0.0310211181640625, -0.0158538818359375, 0.08465576171875, 0.0400390625, -0.021514892578125, 0.016632080078125, -0.059783935546875, 0.0271148681640625, -0.0474853515625, 0.00034427642822265625, 0.0301513671875, 0.041290283203125, 0.00888824462890625, -0.0562744140625, -0.032501220703125, -0.005214691162109375, 0.0013990402221679688, 0.01435089111328125, -0.0025997161865234375, 0.0068206787109375, 0.0631103515625, 0.0260009765625, -0.042327880859375, 0.02374267578125, -0.042633056640625, -0.00858306884765625, 0.0482177734375, 0.023040771484375, -0.01390838623046875, -0.004558563232421875, -0.03668212890625, -0.00673675537109375, -0.0406494140625, -0.007221221923828125, 0.04583740234375, 0.0183868408203125, -0.029144287109375, 0.056121826171875, -0.01702880859375, 0.04425048828125, 0.00487518310546875, -0.01666259765625, 0.0380859375, -0.0026531219482421875, -0.03863525390625, -0.00504302978515625, 0.0673828125, 0.0268402099609375, 0.0196685791015625, 0.0066680908203125, -0.0167236328125, -0.0037212371826171875, 0.0069122314453125, -0.07366943359375, -0.0240936279296875, 0.0020008087158203125, -0.02764892578125, -0.0460205078125, 0.01157379150390625, -0.043731689453125, -0.0005950927734375, -0.03369140625, 0.05133056640625, -0.01107025146484375, -0.0181427001953125, -0.00116729736328125, -0.005680084228515625, 0.0294647216796875, 0.0210723876953125, -0.060302734375, 0.0259857177734375, 0.035064697265625, 0.033966064453125, 0.0115966796875, -0.035797119140625, -0.0266571044921875, 0.01702880859375, -0.0089263916015625, 0.04254150390625, 0.0018377304077148438, -0.0279541015625, -0.0128631591796875, 0.0169219970703125, -0.0091705322265625, -0.0291900634765625, 0.056182861328125, -0.031585693359375, 0.0257110595703125, -0.03363037109375, -0.0311737060546875, -0.0099639892578125, 0.023956298828125, -0.048431396484375, 0.083984375, 0.02630615234375, -0.06988525390625, 0.0400390625, -0.059356689453125, -0.0211029052734375, 0.0142974853515625, -0.0148162841796875, -0.05743408203125, -0.00576019287109375, 0.0013761520385742188, 0.025787353515625, -0.028717041015625, 0.0252838134765625, -0.024505615234375, -0.0440673828125, -0.0055999755859375, -0.036102294921875, 0.04522705078125, 0.01551055908203125, -0.034759521484375, 0.017822265625, -0.071533203125, 0.0233612060546875, 0.0340576171875, -0.041168212890625, 0.017059326171875, -0.0261383056640625, 0.038299560546875, 0.0194244384765625, 0.03485107421875, -0.038421630859375, 0.0274200439453125, -0.01308441162109375, 0.03363037109375, 0.057403564453125, -0.0090179443359375, -0.01361083984375, -0.01338958740234375, 0.0245361328125, 0.0032958984375, 0.0357666015625, 0.03448486328125, -0.04095458984375, -0.055908203125, -0.0093841552734375, 0.00991058349609375, 0.03369140625, -0.039154052734375, 0.06195068359375, -0.0106353759765625, -0.04571533203125, -0.003940582275390625, 0.01018524169921875, 0.03289794921875, 0.034881591796875, 0.032379150390625, -0.0147552490234375, -0.0299530029296875, -0.07159423828125, -0.0023021697998046875, 0.0007648468017578125, 0.0183563232421875, 0.0115509033203125, 0.060211181640625, -0.02679443359375, 0.062164306640625, -0.059539794921875, -0.01479339599609375, -0.019073486328125, 0.00027751922607421875, 0.0259857177734375, 0.05401611328125, 0.05206298828125, -0.035186767578125, -0.0257568359375, -0.00629425048828125, -0.0631103515625, 0.0213165283203125, 0.007495880126953125, -0.0133209228515625, -0.00821685791015625, 0.02593994140625, -0.057769775390625, 0.052154541015625, 0.0145721435546875, -0.0285186767578125, 0.047515869140625, -0.044189453125, -0.00597381591796875, -0.09033203125, 0.0203857421875, 0.01222991943359375, -0.00876617431640625, -0.01198577880859375, 0.01206207275390625, -0.00847625732421875, -0.0158233642578125, -0.026519775390625, 0.04638671875, -0.0025196075439453125, 0.0022220611572265625, -0.0303955078125, -0.0189666748046875, -0.00757598876953125, 0.04791259765625, 0.0123443603515625, 0.044342041015625, 0.0423583984375, -0.03485107421875, 0.044189453125, 0.0423583984375, -0.0261383056640625, 0.0280609130859375, -0.08087158203125, 0.0143890380859375, -0.0007600784301757812, 0.02105712890625, -0.05157470703125, -0.034576416015625, 0.0430908203125, -0.0118255615234375, 0.0199432373046875, -0.0236053466796875, -0.0494384765625, -0.04534912109375, 0.00542449951171875, 0.040863037109375, 0.046173095703125, -0.05291748046875, 0.0233612060546875, 0.0004153251647949219, 0.0157012939453125, -0.033203125, -0.056121826171875, -0.032073974609375, -0.018096923828125, -0.0252838134765625, 0.00452423095703125, -0.0028591156005859375, 0.0190277099609375, -0.00951385498046875, -0.0002256631851196289, -0.0193023681640625, -0.01412200927734375, 0.0267333984375, 0.0280914306640625, -0.0213470458984375, -0.01336669921875, -0.0006437301635742188, -0.03271484375, 0.0285186767578125, -0.0069122314453125, 0.041473388671875, 0.00252532958984375, -0.027587890625, -0.055633544921875, -0.00614166259765625, 0.0254058837890625, -0.009521484375, 0.05975341796875, 0.0904541015625, -0.03582763671875, 0.01045989990234375, -0.03656005859375, -0.004058837890625, -0.034881591796875, 0.050323486328125, -0.036468505859375, -0.029632568359375, 0.037200927734375, 0.00803375244140625, -0.0038852691650390625, 0.06707763671875, 0.035308837890625, 0.0204315185546875, 0.08319091796875, 0.026824951171875, -0.018707275390625, 0.0330810546875, -0.056365966796875, -0.0031452178955078125, -0.0615234375, -0.023284912109375, -0.034423828125, -0.0177001953125, -0.05389404296875, -0.0078887939453125, 0.0170745849609375, 0.00849151611328125, -0.060760498046875, 0.0279998779296875, -0.038665771484375, 0.043121337890625, 0.044281005859375, 0.037200927734375, -0.0006451606750488281, 0.0212249755859375, 0.0047760009765625, 0.0102996826171875, -0.05645751953125, -0.0163421630859375, 0.09222412109375, 0.038238525390625, 0.058502197265625, -0.0092315673828125, 0.047454833984375, -0.01641845703125, 0.007617950439453125, -0.04010009765625, 0.039154052734375, -0.0015811920166015625, -0.06060791015625, -0.0197296142578125, -0.043243408203125, -0.05828857421875, 0.0127105712890625, -0.02679443359375, -0.04302978515625, -0.010345458984375, 0.0235595703125, -0.028076171875, 0.039703369140625, -0.04364013671875, 0.08612060546875, -0.022125244140625, -0.041290283203125, -0.0182342529296875, -0.031768798828125, 0.01544952392578125, 0.00015413761138916016, -0.022705078125, 0.00344085693359375, 0.01389312744140625, 0.0703125, -0.059600830078125, 0.048858642578125, -0.037872314453125, 0.00333404541015625, 0.03515625, -0.0377197265625, 0.06494140625, 0.020843505859375, -0.0008144378662109375, -0.0015649795532226562, -0.008880615234375, -0.053680419921875, -0.0227203369140625, 0.0577392578125, -0.09906005859375, -0.016082763671875, -0.0428466796875, -0.0252532958984375, -0.01293182373046875, 0.0173187255859375, 0.062469482421875, 0.039581298828125, -0.016754150390625, 0.003040313720703125, 0.036376953125, 0.0021991729736328125, 0.0231475830078125, 0.003597259521484375, -0.00547027587890625, -0.0416259765625, 0.06317138671875, -0.007190704345703125, 0.01178741455078125, -0.0035953521728515625, 0.0164031982421875, -0.0278167724609375, -0.03668212890625, -0.0249786376953125, 0.032501220703125, -0.051513671875, -0.0093231201171875, -0.019927978515625, -0.041259765625, -0.0202178955078125, 0.015167236328125, -0.03424072265625, -0.01019287109375, -0.036376953125, -0.013824462890625, 0.0361328125, 0.05548095703125, 0.00397491455078125, 0.062103271484375, -0.040771484375, 0.0175628662109375, 0.027740478515625, 0.041107177734375, -0.004444122314453125, -0.0645751953125, -0.02447509765625, 0.004177093505859375, -0.0269012451171875, -0.025726318359375, 0.0205078125, -0.0006623268127441406, 0.0379638671875, 0.041168212890625, -0.03411865234375, 0.06707763671875, -0.0181732177734375, 0.052520751953125, 0.033050537109375, -0.0347900390625, 0.036956787109375, -0.032745361328125, 0.027008056640625, 0.039459228515625, 0.03192138671875, 0.01345062255859375, 0.0146484375, -0.08154296875, -0.052703857421875, 0.060760498046875, 0.0203399658203125, 0.00847625732421875, 0.0187530517578125, 0.04290771484375, 0.00501251220703125, 0.01788330078125, -0.07415771484375, -0.030792236328125, -0.019927978515625, 0.002696990966796875, -0.0306549072265625, -0.027069091796875, -0.0203704833984375, -0.04925537109375, 0.07659912109375, -0.0098724365234375, 0.0277557373046875, -0.0031147003173828125, 0.01305389404296875, -0.021026611328125, -0.007030487060546875, 0.0357666015625, 0.052520751953125, -0.0364990234375, -0.02734375, 0.0176239013671875, -0.056884765625, -0.005863189697265625, 0.0301971435546875, -0.0090179443359375, 0.0009312629699707031, 0.0195465087890625, 0.0860595703125, 0.00717926025390625, 0.003314971923828125, 0.0261077880859375, -0.001483917236328125, -0.03375244140625, -0.0328369140625, 0.0237579345703125, -0.01502227783203125, 0.01617431640625, 0.001216888427734375, 0.0291900634765625, -0.0092010498046875, -0.02325439453125, -0.006412506103515625, 0.0233306884765625, -0.026611328125, -0.022857666015625, 0.0689697265625, 0.01025390625, -0.004703521728515625, 0.055694580078125, -0.00860595703125, -0.0170745849609375, 0.05084228515625, 0.0447998046875, 0.065673828125, -0.007232666015625, 0.0027637481689453125, 0.062347412109375, 0.005035400390625, -0.02935791015625, 0.031707763671875, 0.00531768798828125, -0.03631591796875, -0.0208282470703125, -0.04364013671875, -0.00565338134765625, 0.04876708984375, -0.07440185546875, 0.03692626953125, -0.04925537109375, -0.035247802734375, 0.009552001953125, 0.0220184326171875, -0.072509765625, 0.039947509765625, 0.0033817291259765625, 0.07110595703125, -0.073974609375, 0.07830810546875, 0.042510986328125, -0.040771484375, -0.074462890625, -0.0204925537109375, -0.0163421630859375, -0.06866455078125, 0.0421142578125, -0.004421234130859375, 0.022705078125, 0.0078582763671875, -0.048248291015625, -0.0577392578125, 0.08441162109375, 0.024017333984375, -0.041351318359375, -0.004756927490234375, 0.0280609130859375, 0.046905517578125, -0.01047515869140625, 0.03936767578125, 0.02716064453125, 0.0308074951171875, 0.020477294921875, -0.07196044921875, -0.004955291748046875, -0.018951416015625, 0.001544952392578125, 0.0184478759765625, -0.062255859375, 0.080322265625, -0.012908935546875, 0.03564453125, 0.03076171875, 0.03387451171875, 0.0176544189453125, 0.006633758544921875, 0.0225830078125, 0.06512451171875, 0.038330078125, -0.0220489501953125, 0.0701904296875, -0.0154571533203125, 0.05682373046875, 0.09405517578125, -0.005504608154296875, 0.046295166015625, 0.0261688232421875, -0.025604248046875, 0.01187896728515625, 0.060546875, -0.037933349609375, 0.02789306640625, 0.0152130126953125, -0.0022735595703125, -0.023651123046875, 0.032958984375, -0.072021484375, 0.0266571044921875, -0.0011615753173828125, -0.048736572265625, -0.0201568603515625, -0.0178680419921875, -0.0066680908203125, -0.033966064453125, -0.034149169921875, 0.045379638671875, -0.018768310546875, -0.03363037109375, 0.0533447265625, 0.002468109130859375, 0.032440185546875, -0.062042236328125, 0.0036220550537109375, -0.004863739013671875, 0.0305938720703125, -0.0306854248046875, -0.0377197265625, -0.0005540847778320312, -0.00228118896484375, -0.005031585693359375, 0.01029205322265625, 0.031707763671875, -0.0204925537109375, -0.053680419921875, 0.00751495361328125, 0.0264892578125, 0.0229034423828125, -0.0108795166015625, -0.07537841796875, -0.00241851806640625, -0.0038433074951171875, -0.031646728515625, 0.03179931640625, 0.016845703125, 0.0070037841796875, 0.031280517578125, 0.044342041015625, 0.0040283203125, -0.0029296875, 0.01558685302734375, 0.0689697265625, -0.04852294921875, -0.049774169921875, -0.053802490234375, 0.023651123046875, -0.01024627685546875, -0.0675048828125, 0.0408935546875, 0.07977294921875, 0.059478759765625, -0.02459716796875, 0.0531005859375, 0.0024051666259765625, 0.018646240234375, -0.040557861328125, 0.04364013671875, -0.0291290283203125, -0.0021820068359375, -0.027313232421875, -0.0889892578125, 0.0143280029296875, 0.058380126953125, -0.0244293212890625, 0.0301513671875, 0.042205810546875, 0.0438232421875, -0.0218658447265625, 0.0136871337890625, 0.00518798828125, 0.01332855224609375, 0.01160430908203125, 0.04449462890625, 0.03594970703125, -0.07196044921875, 0.0289764404296875, -0.02972412109375, -0.0099334716796875, -0.01042938232421875, -0.047210693359375, -0.07208251953125, -0.0197296142578125, -0.047119140625, -0.04290771484375, -0.004970550537109375, 0.06658935546875, 0.058502197265625, -0.047027587890625, -0.0207977294921875, -0.007076263427734375, -0.0257568359375, -0.00930023193359375, -0.01244354248046875, 0.043304443359375, -0.00290679931640625, -0.047760009765625, -0.01221466064453125, -0.030914306640625, 0.034881591796875, -0.01158905029296875, -0.0168914794921875, -0.004062652587890625, -0.0305023193359375, 0.032928466796875, 0.0234222412109375, -0.0400390625, -0.0274810791015625, -0.03631591796875, -0.00693511962890625, 0.0136566162109375, 0.0271759033203125, -0.044677734375, 0.0006003379821777344, 0.015167236328125, 0.01239013671875, 0.06280517578125, 0.002574920654296875, 0.01473236083984375, -0.04443359375, 0.042236328125, -0.00530242919921875, 0.0306549072265625, -0.0006279945373535156, -0.032012939453125, 0.052459716796875, 0.031341552734375, -0.04339599609375, -0.047698974609375, -0.00814056396484375, -0.08648681640625, 0.01078033447265625, 0.0946044921875, 0.00609588623046875, -0.0258331298828125, 0.03631591796875, -0.025604248046875, 0.0252532958984375, -0.00405120849609375, 0.039398193359375, 0.037506103515625, -0.006038665771484375, 0.0181884765625, -0.044189453125, 0.04071044921875, 0.004486083984375, -0.07135009765625, -0.0137481689453125, 0.0212554931640625, 0.046600341796875, -0.0022296905517578125, 0.0311126708984375, -0.004589080810546875, 0.0267791748046875, 0.01168060302734375, 0.00787353515625, -0.033447265625, -0.0102996826171875, -0.0174713134765625, 0.001369476318359375, 0.00666046142578125, -0.0274810791015625 ] ]
Yntec/NaughtyChildren
2023-09-26T08:45:33.000Z
[ "diffusers", "Children Books", "Photorealistic", "Highly Detailed", "AreThoseLevel4Plates", "Zovya", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "license:creativeml-openrail-m", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
Yntec
null
null
Yntec/NaughtyChildren
0
5,840
diffusers
2023-09-26T05:48:03
--- license: creativeml-openrail-m library_name: diffusers pipeline_tag: text-to-image tags: - Children Books - Photorealistic - Highly Detailed - AreThoseLevel4Plates - Zovya - stable-diffusion - stable-diffusion-diffusers - diffusers - text-to-image --- # NaughtyChildren A mix of EdgeOfRealism by AreThoseLevel4Plates and ChildrenStoriesCustom by Zovya, improving the backgrounds of the former and the faces of the latter. It has the zVAE baked in. Comparison: ![Comparison](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/S4MAInZzPcTwuEF9quRO-.png) (Click for larger) Sample and prompts: ![Sample](https://cdn-uploads.huggingface.co/production/uploads/63239b8370edc53f51cd5d42/sAk4C0oADouqH46Ti_Dvs.png) Professional fine details photo portrait of pretty cute girl from kazan, tatarstan kid in the postsoviet suburbia, tatar, detailed photo, beautiful eyes, instagram, iphone. Original pages: https://civitai.com/models/21813?modelVersionId=26041 (Edge Of Realism) https://civitai.com/api/download/models/106092 (ChildrenStoriesCustom) # Recipe: - SuperMerge Weight sum Train difference use MBW 0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1 Model A: ChildrenStoriesCustom Model B: EdgeOfRealism Output: NaughtyChildren - Fp16 no-ema: NaughtyChildrenMini - Bake in zVae: NaughtyChildrenz
1,355
[ [ -0.036041259765625, -0.0340576171875, 0.01085662841796875, 0.0237579345703125, -0.02557373046875, -0.0008473396301269531, 0.028717041015625, -0.040252685546875, 0.037872314453125, 0.03857421875, -0.08807373046875, -0.0304107666015625, -0.02728271484375, -0.0310821533203125, -0.01421356201171875, 0.054412841796875, 0.00675201416015625, 0.0204620361328125, -0.0178070068359375, 0.0241241455078125, -0.0203094482421875, 0.007598876953125, -0.0338134765625, -0.0303497314453125, 0.00945281982421875, 0.05511474609375, 0.052001953125, 0.0400390625, 0.005084991455078125, 0.01464080810546875, -0.006816864013671875, 0.00890350341796875, -0.041351318359375, 0.0199432373046875, -0.0401611328125, -0.004718780517578125, -0.03814697265625, -0.004863739013671875, 0.012664794921875, 0.00835418701171875, -0.003864288330078125, 0.01503753662109375, -0.00685882568359375, 0.0264434814453125, -0.045318603515625, -0.01666259765625, -0.00927734375, 0.029388427734375, 0.0159149169921875, 0.00391387939453125, 0.0011835098266601562, -0.0230560302734375, -0.01029205322265625, -0.05316162109375, 0.01282501220703125, -0.004108428955078125, 0.08392333984375, -0.017669677734375, -0.02691650390625, -0.034393310546875, -0.0587158203125, 0.050140380859375, -0.049224853515625, 0.025177001953125, 0.0238037109375, 0.0224761962890625, -0.039276123046875, -0.05816650390625, -0.04443359375, 0.0244140625, 0.0193634033203125, 0.0316162109375, -0.0322265625, -0.024169921875, 0.0164642333984375, 0.0277252197265625, -0.0391845703125, -0.0026264190673828125, -0.01012420654296875, 0.011505126953125, 0.047943115234375, 0.032257080078125, 0.0653076171875, 0.007259368896484375, -0.051116943359375, -0.025146484375, -0.05950927734375, 0.0161285400390625, 0.0347900390625, 0.00534820556640625, -0.0271453857421875, 0.02685546875, -0.0114898681640625, 0.046905517578125, 0.02850341796875, -0.0279998779296875, 0.0032062530517578125, -0.0316162109375, -0.002010345458984375, -0.004917144775390625, 0.037994384765625, 0.07861328125, 0.011383056640625, 0.00458526611328125, -0.02520751953125, 0.0038928985595703125, 0.0150909423828125, -0.1075439453125, -0.025299072265625, 0.0002090930938720703, -0.0266571044921875, -0.0197601318359375, 0.001346588134765625, -0.05645751953125, -0.018310546875, 0.00193023681640625, 0.0093994140625, -0.05987548828125, -0.039337158203125, -0.0034313201904296875, -0.0078277587890625, 0.038330078125, 0.021575927734375, -0.078369140625, 0.05078125, 0.047943115234375, 0.048065185546875, 0.022918701171875, 0.0030689239501953125, -0.0172882080078125, -0.00489044189453125, -0.046630859375, 0.0626220703125, -0.03167724609375, -0.054718017578125, -0.01507568359375, 0.016998291015625, 0.00839996337890625, -0.0299072265625, 0.044342041015625, -0.02496337890625, -0.006809234619140625, -0.045501708984375, -0.019134521484375, -0.019500732421875, 0.009185791015625, -0.0229644775390625, 0.058074951171875, 0.01568603515625, -0.036102294921875, 0.057281494140625, -0.0109710693359375, -0.0236053466796875, 0.01087188720703125, -0.0030765533447265625, -0.0204010009765625, 0.018951416015625, 0.0295867919921875, 0.0245513916015625, -0.005069732666015625, 0.006580352783203125, -0.017242431640625, -0.0361328125, 0.0005612373352050781, -0.0173492431640625, 0.07305908203125, 0.03759765625, -0.040130615234375, 0.005184173583984375, -0.053375244140625, 0.00374603271484375, 0.031646728515625, 0.0283660888671875, -0.02777099609375, -0.007503509521484375, 0.035369873046875, 0.06329345703125, 0.01338958740234375, -0.052886962890625, 0.019012451171875, -0.0148773193359375, 0.007030487060546875, 0.044769287109375, 0.0172882080078125, -0.007053375244140625, -0.046478271484375, 0.0662841796875, 0.020355224609375, 0.036956787109375, 0.002918243408203125, -0.039154052734375, -0.040618896484375, -0.034088134765625, 0.00689697265625, 0.018280029296875, -0.09130859375, 0.031402587890625, -0.015960693359375, -0.06146240234375, -0.03350830078125, -0.012298583984375, 0.0587158203125, 0.0247039794921875, 0.004894256591796875, -0.05194091796875, -0.02691650390625, -0.08319091796875, 0.00815582275390625, 0.0156402587890625, 0.00778961181640625, 0.01306915283203125, 0.039764404296875, 0.0080108642578125, 0.031585693359375, -0.016204833984375, -0.0017175674438476562, -0.010894775390625, -0.0277252197265625, 0.0248260498046875, 0.06512451171875, 0.0689697265625, -0.078369140625, -0.054290771484375, -0.01187896728515625, -0.032318115234375, 0.00860595703125, 0.01293182373046875, -0.02215576171875, -0.024200439453125, 0.0015316009521484375, -0.040679931640625, 0.046600341796875, 0.0246429443359375, -0.06414794921875, 0.04205322265625, -0.0110321044921875, 0.039154052734375, -0.08349609375, -0.0164642333984375, 0.0174102783203125, -0.033111572265625, -0.041717529296875, 0.059356689453125, -0.008087158203125, -0.006969451904296875, -0.0728759765625, 0.050140380859375, -0.0399169921875, -0.0142822265625, -0.0259246826171875, 0.0084686279296875, 0.01285552978515625, 0.023773193359375, -0.011566162109375, 0.0689697265625, 0.071044921875, -0.04217529296875, 0.04840087890625, 0.01543426513671875, -0.0083770751953125, 0.06951904296875, -0.049224853515625, 0.00763702392578125, 0.01326751708984375, 0.0166168212890625, -0.06622314453125, -0.03424072265625, 0.04840087890625, -0.051116943359375, 0.011810302734375, -0.0202484130859375, -0.07952880859375, -0.026885986328125, -0.039093017578125, 0.0194549560546875, 0.054473876953125, -0.059417724609375, 0.042755126953125, 0.0197906494140625, 0.00550079345703125, -0.0197296142578125, -0.05889892578125, 0.0143280029296875, -0.00940704345703125, -0.041473388671875, 0.04791259765625, -0.00815582275390625, -0.028961181640625, 0.0291595458984375, -0.003185272216796875, -0.009796142578125, -0.0133209228515625, 0.02166748046875, 0.003643035888671875, -0.015777587890625, -0.0284881591796875, 0.0295867919921875, 0.01180267333984375, 0.0035839080810546875, -0.0181427001953125, 0.053070068359375, -0.0377197265625, -0.01372528076171875, -0.049285888671875, 0.026580810546875, 0.0340576171875, 0.0041046142578125, 0.046539306640625, 0.03948974609375, -0.0697021484375, -0.01727294921875, -0.01161956787109375, 0.00006300210952758789, -0.036590576171875, -0.01837158203125, -0.043365478515625, -0.01406097412109375, 0.082763671875, 0.0228424072265625, -0.0043792724609375, 0.0304412841796875, 0.031280517578125, 0.01104736328125, 0.08319091796875, 0.02935791015625, 0.0272369384765625, 0.0272369384765625, -0.054412841796875, -0.00811767578125, -0.05816650390625, -0.031951904296875, -0.006160736083984375, -0.0305023193359375, -0.053070068359375, -0.0186309814453125, -0.004016876220703125, -0.017364501953125, -0.0026187896728515625, 0.04022216796875, -0.041748046875, 0.047637939453125, 0.03302001953125, 0.03350830078125, 0.01403045654296875, -0.009307861328125, -0.006500244140625, -0.0310211181640625, -0.052703857421875, -0.029083251953125, 0.03399658203125, -0.001922607421875, 0.037872314453125, 0.0264739990234375, 0.069091796875, 0.0004627704620361328, 0.0224151611328125, -0.053680419921875, 0.060791015625, -0.0220184326171875, -0.0718994140625, 0.0172119140625, -0.01425933837890625, -0.054901123046875, 0.0277099609375, -0.02911376953125, -0.057830810546875, 0.043304443359375, 0.00008767843246459961, -0.03826904296875, 0.04058837890625, -0.028076171875, 0.03814697265625, -0.0178680419921875, -0.07061767578125, -0.0018892288208007812, -0.036895751953125, 0.033905029296875, 0.042205810546875, 0.005870819091796875, 0.006107330322265625, 0.0005927085876464844, 0.0523681640625, -0.040740966796875, 0.060577392578125, -0.0143585205078125, -0.00894927978515625, 0.02874755859375, 0.0241241455078125, 0.002788543701171875, 0.02593994140625, 0.005054473876953125, -0.042144775390625, -0.0065460205078125, -0.058441162109375, -0.050811767578125, 0.0999755859375, -0.04150390625, -0.03717041015625, -0.04840087890625, -0.0237579345703125, 0.00415802001953125, 0.046356201171875, 0.04229736328125, 0.07159423828125, -0.019775390625, 0.00399017333984375, 0.073486328125, -0.0062408447265625, 0.0282440185546875, 0.0011167526245117188, -0.057708740234375, -0.02490234375, 0.062408447265625, 0.012054443359375, 0.0169830322265625, -0.02008056640625, 0.0159454345703125, 0.002132415771484375, -0.0208892822265625, -0.05792236328125, 0.015228271484375, -0.0582275390625, -0.0039825439453125, -0.025634765625, -0.008758544921875, -0.034881591796875, -0.0301513671875, -0.0462646484375, -0.0246429443359375, -0.029449462890625, -0.01593017578125, 0.02923583984375, 0.059112548828125, -0.01140594482421875, 0.0179901123046875, -0.031463623046875, 0.0250396728515625, 0.022125244140625, 0.025787353515625, 0.00769805908203125, -0.03131103515625, -0.0009226799011230469, 0.004535675048828125, -0.03424072265625, -0.048492431640625, 0.0408935546875, -0.00748443603515625, 0.0273590087890625, 0.035614013671875, -0.0007581710815429688, 0.049652099609375, -0.0163116455078125, 0.066650390625, 0.0270538330078125, -0.060821533203125, 0.0231170654296875, -0.04876708984375, 0.016937255859375, 0.0552978515625, 0.02301025390625, -0.0272064208984375, -0.0162811279296875, -0.085693359375, -0.07257080078125, 0.0345458984375, 0.0178985595703125, 0.04949951171875, 0.012451171875, 0.0220489501953125, 0.02117919921875, 0.018646240234375, -0.04473876953125, -0.0205078125, -0.02880859375, -0.01165771484375, 0.03289794921875, -0.006511688232421875, 0.004058837890625, -0.035064697265625, 0.0614013671875, 0.0179290771484375, 0.0179901123046875, 0.0119476318359375, 0.0176849365234375, -0.033416748046875, -0.023406982421875, 0.059814453125, 0.07659912109375, -0.017333984375, -0.00644683837890625, -0.0203704833984375, -0.041168212890625, 0.02001953125, -0.01219940185546875, -0.023712158203125, 0.00191497802734375, 0.037628173828125, 0.06707763671875, 0.0282440185546875, -0.0318603515625, 0.0367431640625, -0.0211944580078125, -0.003231048583984375, -0.047698974609375, 0.041839599609375, 0.01514434814453125, 0.0265960693359375, 0.0183563232421875, -0.000583648681640625, 0.03240966796875, -0.062286376953125, 0.025970458984375, 0.005035400390625, -0.0491943359375, -0.044189453125, 0.04302978515625, -0.0088043212890625, -0.042388916015625, 0.035400390625, -0.037200927734375, -0.039215087890625, 0.0732421875, 0.063720703125, 0.061309814453125, -0.031890869140625, 0.0386962890625, 0.01702880859375, 0.00942230224609375, 0.00475311279296875, 0.048126220703125, 0.01090240478515625, -0.04754638671875, 0.00391387939453125, -0.007450103759765625, -0.037841796875, 0.01458740234375, -0.0655517578125, 0.03253173828125, -0.0732421875, -0.002079010009765625, -0.016387939453125, 0.004863739013671875, -0.047607421875, 0.058013916015625, -0.0006270408630371094, 0.07073974609375, -0.082763671875, 0.04486083984375, 0.05523681640625, -0.0322265625, -0.04248046875, 0.032989501953125, 0.0022678375244140625, -0.0355224609375, 0.02520751953125, 0.02252197265625, -0.0369873046875, 0.005672454833984375, -0.06787109375, -0.052947998046875, 0.058441162109375, -0.01404571533203125, -0.02459716796875, 0.004856109619140625, -0.048553466796875, 0.059051513671875, -0.0587158203125, 0.00778961181640625, 0.0296478271484375, 0.026092529296875, 0.031951904296875, -0.01995849609375, 0.0036029815673828125, -0.06134033203125, 0.0205841064453125, -0.00984954833984375, -0.0648193359375, 0.063720703125, -0.0120849609375, -0.01537322998046875, 0.00846099853515625, 0.073974609375, 0.036041259765625, 0.03790283203125, 0.036285400390625, 0.046600341796875, -0.007572174072265625, 0.0011882781982421875, 0.08038330078125, -0.0106201171875, 0.018585205078125, 0.064697265625, -0.006908416748046875, 0.01898193359375, 0.0280303955078125, -0.005756378173828125, 0.03448486328125, 0.07708740234375, -0.003833770751953125, 0.060638427734375, 0.017333984375, -0.0062103271484375, -0.0091094970703125, -0.028289794921875, -0.017333984375, 0.01099395751953125, 0.03271484375, 0.00040411949157714844, -0.0210113525390625, 0.0192718505859375, 0.0290069580078125, 0.0271148681640625, -0.0169525146484375, 0.03350830078125, 0.0179595947265625, -0.0302886962890625, 0.046966552734375, -0.004119873046875, 0.04583740234375, -0.016845703125, -0.0205078125, -0.021514892578125, 0.0025348663330078125, -0.01727294921875, -0.04937744140625, -0.0005626678466796875, 0.0037097930908203125, 0.00875091552734375, -0.012603759765625, 0.054229736328125, -0.0162200927734375, -0.06610107421875, 0.02496337890625, 0.01117706298828125, -0.000004827976226806641, 0.052642822265625, -0.06671142578125, -0.0029163360595703125, -0.006862640380859375, -0.040802001953125, 0.005615234375, 0.0226593017578125, 0.002826690673828125, 0.045013427734375, 0.0216217041015625, 0.0187835693359375, -0.00555419921875, -0.01336669921875, 0.08587646484375, -0.03350830078125, -0.059356689453125, -0.053436279296875, 0.03155517578125, -0.006862640380859375, -0.053436279296875, 0.0657958984375, 0.035675048828125, 0.0618896484375, -0.016021728515625, 0.02960205078125, -0.01332855224609375, 0.0149688720703125, -0.052703857421875, 0.05218505859375, -0.08056640625, -0.01476287841796875, -0.03839111328125, -0.07232666015625, -0.00823211669921875, 0.034149169921875, -0.01296234130859375, 0.026336669921875, 0.0190887451171875, 0.05242919921875, -0.04473876953125, -0.007602691650390625, 0.01898193359375, -0.005229949951171875, 0.028350830078125, 0.028167724609375, 0.062408447265625, -0.039337158203125, -0.0252838134765625, -0.05792236328125, -0.0200347900390625, -0.033355712890625, -0.05035400390625, -0.0545654296875, -0.035369873046875, -0.0112762451171875, -0.038970947265625, 0.027008056640625, 0.07806396484375, 0.047607421875, -0.05377197265625, -0.00027251243591308594, 0.0184783935546875, 0.00568389892578125, -0.0264892578125, -0.015228271484375, 0.00656890869140625, 0.040771484375, -0.06719970703125, 0.037841796875, -0.00829315185546875, 0.0245361328125, -0.0302581787109375, 0.0210418701171875, -0.0214080810546875, 0.03094482421875, 0.006500244140625, 0.0016946792602539062, -0.0308074951171875, -0.01537322998046875, -0.013397216796875, -0.0081787109375, 0.00754547119140625, 0.04620361328125, -0.0231781005859375, 0.0313720703125, 0.035614013671875, 0.0029850006103515625, 0.050262451171875, 0.01514434814453125, 0.020355224609375, -0.036956787109375, 0.03521728515625, 0.0248260498046875, 0.0220489501953125, 0.047515869140625, -0.040924072265625, 0.0231781005859375, 0.021026611328125, -0.012176513671875, -0.062225341796875, 0.033843994140625, -0.041015625, -0.0244293212890625, 0.06878662109375, -0.0146484375, -0.05889892578125, 0.013458251953125, -0.0018167495727539062, 0.01396942138671875, -0.032470703125, 0.028076171875, 0.058441162109375, 0.0015659332275390625, -0.0006613731384277344, -0.040618896484375, 0.01026153564453125, 0.01485443115234375, -0.04058837890625, -0.0253448486328125, 0.04058837890625, 0.0303192138671875, 0.0174407958984375, 0.0294952392578125, -0.0242462158203125, 0.03900146484375, 0.0271453857421875, 0.032470703125, -0.018829345703125, -0.046112060546875, -0.01203155517578125, -0.031402587890625, -0.0131378173828125, -0.01177215576171875 ] ]
quantumaikr/llama-2-70B-instruct
2023-09-03T05:44:08.000Z
[ "transformers", "safetensors", "llama", "text-generation", "en", "license:cc-by-nc-4.0", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
quantumaikr
null
null
quantumaikr/llama-2-70B-instruct
0
5,839
transformers
2023-09-03T02:52:14
--- license: cc-by-nc-4.0 language: - en pipeline_tag: text-generation --- # quantumaikr/quantumairk-llama-2-70B-instruct ## Model Description `quantumaikr/quantumairk-llama-2-70B-instruct` is a Llama2 70B model(garage-bAInd/Platypus2-70B-instruct) finetuned on some Dataset ## Usage Start chatting with `quantumaikr/quantumairk-llama-2-70B-instruct` using the following code snippet: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("quantumaikr/quantumairk-llama-2-70B-instruct") model = AutoModelForCausalLM.from_pretrained("quantumaikr/quantumairk-llama-2-70B-instruct", torch_dtype=torch.float16, device_map="auto") system_prompt = "You are QuantumLM, an AI that follows instructions extremely well. Help as much as you can. Remember, be safe, and don't do anything illegal." message = "Write me a poem please" prompt = f"[INST] <<SYS>>\n{system_prompt}\n<</SYS>>\n\n{message}[/INST]" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, temperature=0.9, top_p=0.75, max_new_tokens=2048) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` QuantumLM should be used with this prompt format: ``` ### System: This is a system prompt, please behave and help the user. ### User: Your prompt here ### Assistant The output of QuantumLM ``` ## Use and Limitations ### Intended Use These models are intended for research only, in adherence with the [CC BY-NC-4.0](https://creativecommons.org/licenses/by-nc/4.0/) license. ### Limitations and bias Although the aforementioned dataset helps to steer the base language models into "safer" distributions of text, not all biases and toxicity can be mitigated through fine-tuning. We ask that users be mindful of such potential issues that can arise in generated responses. Do not treat model outputs as substitutes for human judgment or as sources of truth. Please use it responsibly. Contact us : hi@quantumai.kr
2,032
[ [ 0.00563812255859375, -0.05389404296875, 0.019989013671875, 0.00799560546875, -0.033721923828125, 0.004009246826171875, 0.0016117095947265625, -0.01523590087890625, 0.00861358642578125, 0.027862548828125, -0.03594970703125, -0.02813720703125, -0.039642333984375, -0.0019311904907226562, -0.03729248046875, 0.07635498046875, 0.0022144317626953125, -0.005054473876953125, -0.00304412841796875, 0.0206451416015625, -0.0272216796875, -0.03729248046875, -0.06439208984375, -0.0189971923828125, 0.0163116455078125, 0.0153350830078125, 0.053466796875, 0.0235595703125, 0.018280029296875, 0.031585693359375, -0.016082763671875, 0.0109100341796875, -0.032684326171875, 0.0072784423828125, 0.013946533203125, -0.050506591796875, -0.038787841796875, -0.004360198974609375, 0.054962158203125, 0.0120086669921875, -0.01036834716796875, 0.030029296875, 0.0018291473388671875, 0.028472900390625, -0.04034423828125, 0.01291656494140625, -0.0452880859375, 0.01027679443359375, -0.01392364501953125, -0.00815582275390625, -0.0263671875, -0.01348876953125, -0.005397796630859375, -0.049560546875, 0.00229644775390625, 0.01019287109375, 0.07342529296875, 0.0191497802734375, -0.045440673828125, -0.006710052490234375, -0.042633056640625, 0.05804443359375, -0.07720947265625, 0.033477783203125, 0.037628173828125, 0.0135345458984375, -0.0157470703125, -0.0662841796875, -0.05938720703125, -0.035186767578125, -0.00366973876953125, 0.0103759765625, -0.036224365234375, -0.010467529296875, 0.0240325927734375, 0.01407623291015625, -0.048065185546875, -0.00824737548828125, -0.047943115234375, -0.036651611328125, 0.04150390625, 0.034698486328125, 0.01384735107421875, -0.03662109375, -0.0219573974609375, -0.0260772705078125, -0.0280303955078125, 0.01221466064453125, 0.0286865234375, 0.002468109130859375, -0.0299530029296875, 0.062255859375, -0.029388427734375, 0.02728271484375, 0.0223236083984375, -0.00173187255859375, 0.0241241455078125, -0.022674560546875, -0.0222625732421875, 0.010009765625, 0.0775146484375, 0.0285186767578125, -0.00832366943359375, -0.01096343994140625, -0.0068817138671875, 0.0012960433959960938, 0.0007128715515136719, -0.051666259765625, -0.03228759765625, 0.030853271484375, -0.032073974609375, -0.031402587890625, -0.01126861572265625, -0.044036865234375, -0.018768310546875, -0.001644134521484375, 0.05657958984375, -0.0163726806640625, -0.03753662109375, 0.00490570068359375, -0.0223236083984375, 0.02996826171875, -0.0096282958984375, -0.05938720703125, 0.012542724609375, 0.033477783203125, 0.053466796875, 0.01461029052734375, -0.0294952392578125, -0.01654052734375, 0.0178985595703125, -0.025238037109375, 0.034881591796875, -0.0128936767578125, -0.036834716796875, -0.022003173828125, -0.004238128662109375, 0.0025920867919921875, -0.028472900390625, 0.033660888671875, -0.03955078125, 0.032440185546875, -0.004520416259765625, -0.027862548828125, -0.0162811279296875, -0.0007314682006835938, -0.033355712890625, 0.0784912109375, 0.0136260986328125, -0.0743408203125, -0.006847381591796875, -0.046356201171875, -0.021453857421875, 0.007717132568359375, -0.0036163330078125, -0.038848876953125, -0.002338409423828125, 0.0101470947265625, 0.0307159423828125, -0.010955810546875, 0.020751953125, -0.0229339599609375, -0.021881103515625, 0.03411865234375, -0.041229248046875, 0.09521484375, 0.02655029296875, -0.038238525390625, 0.0305328369140625, -0.054840087890625, 0.0025882720947265625, 0.0298309326171875, -0.0191497802734375, -0.0025177001953125, -0.0200958251953125, 0.0047760009765625, 0.00009292364120483398, 0.0396728515625, -0.036712646484375, 0.0099639892578125, -0.03753662109375, 0.059661865234375, 0.07366943359375, -0.0028896331787109375, 0.0311279296875, -0.029876708984375, 0.040924072265625, 0.01216888427734375, 0.018035888671875, 0.0267486572265625, -0.040008544921875, -0.07794189453125, -0.01245880126953125, 0.01103973388671875, 0.04327392578125, -0.0655517578125, 0.04217529296875, -0.011322021484375, -0.0548095703125, -0.03759765625, -0.0001049041748046875, 0.0199127197265625, 0.049285888671875, 0.0191650390625, -0.0003631114959716797, -0.0533447265625, -0.07342529296875, -0.0103302001953125, -0.02947998046875, 0.005100250244140625, -0.00040912628173828125, 0.03753662109375, -0.038970947265625, 0.056365966796875, -0.04083251953125, -0.0011997222900390625, -0.01206207275390625, -0.01061248779296875, 0.0284271240234375, 0.05999755859375, 0.004535675048828125, -0.0404052734375, -0.0250701904296875, -0.025238037109375, -0.06646728515625, 0.01178741455078125, -0.01318359375, -0.054779052734375, 0.00861358642578125, 0.0293731689453125, -0.0728759765625, 0.028106689453125, 0.028289794921875, -0.052978515625, 0.04168701171875, -0.0029811859130859375, 0.0009179115295410156, -0.1014404296875, 0.00676727294921875, -0.01244354248046875, -0.005947113037109375, -0.05059814453125, 0.0105438232421875, -0.006683349609375, 0.00832366943359375, -0.047027587890625, 0.05419921875, -0.004283905029296875, 0.0236968994140625, -0.009735107421875, -0.02032470703125, 0.0015506744384765625, 0.04095458984375, -0.00933837890625, 0.0462646484375, 0.051483154296875, -0.0450439453125, 0.047515869140625, 0.032562255859375, -0.011383056640625, 0.0061492919921875, -0.061126708984375, 0.0196533203125, 0.00545501708984375, 0.026763916015625, -0.077392578125, -0.005035400390625, 0.053558349609375, -0.05419921875, 0.005863189697265625, 0.0133514404296875, -0.0540771484375, -0.0296783447265625, -0.0235137939453125, 0.0445556640625, 0.05902099609375, -0.0294342041015625, 0.056915283203125, 0.0186004638671875, 0.0126800537109375, -0.033538818359375, -0.058502197265625, -0.006275177001953125, -0.033660888671875, -0.037994384765625, 0.008148193359375, -0.0177154541015625, -0.0051422119140625, -0.0003204345703125, -0.0042572021484375, -0.00525665283203125, 0.0217742919921875, 0.03448486328125, 0.035491943359375, -0.0023136138916015625, -0.004520416259765625, 0.0179901123046875, -0.0189361572265625, 0.0303497314453125, -0.010650634765625, 0.053680419921875, -0.027862548828125, -0.01180267333984375, -0.06134033203125, 0.01337432861328125, 0.035614013671875, -0.0054473876953125, 0.053253173828125, 0.063720703125, -0.031951904296875, 0.0091094970703125, -0.0238494873046875, -0.0294647216796875, -0.04339599609375, 0.033447265625, -0.027069091796875, -0.0540771484375, 0.0323486328125, 0.0281524658203125, 0.00644683837890625, 0.060882568359375, 0.062744140625, -0.01219940185546875, 0.08453369140625, 0.04248046875, 0.005916595458984375, 0.025726318359375, -0.047698974609375, 0.0205078125, -0.08782958984375, -0.019287109375, -0.0247344970703125, -0.006107330322265625, -0.048431396484375, -0.0100860595703125, 0.03155517578125, 0.0242156982421875, -0.050689697265625, 0.0274810791015625, -0.053497314453125, 0.00818634033203125, 0.06689453125, 0.02435302734375, 0.004451751708984375, 0.004848480224609375, -0.0135498046875, 0.0146331787109375, -0.0574951171875, -0.035003662109375, 0.09295654296875, 0.03985595703125, 0.044952392578125, 0.0162353515625, 0.065673828125, 0.00148773193359375, -0.0030364990234375, -0.032958984375, 0.053009033203125, 0.015594482421875, -0.05908203125, -0.02093505859375, -0.033172607421875, -0.0706787109375, 0.0135650634765625, -0.003376007080078125, -0.045501708984375, 0.0030841827392578125, 0.01457977294921875, -0.04437255859375, 0.0182342529296875, -0.053192138671875, 0.056121826171875, -0.00972747802734375, -0.005596160888671875, -0.0150604248046875, -0.03369140625, 0.03997802734375, 0.001873016357421875, 0.0097808837890625, -0.0086517333984375, 0.0062408447265625, 0.0872802734375, -0.0216827392578125, 0.07171630859375, -0.0002551078796386719, -0.00713348388671875, 0.033782958984375, 0.00969696044921875, 0.02313232421875, 0.01557159423828125, -0.00640106201171875, 0.027618408203125, 0.027984619140625, -0.040802001953125, -0.016448974609375, 0.048980712890625, -0.08709716796875, -0.05181884765625, -0.0400390625, -0.033966064453125, 0.00408172607421875, 0.0157012939453125, 0.044036865234375, 0.0193939208984375, 0.0290069580078125, -0.0061187744140625, 0.025787353515625, -0.024444580078125, 0.03875732421875, 0.0264892578125, -0.0180206298828125, -0.041473388671875, 0.07012939453125, -0.0012388229370117188, 0.005428314208984375, 0.0091094970703125, 0.0323486328125, -0.034210205078125, -0.020416259765625, -0.03265380859375, 0.01534271240234375, -0.06353759765625, -0.0307464599609375, -0.041351318359375, -0.035125732421875, -0.0462646484375, 0.006412506103515625, -0.0135345458984375, -0.03533935546875, -0.036346435546875, -0.00476837158203125, 0.050384521484375, 0.034759521484375, -0.0218048095703125, 0.0283203125, -0.059173583984375, 0.03558349609375, 0.043426513671875, 0.004161834716796875, 0.0240478515625, -0.0679931640625, -0.0179901123046875, 0.0213623046875, -0.029510498046875, -0.06622314453125, 0.033966064453125, 0.0032291412353515625, 0.046966552734375, 0.0092010498046875, 0.0235443115234375, 0.0482177734375, -0.0260467529296875, 0.07232666015625, 0.0150299072265625, -0.08697509765625, 0.049652099609375, -0.0141448974609375, 0.032073974609375, 0.01012420654296875, 0.014801025390625, -0.03131103515625, -0.05108642578125, -0.062103271484375, -0.0631103515625, 0.04534912109375, 0.0355224609375, 0.037872314453125, -0.0048370361328125, 0.028411865234375, -0.01197052001953125, 0.0180206298828125, -0.0794677734375, -0.038421630859375, -0.03594970703125, -0.0275115966796875, -0.004657745361328125, -0.0215911865234375, -0.0180206298828125, -0.04290771484375, 0.072265625, 0.007625579833984375, 0.04254150390625, 0.0237884521484375, -0.0162811279296875, -0.0244903564453125, 0.003856658935546875, 0.044403076171875, 0.0300445556640625, -0.0071258544921875, 0.021087646484375, 0.02655029296875, -0.04168701171875, 0.00482177734375, 0.0006070137023925781, -0.0149383544921875, -0.0183563232421875, 0.0345458984375, 0.06988525390625, -0.01080322265625, -0.034881591796875, 0.013946533203125, -0.01519775390625, -0.01380157470703125, -0.02947998046875, 0.01493072509765625, 0.0203857421875, 0.044158935546875, 0.0269622802734375, -0.00727081298828125, 0.0140380859375, -0.0220489501953125, 0.0019521713256835938, 0.04412841796875, -0.0010557174682617188, -0.016326904296875, 0.07135009765625, 0.0031452178955078125, -0.0291900634765625, 0.0654296875, -0.0135955810546875, -0.05072021484375, 0.052398681640625, 0.040252685546875, 0.07098388671875, 0.012115478515625, 0.006855010986328125, 0.03106689453125, 0.017974853515625, 0.0190582275390625, 0.0240478515625, -0.0010442733764648438, -0.049041748046875, -0.011566162109375, -0.05517578125, -0.01183319091796875, 0.00814056396484375, -0.035491943359375, 0.010009765625, -0.044097900390625, -0.0211334228515625, -0.0063629150390625, 0.020416259765625, -0.050689697265625, -0.00084686279296875, 0.0200042724609375, 0.04254150390625, -0.06951904296875, 0.0745849609375, 0.039154052734375, -0.03643798828125, -0.085205078125, -0.0139923095703125, -0.01142120361328125, -0.07159423828125, 0.061126708984375, 0.00374603271484375, 0.0038509368896484375, 0.03173828125, -0.061248779296875, -0.06829833984375, 0.09234619140625, 0.013641357421875, -0.0288238525390625, 0.004711151123046875, 0.006572723388671875, 0.029083251953125, 0.0027599334716796875, 0.036376953125, 0.026702880859375, 0.0232696533203125, -0.00534820556640625, -0.06964111328125, 0.0236358642578125, -0.0242462158203125, 0.00975799560546875, -0.0023059844970703125, -0.059722900390625, 0.082763671875, -0.0105743408203125, -0.0124359130859375, 0.0222015380859375, 0.074951171875, 0.036285400390625, 0.00687408447265625, 0.03741455078125, 0.046539306640625, 0.047149658203125, 0.0147705078125, 0.05499267578125, -0.0307159423828125, 0.048828125, 0.053192138671875, 0.0050048828125, 0.0743408203125, 0.033599853515625, -0.0294647216796875, 0.0518798828125, 0.057220458984375, -0.0171966552734375, 0.0423583984375, 0.0168304443359375, -0.0163116455078125, -0.0114288330078125, 0.01479339599609375, -0.039825439453125, 0.0274810791015625, 0.01165771484375, -0.00881195068359375, -0.0074005126953125, 0.0030384063720703125, -0.003963470458984375, -0.029083251953125, -0.01381683349609375, 0.0418701171875, 0.00978851318359375, -0.0225677490234375, 0.0792236328125, 0.00927734375, 0.06591796875, -0.0304718017578125, -0.007244110107421875, -0.0081329345703125, 0.00930023193359375, -0.01849365234375, -0.037628173828125, 0.01070404052734375, -0.0103759765625, -0.0140838623046875, 0.00771331787109375, 0.038421630859375, -0.0260772705078125, -0.0557861328125, -0.006103515625, 0.02032470703125, 0.028533935546875, 0.005062103271484375, -0.07232666015625, 0.01320648193359375, 0.0158538818359375, -0.0195770263671875, 0.01396942138671875, 0.0037937164306640625, 0.00882720947265625, 0.05767822265625, 0.06036376953125, -0.01117706298828125, 0.029693603515625, -0.02825927734375, 0.057586669921875, -0.043365478515625, -0.0298309326171875, -0.068115234375, 0.047576904296875, 0.01457977294921875, -0.034942626953125, 0.06561279296875, 0.041839599609375, 0.057830810546875, -0.00504302978515625, 0.0478515625, -0.007171630859375, 0.02191162109375, -0.042999267578125, 0.06689453125, -0.032867431640625, 0.016387939453125, 0.00254058837890625, -0.057586669921875, 0.006927490234375, 0.072265625, -0.00514984130859375, 0.007457733154296875, 0.0499267578125, 0.073974609375, 0.0158233642578125, -0.011474609375, 0.00591278076171875, 0.04034423828125, 0.032257080078125, 0.052734375, 0.068603515625, -0.06634521484375, 0.034881591796875, -0.056121826171875, -0.00830841064453125, -0.0074005126953125, -0.05181884765625, -0.06475830078125, -0.046844482421875, -0.04217529296875, -0.05938720703125, -0.01206207275390625, 0.0718994140625, 0.0478515625, -0.056304931640625, -0.0220794677734375, -0.0109100341796875, 0.0080108642578125, -0.0015888214111328125, -0.0178375244140625, 0.03912353515625, -0.0106048583984375, -0.047271728515625, 0.00634002685546875, -0.02557373046875, 0.0231781005859375, -0.036834716796875, -0.0182647705078125, -0.00908660888671875, 0.001857757568359375, 0.0283203125, 0.01450347900390625, -0.06951904296875, -0.009033203125, -0.0018186569213867188, -0.0235748291015625, -0.006511688232421875, 0.009490966796875, -0.06475830078125, 0.01132965087890625, 0.0247802734375, 0.0156402587890625, 0.0263671875, -0.037109375, 0.021820068359375, -0.034515380859375, 0.00817108154296875, 0.014190673828125, 0.0121002197265625, 0.01030731201171875, -0.050323486328125, 0.00621795654296875, 0.0123443603515625, -0.04364013671875, -0.0777587890625, -0.004016876220703125, -0.0787353515625, -0.02911376953125, 0.1070556640625, 0.0096893310546875, -0.0272216796875, 0.0151214599609375, -0.034454345703125, 0.052459716796875, -0.027587890625, 0.052825927734375, 0.033416748046875, -0.01316070556640625, -0.01497650146484375, -0.033294677734375, 0.031463623046875, 0.040924072265625, -0.041107177734375, -0.00789642333984375, 0.0097808837890625, 0.033172607421875, 0.00971221923828125, 0.04486083984375, 0.015655517578125, 0.0240631103515625, 0.0106658935546875, 0.0113067626953125, -0.0096893310546875, 0.00799560546875, -0.035858154296875, -0.01317596435546875, 0.012054443359375, -0.0293121337890625 ] ]
GeorgiaTechResearchInstitute/galactica-6.7b-evol-instruct-70k
2023-06-17T00:57:35.000Z
[ "transformers", "pytorch", "opt", "text-generation", "galactica", "wizardlm", "alpaca", "dataset:victor123/evol_instruct_70k", "arxiv:2304.12244", "license:cc-by-nc-4.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
GeorgiaTechResearchInstitute
null
null
GeorgiaTechResearchInstitute/galactica-6.7b-evol-instruct-70k
16
5,836
transformers
2023-05-04T20:52:23
--- license: cc-by-nc-4.0 datasets: - victor123/evol_instruct_70k library_name: transformers pipeline_tag: text-generation tags: - galactica - wizardlm - alpaca - opt inference: false --- # GALACTICA 6.7B Evol-Instruct GALACTICA 6.7B fine-tuned on the Evol-Instruct 70k dataset. The model card from the original Galactica repo can be found [here](https://github.com/paperswithcode/galai/blob/main/docs/model_card.md), and the original paper [here](https://galactica.org/paper.pdf). The HF dataset for Evol-Instruct-70k can be found [here](https://huggingface.co/datasets/victor123/evol_instruct_70k), and the original GitHub repo for WizardLM is [here](https://github.com/nlpxucan/WizardLM). ## Model Details The GALACTICA models are trained on a large-scale scientific corpus and are designed to perform scientific tasks. ## Model Use The GALACTICA model card specifies that the primary indended users of the GALACTICA models are researchers studying language models applied to the scientific domain, and it cautions against production use of GALACTICA without safeguards due to the potential for the model to produce inaccurate information. The original GALACTICA models are available under a non-commercial CC BY-NC 4.0 license, and models based on the Evol-Instruct-70k dataset are additionally subject to the [OpenAI Terms of Service](https://openai.com/policies/terms-of-use). <details> <summary> Example Outputs </summary> *__Full Prompt:__* ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Write out Maxwell's equations and explain each one. ### Response: ``` *__Response:__* Maxwell's equations are a set of four equations that describe the behavior of electromagnetic fields. These equations were developed by James Clerk Maxwell in 1864 and are widely used in the study of electromagnetism. 1. Gauss's Law: This equation states that the electric charge inside a closed surface is equal to the flux of electric field through the surface. Mathematically, it can be expressed as: $$\nabla \bullet \textbf{E} = \frac{\rho}{\epsilon_0}$$ where E is the electric field, ρ is the charge density, ε0 is the electric constant, and ∇ is the divergence operator. 2. Faraday's Law: This equation states that the electric field around a closed loop is proportional to the time rate of change of magnetic flux through the loop. Mathematically, it can be expressed as: $$\nabla \times \textbf{E} = -\frac{\partial\textbf{B}}{\partial t}$$ where B is the magnetic field, t is time, and ∇ × is the curl operator. 3. Gauss's Law for Magnetism: This equation states that the magnetic charge inside a closed surface is equal to the flux of magnetic field through the surface. Mathematically, it can be expressed as: $$\nabla \bullet \textbf{B} = 0$$ where B is the magnetic field. 4. Ampere's Law: This equation states that the magnetic field around a closed loop is proportional to the current through the loop. Mathematically, it can be expressed as: $$\nabla \times \textbf{B} = \mu_0\textbf{J} + \mu_0\epsilon_0\frac{\partial \textbf{E}}{\partial t}$$ where J is the current density, μ0 is the magnetic constant, and ε0 is the electric constant. These equations form the foundation of all electromagnetic theory and are used to describe the behavior of electromagnetic fields in a wide range of applications, from radio waves to high-energy particles. </details> ## Training Data This model was trained by fine-tuning pre-trained GALACTICA 6.7B on the Evol-Instruct-70k dataset. GALACTICA models were trained on 106 billion tokens of open-access scientific text and data, including papers, textbooks, scientific websites, encyclopedias, and more. Fine-tuning the base GALACTICA models on the 70k instruction-response pairs in the Evol-Instruct-70k dataset allows users to query the resulting model in an instruct-response fashion. ## How to Use The GALACTICA Evol-Instruct-70K weights are made available for use with the `transformers` library. <details> <summary> Click to expand </summary> ```python # pip install accelerate from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("GeorgiaTechResearchInstitute/galactica-6.7b-evol-instruct-70k") model = AutoModelForCausalLM.from_pretrained("GeorgiaTechResearchInstitute/galactica-6.7b-evol-instruct-70k", device_map="auto", torch_dtype=torch.float16) # the evol-instruct models were fine-tuned with the same hidden prompts as the Alpaca project no_input_prompt_template = ("Below is an instruction that describes a task. " "Write a response that appropriately completes the request.\n\n" "### Instruction:\n{instruction}\n\n### Response:") prompt = "Write out Maxwell's equations and explain the meaning of each one." formatted_prompt = no_input_prompt_template.format_map({'instruction': prompt}) tokenized_prompt = tokenizer(formatted_prompt, return_tensors="pt").input_ids.to(model.device) out_tokens = model.generate(tokenized_prompt) print(tokenizer.batch_decode(out_tokens, skip_special_tokens=False, clean_up_tokenization_spaces=False)) ``` </details> ## Training Resources GALACTICA 6.7B Evol-Instruct was fine-tuned in about 22 hours using 8 A100 80GB GPUS, 16-bit mixed-precision, an effective batch-size of 64, and with a maximum context window of 2048 tokens. This model was trained using full-shard data parallelism. ## Performance and Limitations Qualitative evaluation suggests that the evol-instruct-70k fine-tuned Galactica models are signficantly more controllable and attentive to user prompts than the Alpaca fine-tuned GALPACA models. ## Works Cited ```bibtex @inproceedings{GALACTICA, title={GALACTICA: A Large Language Model for Science}, author={Ross Taylor and Marcin Kardas and Guillem Cucurull and Thomas Scialom and Anthony Hartshorn and Elvis Saravia and Andrew Poulton and Viktor Kerkez and Robert Stojnic}, year={2022} } ``` ```bibtex @misc{xu2023wizardlm, title={WizardLM: Empowering Large Language Models to Follow Complex Instructions}, author={Can Xu and Qingfeng Sun and Kai Zheng and Xiubo Geng and Pu Zhao and Jiazhan Feng and Chongyang Tao and Daxin Jiang}, year={2023}, eprint={2304.12244}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
6,410
[ [ -0.04302978515625, -0.063232421875, 0.038299560546875, 0.0201416015625, 0.003448486328125, -0.01142120361328125, -0.01160430908203125, -0.02581787109375, 0.0206146240234375, 0.033447265625, -0.04742431640625, -0.032928466796875, -0.02508544921875, 0.00771331787109375, -0.00867462158203125, 0.07525634765625, -0.01531219482421875, 0.0033397674560546875, -0.0156097412109375, 0.00848388671875, -0.0235595703125, -0.02960205078125, -0.033843994140625, -0.023895263671875, 0.044952392578125, 0.0149688720703125, 0.04302978515625, 0.04791259765625, 0.06304931640625, 0.023406982421875, -0.0299835205078125, 0.02008056640625, -0.04034423828125, -0.0231781005859375, -0.0102081298828125, -0.033294677734375, -0.04522705078125, 0.01395416259765625, 0.02899169921875, 0.042938232421875, -0.01024627685546875, 0.03240966796875, -0.0205078125, 0.04229736328125, -0.031524658203125, 0.029388427734375, -0.04180908203125, -0.006664276123046875, 0.001407623291015625, -0.015594482421875, -0.01523590087890625, -0.0119781494140625, 0.0012331008911132812, -0.07080078125, 0.0276947021484375, 0.014404296875, 0.09686279296875, 0.0170745849609375, -0.025604248046875, -0.043182373046875, -0.069091796875, 0.047760009765625, -0.0526123046875, 0.02587890625, 0.0181121826171875, 0.0246124267578125, -0.0209197998046875, -0.06683349609375, -0.06390380859375, -0.037445068359375, -0.0008673667907714844, 0.0264892578125, -0.0030155181884765625, 0.0194091796875, 0.048248291015625, 0.03057861328125, -0.03472900390625, 0.004634857177734375, -0.06787109375, 0.010162353515625, 0.053253173828125, 0.0202178955078125, 0.01152801513671875, -0.0163726806640625, -0.0227508544921875, -0.028839111328125, -0.0687255859375, 0.0106964111328125, 0.03509521484375, 0.01360321044921875, -0.021636962890625, 0.04534912109375, -0.0034694671630859375, 0.03826904296875, 0.032928466796875, 0.004268646240234375, 0.032379150390625, -0.028228759765625, -0.0423583984375, -0.03179931640625, 0.04974365234375, 0.027679443359375, -0.0039825439453125, -0.0174102783203125, -0.01593017578125, -0.007671356201171875, 0.0274505615234375, -0.06536865234375, -0.003467559814453125, 0.02032470703125, -0.036956787109375, -0.01129913330078125, 0.00970458984375, -0.0667724609375, -0.024444580078125, -0.037567138671875, 0.0357666015625, -0.0013475418090820312, 0.0018625259399414062, 0.01363372802734375, 0.003787994384765625, 0.019805908203125, 0.005275726318359375, -0.07806396484375, 0.01267242431640625, 0.02880859375, 0.051513671875, 0.0223388671875, -0.03826904296875, -0.0086669921875, -0.01146697998046875, -0.0113372802734375, 0.046905517578125, -0.0233154296875, -0.032501220703125, -0.006378173828125, 0.0226898193359375, -0.0304107666015625, -0.031890869140625, 0.041748046875, -0.048248291015625, 0.023773193359375, -0.0125885009765625, -0.0509033203125, -0.0297393798828125, -0.0240936279296875, -0.052978515625, 0.051116943359375, 0.0020198822021484375, -0.0157623291015625, 0.0123138427734375, -0.06036376953125, 0.004444122314453125, -0.0220794677734375, 0.023956298828125, -0.041351318359375, 0.00046563148498535156, 0.0071258544921875, 0.0360107421875, -0.0309906005859375, 0.00673675537109375, -0.029388427734375, -0.01485443115234375, 0.0058135986328125, -0.0208892822265625, 0.0631103515625, 0.037872314453125, -0.024200439453125, 0.00972747802734375, -0.05877685546875, -0.01493072509765625, 0.0229949951171875, -0.0094146728515625, 0.004627227783203125, -0.015838623046875, -0.0183868408203125, 0.0267333984375, 0.0156097412109375, -0.03179931640625, 0.0206451416015625, -0.03533935546875, 0.0168304443359375, 0.03936767578125, -0.0029354095458984375, 0.05242919921875, -0.0118560791015625, 0.040191650390625, -0.01090240478515625, 0.0218505859375, -0.0277557373046875, -0.037841796875, -0.07354736328125, -0.0025615692138671875, 0.007068634033203125, 0.04248046875, -0.030792236328125, 0.043701171875, -0.002147674560546875, -0.0770263671875, -0.043670654296875, 0.00946044921875, 0.031219482421875, 0.042388916015625, 0.0496826171875, 0.010894775390625, -0.03753662109375, -0.051116943359375, 0.0076446533203125, -0.005031585693359375, -0.0016298294067382812, 0.03912353515625, 0.059112548828125, -0.01593017578125, 0.049896240234375, -0.04669189453125, -0.01160430908203125, -0.0030651092529296875, -0.003204345703125, 0.03912353515625, 0.0301055908203125, 0.049102783203125, -0.042755126953125, -0.04144287109375, 0.0177154541015625, -0.0711669921875, -0.0143585205078125, -0.01477813720703125, -0.016448974609375, 0.0013408660888671875, 0.05523681640625, -0.075439453125, 0.0306243896484375, 0.037384033203125, -0.06060791015625, 0.0350341796875, -0.0254058837890625, 0.0009546279907226562, -0.0777587890625, 0.0295562744140625, 0.00894927978515625, 0.004913330078125, -0.061798095703125, 0.0236663818359375, 0.0160980224609375, -0.005229949951171875, -0.046478271484375, 0.061798095703125, -0.052642822265625, 0.019012451171875, -0.0189361572265625, 0.0009694099426269531, 0.001689910888671875, 0.05419921875, -0.033172607421875, 0.05499267578125, 0.03485107421875, -0.0650634765625, 0.041839599609375, 0.0229339599609375, -0.00736236572265625, 0.0221710205078125, -0.06451416015625, 0.01488494873046875, -0.0255279541015625, 0.023956298828125, -0.032257080078125, 0.0017194747924804688, 0.01763916015625, -0.022247314453125, 0.0177154541015625, -0.0007557868957519531, -0.0103759765625, -0.05224609375, 0.007144927978515625, 0.01444244384765625, 0.04669189453125, -0.060394287109375, 0.05224609375, 0.0199127197265625, -0.00020694732666015625, -0.055816650390625, -0.002197265625, -0.011627197265625, -0.02880859375, -0.059478759765625, 0.036956787109375, -0.0169219970703125, -0.032257080078125, -0.01137542724609375, 0.007167816162109375, 0.01021575927734375, 0.005062103271484375, 0.0281219482421875, 0.04107666015625, -0.016326904296875, 0.006099700927734375, 0.005893707275390625, -0.014129638671875, 0.011627197265625, -0.0080413818359375, 0.05169677734375, -0.0236968994140625, -0.0169677734375, -0.039215087890625, 0.004974365234375, 0.0615234375, -0.01210784912109375, 0.06561279296875, 0.038421630859375, -0.023468017578125, 0.004299163818359375, -0.0302581787109375, -0.0203399658203125, -0.0379638671875, 0.0283050537109375, -0.0230712890625, -0.0276031494140625, 0.07171630859375, 0.01959228515625, 0.028717041015625, 0.07403564453125, 0.051055908203125, -0.003887176513671875, 0.060455322265625, 0.04388427734375, 0.0166778564453125, 0.04840087890625, -0.05535888671875, 0.010162353515625, -0.058441162109375, -0.050079345703125, -0.05035400390625, -0.012908935546875, -0.02227783203125, -0.035369873046875, 0.0035152435302734375, 0.0202484130859375, -0.033172607421875, 0.039337158203125, -0.0229034423828125, 0.04791259765625, 0.02288818359375, -0.01244354248046875, -0.00450897216796875, 0.0043487548828125, 0.003299713134765625, 0.015106201171875, -0.051300048828125, -0.0406494140625, 0.09039306640625, 0.04547119140625, 0.0762939453125, -0.0009412765502929688, 0.0396728515625, -0.00804901123046875, 0.0292205810546875, -0.04376220703125, 0.025390625, -0.005828857421875, -0.039398193359375, -0.0097198486328125, -0.02496337890625, -0.07696533203125, 0.03094482421875, -0.023895263671875, -0.041595458984375, 0.038482666015625, 0.01445770263671875, -0.04107666015625, 0.022369384765625, -0.0830078125, 0.06597900390625, -0.0062713623046875, -0.01384735107421875, -0.00678253173828125, -0.0343017578125, 0.0158843994140625, 0.00809478759765625, -0.00560760498046875, -0.00678253173828125, 0.022979736328125, 0.052581787109375, -0.04779052734375, 0.0584716796875, -0.0228118896484375, -0.01375579833984375, 0.039215087890625, -0.028594970703125, 0.055908203125, 0.00466156005859375, 0.0118560791015625, 0.0194549560546875, -0.017120361328125, -0.019378662109375, -0.04718017578125, 0.055877685546875, -0.06610107421875, -0.04254150390625, -0.01520538330078125, -0.042755126953125, 0.0203704833984375, 0.015594482421875, 0.03466796875, 0.027801513671875, -0.013031005859375, 0.0082550048828125, 0.05523681640625, -0.02606201171875, 0.04132080078125, 0.042572021484375, -0.031097412109375, -0.0194244384765625, 0.061065673828125, 0.0159759521484375, 0.0273895263671875, 0.01320648193359375, -0.00643157958984375, -0.038421630859375, -0.0240631103515625, -0.05035400390625, 0.039703369140625, -0.049835205078125, -0.0213775634765625, -0.061309814453125, -0.0283966064453125, -0.017181396484375, -0.0273590087890625, -0.0280303955078125, -0.03155517578125, -0.049896240234375, -0.00722503662109375, 0.030181884765625, 0.050140380859375, 0.0037136077880859375, 0.031707763671875, -0.049896240234375, 0.0257568359375, 0.01629638671875, 0.0157623291015625, 0.0018606185913085938, -0.0621337890625, -0.032928466796875, 0.008697509765625, -0.0285797119140625, -0.057220458984375, 0.042388916015625, 0.00988006591796875, 0.041839599609375, 0.0231475830078125, -0.0289306640625, 0.05914306640625, -0.04425048828125, 0.058563232421875, 0.0149078369140625, -0.06451416015625, 0.0245819091796875, -0.0185089111328125, 0.0169677734375, 0.014495849609375, 0.0057220458984375, -0.029937744140625, -0.0301361083984375, -0.0692138671875, -0.05810546875, 0.04852294921875, 0.0240936279296875, 0.01247406005859375, 0.017303466796875, 0.0293121337890625, 0.0005602836608886719, 0.0026988983154296875, -0.042144775390625, -0.043365478515625, -0.029632568359375, -0.0097808837890625, -0.01953125, 0.0023975372314453125, -0.017425537109375, -0.0192108154296875, 0.04949951171875, -0.0020751953125, 0.05169677734375, 0.00954437255859375, -0.011444091796875, -0.0177764892578125, 0.005054473876953125, 0.07769775390625, 0.059417724609375, -0.0237884521484375, -0.01134490966796875, 0.019989013671875, -0.04852294921875, 0.004852294921875, 0.037017822265625, -0.021697998046875, 0.00518035888671875, 0.005603790283203125, 0.08441162109375, -0.0145721435546875, -0.034759521484375, 0.0173492431640625, -0.01512908935546875, -0.0218353271484375, -0.0188140869140625, 0.01488494873046875, 0.01267242431640625, 0.01490020751953125, 0.02923583984375, 0.0209808349609375, -0.00600433349609375, -0.0254364013671875, -0.0003368854522705078, 0.02825927734375, -0.014678955078125, -0.035888671875, 0.06890869140625, 0.0244293212890625, -0.0225677490234375, 0.0284423828125, -0.0176239013671875, -0.0219268798828125, 0.09381103515625, 0.05126953125, 0.06231689453125, -0.019256591796875, 0.02508544921875, 0.0460205078125, 0.028656005859375, -0.0081024169921875, 0.0234527587890625, 0.04144287109375, -0.038848876953125, -0.0194854736328125, -0.0496826171875, -0.0259857177734375, 0.015228271484375, -0.039886474609375, 0.0178985595703125, -0.042938232421875, 0.007328033447265625, 0.01517486572265625, 0.0125885009765625, -0.0809326171875, 0.019439697265625, -0.0008807182312011719, 0.07421875, -0.0618896484375, 0.037872314453125, 0.045013427734375, -0.04681396484375, -0.078369140625, -0.0259246826171875, -0.010009765625, -0.053497314453125, 0.041473388671875, 0.006168365478515625, -0.004253387451171875, 0.0019702911376953125, -0.04901123046875, -0.0931396484375, 0.10418701171875, 0.019622802734375, -0.047576904296875, 0.0155487060546875, -0.01523590087890625, 0.061065673828125, -0.043914794921875, 0.03375244140625, 0.035430908203125, 0.034423828125, 0.000766754150390625, -0.07373046875, 0.00122833251953125, -0.030181884765625, -0.0167694091796875, 0.00954437255859375, -0.0721435546875, 0.07940673828125, -0.03521728515625, -0.0184478759765625, 0.0107269287109375, 0.0745849609375, 0.007244110107421875, 0.04052734375, 0.01358795166015625, 0.03680419921875, 0.07403564453125, -0.006866455078125, 0.08233642578125, -0.036529541015625, 0.0582275390625, 0.0457763671875, -0.01177978515625, 0.031890869140625, 0.0253448486328125, -0.022430419921875, 0.033721923828125, 0.04010009765625, -0.0297393798828125, 0.028076171875, 0.00798797607421875, -0.0078582763671875, 0.01535797119140625, -0.003173828125, -0.049774169921875, 0.0219268798828125, 0.032623291015625, -0.04693603515625, 0.0017375946044921875, -0.006488800048828125, 0.017852783203125, -0.0204620361328125, -0.03875732421875, 0.0289306640625, 0.0164031982421875, -0.059051513671875, 0.061248779296875, 0.004436492919921875, 0.04205322265625, -0.042938232421875, 0.005626678466796875, -0.0262451171875, 0.01079559326171875, -0.0264434814453125, -0.0335693359375, 0.00774383544921875, 0.004425048828125, -0.0297088623046875, -0.002872467041015625, 0.039642333984375, -0.0391845703125, -0.04583740234375, 0.00971221923828125, 0.0235748291015625, 0.0130615234375, 0.0218505859375, -0.03826904296875, -0.0204315185546875, 0.0034198760986328125, -0.035797119140625, 0.0087127685546875, 0.000037729740142822266, 0.018218994140625, 0.06304931640625, 0.057159423828125, -0.00405120849609375, 0.029449462890625, -0.00811004638671875, 0.06829833984375, -0.052093505859375, -0.033905029296875, -0.045166015625, 0.031768798828125, -0.009552001953125, -0.052581787109375, 0.061737060546875, 0.032318115234375, 0.05810546875, -0.03240966796875, 0.03924560546875, -0.019561767578125, 0.0172271728515625, -0.061248779296875, 0.06463623046875, -0.0340576171875, 0.018585205078125, -0.0125579833984375, -0.09478759765625, -0.0191192626953125, 0.0447998046875, -0.0205841064453125, 0.019622802734375, 0.0667724609375, 0.0865478515625, -0.00386810302734375, -0.005092620849609375, 0.0205078125, 0.0253448486328125, 0.044403076171875, 0.0198974609375, 0.046630859375, -0.041107177734375, 0.038421630859375, -0.0256500244140625, -0.0028171539306640625, -0.010833740234375, -0.04034423828125, -0.07525634765625, -0.03509521484375, -0.0008578300476074219, -0.037689208984375, -0.00908660888671875, 0.07403564453125, 0.036529541015625, -0.0584716796875, -0.009307861328125, -0.00984954833984375, 0.0122528076171875, -0.0258941650390625, -0.0169677734375, 0.049102783203125, -0.0196990966796875, -0.07232666015625, 0.037750244140625, -0.0007710456848144531, 0.0225677490234375, -0.0248565673828125, -0.0208740234375, 0.006595611572265625, 0.0254058837890625, 0.036773681640625, 0.049774169921875, -0.041656494140625, -0.0052032470703125, 0.0247650146484375, -0.0227203369140625, -0.0119476318359375, 0.0479736328125, -0.04180908203125, 0.029449462890625, 0.035430908203125, 0.022247314453125, 0.032928466796875, -0.007312774658203125, 0.03662109375, -0.019927978515625, 0.01116180419921875, 0.00151824951171875, 0.037628173828125, 0.00411224365234375, -0.037994384765625, 0.032318115234375, 0.0177001953125, -0.06451416015625, -0.0537109375, -0.0012483596801757812, -0.07879638671875, -0.0013828277587890625, 0.0980224609375, -0.010772705078125, -0.0194854736328125, -0.003692626953125, -0.0216827392578125, 0.0098724365234375, -0.035675048828125, 0.07537841796875, 0.037109375, -0.005420684814453125, -0.00005429983139038086, -0.045074462890625, 0.0396728515625, 0.026947021484375, -0.0672607421875, 0.0014133453369140625, 0.030059814453125, 0.006351470947265625, 0.029876708984375, 0.032257080078125, 0.00992584228515625, 0.022857666015625, 0.01520538330078125, 0.0208282470703125, -0.024505615234375, -0.03021240234375, -0.0345458984375, 0.005214691162109375, -0.0357666015625, -0.0024700164794921875 ] ]
cross-encoder/ms-marco-electra-base
2021-08-05T08:40:12.000Z
[ "transformers", "pytorch", "electra", "text-classification", "license:apache-2.0", "endpoints_compatible", "region:us" ]
text-classification
cross-encoder
null
null
cross-encoder/ms-marco-electra-base
3
5,835
transformers
2022-03-02T23:29:05
--- license: apache-2.0 --- # Cross-Encoder for MS Marco This model was trained on the [MS Marco Passage Ranking](https://github.com/microsoft/MSMARCO-Passage-Ranking) task. The model can be used for Information Retrieval: Given a query, encode the query will all possible passages (e.g. retrieved with ElasticSearch). Then sort the passages in a decreasing order. See [SBERT.net Retrieve & Re-rank](https://www.sbert.net/examples/applications/retrieve_rerank/README.html) for more details. The training code is available here: [SBERT.net Training MS Marco](https://github.com/UKPLab/sentence-transformers/tree/master/examples/training/ms_marco) ## Usage with Transformers ```python from transformers import AutoTokenizer, AutoModelForSequenceClassification import torch model = AutoModelForSequenceClassification.from_pretrained('model_name') tokenizer = AutoTokenizer.from_pretrained('model_name') features = tokenizer(['How many people live in Berlin?', 'How many people live in Berlin?'], ['Berlin has a population of 3,520,031 registered inhabitants in an area of 891.82 square kilometers.', 'New York City is famous for the Metropolitan Museum of Art.'], padding=True, truncation=True, return_tensors="pt") model.eval() with torch.no_grad(): scores = model(**features).logits print(scores) ``` ## Usage with SentenceTransformers The usage becomes easier when you have [SentenceTransformers](https://www.sbert.net/) installed. Then, you can use the pre-trained models like this: ```python from sentence_transformers import CrossEncoder model = CrossEncoder('model_name', max_length=512) scores = model.predict([('Query', 'Paragraph1'), ('Query', 'Paragraph2') , ('Query', 'Paragraph3')]) ``` ## Performance In the following table, we provide various pre-trained Cross-Encoders together with their performance on the [TREC Deep Learning 2019](https://microsoft.github.io/TREC-2019-Deep-Learning/) and the [MS Marco Passage Reranking](https://github.com/microsoft/MSMARCO-Passage-Ranking/) dataset. | Model-Name | NDCG@10 (TREC DL 19) | MRR@10 (MS Marco Dev) | Docs / Sec | | ------------- |:-------------| -----| --- | | **Version 2 models** | | | | cross-encoder/ms-marco-TinyBERT-L-2-v2 | 69.84 | 32.56 | 9000 | cross-encoder/ms-marco-MiniLM-L-2-v2 | 71.01 | 34.85 | 4100 | cross-encoder/ms-marco-MiniLM-L-4-v2 | 73.04 | 37.70 | 2500 | cross-encoder/ms-marco-MiniLM-L-6-v2 | 74.30 | 39.01 | 1800 | cross-encoder/ms-marco-MiniLM-L-12-v2 | 74.31 | 39.02 | 960 | **Version 1 models** | | | | cross-encoder/ms-marco-TinyBERT-L-2 | 67.43 | 30.15 | 9000 | cross-encoder/ms-marco-TinyBERT-L-4 | 68.09 | 34.50 | 2900 | cross-encoder/ms-marco-TinyBERT-L-6 | 69.57 | 36.13 | 680 | cross-encoder/ms-marco-electra-base | 71.99 | 36.41 | 340 | **Other models** | | | | nboost/pt-tinybert-msmarco | 63.63 | 28.80 | 2900 | nboost/pt-bert-base-uncased-msmarco | 70.94 | 34.75 | 340 | nboost/pt-bert-large-msmarco | 73.36 | 36.48 | 100 | Capreolus/electra-base-msmarco | 71.23 | 36.89 | 340 | amberoad/bert-multilingual-passage-reranking-msmarco | 68.40 | 35.54 | 330 | sebastian-hofstaetter/distilbert-cat-margin_mse-T2-msmarco | 72.82 | 37.88 | 720 Note: Runtime was computed on a V100 GPU.
3,233
[ [ -0.03228759765625, -0.043670654296875, 0.0250396728515625, 0.01169586181640625, -0.01270294189453125, 0.01073455810546875, -0.01340484619140625, -0.0384521484375, 0.025146484375, 0.0255584716796875, -0.041229248046875, -0.05108642578125, -0.057952880859375, 0.003082275390625, -0.033294677734375, 0.059326171875, -0.0014905929565429688, 0.01232147216796875, -0.01372528076171875, -0.00827789306640625, -0.01934814453125, -0.03082275390625, -0.041351318359375, -0.0218048095703125, 0.0360107421875, 0.0159454345703125, 0.05816650390625, 0.02978515625, 0.041961669921875, 0.032958984375, -0.0084075927734375, 0.007038116455078125, -0.01416015625, 0.00011479854583740234, 0.00521087646484375, -0.0286865234375, -0.041717529296875, -0.0091400146484375, 0.033416748046875, 0.0266265869140625, 0.0005517005920410156, 0.0199737548828125, -0.00020241737365722656, 0.043304443359375, -0.0293731689453125, -0.0037670135498046875, -0.02587890625, 0.0184326171875, -0.01495361328125, -0.018768310546875, -0.03521728515625, -0.0162811279296875, 0.013275146484375, -0.043914794921875, 0.0298919677734375, 0.01174163818359375, 0.09521484375, 0.0262603759765625, -0.016448974609375, -0.0196533203125, -0.035369873046875, 0.05419921875, -0.051513671875, 0.053314208984375, 0.01372528076171875, 0.01323699951171875, 0.0088958740234375, -0.07318115234375, -0.033660888671875, -0.0162811279296875, -0.01438140869140625, 0.0192413330078125, -0.031829833984375, -0.006412506103515625, 0.0313720703125, 0.0309600830078125, -0.07501220703125, -0.0060882568359375, -0.053802490234375, -0.00921630859375, 0.04913330078125, 0.0202178955078125, 0.020172119140625, -0.0188446044921875, -0.024200439453125, -0.01070404052734375, -0.03790283203125, 0.0163116455078125, 0.0207366943359375, 0.0008816719055175781, -0.01544952392578125, 0.03082275390625, -0.01776123046875, 0.059600830078125, 0.0084075927734375, 0.00708770751953125, 0.058074951171875, -0.019500732421875, -0.0178985595703125, 0.0018720626831054688, 0.07373046875, 0.021514892578125, 0.00785064697265625, -0.0095367431640625, -0.0169677734375, -0.01287078857421875, 0.0303497314453125, -0.066162109375, -0.020111083984375, 0.0219573974609375, -0.040252685546875, -0.01009368896484375, 0.01226806640625, -0.06396484375, 0.01209259033203125, -0.0098724365234375, 0.04547119140625, -0.030059814453125, 0.002422332763671875, 0.0177459716796875, -0.0106658935546875, 0.0216522216796875, 0.01345062255859375, -0.0557861328125, 0.0010137557983398438, 0.0259857177734375, 0.070556640625, -0.0086669921875, -0.028472900390625, -0.01214599609375, -0.0028209686279296875, -0.01250457763671875, 0.04266357421875, -0.03546142578125, -0.0235137939453125, -0.0054473876953125, 0.021514892578125, -0.01132965087890625, -0.0229034423828125, 0.053619384765625, -0.034820556640625, 0.038299560546875, -0.0094757080078125, -0.0261993408203125, -0.0118255615234375, 0.0177154541015625, -0.059112548828125, 0.091064453125, 0.00298309326171875, -0.0638427734375, 0.0123443603515625, -0.052947998046875, -0.0256805419921875, -0.0120697021484375, 0.0030975341796875, -0.05743408203125, 0.003437042236328125, 0.030609130859375, 0.01934814453125, -0.0241241455078125, 0.007472991943359375, -0.0130767822265625, -0.0340576171875, 0.0123291015625, -0.031219482421875, 0.08197021484375, 0.0298309326171875, -0.037017822265625, 0.004039764404296875, -0.050628662109375, 0.00893402099609375, 0.0214385986328125, -0.031890869140625, -0.0004444122314453125, -0.021514892578125, 0.0103912353515625, 0.0301055908203125, 0.032958984375, -0.037689208984375, 0.00782012939453125, -0.0210113525390625, 0.036376953125, 0.03466796875, -0.0081939697265625, 0.02587890625, -0.0226287841796875, 0.05029296875, 0.00949859619140625, 0.032623291015625, 0.0006999969482421875, -0.047607421875, -0.06622314453125, -0.01016998291015625, 0.038330078125, 0.044158935546875, -0.055450439453125, 0.04083251953125, -0.038970947265625, -0.0531005859375, -0.062042236328125, -0.007511138916015625, 0.031524658203125, 0.025482177734375, 0.049713134765625, -0.006656646728515625, -0.055084228515625, -0.07501220703125, -0.025177001953125, 0.001766204833984375, 0.0030117034912109375, 0.0179595947265625, 0.048370361328125, -0.019775390625, 0.055328369140625, -0.040008544921875, -0.0163116455078125, -0.034423828125, 0.00023567676544189453, 0.019012451171875, 0.050018310546875, 0.04730224609375, -0.052642822265625, -0.0408935546875, -0.0142669677734375, -0.052215576171875, 0.005329132080078125, 0.00275421142578125, -0.010467529296875, 0.02032470703125, 0.04638671875, -0.051971435546875, 0.051177978515625, 0.037109375, -0.034393310546875, 0.027984619140625, -0.03302001953125, 0.0218658447265625, -0.09063720703125, 0.007617950439453125, -0.002506256103515625, -0.01189422607421875, -0.03875732421875, -0.011932373046875, 0.006992340087890625, -0.0018749237060546875, -0.0261077880859375, 0.02484130859375, -0.04547119140625, -0.0025634765625, 0.0092010498046875, 0.005832672119140625, 0.01277923583984375, 0.047637939453125, 0.024566650390625, 0.05841064453125, 0.039031982421875, -0.0266571044921875, 0.0180816650390625, 0.027587890625, -0.046142578125, 0.0284881591796875, -0.0693359375, -0.0006308555603027344, -0.00975799560546875, 0.00811004638671875, -0.07489013671875, 0.012664794921875, 0.0179290771484375, -0.0655517578125, 0.0234832763671875, -0.01026153564453125, -0.0296173095703125, -0.049560546875, -0.013458251953125, 0.024688720703125, 0.0377197265625, -0.035614013671875, 0.04351806640625, 0.0255279541015625, 0.0005121231079101562, -0.052947998046875, -0.091552734375, 0.01372528076171875, -0.00420379638671875, -0.055206298828125, 0.04754638671875, -0.01540374755859375, 0.0110931396484375, 0.0029296875, -0.003345489501953125, -0.003101348876953125, -0.008392333984375, 0.01454925537109375, 0.0247650146484375, -0.0140838623046875, 0.001178741455078125, 0.0009531974792480469, -0.0164031982421875, 0.005237579345703125, -0.0157012939453125, 0.048004150390625, -0.013275146484375, -0.0095367431640625, -0.018951416015625, 0.01491546630859375, 0.03729248046875, -0.042388916015625, 0.0540771484375, 0.061004638671875, -0.0242919921875, -0.0082550048828125, -0.031402587890625, -0.007556915283203125, -0.037811279296875, 0.03387451171875, -0.04351806640625, -0.0579833984375, 0.039794921875, 0.0227813720703125, 0.002025604248046875, 0.038360595703125, 0.03662109375, -0.0015096664428710938, 0.07733154296875, 0.036346435546875, -0.0036449432373046875, 0.04937744140625, -0.053619384765625, 0.022308349609375, -0.058074951171875, -0.044281005859375, -0.04962158203125, -0.033416748046875, -0.05133056640625, -0.0264129638671875, 0.0228729248046875, -0.0100860595703125, -0.0170135498046875, 0.05230712890625, -0.05633544921875, 0.0243072509765625, 0.0546875, 0.0208892822265625, 0.007785797119140625, 0.01097869873046875, -0.0192108154296875, -0.00926971435546875, -0.06256103515625, -0.024505615234375, 0.09783935546875, 0.0125885009765625, 0.0526123046875, 0.0012836456298828125, 0.057952880859375, 0.023101806640625, -0.002872467041015625, -0.03240966796875, 0.03302001953125, -0.0113372802734375, -0.058319091796875, -0.0172882080078125, -0.0316162109375, -0.08074951171875, 0.025543212890625, -0.0159759521484375, -0.043701171875, 0.038543701171875, -0.006702423095703125, -0.029266357421875, 0.02374267578125, -0.0419921875, 0.09814453125, -0.03125, -0.02685546875, -0.007358551025390625, -0.0555419921875, 0.01273345947265625, 0.01556396484375, 0.0025577545166015625, 0.006908416748046875, -0.0124969482421875, 0.056854248046875, -0.02764892578125, 0.026123046875, -0.0109405517578125, 0.01145172119140625, 0.01406097412109375, -0.007411956787109375, 0.02874755859375, -0.0005950927734375, -0.007801055908203125, 0.0250396728515625, -0.003253936767578125, -0.029876708984375, -0.03192138671875, 0.061004638671875, -0.06884765625, -0.031341552734375, -0.040771484375, -0.0272369384765625, -0.002227783203125, 0.0155029296875, 0.057769775390625, 0.03167724609375, 0.0003180503845214844, 0.0323486328125, 0.055816650390625, -0.023590087890625, 0.04302978515625, 0.02838134765625, -0.003978729248046875, -0.055389404296875, 0.058074951171875, 0.02294921875, 0.012481689453125, 0.043121337890625, -0.013671875, -0.0357666015625, -0.040557861328125, -0.0266571044921875, 0.012664794921875, -0.039947509765625, -0.0167694091796875, -0.05487060546875, -0.0307769775390625, -0.037567138671875, -0.005535125732421875, -0.03125, -0.031982421875, -0.01806640625, -0.01311492919921875, 0.016448974609375, 0.045745849609375, 0.00983428955078125, 0.015289306640625, -0.046051025390625, 0.0160369873046875, 0.0004973411560058594, 0.011810302734375, -0.007965087890625, -0.0657958984375, -0.03411865234375, -0.0051422119140625, -0.0308380126953125, -0.06207275390625, 0.051055908203125, -0.00652313232421875, 0.054901123046875, 0.01139068603515625, 0.0043487548828125, 0.05633544921875, -0.029205322265625, 0.06744384765625, 0.012298583984375, -0.064697265625, 0.050018310546875, 0.0019855499267578125, 0.02935791015625, 0.04730224609375, 0.041900634765625, -0.040008544921875, -0.0194244384765625, -0.057891845703125, -0.071044921875, 0.0675048828125, 0.0224761962890625, -0.0082550048828125, 0.005504608154296875, 0.0015115737915039062, -0.00904083251953125, 0.02117919921875, -0.07275390625, -0.036865234375, -0.034027099609375, -0.0285491943359375, -0.0236968994140625, -0.0124053955078125, 0.01543426513671875, -0.047027587890625, 0.0582275390625, 0.0131378173828125, 0.042999267578125, 0.045501708984375, -0.0310516357421875, 0.0066070556640625, 0.00827789306640625, 0.05181884765625, 0.048309326171875, -0.020355224609375, -0.001758575439453125, 0.0159454345703125, -0.038360595703125, -0.01056671142578125, 0.01776123046875, -0.034881591796875, 0.0290069580078125, 0.025360107421875, 0.07562255859375, 0.0168609619140625, -0.0290374755859375, 0.048858642578125, 0.0038623809814453125, -0.020904541015625, -0.037689208984375, -0.0149383544921875, 0.0015211105346679688, 0.0286865234375, 0.0185089111328125, 0.004810333251953125, 0.0191497802734375, -0.0310211181640625, 0.01174163818359375, 0.0269012451171875, -0.044158935546875, -0.01538848876953125, 0.0682373046875, 0.01306915283203125, -0.032073974609375, 0.0516357421875, 0.0017652511596679688, -0.061553955078125, 0.03900146484375, 0.02734375, 0.078857421875, -0.021392822265625, 0.0133514404296875, 0.052215576171875, 0.051361083984375, 0.005558013916015625, 0.0262603759765625, -0.011871337890625, -0.039886474609375, -0.0011186599731445312, -0.041351318359375, -0.00888824462890625, -0.005023956298828125, -0.050750732421875, 0.02227783203125, -0.013275146484375, -0.0245361328125, -0.01439666748046875, 0.0205078125, -0.06304931640625, 0.01245880126953125, 0.004138946533203125, 0.083740234375, -0.04107666015625, 0.08038330078125, 0.043487548828125, -0.066162109375, -0.043731689453125, -0.00975799560546875, -0.0299835205078125, -0.052215576171875, 0.04278564453125, 0.00948333740234375, 0.00875091552734375, -0.00002014636993408203, -0.0267486572265625, -0.061767578125, 0.111083984375, 0.015289306640625, -0.05126953125, -0.0137939453125, 0.033111572265625, 0.038604736328125, -0.02593994140625, 0.0509033203125, 0.0325927734375, 0.037139892578125, -0.01457977294921875, -0.0706787109375, 0.01125335693359375, -0.03704833984375, -0.003246307373046875, 0.006000518798828125, -0.062255859375, 0.07940673828125, -0.0166473388671875, 0.01275634765625, 0.0125579833984375, 0.04534912109375, 0.01546478271484375, 0.025848388671875, 0.02642822265625, 0.0634765625, 0.05157470703125, -0.029876708984375, 0.0665283203125, -0.042266845703125, 0.042877197265625, 0.06787109375, 0.015289306640625, 0.06707763671875, 0.032379150390625, -0.02459716796875, 0.0560302734375, 0.054840087890625, -0.0163726806640625, 0.038787841796875, 0.0029144287109375, 0.0012331008911132812, -0.0310211181640625, 0.0289459228515625, -0.051025390625, 0.0179901123046875, 0.011810302734375, -0.060302734375, -0.005695343017578125, -0.003955841064453125, -0.00833892822265625, -0.012298583984375, -0.0185546875, 0.03387451171875, -0.00579833984375, -0.0430908203125, 0.051513671875, 0.0022830963134765625, 0.056396484375, -0.04998779296875, 0.0139312744140625, -0.0194549560546875, 0.02032470703125, -0.017059326171875, -0.06591796875, 0.007122039794921875, -0.0037097930908203125, -0.01129150390625, -0.0208892822265625, 0.036865234375, -0.04400634765625, -0.043060302734375, 0.0309600830078125, 0.0240631103515625, 0.0159912109375, -0.007198333740234375, -0.07806396484375, 0.016693115234375, 0.0159912109375, -0.037811279296875, 0.0084381103515625, 0.031829833984375, 0.00982666015625, 0.0504150390625, 0.03662109375, -0.00911712646484375, 0.031524658203125, 0.0025787353515625, 0.05316162109375, -0.06591796875, -0.039642333984375, -0.04345703125, 0.045806884765625, -0.0219879150390625, -0.040008544921875, 0.06805419921875, 0.07818603515625, 0.0748291015625, -0.0242156982421875, 0.0504150390625, -0.01110076904296875, 0.0186767578125, -0.0294342041015625, 0.05877685546875, -0.064208984375, 0.0188446044921875, -0.0164947509765625, -0.0625, -0.01320648193359375, 0.04803466796875, -0.033111572265625, 0.0194549560546875, 0.0504150390625, 0.07061767578125, 0.0006146430969238281, -0.0019311904907226562, 0.0185394287109375, 0.011932373046875, 0.01352691650390625, 0.06597900390625, 0.0489501953125, -0.06964111328125, 0.075927734375, -0.03289794921875, 0.01218414306640625, -0.0166473388671875, -0.031829833984375, -0.06414794921875, -0.0439453125, -0.024871826171875, -0.03173828125, 0.0123291015625, 0.0626220703125, 0.054840087890625, -0.0562744140625, -0.01556396484375, -0.001735687255859375, 0.00749969482421875, -0.010467529296875, -0.0172119140625, 0.032562255859375, -0.0207366943359375, -0.0718994140625, 0.025238037109375, 0.001007080078125, 0.0006685256958007812, -0.01849365234375, -0.03289794921875, -0.0222320556640625, 0.0032672882080078125, 0.034393310546875, 0.00815582275390625, -0.05499267578125, -0.00933074951171875, 0.01424407958984375, -0.0224456787109375, 0.0221710205078125, 0.045806884765625, -0.058868408203125, 0.01727294921875, 0.062286376953125, 0.031524658203125, 0.068359375, -0.0157012939453125, 0.0208282470703125, -0.0311279296875, -0.0031108856201171875, 0.01171112060546875, 0.04339599609375, 0.01078033447265625, -0.01446533203125, 0.045318603515625, 0.0296630859375, -0.04547119140625, -0.061737060546875, -0.01361083984375, -0.086669921875, -0.026580810546875, 0.06787109375, -0.0101470947265625, -0.033416748046875, 0.01340484619140625, -0.01111602783203125, 0.017822265625, -0.0283660888671875, 0.035552978515625, 0.049591064453125, 0.00438690185546875, -0.0204925537109375, -0.043365478515625, 0.031005859375, 0.01763916015625, -0.05224609375, -0.01372528076171875, 0.01337432861328125, 0.03564453125, 0.01515960693359375, 0.033233642578125, -0.0309600830078125, 0.0238189697265625, 0.0120086669921875, 0.031036376953125, -0.0217742919921875, -0.031280517578125, -0.0252685546875, 0.01306915283203125, -0.031219482421875, -0.038604736328125 ] ]
GeorgiaTechResearchInstitute/galpaca-30b
2023-04-18T20:14:42.000Z
[ "transformers", "pytorch", "opt", "text-generation", "galactica", "alpaca", "dataset:tatsu-lab/alpaca", "license:cc-by-nc-4.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
GeorgiaTechResearchInstitute
null
null
GeorgiaTechResearchInstitute/galpaca-30b
55
5,835
transformers
2023-03-30T18:32:41
--- license: cc-by-nc-4.0 datasets: - tatsu-lab/alpaca library_name: transformers pipeline_tag: text-generation tags: - galactica - alpaca - opt inference: false --- # GALPACA 30B (large) GALACTICA 30B fine-tuned on the Alpaca dataset. The model card from the original Galactica repo can be found [here](https://github.com/paperswithcode/galai/blob/main/docs/model_card.md), and the original paper [here](https://galactica.org/paper.pdf). The dataset card for Alpaca can be found [here](https://huggingface.co/datasets/tatsu-lab/alpaca/blob/main/README.md), and the project homepage [here](https://crfm.stanford.edu/2023/03/13/alpaca.html). The Alpaca dataset was collected with a modified version of the [Self-Instruct Framework](https://github.com/yizhongw/self-instruct), and was built using OpenAI's `text-davinci-003` model. As such it is subject to OpenAI's terms of service. ## Model Details The GALACTICA models are trained on a large-scale scientific corpus and are designed to perform scientific tasks. The Alpaca dataset is a set of 52k instruct-response pairs designed to enhace the instruction following capabilites of pre-trained language models. ## Model Use The GALACTICA model card specifies that the primary indended users of the GALACTICA models are researchers studying language models applied to the scientific domain, and it cautions against production use of GALACTICA without safeguards due to the potential for the model to produce inaccurate information. The original GALACTICA models are available under a non-commercial CC BY-NC 4.0 license, and the GALPACA model is additionally subject to the [OpenAI Terms of Service](https://openai.com/policies/terms-of-use). <details> <summary> Example Outputs </summary> *__Full Prompt:__* ``` Below is an instruction that describes a task. As a scientist, write a response that appropriately completes the request, citing specific papers whenever possible. ### Instruction: Write out Maxwell's equations and explain each one. ### Markdown and LaTeX Response: ``` *__Response:__* Maxwell's equations are a set of four equations that describe the behavior of electromagnetic fields. These equations are: $$\nabla \times \textbf{E} = -\frac{\partial \textbf{B}}{\partial t}$$ $$\nabla \times \textbf{B} = \mu_0 \epsilon_0 \frac{\partial \textbf{E}}{\partial t} + \mu_0 \textbf{J}$$ $$\nabla \bullet \textbf{E} = \frac{\rho}{\epsilon_0}$$ $$\nabla \bullet \textbf{B} = 0$$ The first two equations describe how the electric and magnetic fields, E and B, are related to each other. The third and fourth equations are Gauss's law for electric and magnetic fields, respectively. These equations describe the behavior of the electric and magnetic fields when there are charges and currents in the space. Each equation has a specific purpose and together they form a complete description of the behavior of electromagnetic fields. *__Full Prompt:__* ``` Below is an instruction that describes a task. Write a response that appropriately completes the request. ### Instruction: Write a python function implements the Sherman-Morrison matrix inversion lemma using numpy. ### Response: ```python ``` *__Response:__* ```python def sm_inv_lemma(A, u, v): """ Function implementing the Sherman-Morrison matrix inversion lemma using numpy. Parameters ---------- A : numpy array The original matrix. u : numpy array The first vector in the Sherman-Morrison lemma. v : numpy array The second vector in the Sherman-Morrison lemma. Returns ------- The inverse matrix of A - uv^T. """ A_inv = np.linalg.inv(A) alpha = 1 / (1 + v.T @ A_inv @ u) return A_inv - (alpha * A_inv @ u @ v.T @ A_inv) ``` </details> ## Training Data The GALPACA models are trained by fine-tuning pre-trained GALACTICA models on the Alpaca dataset. GALACTICA models were trained on 106 billion tokens of open-access scientific text and data, including papers, textbooks, scientific websites, encyclopedias, and more. Fine-tuning the base GALACTICA models on the 52k instruction-response pairs in the Alpaca dataset allows users to query the GALPACA models in an instruct-response fashion. ## How to Use The GALPACA weights are made available for use with the `transformers` library. <details> <summary> Click to expand </summary> ```python # pip install accelerate from transformers import AutoTokenizer, OPTForCausalLM tokenizer = AutoTokenizer.from_pretrained("GeorgiaTechResearchInstitute/galpaca-30b") model = OPTForCausalLM.from_pretrained("GeorgiaTechResearchInstitute/galpaca-30b", device_map="auto", torch_dtype=torch.float16) # see the original Alpaca repo for more information about the prompt templates no_input_prompt_template = ("Below is an instruction that describes a task. " "Write a response that appropriately completes the request.\n\n" "### Instruction:\n{instruction}\n\n### Response:") prompt = "Write out Maxwell's equations and explain the meaning of each one." formatted_prompt = no_input_prompt_template.format_map({'instruction': prompt}) tokenized_prompt = tokenizer(formatted_prompt, return_tensors="pt").input_ids.to(model.device) out_tokens = model.generate(tokenized_prompt) print(tokenizer.batch_decode(out_tokens, skip_special_tokens=False, clean_up_tokenization_spaces=False)) ``` </details> ## Training Resources GALPACA 30B was fine-tuned in about 6 hours using 16 A100 80GB GPUS, 16-bit mixed-precision, an effective batch-size of 1024, and with a maximum context window of 384 tokens. This model was trained using DeepSpeed ZeRO Stage 3 optimizations. ## Performance and Limitations Qualitative evaluation suggests that Galpaca frequently outperforms LLaMA-based Alpaca models on tasks related to technical knowledge and programming, while it underperforms on natural langauge tasks such as generating prose. More information about the performance and limitations of the GALACTICA family of models can be found on the original GALACTICA model card. ## Works Cited ```bibtex @inproceedings{GALACTICA, title={GALACTICA: A Large Language Model for Science}, author={Ross Taylor and Marcin Kardas and Guillem Cucurull and Thomas Scialom and Anthony Hartshorn and Elvis Saravia and Andrew Poulton and Viktor Kerkez and Robert Stojnic}, year={2022} } ``` ```bibtex @misc{alpaca, author = {Rohan Taori and Ishaan Gulrajani and Tianyi Zhang and Yann Dubois and Xuechen Li and Carlos Guestrin and Percy Liang and Tatsunori B. Hashimoto }, title = {Stanford Alpaca: An Instruction-following LLaMA model}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/tatsu-lab/stanford_alpaca}}, } ```
6,779
[ [ -0.04351806640625, -0.0657958984375, 0.041290283203125, 0.026702880859375, 0.004734039306640625, -0.0160064697265625, -0.00153350830078125, -0.0297088623046875, 0.0246734619140625, 0.027069091796875, -0.056915283203125, -0.032379150390625, -0.0372314453125, 0.007793426513671875, -0.020233154296875, 0.0869140625, -0.01328277587890625, -0.0008392333984375, -0.000514984130859375, -0.006023406982421875, -0.00905609130859375, -0.01285552978515625, -0.050750732421875, -0.0233612060546875, 0.04541015625, 0.007686614990234375, 0.05084228515625, 0.056488037109375, 0.05780029296875, 0.0261077880859375, -0.026824951171875, 0.0168304443359375, -0.034393310546875, -0.0340576171875, 0.00109100341796875, -0.03314208984375, -0.043701171875, 0.0225372314453125, 0.0496826171875, 0.04498291015625, -0.0112152099609375, 0.0384521484375, -0.01503753662109375, 0.030853271484375, -0.032196044921875, 0.0215301513671875, -0.04156494140625, 0.00315093994140625, -0.0012865066528320312, -0.020263671875, -0.0115814208984375, -0.024749755859375, -0.00717926025390625, -0.07049560546875, 0.03204345703125, 0.007358551025390625, 0.09381103515625, 0.02239990234375, -0.033599853515625, -0.039764404296875, -0.056640625, 0.0478515625, -0.051422119140625, 0.014068603515625, 0.0235748291015625, 0.0176544189453125, -0.0240478515625, -0.05914306640625, -0.072998046875, -0.03363037109375, -0.00911712646484375, 0.0241241455078125, -0.02166748046875, -0.004093170166015625, 0.039947509765625, 0.031463623046875, -0.03863525390625, 0.011199951171875, -0.06768798828125, 0.0098876953125, 0.03985595703125, 0.022064208984375, 0.0106201171875, -0.01177215576171875, -0.033294677734375, -0.0274505615234375, -0.06951904296875, 0.013824462890625, 0.038177490234375, 0.00933074951171875, -0.0242156982421875, 0.049835205078125, -0.01444244384765625, 0.0328369140625, 0.01377105712890625, -0.003330230712890625, 0.03912353515625, -0.0234222412109375, -0.040802001953125, -0.03955078125, 0.0701904296875, 0.0250396728515625, -0.01177215576171875, -0.00971221923828125, -0.00714111328125, -0.003265380859375, 0.00970458984375, -0.06280517578125, 0.0022430419921875, 0.0150909423828125, -0.0423583984375, -0.0163116455078125, -0.004791259765625, -0.06976318359375, -0.02545166015625, -0.0185089111328125, 0.040496826171875, -0.002712249755859375, 0.01049041748046875, -0.001068115234375, 0.000051915645599365234, 0.02642822265625, -0.00960540771484375, -0.08868408203125, 0.021514892578125, 0.0262908935546875, 0.05364990234375, 0.02947998046875, -0.05072021484375, -0.0222320556640625, -0.00013136863708496094, -0.0128936767578125, 0.048095703125, -0.0191192626953125, -0.03271484375, -0.01554107666015625, 0.03289794921875, -0.02166748046875, -0.018585205078125, 0.0472412109375, -0.04296875, 0.0259857177734375, -0.0243377685546875, -0.033660888671875, -0.0266571044921875, -0.01512908935546875, -0.047088623046875, 0.047088623046875, 0.01007843017578125, -0.0294952392578125, 0.01494598388671875, -0.061309814453125, 0.0029754638671875, -0.0220947265625, 0.01360321044921875, -0.04345703125, -0.0082550048828125, 0.0227813720703125, 0.037506103515625, -0.03228759765625, -0.0004591941833496094, -0.03082275390625, -0.02728271484375, 0.01171112060546875, -0.015716552734375, 0.073486328125, 0.033843994140625, -0.0252685546875, 0.0029735565185546875, -0.06353759765625, -0.01561737060546875, 0.033294677734375, -0.02105712890625, 0.0152435302734375, -0.01200103759765625, -0.0218505859375, 0.0096893310546875, 0.0181427001953125, -0.03631591796875, 0.0264892578125, -0.0177154541015625, 0.036041259765625, 0.044830322265625, -0.006755828857421875, 0.048187255859375, -0.0128936767578125, 0.04095458984375, -0.007526397705078125, 0.0226898193359375, -0.0253753662109375, -0.044921875, -0.07275390625, -0.010894775390625, -0.00047278404235839844, 0.04388427734375, -0.036468505859375, 0.02740478515625, -0.0011768341064453125, -0.072265625, -0.041473388671875, 0.0092620849609375, 0.0283203125, 0.056793212890625, 0.0576171875, 0.0015134811401367188, -0.04522705078125, -0.061981201171875, 0.018463134765625, -0.01629638671875, 0.00324249267578125, 0.037322998046875, 0.058746337890625, -0.03448486328125, 0.04766845703125, -0.06036376953125, -0.0228424072265625, 0.00787353515625, 0.0093231201171875, 0.0428466796875, 0.035980224609375, 0.05010986328125, -0.0367431640625, -0.0182342529296875, 0.0032749176025390625, -0.06695556640625, -0.0141448974609375, -0.0036411285400390625, -0.004398345947265625, 0.01428985595703125, 0.043243408203125, -0.07049560546875, 0.0200653076171875, 0.044677734375, -0.05084228515625, 0.035888671875, -0.0228424072265625, 0.004673004150390625, -0.07720947265625, 0.035614013671875, 0.00922393798828125, 0.007080078125, -0.051025390625, 0.0244598388671875, 0.01114654541015625, -0.0042266845703125, -0.034637451171875, 0.0513916015625, -0.044342041015625, 0.00016498565673828125, -0.01537322998046875, -0.0032024383544921875, 0.00867462158203125, 0.05523681640625, -0.0204315185546875, 0.06146240234375, 0.0273895263671875, -0.051849365234375, 0.0355224609375, 0.032379150390625, -0.01788330078125, 0.016021728515625, -0.07196044921875, 0.00606536865234375, -0.03497314453125, 0.03472900390625, -0.03704833984375, 0.0063018798828125, 0.03564453125, -0.0250396728515625, 0.00917816162109375, -0.0005736351013183594, -0.005615234375, -0.04791259765625, 0.0029163360595703125, 0.020111083984375, 0.0391845703125, -0.057403564453125, 0.036407470703125, 0.0234222412109375, -0.01268768310546875, -0.0694580078125, -0.0255584716796875, -0.00986480712890625, -0.02984619140625, -0.043914794921875, 0.033599853515625, -0.017547607421875, -0.0220794677734375, -0.0003199577331542969, 0.0246734619140625, 0.00391387939453125, 0.006175994873046875, 0.024261474609375, 0.047821044921875, -0.0215911865234375, 0.004528045654296875, -0.0016794204711914062, -0.0152740478515625, 0.006778717041015625, -0.002468109130859375, 0.05169677734375, -0.039825439453125, -0.0016031265258789062, -0.03125, 0.0189208984375, 0.037261962890625, -0.013519287109375, 0.07110595703125, 0.0589599609375, -0.01904296875, 0.0128021240234375, -0.0284271240234375, -0.01084136962890625, -0.03741455078125, 0.032562255859375, -0.01971435546875, -0.035064697265625, 0.062286376953125, 0.032196044921875, 0.01078033447265625, 0.06787109375, 0.048370361328125, -0.01143646240234375, 0.061859130859375, 0.047576904296875, 0.00420379638671875, 0.03753662109375, -0.061309814453125, 0.0011568069458007812, -0.068603515625, -0.04388427734375, -0.05596923828125, -0.00667572021484375, -0.0281982421875, -0.033477783203125, 0.00807952880859375, 0.01209259033203125, -0.035675048828125, 0.050689697265625, -0.02325439453125, 0.051025390625, 0.027618408203125, -0.009521484375, 0.0016918182373046875, 0.0029010772705078125, 0.00518798828125, 0.0235443115234375, -0.041717529296875, -0.04156494140625, 0.089111328125, 0.04815673828125, 0.05908203125, 0.007373809814453125, 0.040008544921875, -0.011505126953125, 0.032135009765625, -0.04901123046875, 0.03680419921875, -0.00872039794921875, -0.03363037109375, -0.01324462890625, -0.044189453125, -0.0836181640625, 0.0269927978515625, -0.032012939453125, -0.05548095703125, 0.039520263671875, 0.0028972625732421875, -0.037933349609375, 0.01324462890625, -0.0677490234375, 0.058807373046875, 0.00739288330078125, -0.01554107666015625, -0.0039825439453125, -0.035919189453125, 0.0291748046875, 0.0080108642578125, 0.005279541015625, -0.0100555419921875, 0.0113983154296875, 0.055572509765625, -0.0418701171875, 0.053253173828125, -0.01348876953125, -0.0235137939453125, 0.0325927734375, -0.0268096923828125, 0.051422119140625, 0.011749267578125, 0.00281524658203125, 0.02069091796875, -0.00897979736328125, -0.0288238525390625, -0.038970947265625, 0.058349609375, -0.0699462890625, -0.04388427734375, -0.0162811279296875, -0.0531005859375, 0.0127105712890625, 0.0171966552734375, 0.037567138671875, 0.02764892578125, -0.01385498046875, 0.0034160614013671875, 0.035400390625, -0.02337646484375, 0.056793212890625, 0.043914794921875, -0.0222625732421875, -0.0266265869140625, 0.060882568359375, 0.0157012939453125, 0.01079559326171875, 0.01142120361328125, -0.002170562744140625, -0.031280517578125, -0.0251312255859375, -0.052642822265625, 0.03936767578125, -0.055450439453125, -0.010589599609375, -0.0552978515625, -0.01898193359375, -0.023681640625, -0.01222991943359375, -0.01386260986328125, -0.04254150390625, -0.0516357421875, 0.00010561943054199219, 0.016021728515625, 0.040496826171875, 0.003284454345703125, 0.04132080078125, -0.05413818359375, 0.0267181396484375, 0.0240936279296875, 0.005359649658203125, -0.00075531005859375, -0.0545654296875, -0.0209808349609375, 0.01218414306640625, -0.040283203125, -0.057403564453125, 0.043701171875, 0.011138916015625, 0.037384033203125, 0.0303955078125, -0.04278564453125, 0.054443359375, -0.041778564453125, 0.054412841796875, 0.018280029296875, -0.06683349609375, 0.0261077880859375, -0.031524658203125, 0.01336669921875, 0.01389312744140625, 0.02813720703125, -0.020355224609375, -0.017242431640625, -0.0584716796875, -0.0509033203125, 0.050506591796875, 0.0219268798828125, 0.01081085205078125, 0.006221771240234375, 0.020660400390625, -0.001407623291015625, -0.0003859996795654297, -0.0699462890625, -0.0301361083984375, -0.026641845703125, -0.005649566650390625, -0.0294036865234375, -0.00021409988403320312, -0.0307464599609375, -0.01331329345703125, 0.0638427734375, -0.01422882080078125, 0.0450439453125, 0.01313018798828125, -0.00469970703125, -0.01313018798828125, 0.00920867919921875, 0.0760498046875, 0.048187255859375, -0.0209503173828125, -0.0104217529296875, 0.02362060546875, -0.04095458984375, 0.013702392578125, 0.041534423828125, -0.02093505859375, -0.014495849609375, 0.0133514404296875, 0.07989501953125, -0.0219268798828125, -0.031402587890625, 0.0263671875, -0.0173797607421875, -0.01488494873046875, -0.018829345703125, 0.010406494140625, 0.0170440673828125, 0.0297088623046875, 0.034423828125, 0.01233673095703125, 0.0024051666259765625, -0.04248046875, -0.0160369873046875, 0.0361328125, -0.00621795654296875, -0.0196075439453125, 0.06536865234375, 0.0107421875, -0.01580810546875, 0.030914306640625, -0.01837158203125, -0.01226806640625, 0.0911865234375, 0.058807373046875, 0.04583740234375, -0.01113128662109375, 0.0281982421875, 0.0428466796875, 0.031890869140625, -0.00457000732421875, 0.01361846923828125, 0.019317626953125, -0.046600341796875, -0.0111236572265625, -0.05072021484375, -0.0242462158203125, 0.016998291015625, -0.0379638671875, 0.0184478759765625, -0.04302978515625, -0.00583648681640625, 0.0185699462890625, 0.014373779296875, -0.06463623046875, 0.024505615234375, 0.0027713775634765625, 0.061859130859375, -0.0662841796875, 0.043792724609375, 0.039764404296875, -0.052642822265625, -0.07281494140625, -0.026702880859375, -0.0124359130859375, -0.06976318359375, 0.04302978515625, 0.012969970703125, -0.005039215087890625, 0.002010345458984375, -0.044647216796875, -0.09014892578125, 0.10882568359375, 0.0267333984375, -0.0322265625, 0.0125732421875, -0.005336761474609375, 0.052520751953125, -0.03765869140625, 0.039154052734375, 0.03955078125, 0.0304718017578125, 0.0006322860717773438, -0.05596923828125, 0.01360321044921875, -0.03375244140625, -0.015655517578125, 0.00887298583984375, -0.0841064453125, 0.084716796875, -0.040435791015625, -0.01348114013671875, 0.0253753662109375, 0.067138671875, 0.021514892578125, 0.036956787109375, 0.0209808349609375, 0.048614501953125, 0.06787109375, -0.00765228271484375, 0.0738525390625, -0.0152587890625, 0.04833984375, 0.048553466796875, -0.0083770751953125, 0.039031982421875, 0.035491943359375, -0.0280303955078125, 0.0428466796875, 0.03564453125, -0.04302978515625, 0.02252197265625, 0.0032405853271484375, -0.00946044921875, 0.026153564453125, -0.002582550048828125, -0.051910400390625, 0.039520263671875, 0.0261077880859375, -0.0584716796875, 0.004673004150390625, -0.0179901123046875, 0.0204315185546875, -0.026641845703125, -0.032958984375, 0.0226593017578125, 0.01287841796875, -0.063232421875, 0.0677490234375, 0.01348876953125, 0.038482666015625, -0.040618896484375, 0.010009765625, -0.0274658203125, 0.006824493408203125, -0.0302734375, -0.03155517578125, 0.01462554931640625, 0.004024505615234375, -0.0306854248046875, 0.0013456344604492188, 0.0305633544921875, -0.032196044921875, -0.046722412109375, 0.0086822509765625, 0.01232147216796875, 0.0166473388671875, 0.035003662109375, -0.0362548828125, -0.01385498046875, -0.01047515869140625, -0.040863037109375, 0.01143646240234375, -0.01372528076171875, 0.0166015625, 0.06878662109375, 0.060699462890625, -0.0002422332763671875, 0.0164337158203125, -0.01372528076171875, 0.0660400390625, -0.0496826171875, -0.033935546875, -0.052581787109375, 0.0183868408203125, -0.00441741943359375, -0.0509033203125, 0.051422119140625, 0.032958984375, 0.04351806640625, -0.029083251953125, 0.02716064453125, -0.011688232421875, 0.0100860595703125, -0.0626220703125, 0.06793212890625, -0.034271240234375, 0.019805908203125, -0.024688720703125, -0.09576416015625, -0.00716400146484375, 0.050262451171875, -0.0134124755859375, 0.01422882080078125, 0.061767578125, 0.08636474609375, 0.0009002685546875, 0.00801849365234375, 0.001506805419921875, 0.026458740234375, 0.036590576171875, 0.0465087890625, 0.052978515625, -0.05029296875, 0.046905517578125, -0.0241851806640625, -0.01068115234375, 0.00240325927734375, -0.060302734375, -0.05657958984375, -0.042144775390625, -0.0005869865417480469, -0.0272979736328125, -0.0173492431640625, 0.056182861328125, 0.043243408203125, -0.04425048828125, -0.01055908203125, -0.00891876220703125, 0.00295257568359375, -0.0173797607421875, -0.0171051025390625, 0.0423583984375, -0.009124755859375, -0.07080078125, 0.0504150390625, -0.0082244873046875, 0.031463623046875, -0.017730712890625, -0.0100860595703125, -0.00289154052734375, 0.0201568603515625, 0.032135009765625, 0.0537109375, -0.0322265625, -0.0023174285888671875, 0.007904052734375, -0.0200958251953125, -0.01053619384765625, 0.03521728515625, -0.061798095703125, 0.00020611286163330078, 0.02630615234375, 0.0217742919921875, 0.045928955078125, -0.0127716064453125, 0.0259552001953125, -0.0244598388671875, 0.0214080810546875, -0.004108428955078125, 0.055511474609375, 0.01126861572265625, -0.037261962890625, 0.04827880859375, 0.013641357421875, -0.06536865234375, -0.060455322265625, 0.0015497207641601562, -0.08111572265625, -0.006748199462890625, 0.093994140625, -0.019073486328125, -0.0219268798828125, 0.0161895751953125, -0.016876220703125, 0.02099609375, -0.02301025390625, 0.07684326171875, 0.03338623046875, 0.0012884140014648438, -0.006023406982421875, -0.035125732421875, 0.02703857421875, 0.038818359375, -0.07354736328125, -0.01419830322265625, 0.0262298583984375, 0.0133209228515625, 0.02825927734375, 0.049102783203125, 0.00572967529296875, 0.02618408203125, 0.0083160400390625, 0.01216888427734375, -0.0262908935546875, -0.025360107421875, -0.0340576171875, 0.0115509033203125, -0.036407470703125, -0.006595611572265625 ] ]
OpenBuddy/openbuddy-openllama-3b-v10-bf16
2023-08-22T10:51:04.000Z
[ "transformers", "pytorch", "llama", "text-generation", "zh", "en", "fr", "de", "ja", "ko", "it", "ru", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
OpenBuddy
null
null
OpenBuddy/openbuddy-openllama-3b-v10-bf16
6
5,835
transformers
2023-08-10T13:37:46
--- language: - zh - en - fr - de - ja - ko - it - ru pipeline_tag: text-generation inference: false library_name: transformers license: apache-2.0 --- # OpenBuddy - Open Multilingual Chatbot GitHub and Usage Guide: [https://github.com/OpenBuddy/OpenBuddy](https://github.com/OpenBuddy/OpenBuddy) Website and Demo: [https://openbuddy.ai](https://openbuddy.ai) ![Demo](https://raw.githubusercontent.com/OpenBuddy/OpenBuddy/main/media/demo.png) # Copyright Notice License: Apache 2.0. ## Disclaimer All OpenBuddy models have inherent limitations and may potentially produce outputs that are erroneous, harmful, offensive, or otherwise undesirable. Users should not use these models in critical or high-stakes situations that may lead to personal injury, property damage, or significant losses. Examples of such scenarios include, but are not limited to, the medical field, controlling software and hardware systems that may cause harm, and making important financial or legal decisions. OpenBuddy is provided "as-is" without any warranty of any kind, either express or implied, including, but not limited to, the implied warranties of merchantability, fitness for a particular purpose, and non-infringement. In no event shall the authors, contributors, or copyright holders be liable for any claim, damages, or other liabilities, whether in an action of contract, tort, or otherwise, arising from, out of, or in connection with the software or the use or other dealings in the software. By using OpenBuddy, you agree to these terms and conditions, and acknowledge that you understand the potential risks associated with its use. You also agree to indemnify and hold harmless the authors, contributors, and copyright holders from any claims, damages, or liabilities arising from your use of OpenBuddy. ## 免责声明 所有OpenBuddy模型均存在固有的局限性,可能产生错误的、有害的、冒犯性的或其他不良的输出。用户在关键或高风险场景中应谨慎行事,不要使用这些模型,以免导致人身伤害、财产损失或重大损失。此类场景的例子包括但不限于医疗领域、可能导致伤害的软硬件系统的控制以及进行重要的财务或法律决策。 OpenBuddy按“原样”提供,不附带任何种类的明示或暗示的保证,包括但不限于适销性、特定目的的适用性和非侵权的暗示保证。在任何情况下,作者、贡献者或版权所有者均不对因软件或使用或其他软件交易而产生的任何索赔、损害赔偿或其他责任(无论是合同、侵权还是其他原因)承担责任。 使用OpenBuddy即表示您同意这些条款和条件,并承认您了解其使用可能带来的潜在风险。您还同意赔偿并使作者、贡献者和版权所有者免受因您使用OpenBuddy而产生的任何索赔、损害赔偿或责任的影响。
2,202
[ [ -0.02740478515625, -0.075439453125, 0.01169586181640625, 0.0380859375, -0.0212249755859375, -0.01006317138671875, -0.018585205078125, -0.0322265625, 0.010223388671875, 0.032257080078125, -0.0200653076171875, -0.042144775390625, -0.032135009765625, -0.019378662109375, -0.002353668212890625, 0.07489013671875, -0.016021728515625, -0.0048370361328125, -0.001819610595703125, -0.014373779296875, -0.048187255859375, -0.01593017578125, -0.036407470703125, -0.0056915283203125, 0.003818511962890625, 0.031890869140625, 0.06658935546875, -0.0005750656127929688, 0.04498291015625, 0.027862548828125, 0.00612640380859375, -0.0034923553466796875, -0.039764404296875, 0.01611328125, 0.004962921142578125, -0.03143310546875, -0.046478271484375, -0.0153045654296875, 0.01236724853515625, 0.0309600830078125, -0.0264892578125, 0.025634765625, 0.00521087646484375, 0.054412841796875, -0.059417724609375, 0.031982421875, -0.0103759765625, 0.004070281982421875, -0.0115966796875, -0.0225067138671875, -0.01617431640625, -0.058441162109375, -0.0102691650390625, -0.04638671875, -0.0099639892578125, 0.010101318359375, 0.0797119140625, 0.00264739990234375, -0.0254058837890625, -0.0148468017578125, -0.05938720703125, 0.044342041015625, -0.05731201171875, 0.0252227783203125, 0.02337646484375, 0.057586669921875, -0.0211334228515625, -0.0465087890625, -0.04052734375, -0.006999969482421875, -0.004364013671875, 0.0289764404296875, -0.0231170654296875, -0.006679534912109375, 0.01470184326171875, 0.0362548828125, -0.05596923828125, -0.005641937255859375, -0.04400634765625, -0.003620147705078125, 0.026641845703125, 0.0091705322265625, 0.0487060546875, -0.0205535888671875, -0.034942626953125, -0.00565338134765625, -0.0308990478515625, 0.0323486328125, 0.033782958984375, 0.0203094482421875, -0.0491943359375, 0.056243896484375, -0.019989013671875, 0.0309600830078125, -0.00820159912109375, -0.0268096923828125, 0.042236328125, -0.030914306640625, -0.0257720947265625, -0.0021610260009765625, 0.0794677734375, 0.04510498046875, 0.02630615234375, 0.004985809326171875, -0.00405120849609375, -0.0087890625, 0.0103607177734375, -0.059844970703125, -0.0222625732421875, 0.050811767578125, -0.0501708984375, -0.0184478759765625, 0.01030731201171875, -0.07257080078125, -0.01299285888671875, 0.0015707015991210938, 0.0240325927734375, -0.047943115234375, -0.04998779296875, 0.013336181640625, -0.005931854248046875, -0.0025997161865234375, 0.017578125, -0.04022216796875, 0.01751708984375, 0.0176849365234375, 0.08087158203125, 0.022857666015625, -0.0153961181640625, -0.0043182373046875, 0.0230560302734375, -0.017364501953125, 0.041229248046875, -0.01055908203125, -0.040374755859375, 0.007373809814453125, 0.00785064697265625, 0.00643157958984375, -0.015655517578125, 0.0262603759765625, -0.0181884765625, 0.047393798828125, 0.0284576416015625, -0.01009368896484375, -0.030029296875, 0.002086639404296875, -0.042083740234375, 0.06842041015625, 0.007595062255859375, -0.06805419921875, 0.01175689697265625, -0.07562255859375, -0.0307159423828125, 0.0037841796875, -0.016815185546875, -0.032135009765625, -0.005634307861328125, 0.01062774658203125, 0.031402587890625, -0.015655517578125, 0.01204681396484375, -0.040283203125, -0.016082763671875, 0.0221710205078125, -0.0251312255859375, 0.10491943359375, 0.020111083984375, -0.01062774658203125, 0.03668212890625, -0.04547119140625, 0.01079559326171875, 0.037689208984375, -0.02838134765625, -0.031402587890625, -0.01218414306640625, 0.01091766357421875, 0.0164642333984375, 0.0258026123046875, -0.048980712890625, 0.016021728515625, -0.039794921875, 0.0360107421875, 0.055511474609375, 0.004550933837890625, 0.0289764404296875, -0.035858154296875, 0.053955078125, 0.004848480224609375, 0.039459228515625, -0.0311737060546875, -0.059295654296875, -0.040435791015625, -0.043182373046875, 0.00382232666015625, 0.05987548828125, -0.045867919921875, 0.049072265625, -0.0160064697265625, -0.054046630859375, -0.055755615234375, -0.0018014907836914062, 0.030914306640625, 0.017364501953125, 0.02734375, -0.010986328125, -0.0286407470703125, -0.03851318359375, -0.00725555419921875, -0.0267486572265625, -0.0099029541015625, 0.032928466796875, 0.04559326171875, -0.00907135009765625, 0.0633544921875, -0.05596923828125, -0.034423828125, 0.0061492919921875, 0.0008440017700195312, 0.02410888671875, 0.053436279296875, 0.06536865234375, -0.056671142578125, -0.05279541015625, 0.006317138671875, -0.06494140625, 0.01678466796875, -0.001556396484375, -0.025054931640625, 0.029693603515625, 0.02044677734375, -0.057952880859375, 0.0693359375, 0.050384521484375, -0.034576416015625, 0.055145263671875, -0.0293121337890625, 0.0174102783203125, -0.10235595703125, 0.0168914794921875, -0.01500701904296875, -0.013275146484375, -0.033172607421875, 0.0218353271484375, 0.00762939453125, -0.0175628662109375, -0.042236328125, 0.0455322265625, -0.02435302734375, 0.024505615234375, -0.000014722347259521484, 0.0148468017578125, -0.01432037353515625, 0.0364990234375, -0.017181396484375, 0.049102783203125, 0.04168701171875, -0.031707763671875, 0.045806884765625, 0.02838134765625, -0.028472900390625, 0.040924072265625, -0.0718994140625, -0.0107269287109375, -0.00502777099609375, 0.01800537109375, -0.08807373046875, -0.0255889892578125, 0.0538330078125, -0.07220458984375, 0.01477813720703125, -0.006572723388671875, -0.045867919921875, -0.03338623046875, -0.031402587890625, 0.0093536376953125, 0.046478271484375, -0.0272064208984375, 0.03033447265625, 0.0218658447265625, -0.01806640625, -0.04486083984375, -0.0526123046875, -0.01995849609375, -0.01354217529296875, -0.06890869140625, 0.0111846923828125, -0.0129547119140625, -0.0018110275268554688, 0.0083465576171875, 0.0111846923828125, -0.0163726806640625, -0.0018310546875, 0.045684814453125, 0.0268402099609375, -0.01088714599609375, 0.007427215576171875, 0.0032711029052734375, -0.0091705322265625, -0.01061248779296875, 0.0040740966796875, 0.044921875, -0.0188446044921875, -0.039520263671875, -0.0272979736328125, 0.040618896484375, 0.042266845703125, -0.015716552734375, 0.0609130859375, 0.054595947265625, -0.036041259765625, 0.008697509765625, -0.03167724609375, -0.0012769699096679688, -0.037811279296875, 0.01457977294921875, -0.032073974609375, -0.0625, 0.050628662109375, 0.00882720947265625, 0.0295562744140625, 0.018707275390625, 0.055999755859375, -0.007152557373046875, 0.07476806640625, 0.04864501953125, 0.01351165771484375, 0.0294189453125, -0.011260986328125, 0.02490234375, -0.045684814453125, -0.0294189453125, -0.051544189453125, -0.01239776611328125, -0.05810546875, -0.023590087890625, 0.023834228515625, 0.0251312255859375, -0.0401611328125, 0.0212554931640625, -0.05426025390625, 0.029693603515625, 0.06182861328125, 0.0159912109375, 0.0158538818359375, -0.007843017578125, -0.019622802734375, 0.0140228271484375, -0.037628173828125, -0.04046630859375, 0.0733642578125, 0.0216217041015625, 0.067138671875, 0.0347900390625, 0.048095703125, -0.009552001953125, 0.006988525390625, -0.053253173828125, 0.032867431640625, 0.0162506103515625, -0.07464599609375, -0.03875732421875, -0.020599365234375, -0.10076904296875, 0.01959228515625, -0.000606536865234375, -0.0782470703125, 0.007354736328125, 0.005786895751953125, -0.0150299072265625, 0.035430908203125, -0.060394287109375, 0.0665283203125, -0.01263427734375, -0.02227783203125, -0.008758544921875, -0.04803466796875, 0.034912109375, -0.0003540515899658203, 0.0340576171875, -0.0222930908203125, -0.0081024169921875, 0.029693603515625, -0.04754638671875, 0.06884765625, -0.0181884765625, 0.0093231201171875, 0.02728271484375, 0.028900146484375, 0.01534271240234375, 0.0169525146484375, 0.0275726318359375, 0.04193115234375, 0.0196075439453125, -0.041107177734375, -0.0281219482421875, 0.05767822265625, -0.07220458984375, -0.039337158203125, -0.03912353515625, -0.021514892578125, 0.0048675537109375, 0.034637451171875, 0.017578125, 0.0121612548828125, -0.01099395751953125, 0.02545166015625, 0.0046539306640625, -0.051727294921875, 0.031829833984375, 0.04498291015625, -0.0367431640625, -0.0382080078125, 0.062347412109375, -0.000766754150390625, 0.01019287109375, 0.00925445556640625, 0.0159454345703125, -0.011505126953125, -0.029754638671875, -0.033782958984375, 0.017333984375, -0.0460205078125, -0.022674560546875, -0.03021240234375, 0.00576019287109375, -0.0584716796875, -0.0180816650390625, -0.0143280029296875, -0.034210205078125, -0.0089111328125, -0.001773834228515625, 0.04510498046875, 0.01459503173828125, -0.0223388671875, 0.01043701171875, -0.07928466796875, 0.039093017578125, -0.0034351348876953125, 0.055572509765625, -0.0019283294677734375, -0.0140228271484375, -0.026580810546875, 0.012969970703125, -0.0360107421875, -0.0758056640625, 0.032501220703125, -0.017974853515625, 0.049896240234375, 0.044769287109375, 0.0302276611328125, 0.047698974609375, -0.0298614501953125, 0.06494140625, 0.0609130859375, -0.049102783203125, 0.05999755859375, -0.043182373046875, 0.0261688232421875, 0.0302276611328125, 0.0592041015625, -0.0418701171875, -0.028961181640625, -0.04315185546875, -0.065185546875, 0.06689453125, 0.0274810791015625, 0.01270294189453125, 0.0009393692016601562, -0.0158233642578125, 0.003570556640625, 0.021514892578125, -0.055908203125, -0.031585693359375, -0.03936767578125, -0.01104736328125, 0.0140838623046875, 0.00033783912658691406, -0.0175323486328125, -0.0090179443359375, 0.051025390625, 0.01204681396484375, 0.035797119140625, 0.0011615753173828125, 0.002483367919921875, -0.026123046875, 0.02203369140625, 0.042236328125, 0.05279541015625, -0.039642333984375, -0.020904541015625, -0.014373779296875, -0.035064697265625, -0.00289154052734375, 0.01194000244140625, -0.0197601318359375, 0.0038356781005859375, 0.00946807861328125, 0.05401611328125, 0.01335906982421875, -0.056243896484375, 0.051025390625, -0.00911712646484375, 0.01023101806640625, -0.0460205078125, -0.0018987655639648438, 0.01195526123046875, 0.021697998046875, 0.004116058349609375, 0.00988006591796875, 0.006046295166015625, -0.038238525390625, -0.01617431640625, 0.019195556640625, -0.037384033203125, -0.01247406005859375, 0.059295654296875, 0.0287933349609375, -0.045867919921875, 0.04571533203125, -0.002040863037109375, -0.01108551025390625, 0.04541015625, 0.02398681640625, 0.0721435546875, -0.0406494140625, 0.01079559326171875, 0.047393798828125, 0.0291748046875, 0.0189971923828125, 0.051483154296875, 0.006542205810546875, -0.046630859375, -0.03131103515625, -0.020660400390625, -0.037872314453125, 0.0139007568359375, -0.055755615234375, 0.034271240234375, -0.037506103515625, -0.0276031494140625, 0.007549285888671875, -0.0258026123046875, -0.044219970703125, -0.00965118408203125, -0.005420684814453125, 0.0665283203125, -0.033660888671875, 0.045440673828125, 0.06787109375, -0.06781005859375, -0.042266845703125, -0.0166015625, 0.006450653076171875, -0.053558349609375, 0.0272674560546875, 0.01471710205078125, 0.007030487060546875, -0.02691650390625, -0.035400390625, -0.053009033203125, 0.075927734375, 0.0093231201171875, -0.0161285400390625, -0.0124664306640625, 0.0003352165222167969, 0.0209808349609375, 0.0015249252319335938, 0.04638671875, -0.007724761962890625, 0.040496826171875, -0.00962066650390625, -0.10491943359375, 0.0262603759765625, -0.02850341796875, -0.0107574462890625, 0.014129638671875, -0.0654296875, 0.07220458984375, -0.03509521484375, -0.01093292236328125, 0.01074981689453125, 0.03338623046875, 0.0203094482421875, 0.0287933349609375, 0.030548095703125, 0.021820068359375, 0.0367431640625, -0.015777587890625, 0.0703125, -0.0352783203125, 0.03515625, 0.06842041015625, 0.0084381103515625, 0.05938720703125, 0.017578125, -0.03363037109375, 0.050018310546875, 0.03668212890625, -0.0015230178833007812, 0.0203094482421875, 0.0006146430969238281, -0.004985809326171875, -0.0021953582763671875, 0.0086517333984375, -0.046844482421875, 0.0241851806640625, 0.0306854248046875, -0.0170440673828125, -0.0174407958984375, 0.0149383544921875, 0.006725311279296875, -0.0070037841796875, -0.003040313720703125, 0.056182861328125, -0.003108978271484375, -0.0297393798828125, 0.0533447265625, 0.00797271728515625, 0.042724609375, -0.06103515625, -0.0020961761474609375, -0.0075225830078125, 0.017303466796875, -0.024169921875, -0.060333251953125, 0.006824493408203125, -0.003662109375, 0.00140380859375, -0.0021228790283203125, 0.057647705078125, -0.00379180908203125, -0.01629638671875, 0.02545166015625, 0.044219970703125, 0.0216217041015625, -0.0011959075927734375, -0.0643310546875, -0.0014448165893554688, -0.0038471221923828125, -0.041168212890625, 0.018646240234375, 0.038482666015625, 0.00450897216796875, 0.0675048828125, 0.0565185546875, 0.00621795654296875, -0.002582550048828125, -0.004669189453125, 0.0675048828125, -0.048553466796875, -0.059661865234375, -0.04290771484375, 0.06195068359375, -0.00626373291015625, -0.0279388427734375, 0.06561279296875, 0.05316162109375, 0.07330322265625, -0.017333984375, 0.07049560546875, -0.0160064697265625, 0.050262451171875, -0.0182647705078125, 0.05804443359375, -0.050445556640625, -0.0257415771484375, -0.031829833984375, -0.046295166015625, -0.0150299072265625, 0.06103515625, -0.01439666748046875, 0.013031005859375, 0.046783447265625, 0.047027587890625, 0.0009598731994628906, 0.01311492919921875, 0.0209197998046875, 0.0280914306640625, 0.0174560546875, 0.0404052734375, 0.049468994140625, -0.0272979736328125, 0.076171875, -0.0256500244140625, -0.036468505859375, -0.035125732421875, -0.033447265625, -0.08258056640625, -0.03668212890625, -0.0335693359375, -0.042266845703125, -0.006153106689453125, 0.06494140625, 0.057220458984375, -0.066162109375, -0.032012939453125, 0.0149383544921875, 0.01113128662109375, -0.03045654296875, -0.025360107421875, 0.0244293212890625, -0.006496429443359375, -0.07196044921875, 0.0009322166442871094, 0.0120086669921875, 0.0149078369140625, -0.025054931640625, 0.00005537271499633789, -0.0070343017578125, 0.0036449432373046875, 0.04217529296875, 0.028533935546875, -0.05633544921875, -0.01861572265625, -0.01041412353515625, 0.0011949539184570312, 0.0030498504638671875, 0.0309600830078125, -0.0419921875, 0.049072265625, 0.0533447265625, 0.004302978515625, 0.0268096923828125, -0.018951416015625, 0.0181884765625, -0.039459228515625, 0.0297088623046875, 0.0111083984375, 0.036376953125, -0.0007195472717285156, -0.0201416015625, 0.049530029296875, 0.00971221923828125, -0.03814697265625, -0.067138671875, 0.00989532470703125, -0.07379150390625, -0.0399169921875, 0.0828857421875, -0.0242767333984375, 0.0013675689697265625, -0.01142120361328125, -0.037841796875, 0.03546142578125, -0.058807373046875, 0.0550537109375, 0.043243408203125, -0.0149383544921875, -0.00008958578109741211, -0.05950927734375, 0.0070953369140625, -0.006183624267578125, -0.058624267578125, -0.00940704345703125, 0.04638671875, 0.0142364501953125, 0.02716064453125, 0.056884765625, -0.0183563232421875, 0.0311279296875, 0.0026683807373046875, 0.031463623046875, -0.0283050537109375, -0.0008244514465332031, -0.00548553466796875, 0.020965576171875, -0.024566650390625, -0.040802001953125 ] ]
Mikael110/llama-2-13b-guanaco-fp16
2023-07-20T00:16:26.000Z
[ "transformers", "pytorch", "llama", "text-generation", "llama-2", "text-classification", "en", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-classification
Mikael110
null
null
Mikael110/llama-2-13b-guanaco-fp16
11
5,834
transformers
2023-07-19T21:47:49
--- language: - en pipeline_tag: text-classification tags: - llama-2 --- This is a Llama-2 version of [Guanaco](https://huggingface.co/timdettmers/guanaco-13b). It was finetuned from the base [Llama-13b](https://huggingface.co/meta-llama/Llama-2-13b-hf) model using the official training scripts found in the [QLoRA repo](https://github.com/artidoro/qlora). I wanted it to be as faithful as possible and therefore changed nothing in the training script beyond the model it was pointing to. The model prompt is therefore also the same as the original Guanaco model. This repo contains the merged f16 model. The QLoRA adaptor can be found [here](https://huggingface.co/Mikael110/llama-2-13b-guanaco-qlora). A 7b version of the model can be found [here](https://huggingface.co/Mikael110/llama-2-7b-guanaco-fp16). **Legal Disclaimer: This model is bound by the usage restrictions of the original Llama-2 model. And comes with no warranty or gurantees of any kind.**
964
[ [ 0.0009417533874511719, -0.0205841064453125, 0.040557861328125, 0.036895751953125, -0.05572509765625, -0.0069732666015625, 0.012542724609375, -0.052490234375, 0.02056884765625, 0.0295562744140625, -0.054046630859375, -0.0254058837890625, -0.0313720703125, 0.004146575927734375, -0.01424407958984375, 0.0908203125, 0.003566741943359375, 0.006275177001953125, -0.00453948974609375, -0.0241241455078125, -0.041046142578125, -0.0211334228515625, -0.045562744140625, -0.0399169921875, 0.06591796875, 0.012359619140625, 0.07379150390625, 0.04742431640625, 0.0225982666015625, 0.00983428955078125, -0.0214385986328125, 0.0136566162109375, -0.03936767578125, -0.0274810791015625, -0.013641357421875, -0.03314208984375, -0.058837890625, 0.001827239990234375, 0.0357666015625, -0.0139007568359375, -0.035858154296875, 0.015472412109375, -0.0128936767578125, 0.025665283203125, -0.029144287109375, 0.01105499267578125, -0.051239013671875, -0.00997161865234375, 0.004604339599609375, 0.003780364990234375, -0.0212249755859375, -0.0284576416015625, 0.0013523101806640625, -0.06231689453125, -0.00147247314453125, -0.01064300537109375, 0.09051513671875, 0.03143310546875, -0.038665771484375, -0.018646240234375, -0.038421630859375, 0.037322998046875, -0.0531005859375, 0.0064697265625, 0.0255584716796875, 0.061431884765625, -0.018768310546875, -0.06878662109375, -0.044342041015625, -0.0153961181640625, -0.007022857666015625, -0.0179595947265625, -0.029266357421875, -0.0030918121337890625, 0.00616455078125, 0.0230255126953125, -0.042327880859375, 0.0160064697265625, -0.057373046875, -0.0095367431640625, 0.04541015625, -0.00461578369140625, 0.0174713134765625, -0.00554656982421875, -0.053253173828125, -0.025299072265625, -0.07684326171875, -0.004688262939453125, 0.0269927978515625, 0.00418853759765625, -0.038787841796875, 0.02960205078125, 0.0015249252319335938, 0.039825439453125, 0.01160430908203125, -0.022308349609375, 0.042266845703125, -0.0059814453125, -0.024871826171875, -0.007595062255859375, 0.035919189453125, 0.05126953125, 0.0225372314453125, -0.0022602081298828125, -0.01474761962890625, -0.01201629638671875, 0.023956298828125, -0.048004150390625, -0.0202178955078125, 0.01467132568359375, -0.0276641845703125, -0.036376953125, 0.01617431640625, -0.0212554931640625, -0.0203399658203125, -0.0036487579345703125, 0.0201873779296875, -0.0031528472900390625, -0.03045654296875, -0.002613067626953125, -0.0002294778823852539, 0.0577392578125, 0.03460693359375, -0.0540771484375, -0.0013093948364257812, 0.04791259765625, 0.06976318359375, 0.01800537109375, -0.0207061767578125, -0.0244140625, 0.017547607421875, -0.00856781005859375, 0.07421875, -0.0156707763671875, -0.040130615234375, 0.009033203125, 0.0323486328125, -0.006534576416015625, -0.051513671875, 0.045318603515625, -0.05364990234375, -0.0003230571746826172, -0.0264892578125, -0.005985260009765625, -0.0438232421875, 0.0146942138671875, -0.05853271484375, 0.07293701171875, 0.041351318359375, -0.04022216796875, 0.007080078125, -0.03875732421875, 0.0040435791015625, -0.017181396484375, -0.00013005733489990234, -0.0379638671875, -0.0114593505859375, -0.00778961181640625, -0.005435943603515625, -0.030059814453125, 0.013916015625, -0.0501708984375, -0.0400390625, 0.0105133056640625, 0.0004513263702392578, 0.07513427734375, 0.01422882080078125, -0.015960693359375, 0.0186920166015625, -0.06298828125, -0.0078887939453125, 0.0231475830078125, -0.0249176025390625, -0.00832366943359375, -0.0279998779296875, 0.00019693374633789062, 0.040130615234375, 0.04144287109375, -0.029510498046875, 0.033111572265625, -0.007091522216796875, 0.031494140625, 0.0517578125, 0.0016870498657226562, 0.0232086181640625, -0.055694580078125, 0.049591064453125, -0.006114959716796875, 0.045928955078125, 0.0031871795654296875, -0.049224853515625, -0.05731201171875, -0.032440185546875, 0.0002758502960205078, 0.0280609130859375, -0.0211334228515625, 0.019073486328125, 0.0014352798461914062, -0.0635986328125, -0.033294677734375, 0.0031642913818359375, 0.030181884765625, 0.0133819580078125, 0.032135009765625, -0.0183563232421875, -0.054107666015625, -0.0675048828125, 0.005886077880859375, -0.03131103515625, -0.0008606910705566406, 0.0024623870849609375, 0.038177490234375, -0.03668212890625, 0.0504150390625, -0.028289794921875, -0.019683837890625, -0.004734039306640625, -0.0225830078125, 0.029632568359375, 0.05206298828125, 0.08990478515625, -0.0247039794921875, -0.027618408203125, 0.00994873046875, -0.056915283203125, -0.0122833251953125, -0.00966644287109375, -0.042388916015625, -0.006175994873046875, 0.00165557861328125, -0.06591796875, 0.0526123046875, 0.042816162109375, -0.023345947265625, 0.040374755859375, -0.01384735107421875, -0.01007843017578125, -0.07391357421875, 0.0185089111328125, -0.0173797607421875, -0.0156097412109375, -0.0308990478515625, 0.0187835693359375, 0.0207977294921875, 0.02996826171875, -0.047332763671875, 0.049835205078125, -0.021881103515625, -0.0164794921875, -0.047882080078125, -0.03497314453125, 0.01351165771484375, 0.0360107421875, -0.0148162841796875, 0.053802490234375, 0.0219573974609375, -0.0240478515625, 0.0294189453125, 0.03680419921875, -0.0006537437438964844, 0.02911376953125, -0.08941650390625, 0.043182373046875, -0.0086212158203125, 0.044464111328125, -0.0577392578125, -0.028778076171875, 0.053741455078125, -0.0184326171875, -0.0157928466796875, -0.0308685302734375, -0.026519775390625, -0.0258636474609375, -0.0309295654296875, 0.0439453125, 0.059844970703125, -0.0762939453125, 0.028839111328125, 0.0012655258178710938, 0.0141754150390625, -0.0306854248046875, -0.045257568359375, -0.01496124267578125, -0.037872314453125, -0.03546142578125, 0.031463623046875, -0.01434326171875, -0.01377105712890625, -0.018402099609375, -0.01556396484375, -0.037506103515625, -0.0022373199462890625, 0.04608154296875, 0.04071044921875, -0.0197906494140625, -0.0202484130859375, 0.0284576416015625, 0.01013946533203125, 0.00921630859375, 0.003620147705078125, 0.039581298828125, 0.0007052421569824219, -0.007904052734375, -0.05291748046875, -0.0008916854858398438, 0.048736572265625, -0.0062713623046875, 0.0538330078125, 0.03326416015625, -0.04107666015625, 0.0011491775512695312, -0.037750244140625, -0.005245208740234375, -0.03759765625, -0.0105133056640625, -0.020538330078125, -0.03131103515625, 0.050079345703125, 0.0235443115234375, 0.003444671630859375, 0.0430908203125, 0.03521728515625, -0.005931854248046875, 0.04632568359375, 0.050384521484375, 0.00788116455078125, 0.05706787109375, -0.0234527587890625, -0.0172119140625, -0.06884765625, -0.05584716796875, -0.03619384765625, -0.01280975341796875, -0.0139312744140625, -0.029205322265625, 0.014251708984375, 0.0178375244140625, -0.043182373046875, 0.06658935546875, -0.0212554931640625, 0.0264892578125, 0.042388916015625, 0.031524658203125, 0.041168212890625, 0.0096588134765625, 0.0125579833984375, 0.01268768310546875, -0.044921875, -0.050750732421875, 0.078369140625, 0.0286865234375, 0.045074462890625, 0.0193939208984375, 0.04022216796875, 0.0231475830078125, 0.039794921875, -0.036224365234375, 0.0152130126953125, 0.00740814208984375, -0.049407958984375, 0.0243988037109375, 0.004978179931640625, -0.06768798828125, 0.029510498046875, 0.005077362060546875, -0.04046630859375, 0.031097412109375, 0.021820068359375, -0.02716064453125, -0.0009908676147460938, -0.035400390625, 0.062255859375, -0.0091400146484375, -0.0010395050048828125, -0.02728271484375, -0.03533935546875, 0.049407958984375, -0.0030612945556640625, -0.0025844573974609375, -0.0152130126953125, 0.00732421875, 0.038665771484375, -0.06884765625, 0.060821533203125, -0.018707275390625, -0.038909912109375, 0.053070068359375, -0.01287078857421875, 0.045013427734375, 0.0250701904296875, -0.0163421630859375, 0.0078277587890625, 0.0022144317626953125, -0.046783447265625, -0.04833984375, 0.047393798828125, -0.07464599609375, -0.035430908203125, -0.035919189453125, -0.01519012451171875, 0.0100555419921875, 0.00142669677734375, 0.015380859375, 0.005123138427734375, -0.0106353759765625, -0.003875732421875, 0.0164031982421875, 0.01141357421875, 0.0231170654296875, 0.044891357421875, -0.01171875, -0.052154541015625, 0.026763916015625, -0.01184844970703125, 0.0133209228515625, 0.006664276123046875, 0.01151275634765625, -0.02899169921875, -0.029083251953125, -0.049163818359375, 0.04345703125, -0.043304443359375, -0.0295257568359375, -0.014434814453125, -0.0185699462890625, -0.01253509521484375, -0.005321502685546875, -0.0178375244140625, -0.028778076171875, -0.049224853515625, -0.023223876953125, 0.0609130859375, 0.060638427734375, -0.011627197265625, 0.063720703125, -0.055999755859375, 0.0078277587890625, 0.045989990234375, -0.012298583984375, -0.0016355514526367188, -0.08453369140625, -0.0109405517578125, 0.017486572265625, -0.028228759765625, -0.06591796875, 0.0226593017578125, 0.00733184814453125, 0.039794921875, 0.034759521484375, -0.004974365234375, 0.054443359375, -0.0195465087890625, 0.04473876953125, 0.0241546630859375, -0.03887939453125, 0.040252685546875, -0.0472412109375, 0.00733184814453125, 0.03692626953125, 0.0193634033203125, -0.00928497314453125, 0.0035457611083984375, -0.052825927734375, -0.042755126953125, 0.03485107421875, 0.008636474609375, 0.006320953369140625, 0.0151519775390625, 0.049468994140625, 0.0225372314453125, 0.023834228515625, -0.0670166015625, -0.00872039794921875, -0.03936767578125, -0.0121002197265625, 0.0111236572265625, -0.0224761962890625, -0.02618408203125, -0.00916290283203125, 0.044647216796875, -0.01445770263671875, 0.00920867919921875, 0.007534027099609375, -0.022430419921875, -0.0121002197265625, -0.02349853515625, 0.05902099609375, 0.04608154296875, -0.0285491943359375, -0.0081939697265625, 0.008880615234375, -0.036376953125, 0.017730712890625, -0.00518798828125, -0.005878448486328125, 0.008453369140625, 0.0247344970703125, 0.07513427734375, 0.029144287109375, -0.04718017578125, 0.0316162109375, 0.00777435302734375, -0.00417327880859375, -0.028778076171875, 0.033233642578125, 0.005985260009765625, 0.04827880859375, 0.027801513671875, 0.010406494140625, -0.00225067138671875, -0.036163330078125, 0.005191802978515625, 0.015960693359375, -0.00490570068359375, -0.053314208984375, 0.05462646484375, 0.01473236083984375, -0.0217437744140625, 0.037872314453125, -0.01312255859375, -0.01629638671875, 0.07281494140625, 0.057159423828125, 0.0526123046875, -0.0195465087890625, 0.0242919921875, 0.04827880859375, 0.022247314453125, -0.0283203125, 0.0310211181640625, -0.0009374618530273438, -0.02996826171875, 0.005016326904296875, -0.0223236083984375, -0.031890869140625, 0.0012979507446289062, -0.06927490234375, 0.031463623046875, -0.0654296875, -0.01263427734375, -0.03704833984375, -0.0048065185546875, -0.04608154296875, 0.0225372314453125, -0.0003886222839355469, 0.07745361328125, -0.0545654296875, 0.0927734375, 0.054290771484375, -0.042633056640625, -0.0723876953125, -0.017333984375, -0.0016031265258789062, -0.0986328125, 0.0155487060546875, 0.00652313232421875, -0.00037217140197753906, -0.013580322265625, -0.040313720703125, -0.0694580078125, 0.12225341796875, 0.043792724609375, -0.039794921875, -0.00966644287109375, -0.013153076171875, 0.041656494140625, -0.03509521484375, 0.025177001953125, 0.038421630859375, 0.03564453125, 0.018768310546875, -0.08074951171875, 0.0173797607421875, -0.0154876708984375, 0.015838623046875, -0.042266845703125, -0.09124755859375, 0.0738525390625, -0.032928466796875, 0.00463104248046875, 0.058685302734375, 0.0638427734375, 0.061065673828125, 0.0187835693359375, 0.041656494140625, 0.03021240234375, 0.06005859375, 0.01617431640625, 0.062744140625, -0.0036773681640625, 0.044464111328125, 0.087158203125, -0.02685546875, 0.0638427734375, 0.0430908203125, -0.0204620361328125, 0.07745361328125, 0.06768798828125, -0.0104522705078125, 0.05352783203125, 0.004718780517578125, -0.022247314453125, 0.00643157958984375, -0.016265869140625, -0.06195068359375, 0.02337646484375, 0.0019121170043945312, -0.030181884765625, -0.01953125, -0.0321044921875, 0.01226806640625, -0.01213836669921875, -0.0184326171875, 0.041351318359375, 0.0095672607421875, -0.035003662109375, 0.0709228515625, -0.00688934326171875, 0.047210693359375, -0.048858642578125, -0.0028934478759765625, -0.0428466796875, -0.0109100341796875, -0.033843994140625, -0.0322265625, 0.0194549560546875, 0.01468658447265625, -0.0170440673828125, 0.012420654296875, 0.043243408203125, -0.0208282470703125, -0.021026611328125, 0.0258331298828125, 0.02288818359375, 0.0232696533203125, 0.0028247833251953125, -0.053314208984375, 0.03839111328125, 0.0027751922607421875, -0.005496978759765625, 0.03302001953125, -0.006961822509765625, -0.008209228515625, 0.046783447265625, 0.041015625, -0.0308380126953125, 0.01229095458984375, 0.007335662841796875, 0.069091796875, -0.0361328125, -0.03692626953125, -0.0396728515625, 0.03656005859375, 0.0017385482788085938, -0.054229736328125, 0.030609130859375, 0.0261688232421875, 0.054656982421875, -0.035888671875, 0.040924072265625, 0.004718780517578125, 0.0025463104248046875, -0.044464111328125, 0.0474853515625, -0.04608154296875, 0.00409698486328125, -0.00598907470703125, -0.07177734375, 0.003559112548828125, 0.09234619140625, 0.013153076171875, 0.0068511962890625, 0.049468994140625, 0.06878662109375, -0.00033736228942871094, -0.00525665283203125, 0.004520416259765625, 0.0082244873046875, 0.0222015380859375, 0.045562744140625, 0.0653076171875, -0.05810546875, 0.048095703125, -0.0185394287109375, -0.008575439453125, -0.028472900390625, -0.0650634765625, -0.06036376953125, -0.01450347900390625, -0.0291595458984375, -0.040557861328125, 0.01495361328125, 0.07281494140625, 0.059112548828125, -0.04180908203125, -0.0247955322265625, -0.00223541259765625, 0.0136871337890625, -0.00690460205078125, -0.00760650634765625, 0.002166748046875, 0.0258941650390625, -0.05316162109375, 0.041961669921875, -0.00910186767578125, 0.047882080078125, -0.01446533203125, -0.0228729248046875, -0.0023670196533203125, -0.01097869873046875, 0.0211334228515625, 0.048065185546875, -0.05108642578125, -0.043487548828125, -0.01036834716796875, -0.016021728515625, 0.0207061767578125, 0.032501220703125, -0.050506591796875, -0.023712158203125, 0.0209197998046875, 0.0226287841796875, 0.036651611328125, -0.006183624267578125, 0.040985107421875, -0.0197906494140625, 0.037750244140625, -0.016815185546875, 0.038238525390625, 0.0204620361328125, -0.0156097412109375, 0.046722412109375, 0.015899658203125, -0.028839111328125, -0.0673828125, 0.01219940185546875, -0.105712890625, 0.007904052734375, 0.0938720703125, -0.025299072265625, -0.03216552734375, 0.0299224853515625, -0.06005859375, 0.0286865234375, -0.03900146484375, 0.0577392578125, 0.0203399658203125, 0.010040283203125, -0.005977630615234375, -0.019927978515625, 0.01629638671875, 0.01629638671875, -0.05792236328125, -0.0345458984375, 0.0271759033203125, 0.048309326171875, -0.00616455078125, 0.040435791015625, -0.01031494140625, 0.05023193359375, -0.01312255859375, 0.0121002197265625, -0.0146636962890625, -0.031494140625, -0.039215087890625, -0.0100555419921875, 0.0261688232421875, -0.0263671875 ] ]
pankajmathur/model_51
2023-08-25T23:45:57.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:ehartford/dolphin", "dataset:psmathur/orca_mini_v1_dataset", "arxiv:2306.02707", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
pankajmathur
null
null
pankajmathur/model_51
1
5,832
transformers
2023-08-03T15:08:32
--- language: - en library_name: transformers license: llama2 datasets: - ehartford/dolphin - psmathur/orca_mini_v1_dataset --- # model_51 A Llama2-70b model trained on Orca Style datasets. **P.S. If you're interested to collaborate, please connect with me at www.linkedin.com/in/pankajam** ## Evaluation We evaluated model_51 on a wide range of tasks using [Language Model Evaluation Harness](https://github.com/EleutherAI/lm-evaluation-harness) from EleutherAI. Here are the results on metrics used by [HuggingFaceH4 Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) ||||| |:------:|:--------:|:-------:|:--------:| |**Task**|**Metric**|**Value**|**Stderr**| |*arc_challenge*|acc_norm|0.6843|0.0141| |*hellaswag*|acc_norm|0.8671|0.0038| |*mmlu*|acc_norm|0.6931|0.0351| |*truthfulqa_mc*|mc2|0.5718|0.0157| |**Total Average**|-|**0.7041**|| ## Example Usage Here is the prompt format ``` ### System: You are an AI assistant that follows instruction extremely well. Help as much as you can. ### User: Tell me about Orcas. ### Assistant: ``` Below shows a code example on how to use this model ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("psmathur/model_51") model = AutoModelForCausalLM.from_pretrained( "psmathur/model_51", torch_dtype=torch.float16, load_in_8bit=True, low_cpu_mem_usage=True, device_map="auto" ) system_prompt = "### System:\nYou are an AI assistant that follows instruction extremely well. Help as much as you can.\n\n" #generate text steps instruction = "Tell me about Orcas." prompt = f"{system_prompt}### User: {instruction}\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, top_p=0.95, top_k=0, max_new_tokens=4096) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` #### Limitations & Biases: While this model aims for accuracy, it can occasionally produce inaccurate or misleading results. Despite diligent efforts in refining the pretraining data, there remains a possibility for the generation of inappropriate, biased, or offensive content. Exercise caution and cross-check information when necessary. ### Citiation: Please kindly cite using the following BibTeX: ``` @misc{model_51, author = {Pankaj Mathur}, title = {model_51: An Orca Style Llama2-70b model}, year = {2023}, publisher = {HuggingFace}, journal = {HuggingFace repository}, howpublished = {\url{https://https://huggingface.co/psmathur/model_51}, } ``` ``` @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ``` @software{touvron2023llama2, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron, Louis Martin, Kevin Stone, Peter Albert, Amjad Almahairi, Yasmine Babaei, Nikolay Bashlykov, Soumya Batra, Prajjwal Bhargava, Shruti Bhosale, Dan Bikel, Lukas Blecher, Cristian Canton Ferrer, Moya Chen, Guillem Cucurull, David Esiobu, Jude Fernandes, Jeremy Fu, Wenyin Fu, Brian Fuller, Cynthia Gao, Vedanuj Goswami, Naman Goyal, Anthony Hartshorn, Saghar Hosseini, Rui Hou, Hakan Inan, Marcin Kardas, Viktor Kerkez Madian Khabsa, Isabel Kloumann, Artem Korenev, Punit Singh Koura, Marie-Anne Lachaux, Thibaut Lavril, Jenya Lee, Diana Liskovich, Yinghai Lu, Yuning Mao, Xavier Martinet, Todor Mihaylov, Pushkar Mishra, Igor Molybog, Yixin Nie, Andrew Poulton, Jeremy Reizenstein, Rashi Rungta, Kalyan Saladi, Alan Schelten, Ruan Silva, Eric Michael Smith, Ranjan Subramanian, Xiaoqing Ellen Tan, Binh Tang, Ross Taylor, Adina Williams, Jian Xiang Kuan, Puxin Xu , Zheng Yan, Iliyan Zarov, Yuchen Zhang, Angela Fan, Melanie Kambadur, Sharan Narang, Aurelien Rodriguez, Robert Stojnic, Sergey Edunov, Thomas Scialom}, year={2023} } ```
4,134
[ [ -0.019256591796875, -0.05950927734375, 0.017120361328125, 0.01116180419921875, -0.015167236328125, 0.00400543212890625, -0.0099029541015625, -0.042938232421875, 0.0126495361328125, 0.0186920166015625, -0.053070068359375, -0.052154541015625, -0.04486083984375, -0.0086212158203125, -0.0228729248046875, 0.07598876953125, -0.00592803955078125, -0.0080108642578125, 0.007251739501953125, -0.0008559226989746094, -0.036163330078125, -0.0266571044921875, -0.064697265625, -0.0321044921875, 0.01293182373046875, 0.0204620361328125, 0.038665771484375, 0.049896240234375, 0.0350341796875, 0.0265960693359375, -0.019317626953125, 0.0185394287109375, -0.038177490234375, -0.0109710693359375, 0.0166778564453125, -0.048431396484375, -0.07647705078125, 0.0001951456069946289, 0.0307464599609375, 0.0222625732421875, -0.01200103759765625, 0.030670166015625, 0.00628662109375, 0.0292510986328125, -0.028106689453125, 0.031219482421875, -0.0246429443359375, 0.0018672943115234375, -0.031982421875, -0.00199127197265625, -0.0078125, -0.0202484130859375, 0.007720947265625, -0.049591064453125, 0.005950927734375, -0.006267547607421875, 0.08746337890625, 0.0279083251953125, -0.0220794677734375, -0.020355224609375, -0.03216552734375, 0.056121826171875, -0.0814208984375, 0.016143798828125, 0.0160064697265625, 0.0198516845703125, -0.019775390625, -0.06640625, -0.054107666015625, -0.01070404052734375, -0.0029506683349609375, 0.017181396484375, -0.01593017578125, -0.00359344482421875, 0.025238037109375, 0.029571533203125, -0.036468505859375, 0.016937255859375, -0.0391845703125, -0.017822265625, 0.047607421875, 0.032745361328125, 0.0202789306640625, -0.0071868896484375, -0.0249481201171875, -0.019622802734375, -0.04742431640625, 0.0306396484375, 0.037872314453125, 0.01113128662109375, -0.04376220703125, 0.049102783203125, -0.0161590576171875, 0.051055908203125, 0.0093231201171875, -0.0278167724609375, 0.042724609375, -0.03045654296875, -0.028900146484375, -0.00998687744140625, 0.0638427734375, 0.020050048828125, 0.01027679443359375, 0.0161895751953125, -0.00626373291015625, 0.01346588134765625, -0.0033168792724609375, -0.05908203125, -0.0209808349609375, 0.0197296142578125, -0.03289794921875, -0.030670166015625, -0.0013017654418945312, -0.059295654296875, -0.0165863037109375, -0.01409149169921875, 0.0236663818359375, -0.02838134765625, -0.0312042236328125, 0.0223236083984375, 0.01287841796875, 0.042236328125, 0.00890350341796875, -0.064208984375, 0.024749755859375, 0.028533935546875, 0.06292724609375, -0.006145477294921875, -0.01910400390625, -0.00872802734375, -0.0033111572265625, -0.01568603515625, 0.047607421875, -0.0279083251953125, -0.02581787109375, -0.0234832763671875, 0.0016078948974609375, -0.00875091552734375, -0.025146484375, 0.042816162109375, -0.0243682861328125, 0.026824951171875, -0.0238037109375, -0.02606201171875, -0.0294647216796875, 0.0203399658203125, -0.0372314453125, 0.09375, -0.004650115966796875, -0.0592041015625, 0.0152130126953125, -0.0645751953125, -0.0098114013671875, -0.0237579345703125, -0.01230621337890625, -0.05389404296875, -0.0206298828125, 0.027191162109375, 0.03558349609375, -0.024627685546875, 0.0179443359375, -0.0264892578125, -0.029754638671875, 0.00890350341796875, -0.0233306884765625, 0.07733154296875, 0.01442718505859375, -0.06146240234375, 0.02130126953125, -0.0606689453125, -0.00843048095703125, 0.028076171875, -0.0270233154296875, 0.00836944580078125, -0.01275634765625, -0.0156097412109375, 0.0157623291015625, 0.02923583984375, -0.038330078125, 0.020660400390625, -0.02728271484375, 0.04248046875, 0.05963134765625, -0.012908935546875, 0.019317626953125, -0.0253753662109375, 0.039764404296875, -0.0003085136413574219, 0.0149078369140625, 0.0028133392333984375, -0.046966552734375, -0.0814208984375, -0.025604248046875, 0.0157928466796875, 0.039093017578125, -0.032440185546875, 0.045013427734375, -0.0080413818359375, -0.055023193359375, -0.0310516357421875, -0.0008416175842285156, 0.0308990478515625, 0.05255126953125, 0.0285797119140625, -0.03143310546875, -0.049530029296875, -0.05877685546875, 0.004680633544921875, -0.009490966796875, -0.00293731689453125, 0.030426025390625, 0.03863525390625, -0.0221710205078125, 0.0684814453125, -0.037689208984375, -0.02642822265625, -0.0215606689453125, 0.0036869049072265625, 0.032958984375, 0.044464111328125, 0.051177978515625, -0.035888671875, -0.0214996337890625, -0.017822265625, -0.07049560546875, -0.00702667236328125, 0.00542449951171875, -0.0287933349609375, 0.01198577880859375, 0.0301666259765625, -0.058197021484375, 0.04583740234375, 0.042205810546875, -0.035736083984375, 0.055419921875, -0.00559234619140625, 0.00026345252990722656, -0.075927734375, 0.0236968994140625, -0.0020771026611328125, -0.0085601806640625, -0.04144287109375, -0.00786590576171875, -0.00849151611328125, 0.0007925033569335938, -0.03143310546875, 0.051025390625, -0.031646728515625, 0.0011758804321289062, -0.0110015869140625, 0.0036563873291015625, -0.01012420654296875, 0.054718017578125, -0.003185272216796875, 0.04583740234375, 0.057464599609375, -0.0355224609375, 0.0300140380859375, 0.0245819091796875, -0.032928466796875, 0.0288543701171875, -0.06787109375, 0.024505615234375, 0.0069122314453125, 0.033721923828125, -0.09295654296875, -0.020416259765625, 0.04022216796875, -0.039459228515625, 0.034423828125, 0.00914764404296875, -0.0272064208984375, -0.03680419921875, -0.02874755859375, 0.030487060546875, 0.0297088623046875, -0.031524658203125, 0.04486083984375, 0.02545166015625, -0.00118255615234375, -0.0565185546875, -0.055206298828125, -0.01087188720703125, -0.0305328369140625, -0.053863525390625, 0.02386474609375, -0.0178375244140625, 0.0019121170043945312, -0.00826263427734375, -0.017364501953125, 0.0091705322265625, 0.0060882568359375, 0.0251007080078125, 0.042266845703125, -0.01360321044921875, -0.0182342529296875, -0.00606536865234375, -0.00738525390625, 0.01092529296875, -0.0047607421875, 0.06201171875, -0.0268096923828125, -0.0264892578125, -0.051513671875, -0.0082244873046875, 0.03253173828125, -0.0236663818359375, 0.05426025390625, 0.056884765625, -0.0273895263671875, 0.015716552734375, -0.043487548828125, -0.0197296142578125, -0.03839111328125, 0.033599853515625, -0.033782958984375, -0.05682373046875, 0.0628662109375, 0.00922393798828125, 0.0203857421875, 0.051727294921875, 0.05828857421875, 0.0035648345947265625, 0.0694580078125, 0.047943115234375, -0.0008044242858886719, 0.044403076171875, -0.052154541015625, 0.0026645660400390625, -0.07000732421875, -0.0482177734375, -0.031768798828125, -0.0382080078125, -0.032958984375, -0.023712158203125, 0.0182037353515625, 0.0108642578125, -0.049224853515625, 0.0275115966796875, -0.043975830078125, 0.01218414306640625, 0.03790283203125, 0.0268096923828125, 0.01102447509765625, -0.006061553955078125, -0.012481689453125, 0.007198333740234375, -0.050201416015625, -0.042572021484375, 0.0980224609375, 0.036163330078125, 0.05718994140625, 0.00566864013671875, 0.044403076171875, 0.00011610984802246094, 0.02117919921875, -0.039398193359375, 0.041839599609375, 0.011749267578125, -0.059722900390625, -0.0204315185546875, -0.022979736328125, -0.07501220703125, 0.007213592529296875, -0.00927734375, -0.063720703125, 0.01861572265625, 0.004436492919921875, -0.036376953125, 0.024169921875, -0.041015625, 0.06549072265625, -0.0211334228515625, -0.003353118896484375, -0.004085540771484375, -0.057281494140625, 0.04473876953125, 0.004055023193359375, 0.002552032470703125, -0.01629638671875, -0.0060882568359375, 0.07769775390625, -0.032012939453125, 0.07489013671875, -0.006839752197265625, -0.004459381103515625, 0.0307464599609375, -0.004215240478515625, 0.046142578125, 0.00397491455078125, -0.0120086669921875, 0.0281524658203125, -0.01491546630859375, -0.031158447265625, -0.01995849609375, 0.043853759765625, -0.08819580078125, -0.04132080078125, -0.0266876220703125, -0.0318603515625, 0.0087890625, 0.0203857421875, 0.0301666259765625, 0.0278167724609375, 0.01511383056640625, 0.00893402099609375, 0.040557861328125, -0.019622802734375, 0.036102294921875, 0.0325927734375, -0.0021038055419921875, -0.0280609130859375, 0.049346923828125, 0.00884246826171875, 0.019927978515625, 0.00872802734375, 0.003040313720703125, -0.0307464599609375, -0.034210205078125, -0.0243682861328125, 0.0361328125, -0.0496826171875, -0.0259552001953125, -0.047271728515625, -0.0188751220703125, -0.027862548828125, 0.00238037109375, -0.038604736328125, -0.0238189697265625, -0.046661376953125, -0.01555633544921875, 0.033233642578125, 0.0419921875, 0.0000959634780883789, 0.0277099609375, -0.0217132568359375, 0.01324462890625, 0.0236663818359375, 0.014984130859375, 0.01271820068359375, -0.0677490234375, -0.0075836181640625, 0.0112152099609375, -0.0438232421875, -0.05438232421875, 0.03399658203125, 0.00968170166015625, 0.0546875, 0.019683837890625, -0.00479888916015625, 0.0721435546875, -0.01568603515625, 0.06915283203125, 0.0218505859375, -0.0736083984375, 0.0390625, -0.0222015380859375, 0.0188751220703125, 0.0179443359375, 0.026336669921875, -0.01910400390625, -0.0299835205078125, -0.064453125, -0.07061767578125, 0.06597900390625, 0.025390625, 0.00836944580078125, 0.01403045654296875, 0.036163330078125, 0.002964019775390625, 0.01052093505859375, -0.07012939453125, -0.04443359375, -0.029815673828125, -0.007541656494140625, 0.0032825469970703125, -0.017730712890625, -0.006717681884765625, -0.018524169921875, 0.0560302734375, 0.0036602020263671875, 0.042266845703125, 0.009735107421875, 0.00614166259765625, -0.006732940673828125, -0.005741119384765625, 0.057373046875, 0.055633544921875, -0.0257415771484375, -0.0036602020263671875, 0.0310516357421875, -0.04345703125, -0.0031986236572265625, 0.01010894775390625, 0.0020503997802734375, -0.00989532470703125, 0.0308990478515625, 0.0645751953125, -0.0026226043701171875, -0.030853271484375, 0.022308349609375, -0.0051116943359375, -0.00966644287109375, -0.032379150390625, 0.011566162109375, 0.0155792236328125, 0.03851318359375, 0.0293426513671875, 0.0100555419921875, -0.005153656005859375, -0.0390625, -0.0015497207641601562, 0.0261993408203125, -0.00788116455078125, -0.032928466796875, 0.07696533203125, 0.005157470703125, -0.0214080810546875, 0.048797607421875, 0.0048980712890625, -0.03985595703125, 0.0599365234375, 0.0272369384765625, 0.049896240234375, -0.0178680419921875, -0.0052337646484375, 0.041717529296875, 0.01338958740234375, -0.00902557373046875, 0.0281524658203125, 0.0097198486328125, -0.042816162109375, -0.029815673828125, -0.0418701171875, -0.0210723876953125, 0.02935791015625, -0.04486083984375, 0.045867919921875, -0.0302886962890625, -0.0235595703125, -0.00374603271484375, 0.0213470458984375, -0.0645751953125, 0.01922607421875, 0.016326904296875, 0.06781005859375, -0.052734375, 0.069091796875, 0.04400634765625, -0.04217529296875, -0.0870361328125, -0.022491455078125, 0.002338409423828125, -0.0736083984375, 0.04095458984375, 0.0103912353515625, -0.00200653076171875, 0.010955810546875, -0.051422119140625, -0.075927734375, 0.112060546875, 0.0369873046875, -0.03594970703125, -0.008392333984375, 0.0022296905517578125, 0.04071044921875, -0.01861572265625, 0.053863525390625, 0.0496826171875, 0.0325927734375, 0.01277923583984375, -0.08221435546875, 0.024566650390625, -0.0300140380859375, -0.01119232177734375, -0.0084991455078125, -0.074462890625, 0.08819580078125, -0.0225677490234375, -0.003673553466796875, 0.021636962890625, 0.051605224609375, 0.047271728515625, 0.01190185546875, 0.028594970703125, 0.046783447265625, 0.055145263671875, -0.0106048583984375, 0.0728759765625, -0.017242431640625, 0.045989990234375, 0.06268310546875, 0.01406097412109375, 0.04925537109375, 0.01175689697265625, -0.0309600830078125, 0.05126953125, 0.07366943359375, -0.0037593841552734375, 0.04119873046875, 0.0117950439453125, 0.00443267822265625, -0.010589599609375, 0.01250457763671875, -0.0545654296875, 0.03326416015625, 0.0270843505859375, -0.024749755859375, -0.01503753662109375, -0.0180511474609375, 0.02178955078125, -0.02838134765625, -0.0054168701171875, 0.042144775390625, 0.00962066650390625, -0.0274658203125, 0.078369140625, 0.004589080810546875, 0.06683349609375, -0.054290771484375, 0.004364013671875, -0.0291595458984375, 0.01324462890625, -0.0288848876953125, -0.047088623046875, 0.006954193115234375, 0.0012493133544921875, 0.00431060791015625, 0.0029430389404296875, 0.039825439453125, -0.020416259765625, -0.035430908203125, 0.013214111328125, 0.0190582275390625, 0.0286407470703125, 0.0123748779296875, -0.07147216796875, 0.0220794677734375, 0.009307861328125, -0.05645751953125, 0.01568603515625, 0.029449462890625, -0.00005173683166503906, 0.05511474609375, 0.050201416015625, -0.001178741455078125, 0.0172119140625, -0.0128173828125, 0.08343505859375, -0.037750244140625, -0.0306396484375, -0.07611083984375, 0.047393798828125, 0.0017538070678710938, -0.04168701171875, 0.058319091796875, 0.038726806640625, 0.06097412109375, 0.00543212890625, 0.046875, -0.024993896484375, 0.0214996337890625, -0.028839111328125, 0.052520751953125, -0.044403076171875, 0.031585693359375, -0.0216217041015625, -0.0731201171875, -0.0185546875, 0.068115234375, -0.0292816162109375, 0.00778961181640625, 0.035064697265625, 0.06536865234375, -0.0027484893798828125, -0.010284423828125, -0.0005717277526855469, 0.02587890625, 0.041961669921875, 0.056549072265625, 0.042694091796875, -0.04412841796875, 0.05645751953125, -0.028472900390625, -0.0232696533203125, -0.015869140625, -0.06268310546875, -0.076171875, -0.0276641845703125, -0.02569580078125, -0.033111572265625, -0.007442474365234375, 0.06207275390625, 0.061920166015625, -0.057281494140625, -0.032470703125, -0.0075836181640625, -0.00319671630859375, -0.02154541015625, -0.01294708251953125, 0.049591064453125, -0.0037288665771484375, -0.066650390625, 0.01000213623046875, -0.0094146728515625, 0.022430419921875, -0.0238800048828125, -0.0187225341796875, -0.02392578125, -0.0043182373046875, 0.0228271484375, 0.02764892578125, -0.057373046875, -0.0172271728515625, 0.00519561767578125, -0.0147857666015625, 0.0107879638671875, 0.020660400390625, -0.054718017578125, 0.0318603515625, 0.0238037109375, 0.02130126953125, 0.0645751953125, -0.005779266357421875, 0.0236358642578125, -0.039306640625, 0.0200653076171875, 0.0019550323486328125, 0.024627685546875, 0.01702880859375, -0.0255889892578125, 0.05072021484375, 0.018524169921875, -0.037994384765625, -0.07073974609375, -0.00597381591796875, -0.0894775390625, 0.0034961700439453125, 0.08648681640625, -0.0296478271484375, -0.0191192626953125, 0.0110015869140625, -0.02728271484375, 0.04425048828125, -0.03399658203125, 0.0726318359375, 0.042816162109375, -0.023101806640625, -0.01171875, -0.042266845703125, 0.033203125, 0.0204315185546875, -0.065673828125, -0.0197296142578125, 0.00875091552734375, 0.038330078125, 0.0163726806640625, 0.048370361328125, -0.00746917724609375, 0.014801025390625, -0.0023651123046875, 0.01212310791015625, -0.029205322265625, -0.0009202957153320312, -0.0096282958984375, -0.01078033447265625, -0.01108551025390625, -0.029754638671875 ] ]
TheBloke/Wizard-Vicuna-30B-Uncensored-fp16
2023-06-05T00:10:35.000Z
[ "transformers", "pytorch", "llama", "text-generation", "uncensored", "en", "dataset:ehartford/wizard_vicuna_70k_unfiltered", "license:other", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/Wizard-Vicuna-30B-Uncensored-fp16
17
5,831
transformers
2023-05-30T09:50:28
--- license: other datasets: - ehartford/wizard_vicuna_70k_unfiltered language: - en tags: - uncensored --- <!-- header start --> <div style="width: 100%;"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p><a href="https://discord.gg/Jq4vkcDakD">Chat & support: my new Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <!-- header end --> # Eric Hartford's Wizard-Vicuna-30B-Uncensored GPTQ This is an fp16 models of [Eric Hartford's Wizard-Vicuna 30B](https://huggingface.co/ehartford/Wizard-Vicuna-30B-Uncensored). It is the result of converting Eric's original fp32 upload to fp16. ## Repositories available * [4bit GPTQ models for GPU inference](https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ). * [4bit and 5bit GGML models for CPU inference](https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Uncensored-GGML). * [float16 HF format model for GPU inference and further conversions](https://huggingface.co/TheBloke/Wizard-Vicuna-30B-Uncensored-fp16). <!-- footer start --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/Jq4vkcDakD) ## Thanks, and how to contribute. Thanks to the [chirper.ai](https://chirper.ai) team! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Patreon special mentions**: Aemon Algiz, Dmitriy Samsonov, Nathan LeClaire, Trenton Dambrowitz, Mano Prime, David Flickinger, vamX, Nikolai Manek, senxiiz, Khalefa Al-Ahmad, Illia Dulskyi, Jonathan Leane, Talal Aujan, V. Lukas, Joseph William Delisle, Pyrater, Oscar Rangel, Lone Striker, Luke Pendergrass, Eugene Pentland, Sebastain Graf, Johann-Peter Hartman. Thank you to all my generous patrons and donaters! <!-- footer end --> # Original model card This is [wizard-vicuna-13b](https://huggingface.co/junelee/wizard-vicuna-13b) trained with a subset of the dataset - responses that contained alignment / moralizing were removed. The intent is to train a WizardLM that doesn't have alignment built-in, so that alignment (of any sort) can be added separately with for example with a RLHF LoRA. Shout out to the open source AI/ML community, and everyone who helped me out. Note: An uncensored model has no guardrails. You are responsible for anything you do with the model, just as you are responsible for anything you do with any dangerous object such as a knife, gun, lighter, or car. Publishing anything this model generates is the same as publishing it yourself. You are responsible for the content you publish, and you cannot blame the model any more than you can blame the knife, gun, lighter, or car for what you do with it.
3,657
[ [ -0.0394287109375, -0.049713134765625, -0.002197265625, 0.0018205642700195312, -0.0158233642578125, -0.0236663818359375, 0.00986480712890625, -0.041839599609375, 0.026397705078125, 0.031585693359375, -0.05450439453125, -0.021240234375, -0.017547607421875, -0.006374359130859375, -0.0232086181640625, 0.06744384765625, 0.033935546875, 0.0042572021484375, -0.0022144317626953125, 0.00833892822265625, -0.05511474609375, -0.0240325927734375, -0.0594482421875, -0.042572021484375, 0.0406494140625, 0.003559112548828125, 0.06427001953125, 0.042633056640625, 0.02056884765625, 0.031463623046875, 0.0090484619140625, 0.015838623046875, -0.049774169921875, -0.01558685302734375, -0.00443267822265625, -0.01337432861328125, -0.057464599609375, 0.00316619873046875, 0.02679443359375, 0.0221710205078125, -0.0204925537109375, 0.00868988037109375, 0.00591278076171875, 0.047332763671875, -0.03765869140625, 0.018707275390625, -0.03680419921875, -0.0025005340576171875, -0.0045166015625, 0.0021381378173828125, -0.020904541015625, -0.021453857421875, -0.0187225341796875, -0.09320068359375, 0.0059967041015625, 0.01219940185546875, 0.0821533203125, 0.016998291015625, -0.0137939453125, 0.0186004638671875, -0.06085205078125, 0.0369873046875, -0.05126953125, 0.03375244140625, 0.02166748046875, 0.03924560546875, -0.01099395751953125, -0.06005859375, -0.050872802734375, 0.00029730796813964844, 0.005615234375, 0.032135009765625, -0.041748046875, -0.00308990478515625, -0.005367279052734375, 0.0386962890625, -0.044891357421875, 0.002941131591796875, -0.04205322265625, -0.01107025146484375, 0.06390380859375, 0.005817413330078125, 0.034912109375, 0.00969696044921875, -0.0176239013671875, -0.0318603515625, -0.036956787109375, 0.00638580322265625, 0.033477783203125, 0.0222930908203125, -0.051544189453125, 0.062744140625, 0.00896453857421875, 0.044647216796875, 0.0243682861328125, 0.00478363037109375, -0.000005662441253662109, -0.035858154296875, -0.04205322265625, -0.0174713134765625, 0.0789794921875, 0.041168212890625, 0.007770538330078125, 0.0030422210693359375, 0.0119476318359375, -0.007297515869140625, 0.0239105224609375, -0.054931640625, -0.036407470703125, 0.03302001953125, -0.04901123046875, -0.0199432373046875, -0.00001817941665649414, -0.05328369140625, -0.046539306640625, -0.0126190185546875, 0.030853271484375, -0.04095458984375, -0.04742431640625, 0.021759033203125, -0.038909912109375, 0.03607177734375, 0.04852294921875, -0.056884765625, 0.007343292236328125, 0.048858642578125, 0.035308837890625, 0.032867431640625, -0.0149688720703125, -0.0224456787109375, 0.0133209228515625, -0.029052734375, 0.0413818359375, -0.02069091796875, -0.03765869140625, -0.01029205322265625, 0.00933074951171875, 0.006160736083984375, -0.0255584716796875, 0.0235595703125, -0.0246429443359375, 0.01123809814453125, -0.01139068603515625, -0.04608154296875, -0.0029087066650390625, 0.01024627685546875, -0.0572509765625, 0.048095703125, 0.011444091796875, -0.058868408203125, 0.0061187744140625, -0.049896240234375, 0.00954437255859375, 0.016510009765625, -0.006023406982421875, -0.01947021484375, 0.0015773773193359375, -0.0107574462890625, 0.007335662841796875, -0.0309295654296875, 0.003509521484375, -0.0494384765625, -0.01568603515625, 0.027557373046875, -0.055023193359375, 0.0933837890625, 0.007503509521484375, -0.016143798828125, -0.010467529296875, -0.07183837890625, -0.006175994873046875, 0.03179931640625, -0.01544952392578125, -0.0008606910705566406, -0.0131683349609375, 0.0146331787109375, 0.0024013519287109375, 0.02655029296875, -0.03863525390625, 0.0236663818359375, -0.0183258056640625, 0.006744384765625, 0.061309814453125, -0.003574371337890625, 0.03021240234375, -0.0411376953125, 0.0321044921875, -0.0184478759765625, 0.054290771484375, 0.0243682861328125, -0.05267333984375, -0.047119140625, -0.030181884765625, 0.0144195556640625, 0.041290283203125, -0.05645751953125, 0.0589599609375, -0.0017175674438476562, -0.07244873046875, -0.056427001953125, -0.006900787353515625, 0.020660400390625, 0.034454345703125, 0.032928466796875, -0.02716064453125, -0.0275115966796875, -0.06610107421875, -0.002666473388671875, -0.043243408203125, -0.01800537109375, 0.0290679931640625, 0.0186767578125, -0.008026123046875, 0.050994873046875, -0.027313232421875, -0.028900146484375, -0.00920867919921875, -0.01739501953125, 0.01474761962890625, 0.0640869140625, 0.050994873046875, -0.058563232421875, -0.036651611328125, 0.01971435546875, -0.06011962890625, -0.003986358642578125, -0.00504302978515625, -0.037445068359375, 0.0025310516357421875, 0.00829315185546875, -0.0772705078125, 0.056182861328125, 0.0306549072265625, -0.051300048828125, 0.039276123046875, -0.0283660888671875, 0.0079498291015625, -0.07452392578125, 0.01148223876953125, 0.004833221435546875, -0.0102081298828125, -0.047760009765625, -0.00455474853515625, -0.03460693359375, -0.0110015869140625, -0.03363037109375, 0.06304931640625, -0.034698486328125, 0.0218658447265625, -0.01250457763671875, -0.00751495361328125, 0.0257110595703125, 0.0275115966796875, -0.0017843246459960938, 0.0282135009765625, 0.050262451171875, -0.03424072265625, 0.045196533203125, 0.03509521484375, -0.00615692138671875, 0.04156494140625, -0.0723876953125, -0.00705718994140625, -0.01343536376953125, 0.0240936279296875, -0.06268310546875, -0.009429931640625, 0.0599365234375, -0.05950927734375, 0.048370361328125, -0.0221405029296875, -0.03057861328125, -0.029144287109375, -0.0248565673828125, 0.0152587890625, 0.052947998046875, -0.03509521484375, 0.057037353515625, 0.036285400390625, 0.0311279296875, -0.057769775390625, -0.056121826171875, -0.0230560302734375, -0.0223236083984375, -0.03857421875, 0.02880859375, -0.020263671875, -0.0252532958984375, 0.0020599365234375, 0.00533294677734375, -0.007373809814453125, 0.0028533935546875, 0.0286712646484375, 0.0313720703125, -0.011199951171875, -0.021453857421875, -0.01285552978515625, 0.006992340087890625, -0.00013756752014160156, -0.020416259765625, 0.047119140625, -0.022918701171875, -0.0230865478515625, -0.0751953125, 0.0293426513671875, 0.041534423828125, -0.0139617919921875, 0.06329345703125, 0.046173095703125, -0.029296875, -0.004764556884765625, -0.038116455078125, -0.0160064697265625, -0.042938232421875, 0.00537872314453125, -0.00725555419921875, -0.04705810546875, 0.04022216796875, 0.04400634765625, 0.026336669921875, 0.045196533203125, 0.037200927734375, -0.01444244384765625, 0.062744140625, 0.05517578125, -0.006427764892578125, 0.044708251953125, -0.047149658203125, 0.0120697021484375, -0.035675048828125, -0.0345458984375, -0.03887939453125, -0.024261474609375, -0.0633544921875, -0.0401611328125, 0.0161590576171875, 0.009246826171875, -0.05206298828125, 0.0258026123046875, -0.051910400390625, 0.0188751220703125, 0.0293731689453125, 0.02008056640625, 0.01354217529296875, -0.00147247314453125, 0.0172576904296875, 0.00936126708984375, -0.06256103515625, -0.0207061767578125, 0.05914306640625, 0.027984619140625, 0.05731201171875, 0.0204010009765625, 0.048736572265625, 0.0246429443359375, 0.02239990234375, -0.0379638671875, 0.038726806640625, -0.00600433349609375, -0.07891845703125, -0.033935546875, -0.0186004638671875, -0.076904296875, 0.0169677734375, -0.02978515625, -0.0614013671875, 0.047119140625, 0.0198211669921875, -0.026336669921875, 0.042816162109375, -0.032135009765625, 0.078857421875, -0.0053863525390625, -0.042205810546875, -0.004917144775390625, -0.04608154296875, 0.0235595703125, 0.023345947265625, 0.01053619384765625, -0.01447296142578125, 0.00536346435546875, 0.0355224609375, -0.07440185546875, 0.09344482421875, -0.0134124755859375, -0.008819580078125, 0.056121826171875, 0.005916595458984375, 0.02056884765625, 0.0176544189453125, -0.003604888916015625, 0.01523590087890625, 0.0185546875, -0.033050537109375, -0.0286712646484375, 0.037994384765625, -0.09112548828125, -0.0439453125, -0.0261383056640625, -0.0291900634765625, 0.016632080078125, 0.0247955322265625, 0.038116455078125, 0.03338623046875, -0.0261077880859375, 0.0198974609375, 0.03265380859375, -0.0120849609375, 0.03399658203125, 0.02215576171875, -0.013946533203125, -0.031829833984375, 0.07464599609375, 0.002880096435546875, -0.0010051727294921875, 0.0255126953125, 0.0166015625, -0.0260162353515625, -0.01287078857421875, -0.033111572265625, 0.042755126953125, -0.044281005859375, -0.0303192138671875, -0.0311279296875, -0.02789306640625, -0.0433349609375, -0.0070343017578125, -0.0557861328125, -0.0369873046875, -0.051116943359375, 0.01430511474609375, 0.052520751953125, 0.045013427734375, -0.0286865234375, 0.0214080810546875, -0.04931640625, 0.00412750244140625, 0.0193634033203125, 0.00959014892578125, 0.0106201171875, -0.039337158203125, -0.00572967529296875, 0.00542449951171875, -0.037750244140625, -0.056121826171875, 0.047637939453125, 0.00691986083984375, 0.053863525390625, 0.034515380859375, 0.01806640625, 0.055877685546875, -0.03924560546875, 0.0640869140625, 0.037109375, -0.048583984375, 0.0178375244140625, -0.040008544921875, 0.0091552734375, 0.04168701171875, 0.0367431640625, -0.01806640625, -0.03778076171875, -0.0628662109375, -0.036956787109375, 0.028167724609375, 0.0193328857421875, 0.0187225341796875, 0.0025501251220703125, 0.037261962890625, -0.0009403228759765625, 0.004085540771484375, -0.065185546875, -0.04718017578125, -0.04241943359375, 0.003231048583984375, 0.0164031982421875, 0.0171661376953125, -0.0196685791015625, -0.05023193359375, 0.0821533203125, -0.014068603515625, 0.050201416015625, 0.018798828125, 0.032318115234375, -0.006687164306640625, -0.00589752197265625, 0.018768310546875, 0.054351806640625, -0.0102081298828125, -0.0211334228515625, -0.02288818359375, -0.027923583984375, -0.003936767578125, 0.02105712890625, -0.01470184326171875, -0.0034427642822265625, 0.01605224609375, 0.0645751953125, -0.0164794921875, -0.0277557373046875, 0.03631591796875, -0.019317626953125, -0.01361083984375, -0.03448486328125, 0.0238037109375, 0.021240234375, 0.04840087890625, 0.01413726806640625, -0.004207611083984375, 0.0198211669921875, -0.0338134765625, 0.0026149749755859375, 0.0537109375, -0.02587890625, -0.0291748046875, 0.07598876953125, -0.002223968505859375, -0.03497314453125, 0.046234130859375, 0.00583648681640625, -0.01213836669921875, 0.062744140625, 0.046539306640625, 0.058074951171875, -0.01203155517578125, 0.028350830078125, 0.034942626953125, 0.0156707763671875, 0.0121002197265625, 0.011749267578125, -0.0009946823120117188, -0.049285888671875, -0.0031261444091796875, -0.03399658203125, -0.035003662109375, 0.0234527587890625, -0.05780029296875, 0.04010009765625, -0.053680419921875, -0.021942138671875, 0.0074005126953125, 0.006809234619140625, -0.046844482421875, 0.0167388916015625, 0.0235595703125, 0.08587646484375, -0.050079345703125, 0.068115234375, 0.029144287109375, -0.045684814453125, -0.06793212890625, -0.0238800048828125, 0.0084075927734375, -0.052459716796875, 0.0035114288330078125, -0.00490570068359375, 0.0033397674560546875, 0.0051422119140625, -0.06719970703125, -0.051849365234375, 0.10113525390625, 0.01483154296875, -0.0310821533203125, -0.0209503173828125, -0.004726409912109375, 0.043243408203125, -0.031524658203125, 0.02899169921875, 0.0246429443359375, 0.0291900634765625, 0.01019287109375, -0.0667724609375, -0.004924774169921875, -0.040008544921875, 0.003387451171875, -0.01233673095703125, -0.09381103515625, 0.05462646484375, 0.00977325439453125, 0.000060498714447021484, 0.02801513671875, 0.06280517578125, 0.035247802734375, 0.00930023193359375, 0.0271759033203125, 0.0273590087890625, 0.06103515625, 0.00823974609375, 0.0947265625, -0.0239715576171875, 0.025970458984375, 0.0626220703125, 0.00559234619140625, 0.046905517578125, 0.0174560546875, -0.0158233642578125, 0.026702880859375, 0.05340576171875, -0.02679443359375, 0.02911376953125, 0.006450653076171875, -0.0220794677734375, -0.0243682861328125, -0.0202484130859375, -0.054046630859375, 0.022064208984375, 0.012237548828125, -0.006885528564453125, 0.003173828125, -0.007640838623046875, -0.00018405914306640625, -0.0132598876953125, -0.027313232421875, 0.04638671875, 0.0172882080078125, -0.0287322998046875, 0.06951904296875, -0.00814056396484375, 0.047454833984375, -0.051849365234375, -0.005157470703125, -0.0360107421875, 0.0206146240234375, -0.00670623779296875, -0.03851318359375, 0.01186370849609375, -0.0137786865234375, -0.0125274658203125, 0.004451751708984375, 0.050933837890625, -0.0166015625, -0.047119140625, 0.04339599609375, 0.0257110595703125, 0.028839111328125, 0.023956298828125, -0.07781982421875, 0.024169921875, -0.003101348876953125, -0.021331787109375, 0.02801513671875, 0.035980224609375, 0.00897979736328125, 0.0511474609375, 0.03240966796875, -0.0005970001220703125, 0.015838623046875, -0.010040283203125, 0.0748291015625, -0.0239715576171875, -0.01776123046875, -0.060394287109375, 0.057037353515625, -0.00502777099609375, -0.019927978515625, 0.0592041015625, 0.0504150390625, 0.056304931640625, -0.018096923828125, 0.05352783203125, -0.0138702392578125, 0.005397796630859375, -0.01690673828125, 0.0877685546875, -0.07891845703125, 0.006488800048828125, -0.016815185546875, -0.054931640625, -0.0157623291015625, 0.047821044921875, 0.0234222412109375, 0.006656646728515625, 0.0053863525390625, 0.06353759765625, -0.002685546875, -0.0008563995361328125, 0.03509521484375, 0.018035888671875, 0.03363037109375, 0.045166015625, 0.055694580078125, -0.06048583984375, 0.04217529296875, -0.041717529296875, -0.014801025390625, -0.010589599609375, -0.058868408203125, -0.06988525390625, -0.034271240234375, -0.041900634765625, -0.0582275390625, -0.002605438232421875, 0.06561279296875, 0.053070068359375, -0.04241943359375, -0.039764404296875, -0.00037407875061035156, 0.01160430908203125, -0.01326751708984375, -0.0186309814453125, 0.0089111328125, 0.0281524658203125, -0.0599365234375, 0.036376953125, -0.0021839141845703125, 0.037750244140625, -0.018035888671875, -0.0165557861328125, -0.0269012451171875, 0.0204620361328125, 0.0266265869140625, 0.0614013671875, -0.042510986328125, -0.016357421875, -0.003978729248046875, 0.006122589111328125, 0.0211639404296875, 0.0259246826171875, -0.040863037109375, 0.0189056396484375, 0.0411376953125, 0.0305938720703125, 0.043121337890625, 0.0015888214111328125, 0.039306640625, -0.022674560546875, 0.0175323486328125, 0.0079498291015625, 0.0264739990234375, 0.0204620361328125, -0.04022216796875, 0.039794921875, 0.0265960693359375, -0.055389404296875, -0.06304931640625, -0.01617431640625, -0.08184814453125, -0.027435302734375, 0.068115234375, 0.00989532470703125, -0.045989990234375, 0.0048065185546875, -0.00826263427734375, 0.0369873046875, -0.024627685546875, 0.022430419921875, 0.0303497314453125, -0.01824951171875, -0.0266265869140625, -0.046539306640625, 0.0389404296875, -0.0035343170166015625, -0.045928955078125, 0.01093292236328125, 0.06585693359375, 0.0257720947265625, 0.02520751953125, 0.07171630859375, -0.0155792236328125, 0.031768798828125, 0.01837158203125, 0.0255584716796875, -0.0094451904296875, -0.031097412109375, -0.02801513671875, 0.00342559814453125, -0.007781982421875, -0.0188751220703125 ] ]
MayaPH/FinOPT-Franklin
2023-07-11T13:51:38.000Z
[ "transformers", "pytorch", "safetensors", "opt", "text-generation", "license:cc-by-sa-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
MayaPH
null
null
MayaPH/FinOPT-Franklin
5
5,829
transformers
2023-05-26T20:36:02
--- license: cc-by-sa-4.0 pipeline_tag: text-generation --- # 🤗 FinOPT-Franklin Released June 1, 2023 ## Model Description FinOPT-Franklin is a language model based on the OPT-1.3B architecture, which has been fine-tuned on a financial question-answering dataset. The model aims to provide accurate and informative responses to financial-related questions. ## FinOPT Series The FinOPT series of language models come in various model sizes. Kindly refer to this Huggingface Hub [link](https://huggingface.co/models?search=mayaph/finopt) to see the other checkpoints of FinOPT. | Model Name | Parameter Size | |---------------------|----------------| | <b>FinOPT-Franklin</b> | <b>1.3B</b> | | [FinOPT-Lincoln](https://huggingface.co/MayaPH/FinOPT-Lincoln) | 350M | | [FinOPT-Washington](https://huggingface.co/MayaPH/FinOPT-Washington) | 125M | ## Intended Use FinOPT-Franklin is designed to assist users in obtaining relevant and reliable information about financial topics. It can be used as a tool for performing question-answering tasks in the financial domain, including banking queries, investment advice, and general financial inquiries. The model is intended to be used by individuals seeking information about financial topics, as well as developers and researchers working on natural language processing (NLP) tasks in the financial domain. ## Usage To use FinOPT-Franklin, you are required to provide attribution in accordance with the Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license. Please include the following attribution notice when utilizing FinOPT-Franklin in your work: ```python # This code uses FinOPT-Franklin, a language model developed by Maya Philippines. # The model is licensed under the Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0) license. # For more information, visit: https://creativecommons.org/licenses/by-sa/4.0/ from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("MayaPH/FinOPT-Franklin") model = AutoModelForCausalLM.from_pretrained("MayaPH/FinOPT-Franklin") ``` Please ensure that you include the relevant attribution notice in your code or any other form of usage to comply with the license terms. ## Limitations and Caveats While FinOPT-Franklin has been fine-tuned on a financial question-answering dataset, it is important to note the following limitations and caveats: 1. **Domain-Specific Focus:** The model's training data primarily consists of financial questions and answers from the financial QA dataset. It may not perform as well on questions outside the financial domain. 2. **Potential Bias:** The model may reflect biases present in the training data. It is crucial to carefully evaluate and interpret the model's responses, particularly on sensitive topics such as investment advice or financial recommendations. 3. **Confidence and Verification:** The model generates responses based on patterns learned from the training data, but it does not have inherent fact-checking capabilities. Users should verify the information provided by the model from reliable sources before making any financial decisions. ## Training Data FinOPT-Franklin was trained on a financial question-answering dataset, which consists of questions and answers related to various financial topics. The dataset was collected from online sources and financial forums, and manually handcrafted. ## Ethical Considerations When using FinOPT-Franklin, it is important to consider the following ethical considerations: 1. **Privacy and Security:** Avoid sharing sensitive personal or financial information while interacting with the model. The model does not have privacy safeguards, so exercise caution when discussing personal or confidential matters. 2. **Fairness and Bias:** The model's responses may reflect biases present in the training data. Be aware of potential biases and make an effort to evaluate responses critically and fairly. 3. **Transparency:** The model operates as a predictive text generator based on patterns learned from the training data. The model's inner workings and the specific training data used are proprietary and not publicly available. 4. **User Responsibility:** Users should take responsibility for their own financial decisions and not solely rely on the information provided by the model. Consult with financial professionals or reliable sources for specific financial advice or recommendations. ## Further Information For additional information or inquiries about FinOPT-Franklin, please contact the Maya Philippines iOps Team via jasper.catapang@maya.ph. ## Disclaimer FinOPT-Franklin is an AI language model trained by Maya Philippines. It is provided "as is" without warranty of any kind, express or implied. The model developers and Maya Philippines shall not be liable for any direct or indirect damages arising from the use of this model. ## Acknowledgments The development of FinOPT-Franklin was made possible by Maya Philippines and the curation and creation of the financial question-answering dataset.
5,159
[ [ -0.02325439453125, -0.0838623046875, 0.0135345458984375, 0.0294952392578125, -0.0240020751953125, -0.01346588134765625, -0.0023784637451171875, -0.057281494140625, 0.01432037353515625, 0.0482177734375, -0.054046630859375, -0.019134521484375, -0.01763916015625, -0.0006475448608398438, -0.031402587890625, 0.07672119140625, 0.0079345703125, 0.00457763671875, 0.005687713623046875, 0.03802490234375, -0.009613037109375, -0.022216796875, -0.084228515625, -0.00775909423828125, 0.0026950836181640625, 0.0254058837890625, 0.04840087890625, 0.04119873046875, 0.041900634765625, 0.02618408203125, 0.010955810546875, -0.0159149169921875, -0.01421356201171875, -0.03387451171875, -0.00914764404296875, -0.00765228271484375, -0.050933837890625, 0.0113983154296875, 0.02313232421875, 0.05059814453125, -0.023529052734375, 0.0285797119140625, -0.0054931640625, 0.050537109375, -0.011932373046875, 0.043731689453125, -0.030548095703125, -0.004428863525390625, 0.0011034011840820312, 0.00527191162109375, -0.0253448486328125, -0.04840087890625, 0.041900634765625, -0.047515869140625, 0.004375457763671875, 0.0154876708984375, 0.083984375, -0.00609588623046875, -0.0181427001953125, -0.0172119140625, -0.047515869140625, 0.044097900390625, -0.06243896484375, 0.031158447265625, 0.051971435546875, 0.03302001953125, 0.02557373046875, -0.05322265625, -0.04229736328125, -0.01776123046875, -0.0047454833984375, 0.007648468017578125, -0.00339508056640625, 0.00969696044921875, 0.00824737548828125, 0.041107177734375, -0.0440673828125, -0.034759521484375, -0.055023193359375, -0.00876617431640625, 0.055938720703125, -0.0145263671875, 0.00885009765625, -0.0357666015625, -0.05963134765625, -0.0298919677734375, -0.029876708984375, 0.026123046875, 0.042755126953125, 0.0435791015625, -0.0231475830078125, 0.0310516357421875, -0.00992584228515625, 0.0621337890625, 0.0169525146484375, -0.01312255859375, 0.01433563232421875, -0.0254058837890625, -0.023651123046875, 0.0041351318359375, 0.05767822265625, 0.0185546875, 0.0328369140625, 0.006046295166015625, -0.0205535888671875, -0.04351806640625, 0.0079498291015625, -0.03265380859375, 0.001041412353515625, 0.020721435546875, -0.0335693359375, -0.0264892578125, 0.0266876220703125, -0.046142578125, 0.006328582763671875, -0.0064544677734375, 0.037322998046875, -0.041778564453125, -0.03753662109375, 0.0291748046875, -0.004505157470703125, 0.0289306640625, 0.0004265308380126953, -0.09478759765625, 0.045196533203125, 0.03717041015625, 0.0295867919921875, 0.02581787109375, -0.0220184326171875, -0.0007271766662597656, 0.0010004043579101562, -0.016998291015625, 0.0491943359375, -0.005680084228515625, -0.01448822021484375, -0.0042572021484375, -0.003429412841796875, -0.0300750732421875, -0.019622802734375, 0.0218048095703125, -0.032012939453125, 0.03472900390625, -0.032196044921875, -0.040069580078125, -0.0227508544921875, 0.0149993896484375, -0.036956787109375, 0.07342529296875, 0.041168212890625, -0.076904296875, 0.00740814208984375, -0.061004638671875, -0.02227783203125, 0.0007634162902832031, -0.00632476806640625, -0.0272979736328125, -0.03125, 0.00928497314453125, 0.0143890380859375, -0.029388427734375, 0.0294952392578125, -0.0234832763671875, -0.0262298583984375, 0.039276123046875, -0.030303955078125, 0.08843994140625, 0.0057373046875, -0.03948974609375, -0.01226043701171875, -0.056488037109375, -0.01381683349609375, 0.0252685546875, -0.0088348388671875, -0.0149383544921875, -0.0117340087890625, 0.022796630859375, 0.02337646484375, 0.056121826171875, -0.0643310546875, -0.006755828857421875, -0.0689697265625, 0.0269775390625, 0.0712890625, 0.0083160400390625, 0.006435394287109375, -0.0552978515625, 0.04864501953125, -0.0224456787109375, 0.05218505859375, 0.005687713623046875, -0.0382080078125, -0.053131103515625, 0.0093231201171875, 0.00620269775390625, 0.07928466796875, -0.033050537109375, 0.0282135009765625, -0.003734588623046875, -0.030181884765625, -0.0191192626953125, 0.005092620849609375, 0.03094482421875, 0.05462646484375, 0.0338134765625, 0.0024623870849609375, -0.0323486328125, -0.08673095703125, -0.0012378692626953125, -0.05279541015625, 0.0236968994140625, 0.032867431640625, 0.035552978515625, 0.0286102294921875, 0.0745849609375, -0.052947998046875, -0.0008091926574707031, -0.032440185546875, -0.006591796875, 0.02423095703125, 0.04376220703125, 0.054779052734375, -0.071533203125, -0.040771484375, -0.001674652099609375, -0.033660888671875, 0.0135955810546875, -0.0235748291015625, -0.0022373199462890625, 0.00780487060546875, 0.036773681640625, -0.040618896484375, 0.0226593017578125, 0.021148681640625, -0.04461669921875, 0.03033447265625, -0.00986480712890625, -0.006114959716796875, -0.10296630859375, 0.0230865478515625, -0.00959014892578125, -0.00472259521484375, -0.03643798828125, -0.0039520263671875, -0.00214385986328125, -0.01153564453125, -0.03265380859375, 0.058502197265625, 0.00360870361328125, -0.006282806396484375, 0.0077362060546875, 0.016937255859375, -0.0019207000732421875, 0.0433349609375, -0.004589080810546875, 0.057098388671875, 0.03314208984375, -0.02545166015625, 0.03802490234375, 0.03125, -0.0181121826171875, 0.03265380859375, -0.06573486328125, -0.006420135498046875, 0.005977630615234375, -0.0046539306640625, -0.08135986328125, -0.0013189315795898438, 0.04461669921875, -0.04437255859375, 0.014678955078125, 0.0179443359375, -0.03228759765625, -0.039703369140625, -0.0281524658203125, 0.002414703369140625, 0.01418304443359375, -0.0223846435546875, 0.0355224609375, 0.0059967041015625, -0.0445556640625, -0.0692138671875, -0.0660400390625, -0.0236358642578125, -0.011474609375, -0.06060791015625, 0.023040771484375, -0.0250091552734375, -0.0201263427734375, 0.01123046875, 0.006000518798828125, -0.015228271484375, 0.00814056396484375, 0.0008826255798339844, 0.0421142578125, -0.0225067138671875, 0.0195465087890625, 0.006175994873046875, -0.01458740234375, 0.01299285888671875, -0.0294189453125, 0.03985595703125, -0.039794921875, -0.01543426513671875, -0.0242919921875, 0.022552490234375, 0.0290985107421875, -0.035552978515625, 0.059539794921875, 0.0287628173828125, -0.0278472900390625, 0.0081634521484375, -0.05194091796875, -0.022674560546875, -0.039947509765625, 0.0176544189453125, -0.00920867919921875, -0.06298828125, 0.05645751953125, 0.0294036865234375, 0.02374267578125, 0.05328369140625, 0.022125244140625, 0.0054473876953125, 0.04461669921875, 0.0682373046875, 0.006130218505859375, 0.049224853515625, -0.03179931640625, 0.02740478515625, -0.039031982421875, -0.0258636474609375, -0.0297698974609375, 0.005664825439453125, -0.038238525390625, -0.0095672607421875, -0.0033473968505859375, 0.0017948150634765625, -0.02349853515625, 0.02886962890625, -0.020355224609375, 0.017242431640625, 0.0304412841796875, 0.0016622543334960938, 0.0147247314453125, -0.0036029815673828125, -0.0223541259765625, 0.0120391845703125, -0.0311279296875, -0.0241241455078125, 0.08587646484375, 0.0361328125, 0.035736083984375, 0.007083892822265625, 0.051422119140625, 0.004138946533203125, 0.0205841064453125, -0.02886962890625, 0.041534423828125, -0.01010894775390625, -0.085693359375, -0.041778564453125, -0.03277587890625, -0.055938720703125, 0.0035076141357421875, -0.0017690658569335938, -0.055267333984375, 0.03082275390625, -0.0015497207641601562, -0.06146240234375, 0.0148468017578125, -0.04461669921875, 0.0875244140625, -0.0222930908203125, -0.00913238525390625, -0.0006480216979980469, -0.051239013671875, 0.03631591796875, -0.00858306884765625, 0.0218353271484375, -0.0185089111328125, 0.00408172607421875, 0.04254150390625, -0.048736572265625, 0.07073974609375, -0.0068206787109375, -0.002590179443359375, 0.033782958984375, -0.0275115966796875, 0.0166473388671875, 0.01375579833984375, -0.00939178466796875, -0.014068603515625, 0.01308441162109375, -0.038818359375, -0.0189361572265625, 0.041473388671875, -0.05853271484375, -0.0254669189453125, -0.0338134765625, -0.041168212890625, -0.0145263671875, 0.0229644775390625, 0.020172119140625, 0.0281524658203125, -0.0248260498046875, -0.0078277587890625, 0.04901123046875, -0.026611328125, 0.038787841796875, 0.05224609375, -0.042694091796875, -0.040283203125, 0.06732177734375, 0.01148223876953125, 0.028717041015625, 0.0328369140625, 0.02191162109375, -0.038818359375, -0.0191802978515625, -0.033721923828125, 0.042755126953125, -0.054840087890625, -0.005275726318359375, -0.033050537109375, -0.0246734619140625, -0.058380126953125, -0.0121002197265625, -0.033447265625, -0.050811767578125, -0.0082244873046875, -0.03662109375, 0.053375244140625, 0.0345458984375, -0.0198211669921875, 0.0223388671875, -0.02935791015625, 0.01377105712890625, 0.0531005859375, 0.0318603515625, -0.0139007568359375, -0.0278472900390625, -0.0238800048828125, -0.009429931640625, -0.00019466876983642578, -0.072998046875, 0.021514892578125, 0.01363372802734375, 0.034942626953125, 0.0443115234375, 0.0157928466796875, 0.0293426513671875, 0.01806640625, 0.048583984375, -0.00769805908203125, -0.06365966796875, 0.052886962890625, -0.044097900390625, 0.01849365234375, 0.059844970703125, 0.03094482421875, -0.045013427734375, -0.049560546875, -0.06884765625, -0.071533203125, 0.06024169921875, 0.004184722900390625, 0.022674560546875, -0.0205230712890625, 0.044281005859375, -0.004268646240234375, 0.0241241455078125, -0.08154296875, -0.032379150390625, -0.028167724609375, -0.01496124267578125, 0.0131988525390625, -0.00975799560546875, 0.01348876953125, -0.016998291015625, 0.06903076171875, 0.002574920654296875, 0.015411376953125, 0.0254974365234375, 0.00521087646484375, -0.0218353271484375, 0.0094146728515625, 0.05145263671875, 0.07049560546875, -0.04693603515625, -0.0180206298828125, 0.01049041748046875, -0.047271728515625, -0.0220794677734375, 0.064208984375, -0.034942626953125, -0.007190704345703125, 0.0166168212890625, 0.049560546875, 0.03143310546875, -0.059112548828125, 0.0482177734375, -0.0208892822265625, -0.01922607421875, -0.0650634765625, -0.0086212158203125, 0.0089111328125, 0.022308349609375, 0.031463623046875, -0.000013887882232666016, 0.0107574462890625, -0.035552978515625, 0.0172119140625, 0.0172119140625, -0.02618408203125, -0.0026111602783203125, 0.0543212890625, -0.0016069412231445312, -0.00557708740234375, 0.06524658203125, -0.008880615234375, -0.02374267578125, 0.051422119140625, 0.0343017578125, 0.059326171875, -0.046722412109375, 0.038787841796875, 0.038787841796875, 0.045013427734375, -0.001983642578125, 0.055877685546875, 0.00506591796875, -0.04949951171875, -0.0283050537109375, -0.057220458984375, -0.02008056640625, 0.0205841064453125, -0.0673828125, 0.0133819580078125, -0.07940673828125, -0.0289306640625, 0.001007080078125, 0.022308349609375, -0.02313232421875, 0.0217132568359375, 0.005741119384765625, 0.08294677734375, -0.07073974609375, 0.055145263671875, 0.045806884765625, -0.06512451171875, -0.07391357421875, -0.024261474609375, -0.005657196044921875, -0.05322265625, 0.03173828125, -0.0021762847900390625, -0.004589080810546875, 0.006359100341796875, -0.0262298583984375, -0.07177734375, 0.07769775390625, 0.034698486328125, -0.0119781494140625, -0.010101318359375, 0.029144287109375, 0.024078369140625, -0.0244598388671875, 0.0068817138671875, 0.032623291015625, 0.040496826171875, 0.004474639892578125, -0.0823974609375, -0.0095062255859375, -0.03533935546875, -0.0072174072265625, -0.0011205673217773438, -0.0687255859375, 0.09808349609375, -0.015472412109375, 0.00799560546875, 0.0006866455078125, 0.0406494140625, 0.01380157470703125, 0.01078033447265625, 0.038055419921875, 0.0277557373046875, 0.0543212890625, -0.006244659423828125, 0.08477783203125, -0.057098388671875, 0.04278564453125, 0.07305908203125, 0.019287109375, 0.0762939453125, 0.022796630859375, -0.0223846435546875, 0.043243408203125, 0.06982421875, -0.00991058349609375, 0.0182647705078125, -0.007579803466796875, -0.01154327392578125, -0.016021728515625, -0.0032138824462890625, -0.040679931640625, 0.025054931640625, 0.0110015869140625, -0.04644775390625, 0.0008625984191894531, 0.0005469322204589844, 0.0055389404296875, -0.00933074951171875, -0.0084686279296875, 0.0271148681640625, 0.01763916015625, -0.03411865234375, 0.026397705078125, 0.0218963623046875, 0.0234527587890625, -0.0765380859375, 0.008575439453125, -0.03753662109375, 0.0125732421875, 0.00582122802734375, -0.0269317626953125, 0.0270233154296875, -0.0103302001953125, -0.01812744140625, -0.0369873046875, 0.0433349609375, -0.031890869140625, -0.061004638671875, 0.016204833984375, 0.03240966796875, 0.006069183349609375, -0.0001531839370727539, -0.084716796875, -0.0253448486328125, 0.0031032562255859375, -0.031585693359375, 0.00124359130859375, 0.0064849853515625, 0.00972747802734375, 0.06231689453125, 0.047210693359375, -0.002552032470703125, -0.0018053054809570312, -0.0102386474609375, 0.046722412109375, -0.047515869140625, -0.059326171875, -0.0518798828125, 0.0211181640625, -0.005680084228515625, -0.037750244140625, 0.056915283203125, 0.05950927734375, 0.07177734375, -0.00492095947265625, 0.044769287109375, -0.0020904541015625, 0.051971435546875, -0.0281524658203125, 0.057952880859375, -0.0330810546875, 0.0226287841796875, -0.04302978515625, -0.0557861328125, 0.00791168212890625, 0.0848388671875, -0.040863037109375, 0.0289306640625, 0.0440673828125, 0.038604736328125, 0.0087890625, 0.0305328369140625, 0.004161834716796875, 0.0184783935546875, -0.00229644775390625, 0.035980224609375, 0.035064697265625, -0.04833984375, 0.051422119140625, -0.057159423828125, -0.003936767578125, -0.0208740234375, -0.03802490234375, -0.059600830078125, -0.04486083984375, -0.02301025390625, -0.01177978515625, -0.003993988037109375, 0.07843017578125, 0.040435791015625, -0.049591064453125, -0.047760009765625, 0.01480865478515625, 0.00789642333984375, -0.027130126953125, -0.019073486328125, 0.046630859375, -0.0009646415710449219, -0.0418701171875, 0.017791748046875, 0.02532958984375, 0.03619384765625, -0.037841796875, 0.012908935546875, -0.033599853515625, 0.0277557373046875, 0.028167724609375, 0.02679443359375, -0.040130615234375, -0.01218414306640625, 0.028228759765625, -0.00672149658203125, -0.003208160400390625, 0.03448486328125, -0.027008056640625, 0.023590087890625, 0.025238037109375, 0.0214996337890625, 0.045135498046875, 0.00769805908203125, 0.03985595703125, -0.03253173828125, 0.0108642578125, 0.01213836669921875, 0.0172119140625, 0.0265045166015625, -0.04010009765625, 0.0239410400390625, 0.015594482421875, -0.0169677734375, -0.036163330078125, 0.0014085769653320312, -0.0740966796875, -0.0303802490234375, 0.05218505859375, -0.00882720947265625, -0.028717041015625, -0.01715087890625, -0.00811767578125, 0.0023479461669921875, -0.0305938720703125, 0.056915283203125, 0.057861328125, -0.017974853515625, -0.03009033203125, -0.071044921875, 0.037994384765625, 0.0225677490234375, -0.05841064453125, 0.01226806640625, 0.0168609619140625, 0.03863525390625, 0.0086822509765625, 0.07928466796875, -0.007289886474609375, 0.044677734375, 0.0081634521484375, 0.005184173583984375, -0.016998291015625, 0.003467559814453125, -0.00392913818359375, 0.00411224365234375, 0.01561737060546875, 0.0053253173828125 ] ]
Fredithefish/RedPajama-INCITE-Chat-3B-Instruction-Tuning-with-GPT-4
2023-08-07T14:45:56.000Z
[ "transformers", "pytorch", "safetensors", "gpt_neox", "text-generation", "en", "dataset:Fredithefish/Instruction-Tuning-with-GPT-4-RedPajama-Chat", "license:cc", "has_space", "text-generation-inference", "region:us" ]
text-generation
Fredithefish
null
null
Fredithefish/RedPajama-INCITE-Chat-3B-Instruction-Tuning-with-GPT-4
2
5,828
transformers
2023-05-16T15:07:11
--- license: cc datasets: - Fredithefish/Instruction-Tuning-with-GPT-4-RedPajama-Chat language: - en inference: false --- <html> <head> <style> .alert { padding: 15px; background-color: #f44336; color: white; } </style> </head> <body> <div class="alert"> <strong>Warning:</strong> This fine-tuned model has only undergone 200 steps of fine-tuning and may not be reliable. The final model will not be released. </div> </body> </html> <br> # RedPajama-INCITE-Chat-3B-Instruction-Tuning-with-GPT-4 RedPajama-INCITE-Chat-3B Model finetuned <a href="https://huggingface.co/datasets/Fredithefish/Instruction-Tuning-with-GPT-4-RedPajama-Chat">on this dataset</a> ## Reproduction The code for the finetuning of this model can be found at https://github.com/fredi-python/Fine-tune-RedPajama-Chat-3B ## Usage and License Notices The Model is intended and licensed for research use only. The model is under the CC BY NC 4.0 license (allowing only non-commercial use)
996
[ [ -0.0275421142578125, -0.07763671875, 0.0018625259399414062, 0.047149658203125, -0.01922607421875, -0.032440185546875, -0.030364990234375, -0.04815673828125, -0.0040283203125, 0.0208587646484375, -0.07769775390625, -0.0323486328125, -0.037811279296875, 0.009857177734375, -0.03271484375, 0.1060791015625, 0.007274627685546875, -0.004726409912109375, 0.0109405517578125, 0.0177459716796875, -0.057830810546875, -0.01155853271484375, -0.0650634765625, -0.023040771484375, 0.00879669189453125, 0.04351806640625, 0.061737060546875, 0.040679931640625, 0.043212890625, 0.0170135498046875, -0.01210784912109375, -0.0082550048828125, -0.06396484375, -0.0188446044921875, 0.006893157958984375, -0.0196075439453125, -0.05352783203125, 0.017059326171875, 0.026153564453125, 0.0195159912109375, -0.01715087890625, 0.0225982666015625, 0.01433563232421875, 0.0307159423828125, -0.03570556640625, 0.0293731689453125, -0.056304931640625, -0.0258331298828125, -0.006542205810546875, 0.004123687744140625, -0.0133209228515625, -0.0307159423828125, 0.0019741058349609375, -0.04083251953125, 0.0194244384765625, 0.00640869140625, 0.08135986328125, 0.01806640625, 0.003032684326171875, 0.0086669921875, -0.0372314453125, 0.058837890625, -0.055328369140625, 0.01319122314453125, 0.04583740234375, 0.0238800048828125, -0.0133209228515625, -0.05645751953125, -0.042999267578125, -0.00994110107421875, 0.0009441375732421875, -0.013671875, -0.0279693603515625, -0.0101776123046875, 0.0177764892578125, 0.02203369140625, -0.04095458984375, -0.02130126953125, -0.040985107421875, -0.011688232421875, 0.0548095703125, 0.0175933837890625, 0.017913818359375, -0.0022449493408203125, -0.022430419921875, -0.0087890625, -0.046905517578125, -0.01030731201171875, 0.030487060546875, 0.043304443359375, -0.07159423828125, 0.03692626953125, -0.0321044921875, 0.05108642578125, 0.02801513671875, -0.0142669677734375, 0.03289794921875, -0.04107666015625, -0.034515380859375, -0.0234832763671875, 0.07415771484375, 0.06170654296875, 0.027008056640625, 0.0183563232421875, -0.0075531005859375, 0.0111236572265625, 0.0287933349609375, -0.06829833984375, -0.056427001953125, 0.025360107421875, -0.029632568359375, -0.026885986328125, -0.00855255126953125, -0.040191650390625, 0.00677490234375, -0.0168914794921875, 0.0474853515625, -0.046356201171875, -0.03533935546875, 0.00807952880859375, 0.00640869140625, -0.0031604766845703125, 0.0206756591796875, -0.08697509765625, 0.0276336669921875, 0.04644775390625, 0.05328369140625, 0.0142822265625, -0.0161285400390625, 0.0128326416015625, -0.01023101806640625, -0.005889892578125, 0.032806396484375, -0.037506103515625, -0.02337646484375, -0.018402099609375, 0.02435302734375, -0.0013132095336914062, -0.01201629638671875, 0.0562744140625, -0.0276641845703125, 0.0072174072265625, 0.003124237060546875, -0.0202789306640625, -0.0189056396484375, 0.017364501953125, -0.0361328125, 0.050384521484375, 0.0164947509765625, -0.06610107421875, 0.025360107421875, -0.043731689453125, -0.02294921875, -0.0076446533203125, 0.01433563232421875, -0.063232421875, -0.020050048828125, 0.013885498046875, 0.04742431640625, 0.00585174560546875, -0.00335693359375, -0.052978515625, -0.044891357421875, 0.009979248046875, -0.02630615234375, 0.049407958984375, 0.0287628173828125, -0.0206146240234375, 0.01136016845703125, -0.060638427734375, 0.005214691162109375, 0.021087646484375, -0.0215301513671875, 0.00511932373046875, -0.0033245086669921875, 0.01540374755859375, 0.0000388026237487793, 0.043365478515625, -0.031585693359375, 0.01166534423828125, -0.028106689453125, 0.037506103515625, 0.044189453125, 0.0086669921875, 0.024169921875, -0.04730224609375, 0.029388427734375, -0.01084136962890625, 0.04718017578125, 0.0109710693359375, -0.0511474609375, -0.045867919921875, -0.0071563720703125, 0.015899658203125, 0.0270843505859375, -0.0210113525390625, 0.04132080078125, 0.0007281303405761719, -0.0538330078125, -0.028564453125, -0.013702392578125, 0.0175018310546875, 0.017059326171875, 0.03643798828125, 0.007720947265625, -0.042449951171875, -0.053375244140625, -0.0075531005859375, 0.0120849609375, 0.00980377197265625, 0.026031494140625, 0.050933837890625, -0.032623291015625, 0.0322265625, -0.0249786376953125, -0.011138916015625, -0.00917816162109375, 0.003765106201171875, 0.0377197265625, 0.06561279296875, 0.038330078125, -0.046051025390625, -0.050689697265625, -0.005275726318359375, -0.03057861328125, -0.0027923583984375, -0.00879669189453125, -0.018035888671875, 0.00339508056640625, 0.01166534423828125, -0.034820556640625, 0.037353515625, 0.05517578125, -0.03216552734375, 0.033935546875, -0.005084991455078125, 0.01824951171875, -0.0738525390625, 0.0033130645751953125, 0.02301025390625, -0.016510009765625, -0.03369140625, 0.0134429931640625, 0.0257568359375, -0.0100555419921875, -0.031494140625, 0.02569580078125, -0.024871826171875, 0.0299835205078125, -0.0272064208984375, -0.00345611572265625, -0.0189208984375, 0.037811279296875, -0.016693115234375, 0.055084228515625, 0.041778564453125, -0.05694580078125, 0.0226898193359375, 0.040496826171875, -0.0253448486328125, 0.0261077880859375, -0.059356689453125, -0.001483917236328125, 0.03253173828125, 0.0030918121337890625, -0.0467529296875, -0.043975830078125, 0.04559326171875, -0.0435791015625, 0.039825439453125, -0.01505279541015625, -0.03887939453125, -0.019439697265625, -0.0275726318359375, 0.02685546875, 0.03936767578125, -0.022369384765625, -0.004474639892578125, 0.04083251953125, -0.0122222900390625, -0.0261993408203125, -0.0361328125, -0.031982421875, -0.02374267578125, -0.0570068359375, 0.0179290771484375, -0.0214996337890625, 0.007110595703125, -0.0174713134765625, -0.005191802978515625, -0.00962066650390625, 0.0222930908203125, 0.031463623046875, 0.02056884765625, 0.01386260986328125, -0.00855255126953125, 0.0018205642700195312, -0.00604248046875, 0.0123291015625, 0.003902435302734375, 0.0498046875, -0.016876220703125, -0.0162506103515625, -0.07952880859375, 0.004848480224609375, 0.03326416015625, 0.0078277587890625, 0.0626220703125, 0.03887939453125, -0.048065185546875, 0.0004658699035644531, -0.029449462890625, -0.027801513671875, -0.037261962890625, 0.01018524169921875, -0.0159149169921875, -0.054656982421875, 0.0352783203125, 0.01473236083984375, -0.01385498046875, 0.051849365234375, 0.04388427734375, -0.01410675048828125, 0.09722900390625, 0.0570068359375, -0.0020389556884765625, 0.053558349609375, -0.0139007568359375, -0.00548553466796875, -0.058746337890625, -0.0275726318359375, -0.04547119140625, -0.0258636474609375, -0.07470703125, 0.010040283203125, 0.026519775390625, 0.0195465087890625, -0.0625, 0.045379638671875, -0.0592041015625, 0.03790283203125, 0.040283203125, 0.036712646484375, 0.001880645751953125, -0.0052032470703125, 0.00783538818359375, -0.004474639892578125, -0.031524658203125, -0.0240478515625, 0.08099365234375, 0.03692626953125, 0.07550048828125, 0.0016164779663085938, 0.0309600830078125, -0.015716552734375, 0.01617431640625, -0.020111083984375, 0.015655517578125, 0.0010833740234375, -0.061126708984375, -0.0157318115234375, -0.03692626953125, -0.0587158203125, -0.008453369140625, -0.03143310546875, -0.0635986328125, -0.003662109375, 0.0163116455078125, -0.03515625, 0.00276947021484375, -0.056793212890625, 0.0821533203125, 0.004917144775390625, -0.024169921875, -0.01415252685546875, -0.046630859375, 0.0311737060546875, 0.00325775146484375, 0.007602691650390625, -0.0223236083984375, 0.027008056640625, 0.0399169921875, -0.04632568359375, 0.07916259765625, -0.0075531005859375, -0.0017938613891601562, 0.020111083984375, -0.00008827447891235352, 0.05859375, 0.0154876708984375, 0.00647735595703125, 0.028900146484375, 0.01309967041015625, -0.01678466796875, -0.0032596588134765625, 0.0709228515625, -0.07940673828125, -0.02838134765625, -0.024810791015625, -0.020965576171875, -0.0028095245361328125, 0.025360107421875, 0.039093017578125, 0.00893402099609375, 0.0034770965576171875, 0.02392578125, 0.032745361328125, 0.00551605224609375, 0.0212860107421875, 0.03302001953125, -0.016326904296875, -0.03717041015625, 0.06829833984375, -0.00991058349609375, 0.0195159912109375, 0.0029392242431640625, 0.0073394775390625, -0.026763916015625, -0.04388427734375, -0.007198333740234375, 0.034210205078125, -0.031951904296875, -0.034454345703125, -0.039459228515625, -0.0199127197265625, -0.037384033203125, 0.00433349609375, -0.024261474609375, -0.0252838134765625, -0.07110595703125, 0.00466156005859375, 0.0560302734375, 0.0526123046875, -0.0177001953125, 0.039581298828125, -0.05059814453125, 0.0178680419921875, 0.01849365234375, 0.061553955078125, -0.0187530517578125, -0.04412841796875, -0.0255126953125, 0.041046142578125, -0.04443359375, -0.046417236328125, 0.03338623046875, -0.0014934539794921875, 0.0264892578125, 0.01505279541015625, 0.00855255126953125, 0.0654296875, -0.027496337890625, 0.05108642578125, 0.01320648193359375, -0.06158447265625, 0.05859375, -0.055328369140625, 0.0264129638671875, 0.04815673828125, 0.0318603515625, -0.0166015625, -0.038055419921875, -0.069091796875, -0.04937744140625, 0.0706787109375, 0.0279083251953125, 0.02142333984375, -0.01800537109375, 0.03009033203125, -0.0024776458740234375, 0.01551055908203125, -0.07574462890625, -0.0190582275390625, -0.033447265625, -0.01377105712890625, 0.019775390625, -0.02191162109375, -0.0233917236328125, -0.0330810546875, 0.07659912109375, -0.0096588134765625, 0.04034423828125, -0.01482391357421875, 0.01476287841796875, -0.03814697265625, 0.0005030632019042969, 0.06256103515625, 0.045379638671875, -0.044586181640625, -0.03570556640625, 0.018463134765625, -0.04132080078125, -0.029998779296875, 0.0120849609375, 0.003513336181640625, -0.035552978515625, 0.013671875, 0.07098388671875, 0.0262603759765625, -0.004657745361328125, 0.04608154296875, -0.0283203125, -0.0153045654296875, -0.0343017578125, 0.03009033203125, 0.006916046142578125, 0.031982421875, 0.0019521713256835938, 0.02294921875, 0.00838470458984375, -0.018096923828125, 0.01320648193359375, 0.052978515625, -0.0183258056640625, -0.02569580078125, 0.07611083984375, 0.0208892822265625, -0.00826263427734375, 0.042236328125, -0.00867462158203125, -0.006732940673828125, 0.034332275390625, 0.0482177734375, 0.0450439453125, 0.00782012939453125, -0.01515960693359375, 0.051910400390625, 0.020477294921875, -0.0177154541015625, 0.04833984375, 0.00478363037109375, -0.046478271484375, -0.0253448486328125, -0.06060791015625, -0.025604248046875, 0.0399169921875, -0.056182861328125, 0.04180908203125, -0.04656982421875, -0.0218353271484375, -0.00921630859375, 0.0092926025390625, -0.0433349609375, 0.040008544921875, -0.00220489501953125, 0.072265625, -0.057769775390625, 0.0513916015625, 0.0318603515625, -0.0294189453125, -0.08026123046875, 0.005664825439453125, -0.0031032562255859375, -0.05279541015625, 0.0163421630859375, 0.024749755859375, 0.00977325439453125, 0.006744384765625, -0.0848388671875, -0.06036376953125, 0.0838623046875, 0.0236968994140625, -0.04937744140625, 0.0159759521484375, -0.02716064453125, 0.0313720703125, -0.020721435546875, 0.02850341796875, 0.044189453125, 0.0283966064453125, 0.00504302978515625, -0.09423828125, -0.0002503395080566406, -0.020538330078125, -0.004917144775390625, 0.00600433349609375, -0.07281494140625, 0.08837890625, -0.004688262939453125, -0.037078857421875, 0.0280303955078125, 0.049072265625, 0.0147705078125, -0.004749298095703125, 0.02252197265625, 0.04071044921875, 0.03558349609375, -0.036163330078125, 0.080810546875, -0.0440673828125, 0.06622314453125, 0.0802001953125, 0.01377105712890625, 0.054962158203125, -0.0013799667358398438, -0.0248260498046875, 0.0298004150390625, 0.0611572265625, -0.017303466796875, 0.058929443359375, -0.005153656005859375, -0.0022754669189453125, -0.01617431640625, 0.0259246826171875, -0.043914794921875, 0.0257568359375, 0.0406494140625, -0.020538330078125, -0.00001627206802368164, -0.0300140380859375, 0.01383209228515625, -0.01320648193359375, -0.020965576171875, 0.061767578125, -0.006103515625, -0.032073974609375, 0.08990478515625, -0.0016050338745117188, 0.0626220703125, -0.0369873046875, -0.012939453125, -0.0081939697265625, 0.0194244384765625, -0.0278167724609375, -0.05682373046875, 0.024261474609375, -0.005664825439453125, -0.00731658935546875, -0.00415802001953125, 0.03961181640625, -0.0242462158203125, -0.0391845703125, 0.0086517333984375, -0.005016326904296875, 0.00946044921875, 0.0015163421630859375, -0.06597900390625, 0.025390625, 0.0205841064453125, -0.0226287841796875, 0.01308441162109375, 0.0232696533203125, 0.01258087158203125, 0.0618896484375, 0.0484619140625, 0.00036835670471191406, -0.01206207275390625, -0.01174163818359375, 0.07171630859375, -0.043121337890625, -0.042083740234375, -0.0665283203125, 0.052490234375, -0.0021915435791015625, -0.03167724609375, 0.05078125, 0.0189056396484375, 0.0743408203125, -0.01111602783203125, 0.06097412109375, -0.004291534423828125, 0.04388427734375, -0.04010009765625, 0.072021484375, -0.038818359375, 0.00457763671875, -0.0343017578125, -0.044921875, -0.0247344970703125, 0.0863037109375, 0.01033782958984375, 0.023406982421875, 0.043487548828125, 0.0665283203125, -0.00548553466796875, 0.03924560546875, 0.00641632080078125, 0.031463623046875, 0.002994537353515625, 0.029632568359375, 0.043304443359375, -0.047088623046875, 0.03387451171875, -0.049957275390625, -0.0219879150390625, -0.033935546875, -0.06585693359375, -0.0662841796875, -0.0384521484375, -0.03143310546875, -0.03118896484375, -0.01314544677734375, 0.0589599609375, 0.06640625, -0.07470703125, 0.0012063980102539062, -0.0162506103515625, -0.00335693359375, -0.0244140625, -0.02130126953125, -0.004550933837890625, -0.0099029541015625, -0.059783935546875, 0.022247314453125, -0.0045013427734375, 0.03643798828125, 0.0005555152893066406, 0.00855255126953125, -0.0159759521484375, -0.018524169921875, 0.008514404296875, 0.02081298828125, -0.044464111328125, -0.046478271484375, -0.034423828125, -0.01052093505859375, -0.0001920461654663086, 0.0071258544921875, -0.0287322998046875, 0.0187530517578125, 0.02764892578125, -0.00045371055603027344, 0.04339599609375, -0.00342559814453125, 0.038848876953125, -0.0728759765625, 0.0087432861328125, -0.010711669921875, 0.039031982421875, 0.0189056396484375, -0.043060302734375, 0.043487548828125, 0.00826263427734375, -0.06427001953125, -0.043731689453125, 0.000027358531951904297, -0.07159423828125, -0.006832122802734375, 0.08782958984375, 0.004985809326171875, -0.01137542724609375, 0.01013946533203125, -0.0234832763671875, 0.032745361328125, -0.052886962890625, 0.0555419921875, 0.04010009765625, -0.032440185546875, -0.0134429931640625, -0.043701171875, 0.052459716796875, -0.009033203125, -0.050201416015625, 0.010650634765625, 0.04180908203125, 0.052398681640625, 0.01922607421875, 0.04022216796875, -0.00942230224609375, 0.027801513671875, -0.00897979736328125, 0.0049285888671875, -0.0111236572265625, -0.03302001953125, -0.0307159423828125, 0.01013946533203125, -0.0006709098815917969, -0.0232086181640625 ] ]
KnutJaegersberg/gpt-2-xl-EvolInstruct
2023-08-31T10:18:54.000Z
[ "transformers", "pytorch", "safetensors", "gpt2", "text-generation", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
KnutJaegersberg
null
null
KnutJaegersberg/gpt-2-xl-EvolInstruct
0
5,827
transformers
2023-07-14T22:50:21
--- license: cc-by-nc-4.0 --- Prompt: "Below is an instruction that describes a task. Write a response that appropriately completes the request. \n\n### Instruction:\n INSTRUCTION. \n### Response:\n"
201
[ [ -0.0163726806640625, -0.039093017578125, 0.0147705078125, 0.038787841796875, -0.007411956787109375, -0.0013370513916015625, 0.0259246826171875, 0.016998291015625, 0.03277587890625, 0.056884765625, -0.070068359375, -0.0147705078125, -0.032318115234375, 0.0214691162109375, -0.01148223876953125, 0.06756591796875, -0.00954437255859375, 0.01012420654296875, -0.0287017822265625, 0.0256195068359375, -0.0675048828125, -0.0181732177734375, -0.087890625, -0.0170135498046875, 0.04876708984375, 0.08233642578125, 0.0185394287109375, 0.0384521484375, 0.038116455078125, 0.025787353515625, 0.01605224609375, 0.0151519775390625, -0.013916015625, 0.037384033203125, -0.00241851806640625, -0.0217132568359375, -0.054901123046875, 0.0025081634521484375, 0.04571533203125, 0.041351318359375, 0.011474609375, 0.03033447265625, 0.0030307769775390625, 0.0221099853515625, -0.02545166015625, 0.01154327392578125, 0.007091522216796875, 0.0243377685546875, 0.006618499755859375, -0.01361083984375, -0.021759033203125, -0.042510986328125, -0.003963470458984375, -0.044769287109375, -0.0006184577941894531, 0.0105743408203125, 0.041900634765625, 0.01000213623046875, -0.0252685546875, -0.01303863525390625, -0.03741455078125, 0.03228759765625, -0.005702972412109375, 0.0077972412109375, 0.070068359375, 0.03521728515625, 0.0037708282470703125, -0.07574462890625, -0.043670654296875, -0.0209197998046875, -0.018280029296875, 0.02716064453125, 0.0173797607421875, -0.0016632080078125, 0.05755615234375, 0.00290679931640625, -0.0205230712890625, -0.0235443115234375, -0.03369140625, -0.02288818359375, 0.0204620361328125, 0.04766845703125, 0.0207672119140625, -0.003391265869140625, -0.0099639892578125, -0.004642486572265625, -0.0205535888671875, 0.024017333984375, -0.01004791259765625, 0.0296173095703125, -0.019500732421875, 0.07623291015625, -0.0272064208984375, 0.04315185546875, 0.0222015380859375, 0.0260162353515625, 0.0006074905395507812, -0.037933349609375, -0.024169921875, 0.00830078125, 0.0197906494140625, 0.039581298828125, -0.006214141845703125, -0.0216522216796875, -0.0280914306640625, -0.0109710693359375, 0.033172607421875, -0.054656982421875, -0.007419586181640625, 0.0135498046875, -0.04010009765625, 0.005306243896484375, 0.0036468505859375, -0.063232421875, -0.0239715576171875, -0.02313232421875, 0.00543212890625, 0.006671905517578125, 0.016693115234375, -0.01548004150390625, -0.0357666015625, 0.0266876220703125, 0.016510009765625, -0.05908203125, 0.04888916015625, 0.032470703125, 0.0207672119140625, 0.047607421875, -0.00861358642578125, -0.075439453125, 0.01751708984375, -0.005672454833984375, 0.066162109375, -0.0275726318359375, -0.0621337890625, 0.004932403564453125, 0.01708984375, 0.0121917724609375, -0.0142974853515625, 0.0284271240234375, -0.032501220703125, 0.038421630859375, -0.06280517578125, -0.031280517578125, -0.0159912109375, 0.022705078125, -0.053680419921875, 0.0557861328125, 0.021942138671875, -0.042022705078125, -0.0101165771484375, -0.07940673828125, -0.0281982421875, 0.0274200439453125, 0.011627197265625, 0.0316162109375, 0.0030803680419921875, -0.0030918121337890625, 0.02789306640625, -0.012298583984375, -0.04876708984375, -0.0203704833984375, -0.01221466064453125, 0.026885986328125, 0.0005087852478027344, 0.041900634765625, 0.0176239013671875, -0.01224517822265625, 0.0217132568359375, -0.04443359375, 0.0180511474609375, 0.01381683349609375, -0.00844573974609375, 0.0155181884765625, -0.02178955078125, 0.00301361083984375, -0.01178741455078125, 0.072509765625, -0.047149658203125, 0.045440673828125, 0.0019626617431640625, -0.00012165307998657227, 0.061279296875, 0.015838623046875, 0.036956787109375, -0.034820556640625, 0.06024169921875, -0.040740966796875, -0.0011663436889648438, -0.0167388916015625, -0.0141448974609375, -0.05560302734375, 0.01397705078125, 0.0016231536865234375, 0.07244873046875, -0.045196533203125, 0.01401519775390625, 0.00762176513671875, -0.021240234375, -0.0185089111328125, -0.00887298583984375, 0.05096435546875, 0.06640625, 0.044464111328125, 0.010589599609375, -0.0740966796875, -0.041900634765625, -0.000050067901611328125, -0.03216552734375, -0.00447845458984375, 0.0193328857421875, 0.016082763671875, -0.012664794921875, 0.03228759765625, -0.06585693359375, 0.0237579345703125, -0.01332855224609375, 0.015533447265625, 0.00887298583984375, 0.0399169921875, -0.0045013427734375, -0.053070068359375, -0.041015625, -0.0212554931640625, -0.049224853515625, -0.042694091796875, -0.00872802734375, -0.02056884765625, -0.0007867813110351562, 0.038330078125, -0.051025390625, 0.038177490234375, -0.020843505859375, -0.0501708984375, 0.029449462890625, -0.0026111602783203125, 0.030059814453125, -0.08709716796875, 0.0008087158203125, -0.03448486328125, 0.0068511962890625, -0.03363037109375, 0.0004239082336425781, -0.018035888671875, 0.00817108154296875, -0.020050048828125, 0.030426025390625, -0.034759521484375, 0.002422332763671875, -0.03582763671875, -0.00511932373046875, 0.0134124755859375, -0.01006317138671875, 0.0015239715576171875, 0.06768798828125, 0.0269317626953125, -0.045196533203125, 0.10479736328125, 0.055572509765625, 0.0261077880859375, 0.053985595703125, -0.08319091796875, 0.0313720703125, -0.0238800048828125, 0.018218994140625, -0.1143798828125, -0.0472412109375, 0.02984619140625, -0.0421142578125, 0.00450897216796875, 0.00444793701171875, -0.04327392578125, -0.0623779296875, -0.028778076171875, 0.03143310546875, 0.030181884765625, -0.00992584228515625, 0.0257568359375, 0.0053253173828125, -0.0162200927734375, -0.0262908935546875, -0.07025146484375, 0.01323699951171875, -0.009521484375, -0.02490234375, 0.004779815673828125, -0.040496826171875, 0.0129852294921875, -0.0239715576171875, -0.0074920654296875, -0.037933349609375, 0.006252288818359375, 0.02294921875, 0.024383544921875, -0.0259246826171875, -0.01049041748046875, -0.01457977294921875, -0.01212310791015625, 0.0220489501953125, 0.0152587890625, 0.033966064453125, -0.0181427001953125, -0.0201568603515625, -0.03387451171875, 0.0767822265625, 0.0086212158203125, -0.00508880615234375, 0.0024433135986328125, 0.03509521484375, -0.0452880859375, 0.02142333984375, -0.01015472412109375, -0.03509521484375, -0.03338623046875, -0.004734039306640625, -0.0259246826171875, -0.02239990234375, 0.042877197265625, 0.0180206298828125, 0.00954437255859375, 0.01580810546875, 0.023712158203125, -0.044403076171875, 0.0526123046875, 0.034698486328125, 0.024505615234375, 0.01424407958984375, -0.00916290283203125, 0.0294952392578125, -0.0679931640625, -0.05010986328125, -0.06024169921875, -0.0225372314453125, 0.00547027587890625, -0.0031185150146484375, 0.005985260009765625, 0.0261993408203125, -0.017059326171875, 0.0703125, -0.03680419921875, 0.0160064697265625, 0.05523681640625, 0.00867462158203125, -0.031494140625, -0.0360107421875, 0.0038299560546875, 0.00875091552734375, -0.025970458984375, -0.0545654296875, 0.027130126953125, 0.0088653564453125, 0.062408447265625, 0.013946533203125, 0.06903076171875, 0.01531219482421875, -0.005893707275390625, -0.046600341796875, 0.07000732421875, 0.0241546630859375, -0.0262603759765625, -0.0341796875, 0.016693115234375, -0.0865478515625, -0.0261077880859375, -0.0142974853515625, -0.05352783203125, -0.02899169921875, 0.0268096923828125, -0.0687255859375, -0.0171051025390625, -0.0587158203125, 0.0841064453125, -0.0186614990234375, 0.02044677734375, 0.01338958740234375, -0.044097900390625, -0.01105499267578125, 0.050537109375, -0.0263671875, 0.01080322265625, -0.0482177734375, 0.04730224609375, -0.031951904296875, 0.060699462890625, -0.0151214599609375, 0.009796142578125, 0.013092041015625, -0.006717681884765625, 0.024505615234375, 0.0060272216796875, 0.0107879638671875, -0.072509765625, 0.0042724609375, -0.04083251953125, -0.044952392578125, 0.0231781005859375, -0.00737762451171875, -0.04351806640625, 0.00042819976806640625, -0.0200042724609375, -0.003692626953125, 0.0230865478515625, 0.03558349609375, 0.0789794921875, -0.0085296630859375, -0.01849365234375, 0.062744140625, 0.0019893646240234375, 0.07232666015625, 0.02490234375, 0.0009465217590332031, -0.0227813720703125, 0.044921875, 0.0186004638671875, -0.0191497802734375, 0.0450439453125, 0.004055023193359375, -0.059661865234375, -0.0230255126953125, -0.03973388671875, -0.0016164779663085938, -0.040771484375, -0.045501708984375, -0.05389404296875, 0.00276947021484375, -0.058197021484375, -0.031982421875, 0.0149383544921875, -0.02862548828125, -0.03179931640625, -0.0321044921875, 0.0362548828125, 0.0491943359375, -0.012725830078125, 0.056884765625, -0.0911865234375, 0.051361083984375, 0.03826904296875, 0.01654052734375, -0.01258087158203125, -0.0222930908203125, -0.00838470458984375, 0.00861358642578125, -0.03985595703125, -0.062408447265625, -0.01509857177734375, -0.01093292236328125, 0.035400390625, 0.01313018798828125, 0.031890869140625, 0.05615234375, -0.06170654296875, 0.088623046875, 0.0228424072265625, -0.06500244140625, 0.08001708984375, -0.023681640625, 0.038970947265625, 0.0408935546875, 0.0271759033203125, -0.05096435546875, -0.014495849609375, -0.060333251953125, -0.05877685546875, 0.025787353515625, -0.0243072509765625, 0.05096435546875, -0.037139892578125, -0.003131866455078125, -0.007030487060546875, 0.025848388671875, -0.03582763671875, -0.023406982421875, -0.0163116455078125, -0.007595062255859375, 0.0443115234375, -0.03759765625, -0.01910400390625, -0.001697540283203125, 0.018035888671875, 0.006717681884765625, 0.058624267578125, 0.01287841796875, 0.0180511474609375, 0.005435943603515625, 0.0124969482421875, 0.07177734375, 0.032928466796875, -0.01383209228515625, 0.0145263671875, 0.00673675537109375, -0.0391845703125, 0.003147125244140625, 0.00914764404296875, 0.011962890625, -0.0059814453125, 0.057037353515625, 0.0224609375, -0.0304718017578125, -0.021728515625, 0.032196044921875, -0.02410888671875, 0.0006823539733886719, -0.033447265625, 0.01139068603515625, -0.0051422119140625, 0.007171630859375, 0.0209808349609375, -0.0021305084228515625, 0.024932861328125, -0.0255889892578125, 0.02655029296875, 0.0039825439453125, -0.0311431884765625, -0.003665924072265625, 0.0284271240234375, 0.0443115234375, -0.05059814453125, 0.0443115234375, 0.008056640625, -0.042633056640625, 0.06756591796875, 0.0517578125, 0.06549072265625, -0.006195068359375, 0.0273284912109375, 0.01261138916015625, 0.01010894775390625, 0.006069183349609375, 0.09844970703125, -0.02532958984375, -0.01995849609375, -0.0247955322265625, -0.00020885467529296875, -0.0206451416015625, -0.00921630859375, -0.0557861328125, -0.0189208984375, -0.0787353515625, -0.0009465217590332031, 0.012542724609375, -0.0227203369140625, -0.037078857421875, 0.032684326171875, 0.0097808837890625, 0.10186767578125, -0.031707763671875, 0.033203125, 0.08905029296875, -0.056976318359375, -0.074462890625, -0.0052947998046875, -0.01361846923828125, -0.0577392578125, 0.060760498046875, 0.0296630859375, -0.0063934326171875, -0.02374267578125, -0.076904296875, -0.0288238525390625, 0.055328369140625, -0.004302978515625, 0.01030731201171875, 0.0125274658203125, -0.022796630859375, 0.01971435546875, -0.0298919677734375, 0.043853759765625, 0.011199951171875, 0.05059814453125, 0.0036029815673828125, -0.046173095703125, 0.039337158203125, -0.02557373046875, 0.0182952880859375, 0.0114288330078125, -0.02899169921875, 0.041839599609375, -0.0212554931640625, -0.006481170654296875, 0.044342041015625, 0.06622314453125, -0.0112457275390625, 0.028350830078125, 0.03350830078125, 0.018798828125, 0.05230712890625, -0.0169219970703125, 0.0709228515625, 0.012420654296875, -0.01052093505859375, 0.0858154296875, 0.0019311904907226562, 0.0413818359375, 0.035003662109375, -0.018829345703125, 0.027313232421875, 0.06353759765625, -0.0038700103759765625, 0.049468994140625, 0.028778076171875, -0.037384033203125, -0.004108428955078125, -0.039581298828125, -0.024993896484375, 0.0177001953125, 0.00937652587890625, 0.0127105712890625, -0.01540374755859375, 0.00875091552734375, -0.002201080322265625, 0.016998291015625, -0.04571533203125, 0.0675048828125, -0.01500701904296875, -0.08349609375, 0.043182373046875, -0.01131439208984375, 0.0181732177734375, -0.02362060546875, -0.038848876953125, -0.03167724609375, -0.0224151611328125, -0.04339599609375, -0.07403564453125, 0.0478515625, 0.01483154296875, -0.058624267578125, -0.020416259765625, 0.0213775634765625, -0.0357666015625, -0.025299072265625, -0.0118560791015625, -0.0178985595703125, 0.033172607421875, 0.0245513916015625, -0.04888916015625, -0.006481170654296875, 0.00669097900390625, 0.0099639892578125, -0.0285491943359375, 0.045440673828125, 0.0193328857421875, 0.038299560546875, 0.01290130615234375, -0.0201568603515625, 0.01434326171875, -0.0054931640625, 0.051849365234375, -0.058441162109375, -0.0264434814453125, -0.03363037109375, 0.07086181640625, -0.046173095703125, -0.0193328857421875, 0.05059814453125, 0.0298919677734375, 0.041473388671875, -0.0227813720703125, 0.047882080078125, -0.032989501953125, 0.057861328125, -0.03704833984375, -0.013580322265625, -0.0034008026123046875, -0.0096282958984375, 0.013763427734375, -0.0430908203125, -0.01482391357421875, 0.0596923828125, 0.0007224082946777344, 0.007785797119140625, 0.06549072265625, 0.0401611328125, -0.001598358154296875, 0.013214111328125, 0.002910614013671875, 0.005313873291015625, -0.0012063980102539062, 0.02886962890625, 0.060272216796875, -0.026611328125, 0.00713348388671875, 0.0037708282470703125, -0.024505615234375, -0.00943756103515625, -0.06536865234375, -0.052215576171875, -0.0173797607421875, -0.0135650634765625, -0.050262451171875, -0.030914306640625, 0.07037353515625, 0.045257568359375, -0.075439453125, -0.0163116455078125, 0.007022857666015625, 0.0297698974609375, -0.01270294189453125, -0.0095977783203125, -0.007274627685546875, -0.045806884765625, -0.027313232421875, 0.0230255126953125, -0.0218353271484375, 0.053375244140625, 0.0175933837890625, 0.0030384063720703125, -0.0033130645751953125, 0.01131439208984375, 0.037628173828125, 0.03302001953125, -0.047607421875, -0.03143310546875, 0.01483917236328125, -0.04351806640625, -0.023712158203125, 0.04864501953125, -0.0182342529296875, 0.01119232177734375, 0.03717041015625, 0.06134033203125, 0.0176544189453125, 0.00852203369140625, 0.0655517578125, -0.058563232421875, -0.0081939697265625, 0.037811279296875, 0.01003265380859375, 0.0276641845703125, -0.05389404296875, 0.045684814453125, -0.00301361083984375, -0.03985595703125, -0.040435791015625, 0.022247314453125, -0.0960693359375, -0.01416778564453125, 0.05657958984375, -0.02325439453125, -0.01267242431640625, -0.0210113525390625, -0.07000732421875, 0.01983642578125, -0.04278564453125, 0.0157470703125, 0.0498046875, -0.018524169921875, -0.023223876953125, -0.03131103515625, 0.0345458984375, 0.02569580078125, -0.057891845703125, -0.006137847900390625, 0.05523681640625, -0.01161956787109375, 0.011077880859375, 0.06439208984375, 0.021209716796875, 0.045623779296875, 0.0137786865234375, -0.0234832763671875, -0.0127410888671875, -0.035888671875, 0.0019044876098632812, 0.01531982421875, -0.0016117095947265625, -0.0819091796875 ] ]
quantumaikr/llama-2-70b-fb16-orca-chat-10k
2023-08-11T04:43:45.000Z
[ "transformers", "safetensors", "llama", "text-generation", "en", "license:cc-by-nc-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
quantumaikr
null
null
quantumaikr/llama-2-70b-fb16-orca-chat-10k
2
5,825
transformers
2023-08-11T04:12:37
--- license: cc-by-nc-4.0 language: - en pipeline_tag: text-generation --- # quantumaikr/llama-2-70b-fb16-orca-chat-10k ## Model Description `quantumaikr/llama-2-70b-fb16-orca-chat-10k` is a Llama2 70B model finetuned on an guanaco, oraca-chat-10k Dataset ## Usage Start chatting with `quantumaikr/llama-2-70b-fb16-orca-chat-10k` using the following code snippet: ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline tokenizer = AutoTokenizer.from_pretrained("quantumaikr/llama-2-70b-fb16-orca-chat-10k") model = AutoModelForCausalLM.from_pretrained("quantumaikr/llama-2-70b-fb16-orca-chat-10k", torch_dtype=torch.float16, device_map="auto") system_prompt = "### System:\nYou are QuantumLM, an AI that follows instructions extremely well. Help as much as you can. Remember, be safe, and don't do anything illegal.\n\n" message = "Write me a poem please" prompt = f"{system_prompt}### User: {message}\n\n### Assistant:\n" inputs = tokenizer(prompt, return_tensors="pt").to("cuda") output = model.generate(**inputs, do_sample=True, temperature=0.9, top_p=0.75, max_new_tokens=4096) print(tokenizer.decode(output[0], skip_special_tokens=True)) ``` QuantumLM should be used with this prompt format: ``` ### System: This is a system prompt, please behave and help the user. ### User: Your prompt here ### Assistant The output of QuantumLM ``` ## Use and Limitations ### Intended Use These models are intended for research only, in adherence with the [CC BY-NC-4.0](https://creativecommons.org/licenses/by-nc/4.0/) license. ### Limitations and bias Although the aforementioned dataset helps to steer the base language models into "safer" distributions of text, not all biases and toxicity can be mitigated through fine-tuning. We ask that users be mindful of such potential issues that can arise in generated responses. Do not treat model outputs as substitutes for human judgment or as sources of truth. Please use it responsibly. Contact us : hi@quantumai.kr
2,017
[ [ 0.0079803466796875, -0.061737060546875, 0.0186920166015625, 0.00792694091796875, -0.03216552734375, 0.00171661376953125, 0.00493621826171875, -0.0220947265625, 0.01250457763671875, 0.027496337890625, -0.0312347412109375, -0.033477783203125, -0.0400390625, -0.005130767822265625, -0.033660888671875, 0.07171630859375, 0.00945281982421875, -0.006481170654296875, 0.003437042236328125, 0.01071929931640625, -0.0313720703125, -0.033416748046875, -0.0792236328125, -0.0229644775390625, 0.0185089111328125, 0.0160369873046875, 0.05096435546875, 0.02691650390625, 0.0115966796875, 0.031463623046875, -0.01561737060546875, 0.0126953125, -0.042877197265625, 0.00920867919921875, 0.01084136962890625, -0.048736572265625, -0.039337158203125, -0.016143798828125, 0.055206298828125, 0.00600433349609375, -0.00577545166015625, 0.034576416015625, 0.0033397674560546875, 0.031280517578125, -0.038665771484375, 0.0204315185546875, -0.041900634765625, 0.00952911376953125, -0.02288818359375, -0.01136016845703125, -0.023773193359375, -0.018402099609375, -0.01042938232421875, -0.04754638671875, 0.0005421638488769531, 0.006473541259765625, 0.07403564453125, 0.0144500732421875, -0.046905517578125, -0.007488250732421875, -0.039398193359375, 0.058807373046875, -0.0728759765625, 0.02825927734375, 0.0272369384765625, 0.010589599609375, -0.0205535888671875, -0.05865478515625, -0.06353759765625, -0.0260467529296875, 0.002040863037109375, 0.004970550537109375, -0.034912109375, -0.0190887451171875, 0.0232086181640625, 0.0157318115234375, -0.04931640625, -0.0054473876953125, -0.0457763671875, -0.035430908203125, 0.04425048828125, 0.041259765625, 0.0157012939453125, -0.03436279296875, -0.01568603515625, -0.036865234375, -0.026611328125, 0.016845703125, 0.034423828125, 0.00921630859375, -0.03076171875, 0.062103271484375, -0.02557373046875, 0.033782958984375, 0.0149993896484375, -0.01136016845703125, 0.0289459228515625, -0.0265655517578125, -0.024139404296875, 0.007717132568359375, 0.08575439453125, 0.0275421142578125, -0.00763702392578125, 0.0020008087158203125, -0.00237274169921875, 0.0114593505859375, -0.008392333984375, -0.055328369140625, -0.034454345703125, 0.033782958984375, -0.033355712890625, -0.03216552734375, -0.0172271728515625, -0.04718017578125, -0.02642822265625, 0.007305145263671875, 0.047637939453125, -0.0265350341796875, -0.03875732421875, 0.01375579833984375, -0.0221710205078125, 0.032012939453125, -0.0106964111328125, -0.06329345703125, 0.01323699951171875, 0.0300750732421875, 0.0513916015625, 0.017669677734375, -0.025787353515625, -0.01285552978515625, 0.01258087158203125, -0.0216827392578125, 0.038482666015625, -0.0138397216796875, -0.036376953125, -0.0262451171875, -0.007556915283203125, -0.005924224853515625, -0.022003173828125, 0.0426025390625, -0.03692626953125, 0.03369140625, -0.01059722900390625, -0.028839111328125, -0.018524169921875, -0.0027980804443359375, -0.0367431640625, 0.07550048828125, 0.00862884521484375, -0.0792236328125, -0.004108428955078125, -0.049163818359375, -0.0168609619140625, 0.006866455078125, -0.01317596435546875, -0.03741455078125, -0.00489044189453125, 0.01248931884765625, 0.0306396484375, -0.011627197265625, 0.0126953125, -0.0222320556640625, -0.0240936279296875, 0.034515380859375, -0.036834716796875, 0.09234619140625, 0.0287017822265625, -0.038909912109375, 0.0312347412109375, -0.058685302734375, 0.005279541015625, 0.0400390625, -0.0167999267578125, -0.002285003662109375, -0.0149383544921875, 0.00966644287109375, -0.00119781494140625, 0.04058837890625, -0.04669189453125, 0.00719451904296875, -0.0367431640625, 0.060882568359375, 0.0693359375, -0.0056610107421875, 0.026275634765625, -0.02886962890625, 0.040435791015625, 0.0127410888671875, 0.01593017578125, 0.0284576416015625, -0.0426025390625, -0.07208251953125, -0.013458251953125, 0.01305389404296875, 0.040985107421875, -0.0703125, 0.04107666015625, -0.0184173583984375, -0.055938720703125, -0.04254150390625, -0.004589080810546875, 0.023712158203125, 0.04583740234375, 0.0114288330078125, -0.0084686279296875, -0.05322265625, -0.068359375, -0.00249481201171875, -0.022308349609375, 0.0095977783203125, 0.00719451904296875, 0.03656005859375, -0.034759521484375, 0.060943603515625, -0.0404052734375, -0.01369476318359375, -0.01459503173828125, -0.01446533203125, 0.031402587890625, 0.0582275390625, 0.00858306884765625, -0.041534423828125, -0.028167724609375, -0.0234527587890625, -0.07110595703125, 0.018524169921875, -0.01088714599609375, -0.045623779296875, 0.01702880859375, 0.0239105224609375, -0.0675048828125, 0.0249176025390625, 0.037872314453125, -0.055694580078125, 0.038818359375, -0.005130767822265625, 0.0024051666259765625, -0.09991455078125, 0.00957489013671875, -0.0076904296875, -0.0070648193359375, -0.04803466796875, 0.01093292236328125, -0.0096282958984375, 0.004974365234375, -0.043365478515625, 0.054412841796875, -0.004817962646484375, 0.0189361572265625, -0.0126953125, -0.004222869873046875, -0.0037784576416015625, 0.041900634765625, -0.009368896484375, 0.0389404296875, 0.059478759765625, -0.0386962890625, 0.04656982421875, 0.03564453125, -0.01324462890625, 0.0076904296875, -0.060943603515625, 0.0187225341796875, 0.0096282958984375, 0.03350830078125, -0.0821533203125, -0.004322052001953125, 0.05902099609375, -0.056121826171875, 0.005245208740234375, 0.02020263671875, -0.055877685546875, -0.0261383056640625, -0.0290374755859375, 0.0472412109375, 0.052032470703125, -0.0244903564453125, 0.055908203125, 0.0201263427734375, 0.017974853515625, -0.03607177734375, -0.05633544921875, -0.00740814208984375, -0.0302276611328125, -0.0467529296875, 0.0052947998046875, -0.01326751708984375, 0.00385284423828125, 0.0031070709228515625, -0.00197601318359375, 0.0037326812744140625, 0.01294708251953125, 0.03369140625, 0.0304718017578125, -0.0008616447448730469, -0.00919342041015625, 0.01401519775390625, -0.020538330078125, 0.027008056640625, -0.007781982421875, 0.061614990234375, -0.03204345703125, -0.01059722900390625, -0.056915283203125, 0.00830841064453125, 0.031402587890625, 0.0011005401611328125, 0.06317138671875, 0.06402587890625, -0.0226593017578125, 0.0137481689453125, -0.0208587646484375, -0.0251007080078125, -0.04052734375, 0.032012939453125, -0.035980224609375, -0.062469482421875, 0.04296875, 0.03729248046875, 0.006793975830078125, 0.062103271484375, 0.051666259765625, -0.00432586669921875, 0.0841064453125, 0.0455322265625, -0.003948211669921875, 0.0265045166015625, -0.045257568359375, 0.0174102783203125, -0.08135986328125, -0.0207977294921875, -0.023956298828125, -0.01140594482421875, -0.053955078125, -0.007205963134765625, 0.0298614501953125, 0.01515960693359375, -0.044158935546875, 0.0280303955078125, -0.054779052734375, 0.007297515869140625, 0.060821533203125, 0.034423828125, 0.011810302734375, 0.002552032470703125, -0.008697509765625, 0.0193328857421875, -0.05718994140625, -0.033447265625, 0.09625244140625, 0.037200927734375, 0.04376220703125, 0.015411376953125, 0.058380126953125, 0.0007152557373046875, -0.00881195068359375, -0.024169921875, 0.0579833984375, 0.0155181884765625, -0.0567626953125, -0.02972412109375, -0.040130615234375, -0.07574462890625, 0.016876220703125, -0.003936767578125, -0.052215576171875, 0.0036640167236328125, 0.0131988525390625, -0.042236328125, 0.0225067138671875, -0.046630859375, 0.061187744140625, 0.0014667510986328125, -0.00016009807586669922, -0.0147857666015625, -0.0374755859375, 0.04364013671875, -0.0002713203430175781, 0.005218505859375, -0.0096282958984375, 0.006927490234375, 0.08087158203125, -0.02423095703125, 0.073486328125, 0.00185394287109375, -0.0131683349609375, 0.032318115234375, 0.0009222030639648438, 0.0274505615234375, 0.01348876953125, -0.003173828125, 0.027740478515625, 0.0284881591796875, -0.0455322265625, -0.0130157470703125, 0.048980712890625, -0.08807373046875, -0.04541015625, -0.042266845703125, -0.032470703125, 0.01132965087890625, 0.00859832763671875, 0.04864501953125, 0.0210723876953125, 0.025390625, -0.0028705596923828125, 0.0312347412109375, -0.022216796875, 0.036590576171875, 0.0261077880859375, -0.0225372314453125, -0.043548583984375, 0.073486328125, 0.00041961669921875, 0.002582550048828125, 0.0085906982421875, 0.02423095703125, -0.03363037109375, -0.020233154296875, -0.035308837890625, 0.02166748046875, -0.05999755859375, -0.027923583984375, -0.04400634765625, -0.03350830078125, -0.0489501953125, 0.01140594482421875, -0.0137481689453125, -0.032501220703125, -0.040130615234375, -0.0030155181884765625, 0.052154541015625, 0.0248870849609375, -0.0166015625, 0.0253753662109375, -0.0604248046875, 0.0284576416015625, 0.035736083984375, -0.0011053085327148438, 0.0251007080078125, -0.07061767578125, -0.0112762451171875, 0.01511383056640625, -0.0298309326171875, -0.06982421875, 0.03851318359375, 0.0031890869140625, 0.0421142578125, 0.00897216796875, 0.0217742919921875, 0.046539306640625, -0.0169677734375, 0.066650390625, 0.01480865478515625, -0.08941650390625, 0.047332763671875, -0.0207061767578125, 0.035247802734375, 0.01116943359375, 0.0181884765625, -0.03094482421875, -0.054443359375, -0.063720703125, -0.0594482421875, 0.05059814453125, 0.03802490234375, 0.033935546875, -0.0019178390502929688, 0.024993896484375, -0.0118865966796875, 0.0113677978515625, -0.08648681640625, -0.039398193359375, -0.03021240234375, -0.0243377685546875, -0.009796142578125, -0.021575927734375, -0.0136260986328125, -0.03460693359375, 0.07159423828125, 0.0035076141357421875, 0.039520263671875, 0.020294189453125, -0.01096343994140625, -0.0240020751953125, 0.0025196075439453125, 0.046478271484375, 0.02862548828125, -0.00704193115234375, 0.011199951171875, 0.027923583984375, -0.043182373046875, 0.00272369384765625, 0.0027790069580078125, -0.013458251953125, -0.018524169921875, 0.0254058837890625, 0.07354736328125, -0.01245880126953125, -0.0399169921875, 0.009185791015625, -0.01526641845703125, -0.017608642578125, -0.029052734375, 0.0174102783203125, 0.021240234375, 0.048126220703125, 0.027069091796875, -0.00902557373046875, 0.006221771240234375, -0.0277099609375, -0.004108428955078125, 0.038848876953125, -0.00004607439041137695, -0.01430511474609375, 0.066650390625, -0.0037384033203125, -0.0213470458984375, 0.06298828125, -0.01666259765625, -0.039886474609375, 0.04620361328125, 0.0390625, 0.0626220703125, 0.00789642333984375, 0.008087158203125, 0.0272064208984375, 0.01108551025390625, 0.0211944580078125, 0.033355712890625, 0.0029087066650390625, -0.058319091796875, -0.01435089111328125, -0.052703857421875, -0.011566162109375, 0.01459503173828125, -0.036590576171875, 0.00974273681640625, -0.038238525390625, -0.0235595703125, -0.006847381591796875, 0.023590087890625, -0.03961181640625, -0.00004172325134277344, 0.0176239013671875, 0.0406494140625, -0.076416015625, 0.07867431640625, 0.039337158203125, -0.035430908203125, -0.08734130859375, -0.027008056640625, -0.0141448974609375, -0.0667724609375, 0.056976318359375, -0.00015616416931152344, 0.005977630615234375, 0.028778076171875, -0.05804443359375, -0.07049560546875, 0.094482421875, 0.0166168212890625, -0.0260467529296875, 0.01094818115234375, 0.00893402099609375, 0.03399658203125, -0.0007753372192382812, 0.03851318359375, 0.03424072265625, 0.022064208984375, 0.0005517005920410156, -0.06732177734375, 0.0302734375, -0.0248870849609375, 0.01007843017578125, 0.00489044189453125, -0.062347412109375, 0.081787109375, -0.00348663330078125, -0.0155181884765625, 0.022674560546875, 0.07763671875, 0.04754638671875, 0.00925445556640625, 0.039398193359375, 0.05267333984375, 0.04351806640625, 0.008026123046875, 0.051483154296875, -0.0244598388671875, 0.048828125, 0.05255126953125, -0.0017642974853515625, 0.072998046875, 0.026123046875, -0.024322509765625, 0.050323486328125, 0.0596923828125, -0.0143890380859375, 0.04254150390625, 0.0150909423828125, -0.0180816650390625, -0.00624847412109375, 0.01306915283203125, -0.03875732421875, 0.026702880859375, 0.015380859375, -0.00894927978515625, -0.00644683837890625, 0.00820159912109375, 0.0014238357543945312, -0.0235595703125, -0.0127716064453125, 0.041046142578125, 0.0094146728515625, -0.0168609619140625, 0.0789794921875, 0.00891876220703125, 0.06707763671875, -0.029541015625, -0.0017118453979492188, -0.01361846923828125, 0.01197052001953125, -0.0201263427734375, -0.035980224609375, 0.00936126708984375, -0.01031494140625, -0.0085601806640625, 0.010467529296875, 0.04425048828125, -0.025604248046875, -0.044891357421875, -0.0014810562133789062, 0.0209808349609375, 0.033447265625, 0.00131988525390625, -0.07342529296875, 0.01953125, 0.0226593017578125, -0.0235137939453125, 0.019256591796875, 0.01079559326171875, 0.00554656982421875, 0.05816650390625, 0.059967041015625, -0.0111236572265625, 0.027984619140625, -0.025970458984375, 0.06585693359375, -0.047210693359375, -0.024749755859375, -0.0712890625, 0.040771484375, 0.00965118408203125, -0.03619384765625, 0.07135009765625, 0.040985107421875, 0.0550537109375, -0.005916595458984375, 0.03656005859375, -0.01751708984375, 0.0204925537109375, -0.0423583984375, 0.06549072265625, -0.0290374755859375, 0.01215362548828125, -0.0008645057678222656, -0.06329345703125, 0.0118865966796875, 0.0682373046875, -0.00604248046875, 0.00962066650390625, 0.04962158203125, 0.07098388671875, 0.0197906494140625, -0.005401611328125, 0.0103607177734375, 0.03759765625, 0.03350830078125, 0.062255859375, 0.06329345703125, -0.0723876953125, 0.04278564453125, -0.04620361328125, -0.00960540771484375, -0.00862884521484375, -0.054443359375, -0.06341552734375, -0.042572021484375, -0.041168212890625, -0.055694580078125, -0.0081634521484375, 0.06365966796875, 0.043792724609375, -0.05609130859375, -0.0228118896484375, -0.01247406005859375, 0.004520416259765625, -0.0036220550537109375, -0.018829345703125, 0.039581298828125, -0.007965087890625, -0.0533447265625, 0.006862640380859375, -0.0240325927734375, 0.0183258056640625, -0.0316162109375, -0.01030731201171875, -0.0083465576171875, 0.0014057159423828125, 0.0265655517578125, 0.0246429443359375, -0.059814453125, -0.0121307373046875, -0.0090484619140625, -0.027557373046875, -0.00417327880859375, 0.00455474853515625, -0.06561279296875, 0.0057220458984375, 0.0288238525390625, 0.01116943359375, 0.034423828125, -0.034393310546875, 0.0193328857421875, -0.03759765625, 0.01108551025390625, 0.00782012939453125, 0.01378631591796875, 0.0076904296875, -0.045623779296875, 0.0153656005859375, 0.01055908203125, -0.045623779296875, -0.07470703125, -0.00677490234375, -0.08404541015625, -0.0284576416015625, 0.09796142578125, 0.005809783935546875, -0.0279388427734375, 0.0159149169921875, -0.031829833984375, 0.04998779296875, -0.034393310546875, 0.053314208984375, 0.0293121337890625, -0.014129638671875, -0.01091766357421875, -0.028778076171875, 0.028076171875, 0.040618896484375, -0.04498291015625, -0.015960693359375, 0.014495849609375, 0.0281982421875, 0.01385498046875, 0.051910400390625, 0.01148223876953125, 0.0200958251953125, 0.0131072998046875, 0.01357269287109375, -0.01085662841796875, 0.009033203125, -0.03564453125, -0.00385284423828125, 0.004608154296875, -0.03192138671875 ] ]
KoboldAI/fairseq-dense-355M
2023-03-31T17:11:49.000Z
[ "transformers", "pytorch", "safetensors", "xglm", "text-generation", "en", "arxiv:2112.10684", "endpoints_compatible", "has_space", "region:us" ]
text-generation
KoboldAI
null
null
KoboldAI/fairseq-dense-355M
6
5,824
transformers
2022-03-02T23:29:04
--- language: en --- This is a Hugging Face transformers-compatible conversion of the original dense 355M-parameter model from the paper "[Efficient Large Scale Language Modeling with Mixtures of Experts](https://arxiv.org/abs/2112.10684)" from Artetxe et al. Please refer to the original model card, which can be found at https://github.com/facebookresearch/fairseq/blob/main/examples/moe_lm/model_card.md.
408
[ [ -0.057281494140625, -0.049957275390625, 0.0197296142578125, 0.031890869140625, -0.01314544677734375, -0.046539306640625, -0.0222320556640625, -0.0196990966796875, 0.031463623046875, 0.06658935546875, -0.060699462890625, -0.012969970703125, -0.032745361328125, -0.017791748046875, -0.032867431640625, 0.0697021484375, -0.007656097412109375, 0.00803375244140625, -0.0183868408203125, 0.004180908203125, 0.0098724365234375, -0.0228118896484375, -0.055877685546875, -0.0273284912109375, 0.039459228515625, 0.0178680419921875, 0.07012939453125, 0.03973388671875, 0.031158447265625, 0.0234527587890625, 0.004581451416015625, -0.0142669677734375, -0.042724609375, -0.012115478515625, -0.01023101806640625, -0.027374267578125, -0.07403564453125, 0.03265380859375, 0.060821533203125, 0.052520751953125, -0.041168212890625, 0.00652313232421875, -0.0014181137084960938, 0.033416748046875, -0.0120849609375, 0.003757476806640625, -0.048004150390625, -0.0017805099487304688, -0.0153350830078125, -0.00313568115234375, -0.057952880859375, 0.0009093284606933594, -0.00782012939453125, -0.025360107421875, 0.0179290771484375, 0.0006132125854492188, 0.0777587890625, 0.0276641845703125, -0.03302001953125, 0.01303863525390625, -0.06097412109375, 0.04510498046875, -0.0416259765625, 0.050506591796875, 0.004482269287109375, 0.053131103515625, -0.0254364013671875, -0.053955078125, -0.05523681640625, 0.00305938720703125, 0.021514892578125, 0.0159454345703125, -0.01526641845703125, 0.0076904296875, 0.0195770263671875, 0.0452880859375, -0.0222320556640625, -0.007659912109375, -0.047027587890625, -0.01076507568359375, 0.07781982421875, 0.004268646240234375, 0.021240234375, -0.01259613037109375, -0.06451416015625, -0.02362060546875, -0.031585693359375, -0.0181427001953125, 0.0189208984375, 0.0209503173828125, -0.03143310546875, 0.039337158203125, -0.0171051025390625, 0.051849365234375, 0.0092010498046875, 0.00014269351959228516, 0.02130126953125, 0.0193634033203125, -0.0190277099609375, -0.0206146240234375, 0.053619384765625, 0.045989990234375, 0.037078857421875, -0.010040283203125, -0.0164794921875, -0.02410888671875, 0.039306640625, -0.08013916015625, -0.0311126708984375, -0.00839996337890625, -0.036895751953125, -0.00473785400390625, 0.0191650390625, -0.0491943359375, -0.00833892822265625, -0.027435302734375, 0.01375579833984375, -0.028564453125, -0.04522705078125, 0.01378631591796875, 0.020660400390625, 0.04217529296875, 0.0208892822265625, -0.0377197265625, 0.01438140869140625, 0.03533935546875, 0.039764404296875, -0.0023517608642578125, -0.005130767822265625, -0.026947021484375, 0.0064239501953125, 0.00164794921875, 0.047637939453125, -0.022979736328125, -0.032623291015625, 0.0029621124267578125, 0.00794219970703125, -0.01248931884765625, -0.048187255859375, 0.08209228515625, -0.052459716796875, 0.00836944580078125, 0.0153961181640625, -0.0250396728515625, -0.0240325927734375, 0.0146942138671875, -0.07452392578125, 0.09271240234375, 0.043609619140625, -0.0333251953125, 0.00959014892578125, -0.037353515625, 0.003536224365234375, 0.0263519287109375, 0.01091766357421875, -0.026458740234375, 0.024566650390625, 0.0021820068359375, 0.040191650390625, -0.023468017578125, 0.0299530029296875, -0.060150146484375, -0.00792694091796875, 0.025848388671875, -0.020599365234375, 0.07501220703125, 0.0313720703125, 0.0152740478515625, 0.01029205322265625, -0.061553955078125, 0.002712249755859375, 0.0194854736328125, -0.01461029052734375, -0.01346588134765625, -0.023681640625, 0.02264404296875, 0.04595947265625, 0.026031494140625, -0.03564453125, 0.0316162109375, 0.002483367919921875, 0.00832366943359375, 0.0216522216796875, -0.0174407958984375, 0.026824951171875, -0.0252838134765625, 0.041748046875, 0.00843048095703125, 0.0176849365234375, -0.003147125244140625, -0.048065185546875, -0.06475830078125, -0.049224853515625, 0.0125885009765625, 0.01274871826171875, -0.05224609375, 0.045013427734375, -0.0230712890625, -0.07843017578125, -0.0528564453125, 0.005596160888671875, -0.01503753662109375, 0.0198822021484375, 0.0211334228515625, -0.030487060546875, -0.039581298828125, -0.07958984375, -0.0146331787109375, -0.0196380615234375, -0.01224517822265625, 0.0229644775390625, 0.0097198486328125, -0.05682373046875, 0.07476806640625, -0.03173828125, -0.0196533203125, -0.01248931884765625, -0.0080108642578125, 0.024383544921875, 0.061187744140625, 0.0618896484375, -0.034088134765625, -0.042327880859375, -0.01287841796875, -0.041229248046875, -0.0249481201171875, 0.01360321044921875, -0.041961669921875, 0.0054473876953125, 0.058013916015625, -0.06268310546875, 0.0195770263671875, 0.06378173828125, -0.0230712890625, 0.02838134765625, 0.0037689208984375, -0.0134429931640625, -0.095703125, 0.01134490966796875, 0.0155487060546875, -0.0338134765625, -0.046905517578125, 0.03863525390625, 0.021514892578125, -0.009613037109375, -0.035858154296875, 0.05645751953125, -0.0364990234375, 0.0223846435546875, -0.01384735107421875, 0.0086822509765625, -0.0166778564453125, 0.026611328125, -0.0107574462890625, 0.0275421142578125, 0.05401611328125, -0.029144287109375, 0.05352783203125, 0.028839111328125, -0.0012998580932617188, 0.07049560546875, -0.051727294921875, 0.0193328857421875, -0.0139312744140625, 0.0194854736328125, -0.06768798828125, -0.03411865234375, 0.01502227783203125, -0.0123291015625, 0.0240020751953125, 0.001117706298828125, -0.042205810546875, -0.0264129638671875, -0.005176544189453125, 0.05126953125, 0.061065673828125, -0.04833984375, 0.0789794921875, 0.037017822265625, -0.03057861328125, 0.013275146484375, -0.040435791015625, 0.005886077880859375, -0.0230255126953125, -0.06549072265625, 0.032958984375, -0.012176513671875, -0.008544921875, 0.0017042160034179688, 0.0290069580078125, -0.00821685791015625, -0.01554107666015625, 0.01483917236328125, 0.014862060546875, -0.04083251953125, -0.023345947265625, 0.013946533203125, -0.0105133056640625, 0.0262908935546875, 0.0160369873046875, 0.049102783203125, -0.00646209716796875, -0.007770538330078125, -0.041168212890625, 0.034942626953125, 0.05841064453125, -0.00974273681640625, 0.06927490234375, 0.0626220703125, -0.036956787109375, -0.0261688232421875, -0.0511474609375, -0.0268096923828125, -0.037017822265625, 0.0292510986328125, -0.04205322265625, -0.042144775390625, 0.05487060546875, -0.00875091552734375, -0.02459716796875, 0.06011962890625, 0.0382080078125, 0.018218994140625, 0.08367919921875, 0.04302978515625, -0.0011053085327148438, 0.0377197265625, 0.0047760009765625, 0.007175445556640625, -0.06329345703125, -0.0245361328125, -0.02325439453125, -0.02362060546875, -0.03887939453125, -0.04620361328125, 0.0177459716796875, 0.0179595947265625, -0.0087432861328125, 0.033660888671875, -0.008331298828125, 0.007061004638671875, 0.0357666015625, 0.0167236328125, 0.0160369873046875, 0.00815582275390625, 0.01044464111328125, -0.013519287109375, -0.045135498046875, -0.028472900390625, 0.04949951171875, 0.0506591796875, 0.06182861328125, 0.0098419189453125, 0.04205322265625, -0.01488494873046875, 0.0540771484375, -0.048919677734375, 0.056488037109375, -0.0007762908935546875, -0.0677490234375, 0.01187896728515625, -0.03521728515625, -0.052154541015625, 0.0194549560546875, -0.02935791015625, -0.049835205078125, -0.00606536865234375, -0.00543212890625, -0.006023406982421875, 0.0278167724609375, -0.058990478515625, 0.07318115234375, 0.019195556640625, -0.006744384765625, -0.007038116455078125, -0.0167236328125, 0.046173095703125, 0.004917144775390625, -0.00662994384765625, -0.0187530517578125, 0.00516510009765625, 0.050506591796875, -0.0218505859375, 0.05657958984375, -0.02001953125, -0.03607177734375, 0.0292816162109375, 0.005054473876953125, 0.0273590087890625, 0.0015125274658203125, -0.024566650390625, 0.0207977294921875, -0.01381683349609375, -0.04522705078125, -0.048004150390625, 0.059600830078125, -0.06805419921875, -0.038238525390625, -0.0004360675811767578, -0.06170654296875, -0.01227569580078125, 0.01153564453125, -0.004657745361328125, 0.0251007080078125, -0.0213470458984375, 0.038482666015625, 0.0283966064453125, 0.01251220703125, 0.00884246826171875, 0.036407470703125, -0.0386962890625, -0.02166748046875, 0.04119873046875, -0.0142364501953125, 0.021728515625, -0.0016384124755859375, -0.0047454833984375, -0.0209503173828125, -0.01861572265625, -0.0401611328125, 0.0433349609375, -0.038726806640625, -0.014404296875, -0.04302978515625, -0.048492431640625, -0.0227813720703125, -0.0073699951171875, -0.04620361328125, -0.004589080810546875, -0.020233154296875, 0.00240325927734375, 0.031646728515625, 0.039794921875, 0.0152435302734375, 0.0498046875, -0.0631103515625, 0.00991058349609375, 0.0250244140625, 0.0506591796875, -0.00487518310546875, -0.06524658203125, -0.0103759765625, 0.00537872314453125, -0.01392364501953125, -0.07415771484375, 0.0273284912109375, 0.010162353515625, 0.04620361328125, 0.04296875, 0.016571044921875, 0.04241943359375, -0.02984619140625, 0.03338623046875, 0.002712249755859375, -0.058258056640625, -0.01338958740234375, -0.033050537109375, 0.01137542724609375, 0.035736083984375, 0.0299835205078125, -0.04266357421875, -0.024261474609375, -0.06744384765625, -0.06170654296875, 0.060516357421875, -0.0036449432373046875, 0.033905029296875, 0.01248931884765625, 0.033172607421875, 0.018951416015625, -0.0021038055419921875, -0.047332763671875, -0.02105712890625, 0.003208160400390625, -0.043701171875, 0.0002143383026123047, -0.04083251953125, 0.00809478759765625, -0.025421142578125, 0.0611572265625, -0.016326904296875, 0.0312042236328125, -0.00677490234375, -0.00542449951171875, -0.0096893310546875, -0.01546478271484375, 0.048065185546875, -0.006435394287109375, -0.0182952880859375, 0.0176544189453125, 0.0116424560546875, -0.029388427734375, -0.031494140625, 0.027923583984375, -0.0019817352294921875, -0.0008692741394042969, 0.00270843505859375, 0.07147216796875, 0.0275115966796875, -0.034149169921875, 0.038055419921875, 0.0084228515625, -0.005695343017578125, -0.04327392578125, 0.01076507568359375, 0.021026611328125, 0.0195465087890625, 0.031463623046875, 0.01529693603515625, 0.0021209716796875, -0.0289459228515625, 0.03753662109375, 0.03466796875, -0.052490234375, -0.0467529296875, 0.058197021484375, 0.037017822265625, -0.039825439453125, 0.04473876953125, -0.0213775634765625, -0.02069091796875, 0.0128173828125, 0.038055419921875, 0.059814453125, -0.0523681640625, 0.01094818115234375, 0.042510986328125, 0.0205535888671875, 0.01146697998046875, 0.0132904052734375, 0.0009794235229492188, -0.052520751953125, -0.033905029296875, -0.049072265625, -0.0242767333984375, 0.01116943359375, -0.07818603515625, 0.043060302734375, -0.0178985595703125, 0.00252532958984375, -0.01617431640625, -0.033843994140625, -0.046142578125, 0.02447509765625, 0.020233154296875, 0.08941650390625, -0.06622314453125, 0.0751953125, 0.049224853515625, -0.007404327392578125, -0.06414794921875, 0.0027484893798828125, -0.0194549560546875, -0.07989501953125, 0.00865936279296875, 0.01082611083984375, 0.01264190673828125, 0.0003414154052734375, -0.052093505859375, -0.052154541015625, 0.046905517578125, 0.0489501953125, -0.034423828125, -0.003551483154296875, 0.0126953125, 0.03704833984375, -0.036651611328125, 0.0382080078125, 0.037933349609375, 0.0246429443359375, 0.021759033203125, -0.0689697265625, 0.0206146240234375, -0.04541015625, 0.0170135498046875, 0.007038116455078125, -0.061248779296875, 0.07489013671875, 0.00887298583984375, 0.0019092559814453125, 0.019195556640625, 0.0931396484375, 0.0278472900390625, 0.002910614013671875, 0.04840087890625, 0.0396728515625, 0.0213623046875, -0.00908660888671875, 0.0645751953125, -0.025543212890625, 0.04827880859375, 0.039398193359375, -0.025390625, 0.07525634765625, 0.0513916015625, -0.0137176513671875, 0.049957275390625, 0.0194091796875, -0.0012416839599609375, 0.01036834716796875, 0.00010478496551513672, -0.01384735107421875, -0.034820556640625, -0.000004410743713378906, -0.0406494140625, 0.04254150390625, 0.028106689453125, -0.0233917236328125, -0.0238189697265625, -0.01348114013671875, 0.0196075439453125, 0.0074310302734375, -0.0240936279296875, 0.042022705078125, 0.0087738037109375, -0.046112060546875, 0.04241943359375, 0.0116424560546875, 0.047027587890625, -0.0245361328125, 0.01137542724609375, 0.0086669921875, 0.0267791748046875, -0.007274627685546875, -0.05694580078125, 0.0406494140625, -0.01520538330078125, -0.010467529296875, -0.02008056640625, 0.035308837890625, -0.06597900390625, -0.052215576171875, 0.031219482421875, 0.0211029052734375, 0.03094482421875, -0.02294921875, -0.059722900390625, 0.01800537109375, -0.006744384765625, -0.031829833984375, 0.00508880615234375, 0.03387451171875, 0.00460052490234375, 0.03216552734375, 0.0187835693359375, -0.01122283935546875, 0.0183258056640625, -0.010162353515625, 0.0565185546875, -0.048004150390625, -0.042694091796875, -0.038116455078125, 0.06475830078125, -0.017303466796875, -0.03851318359375, 0.047698974609375, 0.0380859375, 0.05419921875, -0.03887939453125, 0.0230560302734375, 0.00677490234375, 0.026214599609375, -0.03436279296875, 0.06475830078125, -0.05780029296875, -0.0272064208984375, -0.025390625, -0.09014892578125, -0.0195465087890625, 0.04876708984375, 0.0024127960205078125, 0.041748046875, 0.033447265625, 0.059326171875, -0.01568603515625, 0.003345489501953125, 0.03717041015625, 0.031585693359375, 0.01392364501953125, 0.0267181396484375, 0.036651611328125, -0.0457763671875, 0.025787353515625, -0.00977325439453125, -0.01447296142578125, -0.046478271484375, -0.0704345703125, -0.0758056640625, -0.06781005859375, -0.05169677734375, -0.025848388671875, -0.0472412109375, 0.057525634765625, 0.07476806640625, -0.04852294921875, -0.004486083984375, 0.01422119140625, -0.0261993408203125, 0.0108489990234375, -0.0173797607421875, -0.0079803466796875, 0.014190673828125, -0.08660888671875, 0.01049041748046875, -0.0034580230712890625, -0.0007619857788085938, -0.038543701171875, -0.01390838623046875, 0.01302337646484375, 0.027923583984375, 0.0322265625, 0.00055694580078125, -0.047637939453125, -0.020111083984375, -0.004878997802734375, -0.027099609375, -0.0012178421020507812, 0.035491943359375, -0.028411865234375, 0.006908416748046875, 0.02783203125, 0.03985595703125, 0.0494384765625, -0.0004839897155761719, 0.044158935546875, -0.060211181640625, 0.038665771484375, -0.00554656982421875, 0.046966552734375, 0.039306640625, -0.005584716796875, 0.010009765625, 0.01885986328125, -0.0207672119140625, -0.065185546875, 0.02569580078125, -0.1304931640625, 0.03094482421875, 0.09210205078125, 0.0087738037109375, -0.050933837890625, 0.020355224609375, -0.0494384765625, 0.017333984375, -0.028289794921875, 0.036865234375, 0.042266845703125, 0.0338134765625, -0.04815673828125, -0.0250244140625, 0.006221771240234375, 0.02984619140625, -0.039825439453125, -0.01520538330078125, 0.0031299591064453125, 0.00740814208984375, 0.025390625, 0.016326904296875, -0.030426025390625, 0.0029811859130859375, -0.00481414794921875, 0.06707763671875, -0.0037059783935546875, -0.01251220703125, -0.013336181640625, 0.0139923095703125, 0.01507568359375, 0.007305145263671875 ] ]
facebook/vit-mae-large
2023-06-13T19:43:02.000Z
[ "transformers", "pytorch", "tf", "vit_mae", "pretraining", "vision", "dataset:imagenet-1k", "arxiv:2111.06377", "license:apache-2.0", "endpoints_compatible", "has_space", "region:us" ]
null
facebook
null
null
facebook/vit-mae-large
4
5,824
transformers
2022-03-02T23:29:05
--- license: apache-2.0 tags: - vision datasets: - imagenet-1k --- # Vision Transformer (large-sized model) pre-trained with MAE Vision Transformer (ViT) model pre-trained using the MAE method. It was introduced in the paper [Masked Autoencoders Are Scalable Vision Learners](https://arxiv.org/abs/2111.06377) by Kaiming He, Xinlei Chen, Saining Xie, Yanghao Li, Piotr Dollár, Ross Girshick and first released in [this repository](https://github.com/facebookresearch/mae). Disclaimer: The team releasing MAE did not write a model card for this model so this model card has been written by the Hugging Face team. ## Model description The Vision Transformer (ViT) is a transformer encoder model (BERT-like). Images are presented to the model as a sequence of fixed-size patches. During pre-training, one randomly masks out a high portion (75%) of the image patches. First, the encoder is used to encode the visual patches. Next, a learnable (shared) mask token is added at the positions of the masked patches. The decoder takes the encoded visual patches and mask tokens as input and reconstructs raw pixel values for the masked positions. By pre-training the model, it learns an inner representation of images that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled images for instance, you can train a standard classifier by placing a linear layer on top of the pre-trained encoder. ## Intended uses & limitations You can use the raw model for image classification. See the [model hub](https://huggingface.co/models?search=facebook/vit-mae) to look for fine-tuned versions on a task that interests you. ### How to use Here is how to use this model: ```python from transformers import AutoImageProcessor, ViTMAEForPreTraining from PIL import Image import requests url = 'http://images.cocodataset.org/val2017/000000039769.jpg' image = Image.open(requests.get(url, stream=True).raw) processor = AutoImageProcessor.from_pretrained('facebook/vit-mae-large') model = ViTMAEForPreTraining.from_pretrained('facebook/vit-mae-large') inputs = processor(images=image, return_tensors="pt") outputs = model(**inputs) loss = outputs.loss mask = outputs.mask ids_restore = outputs.ids_restore ``` ### BibTeX entry and citation info ```bibtex @article{DBLP:journals/corr/abs-2111-06377, author = {Kaiming He and Xinlei Chen and Saining Xie and Yanghao Li and Piotr Doll{\'{a}}r and Ross B. Girshick}, title = {Masked Autoencoders Are Scalable Vision Learners}, journal = {CoRR}, volume = {abs/2111.06377}, year = {2021}, url = {https://arxiv.org/abs/2111.06377}, eprinttype = {arXiv}, eprint = {2111.06377}, timestamp = {Tue, 16 Nov 2021 12:12:31 +0100}, biburl = {https://dblp.org/rec/journals/corr/abs-2111-06377.bib}, bibsource = {dblp computer science bibliography, https://dblp.org} } ```
2,970
[ [ -0.04815673828125, -0.035552978515625, 0.0010099411010742188, 0.0121917724609375, -0.0198516845703125, -0.006336212158203125, 0.0000010132789611816406, -0.0404052734375, 0.03826904296875, 0.032958984375, -0.040069580078125, -0.01995849609375, -0.06475830078125, -0.006587982177734375, -0.023651123046875, 0.06182861328125, -0.005611419677734375, -0.01336669921875, -0.00023639202117919922, -0.01239776611328125, -0.025299072265625, -0.045257568359375, -0.04461669921875, -0.0004162788391113281, 0.0168914794921875, -0.0021495819091796875, 0.0399169921875, 0.0506591796875, 0.057281494140625, 0.03472900390625, 0.0036411285400390625, 0.0035533905029296875, -0.0321044921875, -0.0186309814453125, 0.01099395751953125, -0.027191162109375, -0.040924072265625, 0.015167236328125, 0.050537109375, 0.023529052734375, 0.0164337158203125, 0.03619384765625, 0.006755828857421875, 0.040985107421875, -0.05926513671875, 0.0239715576171875, -0.035614013671875, 0.0233001708984375, -0.00722503662109375, 0.0022220611572265625, -0.024078369140625, -0.01523590087890625, 0.00836181640625, -0.047515869140625, 0.0311279296875, -0.005336761474609375, 0.1033935546875, 0.036529541015625, -0.016632080078125, 0.00640869140625, -0.041595458984375, 0.045989990234375, -0.0338134765625, 0.032196044921875, 0.01983642578125, 0.041015625, 0.01505279541015625, -0.07196044921875, -0.0404052734375, -0.01178741455078125, -0.022705078125, 0.0121002197265625, -0.022735595703125, 0.00800323486328125, 0.0419921875, 0.0450439453125, -0.0394287109375, -0.0071258544921875, -0.04681396484375, -0.0194549560546875, 0.05023193359375, -0.0109100341796875, 0.0098876953125, -0.016143798828125, -0.0362548828125, -0.030609130859375, -0.017364501953125, 0.0192718505859375, 0.00807952880859375, -0.02532958984375, -0.02001953125, 0.040771484375, -0.0170440673828125, 0.048065185546875, 0.0460205078125, -0.0139007568359375, 0.0501708984375, -0.006317138671875, -0.035980224609375, -0.0181427001953125, 0.0667724609375, 0.037078857421875, 0.0235748291015625, 0.01296234130859375, -0.02215576171875, 0.006633758544921875, 0.0138702392578125, -0.07244873046875, -0.03521728515625, -0.015655517578125, -0.04571533203125, -0.02569580078125, 0.021453857421875, -0.052215576171875, 0.0035800933837890625, -0.0218505859375, 0.06585693359375, -0.0048980712890625, -0.0147247314453125, -0.0098114013671875, 0.00955963134765625, 0.042083740234375, 0.01399993896484375, -0.049407958984375, 0.0109100341796875, 0.007183074951171875, 0.06353759765625, -0.007354736328125, -0.0183563232421875, -0.0269622802734375, -0.0120849609375, -0.02569580078125, 0.034515380859375, -0.0145263671875, -0.0087738037109375, -0.005504608154296875, 0.045654296875, -0.01328277587890625, -0.037872314453125, 0.012969970703125, -0.04046630859375, 0.006618499755859375, -0.006114959716796875, -0.010009765625, -0.0236663818359375, 0.0149993896484375, -0.05340576171875, 0.0709228515625, 0.016357421875, -0.045867919921875, 0.027984619140625, -0.0482177734375, -0.01044464111328125, 0.0075836181640625, 0.007450103759765625, -0.054901123046875, -0.0031032562255859375, 0.041015625, 0.056671142578125, 0.006008148193359375, 0.01201629638671875, -0.02691650390625, -0.02288818359375, 0.01221466064453125, -0.0287017822265625, 0.07183837890625, 0.013824462890625, -0.031219482421875, 0.0036106109619140625, -0.05059814453125, -0.015899658203125, 0.025726318359375, -0.0026416778564453125, -0.009674072265625, -0.029449462890625, -0.0035572052001953125, 0.028076171875, 0.0247344970703125, -0.043182373046875, 0.00466156005859375, -0.00843048095703125, 0.039825439453125, 0.05377197265625, -0.0027980804443359375, 0.03472900390625, -0.01309967041015625, 0.033050537109375, 0.0019369125366210938, 0.05828857421875, -0.0460205078125, -0.03680419921875, -0.07891845703125, -0.04022216796875, 0.005306243896484375, 0.0263824462890625, -0.060211181640625, 0.0391845703125, -0.0159912109375, -0.037628173828125, -0.057830810546875, 0.002429962158203125, 0.0350341796875, 0.0380859375, 0.038360595703125, -0.04083251953125, -0.049468994140625, -0.0697021484375, 0.019805908203125, 0.0088653564453125, 0.00304412841796875, 0.0186004638671875, 0.05010986328125, -0.042572021484375, 0.07196044921875, -0.048583984375, -0.0272979736328125, 0.0152587890625, -0.004962921142578125, 0.01122283935546875, 0.051025390625, 0.055267333984375, -0.06817626953125, -0.046234130859375, -0.022003173828125, -0.0645751953125, 0.0010251998901367188, 0.00890350341796875, -0.018463134765625, 0.0285186767578125, 0.04437255859375, -0.04058837890625, 0.05780029296875, 0.04327392578125, -0.0097503662109375, 0.032318115234375, -0.0056304931640625, 0.0097198486328125, -0.0784912109375, 0.0023212432861328125, 0.01172637939453125, -0.0295867919921875, -0.025238037109375, 0.00951385498046875, 0.015655517578125, -0.01457977294921875, -0.0279083251953125, 0.03521728515625, -0.04766845703125, -0.0040130615234375, -0.006893157958984375, -0.0340576171875, 0.00615692138671875, 0.057037353515625, 0.015472412109375, 0.0328369140625, 0.05340576171875, -0.0384521484375, 0.0460205078125, 0.0216217041015625, -0.019775390625, 0.051239013671875, -0.06182861328125, 0.001102447509765625, -0.01488494873046875, 0.024658203125, -0.06903076171875, -0.0298004150390625, 0.02410888671875, -0.03826904296875, 0.043792724609375, -0.0265045166015625, -0.0168609619140625, -0.07110595703125, -0.022979736328125, 0.042572021484375, 0.056182861328125, -0.060333251953125, 0.03497314453125, 0.034393310546875, 0.01201629638671875, -0.047698974609375, -0.052398681640625, 0.0006260871887207031, -0.00830841064453125, -0.042022705078125, 0.0243072509765625, -0.01373291015625, 0.01226043701171875, 0.01450347900390625, 0.006496429443359375, -0.00769805908203125, -0.0209808349609375, 0.035186767578125, 0.04644775390625, -0.0247039794921875, -0.0098876953125, -0.019561767578125, -0.0247344970703125, 0.00264739990234375, -0.0215301513671875, 0.02313232421875, -0.03302001953125, -0.0281982421875, -0.035552978515625, 0.01386260986328125, 0.047332763671875, -0.031005859375, 0.048126220703125, 0.07586669921875, -0.03839111328125, 0.01146697998046875, -0.06927490234375, -0.00952911376953125, -0.041351318359375, 0.02923583984375, -0.0301666259765625, -0.0626220703125, 0.046905517578125, 0.019989013671875, -0.0028400421142578125, 0.06317138671875, 0.047637939453125, -0.0087890625, 0.06353759765625, 0.056915283203125, 0.0059967041015625, 0.050048828125, -0.0594482421875, 0.001529693603515625, -0.07037353515625, -0.03472900390625, 0.005657196044921875, -0.0218658447265625, -0.040771484375, -0.0450439453125, 0.02532958984375, -0.01009368896484375, -0.032806396484375, 0.040252685546875, -0.06378173828125, 0.02978515625, 0.051666259765625, 0.0198516845703125, -0.0037517547607421875, 0.0034275054931640625, 0.0152130126953125, 0.0017709732055664062, -0.038787841796875, -0.0020503997802734375, 0.07525634765625, 0.043426513671875, 0.0648193359375, -0.0275421142578125, 0.04205322265625, 0.001232147216796875, 0.0174102783203125, -0.05072021484375, 0.04150390625, -0.00974273681640625, -0.0460205078125, -0.01149749755859375, -0.01708984375, -0.06475830078125, 0.0101318359375, -0.035186767578125, -0.038818359375, 0.048370361328125, 0.01800537109375, -0.02032470703125, 0.028289794921875, -0.06158447265625, 0.06201171875, -0.01313018798828125, -0.0155792236328125, 0.0194091796875, -0.06341552734375, 0.03912353515625, -0.0206451416015625, -0.00904083251953125, 0.0173187255859375, 0.0257415771484375, 0.0714111328125, -0.0440673828125, 0.0703125, -0.02001953125, 0.0133209228515625, 0.035369873046875, -0.02301025390625, 0.023193359375, -0.0084075927734375, 0.03753662109375, 0.042449951171875, 0.0019989013671875, -0.0297698974609375, -0.0304718017578125, 0.03326416015625, -0.066162109375, -0.039642333984375, -0.0311737060546875, -0.033538818359375, 0.0101470947265625, 0.00722503662109375, 0.06219482421875, 0.036407470703125, 0.00960540771484375, 0.0301055908203125, 0.05633544921875, -0.025360107421875, 0.04351806640625, 0.01183319091796875, -0.017730712890625, -0.0352783203125, 0.06329345703125, 0.024261474609375, 0.02642822265625, 0.0216827392578125, 0.0013570785522460938, -0.01104736328125, -0.0068359375, -0.019989013671875, 0.0308685302734375, -0.045440673828125, -0.031585693359375, -0.05133056640625, -0.06170654296875, -0.044891357421875, -0.034027099609375, -0.043060302734375, -0.01499176025390625, -0.017425537109375, 0.005191802978515625, 0.0255126953125, 0.040740966796875, -0.0183868408203125, 0.03472900390625, -0.043609619140625, 0.034423828125, 0.062469482421875, 0.022003173828125, -0.0031909942626953125, -0.052520751953125, -0.02532958984375, 0.0016908645629882812, -0.0237884521484375, -0.049041748046875, 0.043487548828125, 0.010223388671875, 0.043914794921875, 0.048858642578125, -0.004207611083984375, 0.060577392578125, -0.0261993408203125, 0.045318603515625, 0.035430908203125, -0.050018310546875, 0.0450439453125, -0.00823974609375, 0.0169219970703125, 0.018524169921875, 0.0269927978515625, -0.00986480712890625, -0.0052947998046875, -0.057647705078125, -0.056915283203125, 0.047882080078125, 0.00841522216796875, 0.0090484619140625, 0.0247955322265625, 0.0291290283203125, -0.0135955810546875, 0.007442474365234375, -0.07366943359375, -0.022918701171875, -0.05340576171875, -0.0097503662109375, -0.02032470703125, -0.032318115234375, -0.004550933837890625, -0.05621337890625, 0.039459228515625, -0.0015554428100585938, 0.05450439453125, 0.022918701171875, -0.03302001953125, -0.006153106689453125, -0.03472900390625, 0.01971435546875, 0.0246124267578125, -0.0242767333984375, 0.007617950439453125, 0.00897979736328125, -0.0682373046875, 0.0005159378051757812, 0.0170440673828125, -0.0189208984375, -0.0012350082397460938, 0.028900146484375, 0.08154296875, 0.004497528076171875, -0.00843048095703125, 0.0626220703125, 0.006359100341796875, -0.03375244140625, -0.0318603515625, 0.0022716522216796875, -0.017852783203125, 0.03375244140625, 0.03619384765625, 0.00884246826171875, 0.003520965576171875, -0.02996826171875, 0.0103607177734375, 0.031890869140625, -0.0274810791015625, -0.0236358642578125, 0.055908203125, -0.00007277727127075195, -0.01488494873046875, 0.038360595703125, -0.01145172119140625, -0.052703857421875, 0.06396484375, 0.03839111328125, 0.07110595703125, -0.036376953125, 0.0171051025390625, 0.056854248046875, 0.036712646484375, -0.00460052490234375, -0.020843505859375, -0.005954742431640625, -0.047271728515625, -0.028961181640625, -0.059173583984375, -0.0022182464599609375, 0.020233154296875, -0.05206298828125, 0.0148162841796875, -0.022796630859375, -0.0177764892578125, 0.0018100738525390625, 0.0074310302734375, -0.072509765625, 0.0260772705078125, 0.022735595703125, 0.052520751953125, -0.06689453125, 0.055328369140625, 0.04840087890625, -0.038116455078125, -0.064453125, -0.0246734619140625, -0.0082550048828125, -0.07537841796875, 0.05303955078125, 0.0289154052734375, 0.01085662841796875, 0.008148193359375, -0.068603515625, -0.07305908203125, 0.09539794921875, 0.02032470703125, -0.0384521484375, -0.0013713836669921875, -0.01508331298828125, 0.0192108154296875, -0.036102294921875, 0.0156707763671875, 0.0131378173828125, 0.02435302734375, 0.0411376953125, -0.067626953125, 0.00563812255859375, -0.0343017578125, 0.01381683349609375, 0.002674102783203125, -0.0546875, 0.08013916015625, -0.0200347900390625, 0.004291534423828125, -0.0095672607421875, 0.045867919921875, -0.004367828369140625, 0.0263519287109375, 0.04779052734375, 0.045654296875, 0.0377197265625, -0.000946044921875, 0.06439208984375, -0.0014133453369140625, 0.042236328125, 0.05377197265625, 0.0217742919921875, 0.03955078125, 0.019775390625, -0.01308441162109375, 0.03289794921875, 0.05413818359375, -0.039825439453125, 0.043243408203125, -0.00970458984375, 0.0017709732055664062, -0.01509857177734375, 0.02056884765625, -0.042572021484375, 0.038421630859375, 0.020050048828125, -0.0401611328125, 0.0045318603515625, 0.037567138671875, -0.006313323974609375, -0.0338134765625, -0.03729248046875, 0.03857421875, 0.00909423828125, -0.036712646484375, 0.0601806640625, -0.021087646484375, 0.06072998046875, -0.037078857421875, 0.0034046173095703125, -0.01007080078125, 0.019500732421875, -0.0199127197265625, -0.049224853515625, 0.0248870849609375, -0.005100250244140625, -0.011138916015625, -0.002002716064453125, 0.06646728515625, -0.0222015380859375, -0.0511474609375, 0.0108642578125, 0.004512786865234375, 0.01690673828125, -0.00714111328125, -0.0535888671875, 0.0023517608642578125, -0.009674072265625, -0.038055419921875, 0.0224456787109375, 0.01763916015625, -0.003482818603515625, 0.041839599609375, 0.0538330078125, -0.00626373291015625, 0.0265045166015625, -0.027984619140625, 0.07867431640625, -0.033050537109375, -0.0318603515625, -0.0521240234375, 0.040283203125, -0.0172119140625, -0.011016845703125, 0.052154541015625, 0.0272369384765625, 0.06475830078125, -0.022064208984375, 0.03363037109375, -0.006778717041015625, 0.0026988983154296875, -0.0301055908203125, 0.05633544921875, -0.040557861328125, -0.0158843994140625, -0.038970947265625, -0.084716796875, -0.0295867919921875, 0.08233642578125, -0.01508331298828125, 0.01325225830078125, 0.0386962890625, 0.0709228515625, -0.0223846435546875, -0.031280517578125, 0.028228759765625, 0.0301513671875, 0.004169464111328125, 0.043243408203125, 0.039703369140625, -0.05303955078125, 0.03338623046875, -0.031341552734375, -0.038421630859375, -0.0275726318359375, -0.050750732421875, -0.07598876953125, -0.066162109375, -0.03619384765625, -0.03363037109375, -0.003932952880859375, 0.04205322265625, 0.08636474609375, -0.045318603515625, 0.006587982177734375, 0.0133514404296875, -0.00951385498046875, -0.0067291259765625, -0.017730712890625, 0.043304443359375, -0.0005044937133789062, -0.052001953125, -0.021087646484375, 0.00965118408203125, 0.021392822265625, -0.0305328369140625, -0.015167236328125, -0.007472991943359375, -0.0020122528076171875, 0.0631103515625, 0.036651611328125, -0.035186767578125, -0.0208892822265625, 0.007129669189453125, -0.00907135009765625, 0.00742340087890625, 0.021453857421875, -0.070556640625, 0.04132080078125, 0.018646240234375, 0.03515625, 0.07220458984375, -0.01371002197265625, 0.006069183349609375, -0.0362548828125, 0.0273284912109375, -0.012054443359375, 0.035308837890625, 0.02215576171875, -0.0281982421875, 0.04217529296875, 0.043182373046875, -0.04766845703125, -0.06085205078125, 0.0005168914794921875, -0.0955810546875, 0.0012006759643554688, 0.08074951171875, -0.0260772705078125, -0.034942626953125, 0.016448974609375, -0.020355224609375, 0.0277862548828125, 0.005146026611328125, 0.04241943359375, 0.0258026123046875, 0.0245208740234375, -0.039825439453125, -0.03900146484375, 0.026702880859375, -0.01548004150390625, -0.040283203125, -0.04046630859375, 0.0261688232421875, 0.0287933349609375, 0.0272369384765625, 0.061737060546875, -0.018524169921875, 0.0162506103515625, 0.003047943115234375, 0.0267791748046875, -0.02032470703125, -0.0201416015625, -0.0302581787109375, -0.0024776458740234375, -0.034576416015625, -0.0543212890625 ] ]
h2oai/h2ogpt-oasst1-512-20b
2023-04-26T16:58:30.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "gpt", "llm", "large language model", "open-source", "en", "dataset:h2oai/openassistant_oasst1", "dataset:h2oai/openassistant_oasst1_h2ogpt", "license:apache-2.0", "has_space", "text-generation-inference", "region:us" ]
text-generation
h2oai
null
null
h2oai/h2ogpt-oasst1-512-20b
38
5,824
transformers
2023-04-18T01:36:16
--- license: apache-2.0 language: - en library_name: transformers inference: false thumbnail: https://h2o.ai/etc.clientlibs/h2o/clientlibs/clientlib-site/resources/images/favicon.ico tags: - gpt - llm - large language model - open-source datasets: - h2oai/openassistant_oasst1 - h2oai/openassistant_oasst1_h2ogpt --- # h2oGPT Model Card ## Summary H2O.ai's `h2ogpt-oasst1-512-20b` is a 20 billion parameter instruction-following large language model licensed for commercial use. - Base model: [EleutherAI/gpt-neox-20b](https://huggingface.co/EleutherAI/gpt-neox-20b) - Fine-tuning dataset: [h2oai/openassistant_oasst1](https://huggingface.co/datasets/h2oai/openassistant_oasst1) and [h2oai/openassistant_oasst1_h2ogpt](https://huggingface.co/datasets/h2oai/openassistant_oasst1_h2ogpt) - Data-prep and fine-tuning code: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) - Training logs: [zip](https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b/blob/main/gpt-neox-20b.openassistant_oasst1.json.6.0_epochs.5a14ea8b3794c0d60476fc262d0a297f98dd712d.1013.zip) and [zip](https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b/blob/main/h2ogpt-oasst1-512-20b.h2oaiopenassistant_oasst1_h2ogpt.2_epochs.fcaae7ef70600de8c97c9b38cb3f0075467cdad1.3.zip) ## Chatbot - Run your own chatbot: [H2O.ai GitHub](https://github.com/h2oai/h2ogpt) [![H2O.ai GitHub](https://user-images.githubusercontent.com/6147661/232930822-e7170e4d-8aa1-4f7a-ad70-ece9cdd8b0cb.png)](https://github.com/h2oai/h2ogpt) ## Usage To use the model with the `transformers` library on a machine with GPUs, first make sure you have the `transformers` and `accelerate` libraries installed. ```bash pip install transformers==4.28.1 pip install accelerate==0.18.0 ``` ```python import torch from transformers import pipeline generate_text = pipeline(model="h2oai/h2ogpt-oasst1-512-20b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto") res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` Alternatively, if you prefer to not use `trust_remote_code=True` you can download [instruct_pipeline.py](https://huggingface.co/h2oai/h2ogpt-oasst1-512-20b/blob/main/h2oai_pipeline.py), store it alongside your notebook, and construct the pipeline yourself from the loaded model and tokenizer: ```python import torch from h2oai_pipeline import H2OTextGenerationPipeline from transformers import AutoModelForCausalLM, AutoTokenizer tokenizer = AutoTokenizer.from_pretrained("h2oai/h2ogpt-oasst1-512-20b", padding_side="left") model = AutoModelForCausalLM.from_pretrained("h2oai/h2ogpt-oasst1-512-20b", torch_dtype=torch.bfloat16, device_map="auto") generate_text = H2OTextGenerationPipeline(model=model, tokenizer=tokenizer) res = generate_text("Why is drinking water so healthy?", max_new_tokens=100) print(res[0]["generated_text"]) ``` ## Model Architecture ``` GPTNeoXForCausalLM( (gpt_neox): GPTNeoXModel( (embed_in): Embedding(50432, 6144) (layers): ModuleList( (0-43): 44 x GPTNeoXLayer( (input_layernorm): LayerNorm((6144,), eps=1e-05, elementwise_affine=True) (post_attention_layernorm): LayerNorm((6144,), eps=1e-05, elementwise_affine=True) (attention): GPTNeoXAttention( (rotary_emb): RotaryEmbedding() (query_key_value): Linear(in_features=6144, out_features=18432, bias=True) (dense): Linear(in_features=6144, out_features=6144, bias=True) ) (mlp): GPTNeoXMLP( (dense_h_to_4h): Linear(in_features=6144, out_features=24576, bias=True) (dense_4h_to_h): Linear(in_features=24576, out_features=6144, bias=True) (act): FastGELUActivation() ) ) ) (final_layer_norm): LayerNorm((6144,), eps=1e-05, elementwise_affine=True) ) (embed_out): Linear(in_features=6144, out_features=50432, bias=False) ) ``` ## Model Configuration ```json GPTNeoXConfig { "_name_or_path": "h2oai/h2ogpt-oasst1-512-20b", "architectures": [ "GPTNeoXForCausalLM" ], "attention_probs_dropout_prob": 0, "bos_token_id": 0, "custom_pipeline": { "text-generation": { "impl": "h2oai_pipeline.H2OTextGenerationPipeline", "pt": "AutoModelForCausalLM" } }, "custom_pipelines": { "text-generation": { "impl": "h2oai_pipeline.H2OTextGenerationPipeline", "pt": "AutoModelForCausalLM" } }, "eos_token_id": 0, "hidden_act": "gelu_fast", "hidden_dropout_prob": 0, "hidden_size": 6144, "initializer_range": 0.02, "intermediate_size": 24576, "layer_norm_eps": 1e-05, "max_position_embeddings": 2048, "model_type": "gpt_neox", "num_attention_heads": 64, "num_hidden_layers": 44, "rotary_emb_base": 10000, "rotary_pct": 0.25, "tie_word_embeddings": false, "torch_dtype": "float16", "transformers_version": "4.28.1", "use_cache": true, "use_parallel_residual": true, "vocab_size": 50432 } ``` ## Model Validation Model validation results using [EleutherAI lm-evaluation-harness](https://github.com/EleutherAI/lm-evaluation-harness). [eval source code](https://github.com/h2oai/h2ogpt/issues/35#issuecomment-1521119301) | Task |Version| Metric |Value | |Stderr| |-------------|------:|--------|-----:|---|-----:| |hellaswag | 0|acc |0.5419|± |0.0050| | | |acc_norm|0.7259|± |0.0045| |boolq | 1|acc |0.7125|± |0.0079| |piqa | 0|acc |0.7742|± |0.0098| | | |acc_norm|0.7775|± |0.0097| |openbookqa | 0|acc |0.2800|± |0.0201| | | |acc_norm|0.4000|± |0.0219| |arc_challenge| 0|acc |0.3993|± |0.0143| | | |acc_norm|0.4420|± |0.0145| |winogrande | 0|acc |0.6614|± |0.0133| |arc_easy | 0|acc |0.7327|± |0.0091| | | |acc_norm|0.6894|± |0.0095| ## Disclaimer Please read this disclaimer carefully before using the large language model provided in this repository. Your use of the model signifies your agreement to the following terms and conditions. - Biases and Offensiveness: The large language model is trained on a diverse range of internet text data, which may contain biased, racist, offensive, or otherwise inappropriate content. By using this model, you acknowledge and accept that the generated content may sometimes exhibit biases or produce content that is offensive or inappropriate. The developers of this repository do not endorse, support, or promote any such content or viewpoints. - Limitations: The large language model is an AI-based tool and not a human. It may produce incorrect, nonsensical, or irrelevant responses. It is the user's responsibility to critically evaluate the generated content and use it at their discretion. - Use at Your Own Risk: Users of this large language model must assume full responsibility for any consequences that may arise from their use of the tool. The developers and contributors of this repository shall not be held liable for any damages, losses, or harm resulting from the use or misuse of the provided model. - Ethical Considerations: Users are encouraged to use the large language model responsibly and ethically. By using this model, you agree not to use it for purposes that promote hate speech, discrimination, harassment, or any form of illegal or harmful activities. - Reporting Issues: If you encounter any biased, offensive, or otherwise inappropriate content generated by the large language model, please report it to the repository maintainers through the provided channels. Your feedback will help improve the model and mitigate potential issues. - Changes to this Disclaimer: The developers of this repository reserve the right to modify or update this disclaimer at any time without prior notice. It is the user's responsibility to periodically review the disclaimer to stay informed about any changes. By using the large language model provided in this repository, you agree to accept and comply with the terms and conditions outlined in this disclaimer. If you do not agree with any part of this disclaimer, you should refrain from using the model and any content generated by it.
8,190
[ [ -0.020965576171875, -0.06072998046875, 0.033477783203125, 0.01279449462890625, -0.020050048828125, -0.007808685302734375, -0.0185089111328125, -0.03265380859375, 0.00936126708984375, 0.0267791748046875, -0.03875732421875, -0.04730224609375, -0.051239013671875, -0.01457977294921875, -0.0019330978393554688, 0.06561279296875, -0.00720977783203125, -0.0248260498046875, 0.004024505615234375, 0.00675201416015625, -0.00975799560546875, -0.0264129638671875, -0.058563232421875, -0.02099609375, 0.01494598388671875, 0.011138916015625, 0.046844482421875, 0.060821533203125, 0.0262451171875, 0.0225067138671875, -0.012298583984375, -0.0012664794921875, -0.0309295654296875, -0.01300048828125, 0.0019140243530273438, -0.01546478271484375, -0.04656982421875, -0.003040313720703125, 0.044158935546875, 0.0211029052734375, -0.007080078125, 0.0243682861328125, 0.018890380859375, 0.0310516357421875, -0.03790283203125, 0.0335693359375, -0.03143310546875, -0.01163482666015625, -0.0088043212890625, 0.00463104248046875, -0.018585205078125, -0.005596160888671875, 0.0214691162109375, -0.04742431640625, -0.002838134765625, 0.0113525390625, 0.09906005859375, 0.0183868408203125, -0.0226287841796875, -0.0241546630859375, -0.034698486328125, 0.05810546875, -0.0797119140625, 0.0257568359375, 0.030517578125, 0.004215240478515625, 0.004215240478515625, -0.061981201171875, -0.04022216796875, -0.0102386474609375, -0.00797271728515625, 0.0193939208984375, -0.0233917236328125, -0.0011425018310546875, 0.0194091796875, 0.0164031982421875, -0.051544189453125, 0.00528717041015625, -0.030731201171875, -0.0201568603515625, 0.050506591796875, 0.01374053955078125, 0.0174560546875, -0.03314208984375, -0.0297393798828125, -0.0111083984375, -0.03277587890625, 0.00760650634765625, 0.035400390625, 0.0170440673828125, -0.0528564453125, 0.039764404296875, -0.01418304443359375, 0.040924072265625, -0.005126953125, -0.011871337890625, 0.04205322265625, -0.0304718017578125, -0.0273284912109375, 0.00013947486877441406, 0.0943603515625, 0.020263671875, 0.0050811767578125, 0.0206451416015625, -0.00827789306640625, -0.0162200927734375, -0.00733184814453125, -0.0814208984375, -0.01192474365234375, 0.03790283203125, -0.03167724609375, -0.018646240234375, 0.0019140243530273438, -0.06341552734375, -0.005245208740234375, -0.004261016845703125, 0.03192138671875, -0.0258331298828125, -0.0304718017578125, 0.0116729736328125, -0.0094757080078125, 0.01715087890625, 0.00955963134765625, -0.057830810546875, 0.01345062255859375, 0.04254150390625, 0.06903076171875, 0.007137298583984375, -0.0229644775390625, -0.0269622802734375, 0.006855010986328125, -0.006107330322265625, 0.0338134765625, -0.0212554931640625, -0.0178375244140625, -0.0006136894226074219, 0.0173797607421875, -0.01580810546875, -0.0237579345703125, 0.0474853515625, -0.016510009765625, 0.0404052734375, 0.0011396408081054688, -0.0311431884765625, -0.0075531005859375, 0.0213775634765625, -0.029266357421875, 0.09228515625, 0.0207366943359375, -0.06427001953125, 0.0081787109375, -0.051422119140625, -0.01107025146484375, -0.006443023681640625, -0.0036220550537109375, -0.05548095703125, -0.023712158203125, 0.0214080810546875, 0.0248870849609375, -0.034942626953125, 0.0174407958984375, -0.02191162109375, -0.0100250244140625, -0.004833221435546875, -0.0302581787109375, 0.08935546875, 0.0174560546875, -0.048858642578125, 0.01494598388671875, -0.056304931640625, 0.00699615478515625, 0.028411865234375, -0.0280303955078125, -0.0102691650390625, -0.01425933837890625, 0.0144805908203125, 0.0352783203125, 0.02862548828125, -0.037322998046875, 0.01461029052734375, -0.037109375, 0.054534912109375, 0.056427001953125, 0.002696990966796875, 0.0206756591796875, -0.0118255615234375, 0.02923583984375, 0.007099151611328125, 0.0243377685546875, -0.01026153564453125, -0.0638427734375, -0.058349609375, -0.029052734375, 0.01523590087890625, 0.0509033203125, -0.04229736328125, 0.05596923828125, -0.032440185546875, -0.04449462890625, -0.044891357421875, 0.00693511962890625, 0.032135009765625, 0.044677734375, 0.0296783447265625, -0.00724029541015625, -0.033721923828125, -0.068359375, 0.009185791015625, -0.018890380859375, -0.0035724639892578125, 0.04400634765625, 0.06573486328125, -0.03497314453125, 0.05584716796875, -0.053802490234375, -0.0157318115234375, -0.004978179931640625, 0.0120849609375, 0.032073974609375, 0.04254150390625, 0.035675048828125, -0.036407470703125, -0.04290771484375, 0.0023193359375, -0.049896240234375, 0.0033092498779296875, -0.00017023086547851562, -0.020050048828125, 0.0225067138671875, 0.032440185546875, -0.06549072265625, 0.03680419921875, 0.055450439453125, -0.046722412109375, 0.039215087890625, -0.0273284912109375, -0.003589630126953125, -0.10784912109375, 0.022857666015625, 0.0031032562255859375, -0.0082550048828125, -0.0304107666015625, -0.004566192626953125, 0.0011501312255859375, -0.006748199462890625, -0.049560546875, 0.06634521484375, -0.031829833984375, 0.0182037353515625, -0.0024471282958984375, 0.005649566650390625, 0.0015077590942382812, 0.040802001953125, 0.006256103515625, 0.040771484375, 0.0557861328125, -0.0364990234375, 0.0263824462890625, 0.01314544677734375, -0.0236358642578125, 0.01221466064453125, -0.0723876953125, 0.01007080078125, 0.0035495758056640625, 0.0182037353515625, -0.08001708984375, -0.024078369140625, 0.03924560546875, -0.043792724609375, 0.023834228515625, -0.02276611328125, -0.0328369140625, -0.04730224609375, -0.027923583984375, 0.021209716796875, 0.051788330078125, -0.031097412109375, 0.037567138671875, 0.0202789306640625, -0.0045013427734375, -0.047607421875, -0.050811767578125, -0.0039043426513671875, -0.0169830322265625, -0.059417724609375, 0.03350830078125, 0.0030517578125, -0.0007281303405761719, 0.0158233642578125, 0.01392364501953125, 0.0022487640380859375, -0.0008974075317382812, 0.0230865478515625, 0.0250244140625, -0.01248931884765625, -0.011871337890625, -0.002353668212890625, -0.01210784912109375, 0.0018978118896484375, -0.0262908935546875, 0.07135009765625, -0.00498199462890625, -0.0105133056640625, -0.060211181640625, 0.0034809112548828125, 0.05133056640625, -0.0171966552734375, 0.0625, 0.06781005859375, -0.041473388671875, 0.00624847412109375, -0.0252685546875, -0.02178955078125, -0.034820556640625, 0.039825439453125, -0.01538848876953125, -0.059173583984375, 0.05267333984375, 0.018768310546875, 0.0014085769653320312, 0.057159423828125, 0.058685302734375, 0.000713348388671875, 0.07635498046875, 0.018798828125, -0.01363372802734375, 0.031463623046875, -0.05206298828125, 0.0126800537109375, -0.07720947265625, -0.019378662109375, -0.042510986328125, -0.008148193359375, -0.06402587890625, -0.042266845703125, 0.0197296142578125, 0.003021240234375, -0.0469970703125, 0.0307769775390625, -0.034027099609375, 0.00250244140625, 0.04876708984375, 0.002880096435546875, -0.0032749176025390625, 0.006137847900390625, -0.022369384765625, 0.0015497207641601562, -0.044891357421875, -0.0389404296875, 0.0802001953125, 0.05120849609375, 0.036224365234375, 0.0015516281127929688, 0.053802490234375, -0.004261016845703125, 0.0126953125, -0.0511474609375, 0.02667236328125, 0.00829315185546875, -0.061920166015625, -0.0263214111328125, -0.038726806640625, -0.0775146484375, 0.0119476318359375, -0.014556884765625, -0.072998046875, 0.0182647705078125, 0.0130462646484375, -0.034149169921875, 0.0259246826171875, -0.0565185546875, 0.07489013671875, -0.0159149169921875, -0.05419921875, -0.0044708251953125, -0.047576904296875, 0.020050048828125, 0.0232696533203125, 0.0185546875, -0.009307861328125, -0.01418304443359375, 0.05810546875, -0.039825439453125, 0.058319091796875, -0.01531219482421875, -0.00872039794921875, 0.04168701171875, -0.017303466796875, 0.049652099609375, 0.01238250732421875, -0.002513885498046875, 0.0250396728515625, -0.01450347900390625, -0.035980224609375, -0.0302581787109375, 0.058441162109375, -0.07037353515625, -0.03985595703125, -0.044342041015625, -0.0288238525390625, 0.0092010498046875, 0.026885986328125, 0.0263519287109375, 0.0266265869140625, 0.00893402099609375, 0.01910400390625, 0.03125, -0.03485107421875, 0.0452880859375, 0.01531219482421875, -0.0162200927734375, -0.054229736328125, 0.07403564453125, 0.002796173095703125, 0.0301513671875, 0.0138702392578125, 0.0209503173828125, -0.033294677734375, -0.028778076171875, -0.05126953125, 0.034149169921875, -0.0285186767578125, -0.00473785400390625, -0.052154541015625, -0.024078369140625, -0.045806884765625, 0.00868988037109375, -0.039031982421875, -0.018218994140625, -0.03564453125, 0.0023345947265625, 0.02398681640625, 0.029632568359375, 0.001720428466796875, 0.018798828125, -0.05377197265625, 0.014068603515625, 0.0307769775390625, 0.021240234375, -0.00749969482421875, -0.061553955078125, -0.0213623046875, 0.0164337158203125, -0.035797119140625, -0.0477294921875, 0.04718017578125, 0.003505706787109375, 0.030303955078125, 0.0306243896484375, -0.011383056640625, 0.04949951171875, -0.0240478515625, 0.0694580078125, 0.01418304443359375, -0.06988525390625, 0.04443359375, -0.0426025390625, 0.034271240234375, 0.01751708984375, 0.034454345703125, -0.0391845703125, -0.0186309814453125, -0.061859130859375, -0.0836181640625, 0.0596923828125, 0.0312042236328125, 0.0111236572265625, 0.007007598876953125, 0.022796630859375, -0.0247650146484375, 0.00955963134765625, -0.05596923828125, -0.03265380859375, -0.01227569580078125, -0.02447509765625, -0.0011768341064453125, -0.00902557373046875, -0.006740570068359375, -0.0308380126953125, 0.0645751953125, -0.006816864013671875, 0.045013427734375, 0.02484130859375, 0.0014905929565429688, -0.0032291412353515625, 0.014892578125, 0.050079345703125, 0.0419921875, -0.0280914306640625, -0.0007777214050292969, 0.004261016845703125, -0.0489501953125, -0.0027618408203125, 0.0220489501953125, -0.025909423828125, -0.008453369140625, 0.030303955078125, 0.0865478515625, 0.0018854141235351562, -0.038726806640625, 0.033935546875, -0.0027618408203125, -0.03521728515625, -0.0240478515625, 0.002361297607421875, 0.0299530029296875, 0.006061553955078125, 0.020050048828125, -0.002574920654296875, -0.003337860107421875, -0.050262451171875, -0.002044677734375, 0.0271148681640625, -0.035064697265625, -0.0213775634765625, 0.064697265625, 0.0118560791015625, -0.0158538818359375, 0.0589599609375, -0.02423095703125, -0.04547119140625, 0.0565185546875, 0.034820556640625, 0.06683349609375, -0.01381683349609375, 0.0100860595703125, 0.060516357421875, 0.038665771484375, -0.0014848709106445312, 0.0164794921875, 0.0196533203125, -0.049163818359375, -0.0192108154296875, -0.042327880859375, -0.004688262939453125, 0.0266571044921875, -0.04296875, 0.032928466796875, -0.0357666015625, -0.0187530517578125, -0.00981903076171875, 0.006855010986328125, -0.05322265625, 0.0207672119140625, 0.0194549560546875, 0.055908203125, -0.060211181640625, 0.06365966796875, 0.0455322265625, -0.037567138671875, -0.06219482421875, -0.003879547119140625, 0.004001617431640625, -0.058441162109375, 0.0360107421875, 0.013397216796875, 0.0138702392578125, 0.01161956787109375, -0.0341796875, -0.0753173828125, 0.09918212890625, 0.0252838134765625, -0.04345703125, -0.006580352783203125, 0.0159912109375, 0.042022705078125, -0.025115966796875, 0.04876708984375, 0.043487548828125, 0.04010009765625, 0.0005006790161132812, -0.0855712890625, 0.0207366943359375, -0.0307769775390625, -0.006229400634765625, 0.00283050537109375, -0.0711669921875, 0.07452392578125, -0.023529052734375, -0.00991058349609375, -0.006549835205078125, 0.033843994140625, 0.030517578125, 0.0182647705078125, 0.027374267578125, 0.06787109375, 0.051544189453125, -0.0177154541015625, 0.08685302734375, -0.03619384765625, 0.053802490234375, 0.07952880859375, 0.0021991729736328125, 0.0693359375, 0.0300750732421875, -0.015350341796875, 0.040069580078125, 0.05206298828125, 0.001064300537109375, 0.0246429443359375, 0.00994873046875, -0.00803375244140625, -0.0099334716796875, -0.006664276123046875, -0.04241943359375, 0.02978515625, 0.0285797119140625, -0.050506591796875, 0.004184722900390625, -0.0196075439453125, 0.0222015380859375, -0.0295867919921875, -0.00943756103515625, 0.044769287109375, 0.004543304443359375, -0.039459228515625, 0.0677490234375, 0.00250244140625, 0.055572509765625, -0.03570556640625, 0.00446319580078125, 0.005634307861328125, 0.0178985595703125, -0.0159149169921875, -0.0550537109375, 0.008880615234375, -0.01052093505859375, -0.003841400146484375, -0.00923919677734375, 0.03460693359375, -0.03192138671875, -0.045074462890625, 0.032073974609375, 0.0273590087890625, 0.0035724639892578125, -0.0039520263671875, -0.076171875, 0.01294708251953125, 0.00757598876953125, -0.0484619140625, 0.01538848876953125, 0.02301025390625, 0.00812530517578125, 0.048858642578125, 0.04559326171875, 0.0043792724609375, 0.007965087890625, -0.00011116266250610352, 0.064697265625, -0.047943115234375, -0.0282135009765625, -0.0699462890625, 0.058319091796875, -0.0214691162109375, -0.04669189453125, 0.054443359375, 0.06500244140625, 0.06304931640625, -0.004482269287109375, 0.06634521484375, -0.021575927734375, 0.019256591796875, -0.02947998046875, 0.05853271484375, -0.0452880859375, 0.00621795654296875, -0.022796630859375, -0.0638427734375, -0.0035648345947265625, 0.051727294921875, -0.0300750732421875, 0.0155181884765625, 0.055694580078125, 0.0711669921875, -0.002559661865234375, -0.0167999267578125, 0.003963470458984375, 0.03778076171875, 0.03125, 0.051513671875, 0.033233642578125, -0.06353759765625, 0.04327392578125, -0.03521728515625, -0.01168060302734375, -0.0164642333984375, -0.054412841796875, -0.062408447265625, -0.04815673828125, -0.039642333984375, -0.04693603515625, -0.0124359130859375, 0.08087158203125, 0.06756591796875, -0.05474853515625, -0.0254058837890625, -0.00740814208984375, 0.002689361572265625, -0.0068817138671875, -0.0225067138671875, 0.044952392578125, -0.003948211669921875, -0.07684326171875, -0.0015964508056640625, 0.01349639892578125, 0.01312255859375, -0.018463134765625, -0.0269012451171875, -0.027374267578125, -0.0018777847290039062, 0.04046630859375, 0.019622802734375, -0.047149658203125, -0.015167236328125, 0.0011167526245117188, -0.0173492431640625, 0.006626129150390625, 0.02960205078125, -0.025970458984375, 0.027374267578125, 0.05029296875, 0.02349853515625, 0.051544189453125, -0.00571441650390625, 0.01433563232421875, -0.03424072265625, 0.028839111328125, 0.0124359130859375, 0.0308380126953125, 0.0189666748046875, -0.0274200439453125, 0.0273590087890625, 0.0323486328125, -0.04205322265625, -0.054229736328125, -0.006244659423828125, -0.08416748046875, -0.00862884521484375, 0.1043701171875, -0.0175628662109375, -0.039031982421875, -0.004100799560546875, -0.0245208740234375, 0.02410888671875, -0.04803466796875, 0.049530029296875, 0.048553466796875, -0.0029964447021484375, -0.013763427734375, -0.054534912109375, 0.031585693359375, 0.01361083984375, -0.07330322265625, -0.0012845993041992188, 0.0158843994140625, 0.0340576171875, 0.0201568603515625, 0.04119873046875, -0.0119171142578125, 0.01113128662109375, 0.004116058349609375, 0.0155181884765625, -0.0032806396484375, -0.0017919540405273438, -0.008148193359375, 0.0015544891357421875, -0.0129852294921875, -0.01409912109375 ] ]
robowaifudev/megatron-gpt2-345m
2023-04-08T22:07:20.000Z
[ "transformers", "pytorch", "safetensors", "gpt2", "text-generation", "en", "dataset:wikitext", "dataset:openwebtext", "dataset:spacemanidol/cc-stories", "arxiv:1909.08053", "license:apache-2.0", "model-index", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
robowaifudev
null
null
robowaifudev/megatron-gpt2-345m
4
5,822
transformers
2022-12-21T00:55:48
--- language: - en tags: - gpt2 license: apache-2.0 widget: - text: It was a bright cold day in April, and the clocks were striking thirteen. Winston Smith, datasets: - wikitext - openwebtext - spacemanidol/cc-stories model-index: - name: megatron-gpt2-345m results: - task: type: text-generation name: Text generation dataset: name: WikiText-103 type: wikitext metrics: - type: wikitext value: 19.31 name: Perplexity - task: type: text-generation name: Text generation dataset: name: WikiText-2 type: wikitext metrics: - type: wikitext value: 17.151 name: Perplexity - task: type: text-generation name: Text generation dataset: name: LAMBADA type: lambada metrics: - type: lambada value: 5.509 name: Perplexity - type: lambada value: 68.31% name: Accuracy --- <!--- # ############################################################################################## # # Copyright (c) 2021-, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # ############################################################################################## --> This is an archive of [nvidia/megatron-gpt2-345m](https://huggingface.co/nvidia/megatron-gpt2-345m) that contains readily available model weights (375M). Its performance on Wikitext-103 is 19.31.<sup>1</sup> In comparison, the performance of GPT2-large (1.5B) is 17.48 and GPT2-medium (762M) is 22.05.<sup>2</sup> ### References 1. Shoeybi, Mohammad, et al. Megatron-LM: Training Multi-Billion Parameter Language Models Using Model Parallelism. arXiv, 2019, [https://doi.org/10.48550/ARXIV.1909.08053](https://doi.org/10.48550/ARXIV.1909.08053). 2. Alec Radford, et al. Language Models are Unsupervised Multitask Learners. OpenAI, 2019. [https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf](https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf). ## Description [Megatron](https://arxiv.org/pdf/1909.08053.pdf) is a large, powerful transformer developed by the Applied Deep Learning Research team at NVIDIA. This particular Megatron model was trained from a generative, left-to-right transformer in the style of GPT-2. This model was trained on text sourced from Wikipedia, RealNews, OpenWebText, and CC-Stories. It contains 345 million parameters. Find more information at [https://github.com/NVIDIA/Megatron-LM](https://github.com/NVIDIA/Megatron-LM) # How to run Megatron GPT2 using Transformers ## Text generation The following code shows how to use the Megatron GPT2 checkpoint and Transformers to generate text. ```python import os import torch from transformers import GPT2Tokenizer, GPT2LMHeadModel tokenizer = GPT2Tokenizer.from_pretrained("gpt2") model = GPT2LMHeadModel.from_pretrained("robowaifudev/megatron-gpt2-345m") if torch.cuda.is_available(): device = torch.device("cuda") model.half() else: device = torch.device("cpu") model.to(device) model.eval() # Generate prompt = ( "It was a bright cold day in April, and the clocks were striking thirteen. Winston Smith," ) input_ids = tokenizer.encode(prompt, return_tensors="pt").to(device) output = model.generate( input_ids=input_ids, max_length=len(input_ids) + 128, do_sample=True, top_k=64, top_p=0.9, temperature=0.8, num_return_sequences=2, repetition_penalty=1.025 ) # Output the text print("Prompt:", prompt) print("*" * 3) for i, sentence in enumerate(output): text = tokenizer.decode(sentence, clean_up_tokenization_spaces=True) print(f"{i}:", text) print("*" * 3) ``` # Original code The original Megatron code can be found here: [https://github.com/NVIDIA/Megatron-LM](https://github.com/NVIDIA/Megatron-LM).
4,584
[ [ -0.03936767578125, -0.062042236328125, 0.0289154052734375, 0.01690673828125, -0.036529541015625, -0.01361083984375, -0.025787353515625, -0.020050048828125, -0.00591278076171875, 0.0184173583984375, -0.035308837890625, -0.0193939208984375, -0.049896240234375, 0.00969696044921875, -0.0028934478759765625, 0.08306884765625, -0.0024566650390625, -0.005809783935546875, -0.0057525634765625, 0.006366729736328125, -0.0182647705078125, -0.0311126708984375, -0.047943115234375, -0.030487060546875, 0.01788330078125, 0.005672454833984375, 0.05828857421875, 0.04302978515625, 0.0303955078125, 0.0290374755859375, -0.0114288330078125, 0.0160369873046875, -0.0092010498046875, -0.015655517578125, 0.00409698486328125, -0.01079559326171875, -0.0243682861328125, -0.00469207763671875, 0.053558349609375, 0.00972747802734375, 0.007232666015625, 0.02264404296875, 0.014801025390625, 0.0276947021484375, -0.018035888671875, 0.0460205078125, -0.0218353271484375, -0.00959014892578125, -0.014251708984375, -0.01560211181640625, -0.0228118896484375, -0.034332275390625, -0.01084136962890625, -0.054229736328125, 0.022064208984375, -0.004192352294921875, 0.083984375, 0.0233306884765625, -0.032012939453125, -0.0061187744140625, -0.05047607421875, 0.055419921875, -0.06732177734375, 0.0285797119140625, 0.0043792724609375, 0.01519775390625, -0.00803375244140625, -0.056884765625, -0.06103515625, -0.020355224609375, -0.005340576171875, 0.00933074951171875, -0.027435302734375, 0.0009198188781738281, 0.038299560546875, 0.0198211669921875, -0.053985595703125, 0.007640838623046875, -0.041748046875, -0.00725555419921875, 0.029205322265625, 0.01406097412109375, 0.0224456787109375, -0.0482177734375, -0.0294952392578125, -0.0272674560546875, -0.036163330078125, 0.0012722015380859375, 0.0294952392578125, 0.0088958740234375, -0.02679443359375, 0.031951904296875, 0.0040435791015625, 0.033050537109375, 0.004566192626953125, -0.018402099609375, 0.04345703125, -0.032257080078125, -0.033538818359375, -0.021270751953125, 0.086181640625, 0.0294342041015625, 0.01206207275390625, -0.006511688232421875, 0.0007748603820800781, 0.004425048828125, 0.0023670196533203125, -0.07208251953125, -0.0240478515625, 0.0323486328125, -0.016937255859375, -0.01168060302734375, 0.0025577545166015625, -0.07708740234375, -0.004985809326171875, -0.016082763671875, 0.03460693359375, -0.041656494140625, -0.03277587890625, 0.0006933212280273438, -0.0263519287109375, 0.01922607421875, 0.00606536865234375, -0.07977294921875, 0.00864410400390625, 0.053314208984375, 0.058258056640625, 0.00390625, -0.03961181640625, -0.016265869140625, 0.0012102127075195312, -0.0088653564453125, 0.034759521484375, -0.00962066650390625, -0.0162200927734375, -0.01381683349609375, 0.00588226318359375, -0.0158843994140625, -0.038909912109375, 0.034210205078125, -0.0308074951171875, 0.0269775390625, -0.0034961700439453125, -0.0201416015625, -0.01690673828125, 0.001804351806640625, -0.047119140625, 0.09271240234375, 0.01165771484375, -0.0703125, 0.01509857177734375, -0.049835205078125, -0.01556396484375, 0.0004429817199707031, -0.018310546875, -0.04168701171875, 0.009796142578125, 0.0270538330078125, 0.0220794677734375, -0.03839111328125, 0.02587890625, 0.0022258758544921875, -0.0146942138671875, 0.005584716796875, -0.04864501953125, 0.0791015625, 0.01169586181640625, -0.058837890625, 0.02301025390625, -0.03973388671875, 0.003963470458984375, 0.02789306640625, -0.0357666015625, 0.0105743408203125, -0.01061248779296875, 0.025054931640625, 0.032562255859375, 0.0201416015625, -0.04638671875, 0.017181396484375, -0.041473388671875, 0.048248291015625, 0.0662841796875, -0.0175628662109375, 0.0109100341796875, -0.01071929931640625, 0.0250244140625, 0.01396942138671875, 0.0262298583984375, 0.007701873779296875, -0.054229736328125, -0.060760498046875, -0.01242828369140625, 0.007015228271484375, 0.045989990234375, -0.08392333984375, 0.032135009765625, -0.033416748046875, -0.04644775390625, -0.01236724853515625, 0.005054473876953125, 0.038726806640625, 0.0229034423828125, 0.03631591796875, -0.0008859634399414062, -0.03753662109375, -0.06524658203125, 0.0037746429443359375, -0.01442718505859375, -0.0005707740783691406, 0.0280303955078125, 0.0675048828125, -0.017425537109375, 0.049652099609375, -0.0491943359375, -0.01080322265625, -0.038970947265625, 0.00594329833984375, 0.03900146484375, 0.045074462890625, 0.04522705078125, -0.046630859375, -0.035369873046875, -0.00689697265625, -0.05615234375, 0.0105743408203125, 0.00411224365234375, 0.0080718994140625, 0.03533935546875, 0.0303497314453125, -0.0758056640625, 0.02069091796875, 0.04986572265625, -0.036041259765625, 0.06121826171875, -0.039947509765625, 0.004573822021484375, -0.10406494140625, 0.020263671875, 0.0025577545166015625, 0.0021839141845703125, -0.037109375, 0.007205963134765625, -0.00011134147644042969, -0.01195526123046875, -0.041534423828125, 0.039825439453125, -0.032073974609375, 0.0104217529296875, -0.0164642333984375, -0.0012845993041992188, -0.010589599609375, 0.0567626953125, -0.0098876953125, 0.073486328125, 0.057830810546875, -0.0482177734375, 0.042724609375, 0.01629638671875, -0.027435302734375, 0.01849365234375, -0.0689697265625, 0.00943756103515625, -0.0182037353515625, 0.0183868408203125, -0.07525634765625, -0.01309967041015625, 0.030426025390625, -0.04736328125, 0.012908935546875, 0.0013399124145507812, -0.0660400390625, -0.04766845703125, -0.023651123046875, 0.036163330078125, 0.056060791015625, -0.049041748046875, 0.04180908203125, 0.02508544921875, 0.0007033348083496094, -0.0303955078125, -0.043853759765625, -0.01070404052734375, -0.01364898681640625, -0.051666259765625, 0.0235595703125, -0.0024127960205078125, -0.0007662773132324219, -0.005115509033203125, 0.005687713623046875, -0.00440216064453125, -0.019927978515625, 0.03668212890625, 0.0303802490234375, -0.0259857177734375, 0.00041794776916503906, -0.0159759521484375, -0.0362548828125, 0.01464080810546875, -0.0245513916015625, 0.07122802734375, -0.0117340087890625, -0.005218505859375, -0.046478271484375, 0.022186279296875, 0.056549072265625, -0.01168060302734375, 0.0537109375, 0.06390380859375, -0.0277862548828125, 0.006153106689453125, -0.03265380859375, -0.0264739990234375, -0.0411376953125, 0.042877197265625, -0.01424407958984375, -0.0545654296875, 0.025177001953125, 0.0134124755859375, 0.017059326171875, 0.06951904296875, 0.04180908203125, 0.019561767578125, 0.08544921875, 0.04736328125, -0.0091094970703125, 0.031494140625, -0.039794921875, 0.011322021484375, -0.07525634765625, -0.0254058837890625, -0.03631591796875, -0.01398468017578125, -0.0489501953125, -0.046295166015625, 0.03314208984375, -0.0008325576782226562, -0.037841796875, 0.0394287109375, -0.056365966796875, 0.0267486572265625, 0.05621337890625, 0.005931854248046875, 0.0010862350463867188, 0.0121002197265625, -0.02581787109375, -0.004528045654296875, -0.046783447265625, -0.0310211181640625, 0.0762939453125, 0.02496337890625, 0.034759521484375, 0.0198974609375, 0.0260162353515625, -0.0157012939453125, 0.0025844573974609375, -0.027008056640625, 0.04608154296875, -0.01708984375, -0.057342529296875, -0.0203399658203125, -0.042388916015625, -0.072509765625, 0.023284912109375, -0.0007047653198242188, -0.07476806640625, -0.003910064697265625, 0.012481689453125, -0.034942626953125, 0.050811767578125, -0.055419921875, 0.08038330078125, -0.00684356689453125, -0.02484130859375, -0.004337310791015625, -0.02685546875, 0.0233154296875, 0.01145172119140625, 0.0029315948486328125, 0.004428863525390625, 0.004993438720703125, 0.06988525390625, -0.03271484375, 0.05145263671875, -0.013916015625, 0.008636474609375, 0.027618408203125, -0.021759033203125, 0.042816162109375, 0.017059326171875, 0.00702667236328125, 0.01678466796875, -0.0141754150390625, -0.0229034423828125, -0.032012939453125, 0.07757568359375, -0.08917236328125, -0.0379638671875, -0.051177978515625, -0.0340576171875, 0.001270294189453125, 0.014251708984375, 0.043212890625, 0.042724609375, 0.0159149169921875, 0.0033092498779296875, 0.040374755859375, -0.034698486328125, 0.0650634765625, 0.0160675048828125, -0.016265869140625, -0.0574951171875, 0.06689453125, 0.0047607421875, 0.0214385986328125, 0.0137481689453125, 0.0165863037109375, -0.043853759765625, -0.0283203125, -0.058197021484375, 0.0302581787109375, -0.043853759765625, -0.01318359375, -0.0472412109375, -0.0191802978515625, -0.0369873046875, 0.011627197265625, -0.0257568359375, -0.0206756591796875, -0.0307159423828125, -0.0032138824462890625, 0.045501708984375, 0.045806884765625, -0.0023288726806640625, 0.023529052734375, -0.040924072265625, 0.03302001953125, 0.0301055908203125, 0.0287628173828125, 0.005855560302734375, -0.04644775390625, 0.00162506103515625, -0.004913330078125, -0.049530029296875, -0.056793212890625, 0.033447265625, -0.004009246826171875, 0.03668212890625, 0.01134490966796875, -0.018157958984375, 0.05010986328125, -0.033203125, 0.075927734375, 0.03094482421875, -0.0697021484375, 0.0312042236328125, -0.0167083740234375, 0.041595458984375, 0.01904296875, 0.0201416015625, -0.0276947021484375, -0.013763427734375, -0.06549072265625, -0.078369140625, 0.06915283203125, 0.026336669921875, 0.01171875, 0.0010309219360351562, 0.0018787384033203125, -0.01715087890625, 0.007434844970703125, -0.057586669921875, -0.041778564453125, -0.029296875, -0.00930023193359375, -0.040069580078125, -0.013916015625, 0.003955841064453125, -0.04486083984375, 0.055999755859375, 0.0006914138793945312, 0.055267333984375, 0.0181121826171875, -0.00440216064453125, 0.0012416839599609375, 0.017486572265625, 0.066162109375, 0.052459716796875, -0.01071929931640625, 0.0235137939453125, 0.0186920166015625, -0.054718017578125, -0.00406646728515625, 0.0233612060546875, -0.009490966796875, 0.0210723876953125, 0.003543853759765625, 0.07159423828125, 0.00803375244140625, -0.0263214111328125, 0.03558349609375, 0.0029430389404296875, -0.0290679931640625, -0.02642822265625, -0.00023353099822998047, 0.018218994140625, 0.0260009765625, 0.0250091552734375, 0.00844573974609375, -0.01433563232421875, -0.0307159423828125, 0.01081085205078125, 0.026397705078125, -0.0165863037109375, -0.0249786376953125, 0.0787353515625, 0.0015058517456054688, -0.01168060302734375, 0.062225341796875, -0.01318359375, -0.043670654296875, 0.06732177734375, 0.05218505859375, 0.0677490234375, -0.00030040740966796875, 0.016937255859375, 0.05499267578125, 0.033172607421875, -0.0024089813232421875, 0.032135009765625, 0.0189056396484375, -0.0269317626953125, -0.033843994140625, -0.049835205078125, -0.01042938232421875, 0.0254058837890625, -0.018218994140625, 0.00543975830078125, -0.04949951171875, -0.024505615234375, 0.0203399658203125, 0.0025577545166015625, -0.051666259765625, 0.0107574462890625, 0.005794525146484375, 0.060516357421875, -0.060333251953125, 0.07177734375, 0.039154052734375, -0.044891357421875, -0.07781982421875, -0.01262664794921875, -0.005008697509765625, -0.05450439453125, 0.056365966796875, 0.0140380859375, 0.00969696044921875, 0.01055145263671875, -0.0233306884765625, -0.0665283203125, 0.081787109375, 0.03277587890625, -0.0189056396484375, 0.00406646728515625, 0.0274810791015625, 0.036163330078125, 0.001941680908203125, 0.044952392578125, 0.03289794921875, 0.048126220703125, 0.0016431808471679688, -0.08831787109375, 0.0199737548828125, -0.0323486328125, 0.0009064674377441406, 0.035797119140625, -0.053558349609375, 0.10498046875, -0.0228729248046875, -0.01678466796875, -0.0009217262268066406, 0.052947998046875, 0.02423095703125, 0.00542449951171875, 0.019073486328125, 0.053497314453125, 0.03399658203125, -0.036041259765625, 0.0811767578125, -0.03216552734375, 0.058197021484375, 0.056884765625, 0.0233154296875, 0.05877685546875, 0.03955078125, -0.01751708984375, 0.03375244140625, 0.02874755859375, -0.022064208984375, 0.02239990234375, 0.0082244873046875, -0.010345458984375, -0.004123687744140625, 0.0133056640625, -0.047393798828125, 0.01617431640625, 0.004627227783203125, -0.04083251953125, -0.0263214111328125, -0.004390716552734375, 0.0228729248046875, -0.033782958984375, -0.0218353271484375, 0.035919189453125, -0.007293701171875, -0.0582275390625, 0.070068359375, 0.0196990966796875, 0.06256103515625, -0.035919189453125, 0.00566864013671875, -0.001781463623046875, 0.0335693359375, -0.019561767578125, -0.06134033203125, 0.017242431640625, 0.0010919570922851562, -0.01751708984375, -0.00637054443359375, 0.021270751953125, -0.0093231201171875, -0.04656982421875, 0.005645751953125, 0.009002685546875, 0.024444580078125, -0.0129547119140625, -0.06121826171875, 0.008575439453125, 0.00042438507080078125, -0.020751953125, 0.022003173828125, 0.006793975830078125, 0.01690673828125, 0.0352783203125, 0.056060791015625, -0.00801849365234375, 0.02459716796875, -0.00984954833984375, 0.0653076171875, -0.05267333984375, -0.031280517578125, -0.08123779296875, 0.033935546875, -0.006656646728515625, -0.06842041015625, 0.0780029296875, 0.06695556640625, 0.05792236328125, -0.0272064208984375, 0.049652099609375, -0.0173797607421875, 0.0035991668701171875, -0.035400390625, 0.036834716796875, -0.032806396484375, -0.00634002685546875, -0.0228118896484375, -0.07025146484375, -0.01430511474609375, 0.039825439453125, -0.026641845703125, 0.0228729248046875, 0.0628662109375, 0.06683349609375, -0.01409149169921875, 0.000942230224609375, 0.0114593505859375, 0.0156097412109375, 0.02850341796875, 0.07476806640625, 0.0298614501953125, -0.051666259765625, 0.049530029296875, -0.031707763671875, -0.013397216796875, -0.00936126708984375, -0.042755126953125, -0.06597900390625, -0.043365478515625, -0.021087646484375, -0.032073974609375, -0.01103973388671875, 0.068115234375, 0.045166015625, -0.055572509765625, -0.0097808837890625, -0.0165863037109375, 0.0064239501953125, -0.0207672119140625, -0.0190582275390625, 0.056732177734375, -0.0033359527587890625, -0.064697265625, -0.001911163330078125, 0.01389312744140625, 0.00567626953125, -0.0240936279296875, -0.01462554931640625, -0.004413604736328125, 0.00501251220703125, 0.040283203125, 0.0252532958984375, -0.0645751953125, -0.0227508544921875, -0.00901031494140625, -0.0194549560546875, 0.0225067138671875, 0.03765869140625, -0.047576904296875, 0.00788116455078125, 0.05029296875, 0.019622802734375, 0.0287017822265625, -0.0166168212890625, 0.005001068115234375, -0.03265380859375, 0.01132965087890625, 0.0032825469970703125, 0.022430419921875, 0.0102386474609375, -0.01287841796875, 0.01213836669921875, 0.03143310546875, -0.0477294921875, -0.04681396484375, 0.0007357597351074219, -0.0814208984375, -0.0235137939453125, 0.11090087890625, 0.0117340087890625, -0.025115966796875, -0.003574371337890625, -0.025054931640625, 0.028228759765625, -0.0251617431640625, 0.042877197265625, 0.02313232421875, -0.00868988037109375, -0.01396942138671875, -0.06494140625, 0.037689208984375, 0.01678466796875, -0.06158447265625, -0.0004799365997314453, 0.02239990234375, 0.0350341796875, 0.0343017578125, 0.03387451171875, -0.00930023193359375, 0.0242767333984375, 0.0165557861328125, 0.0117034912109375, -0.02423095703125, -0.01462554931640625, -0.0172882080078125, 0.02655029296875, -0.0126953125, -0.0308990478515625 ] ]
Mikivis/gpt2-large-lora-sft2
2023-09-07T03:57:27.000Z
[ "transformers", "pytorch", "safetensors", "gpt2", "text-generation", "generated_from_trainer", "dataset:customized", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Mikivis
null
null
Mikivis/gpt2-large-lora-sft2
0
5,822
transformers
2023-09-06T11:30:43
--- base_model: /data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft1 tags: - generated_from_trainer datasets: - customized model-index: - name: gpt2-large-lora-sft2 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gpt2-large-lora-sft2 This model is a fine-tuned version of [/data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft1](https://huggingface.co//data1/sunhaoyu/pretrained-models/gpt2-large-lora-sft1) on the customized dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 0.00015 - train_batch_size: 1 - eval_batch_size: 8 - seed: 42 - distributed_type: multi-GPU - num_devices: 6 - total_train_batch_size: 6 - total_eval_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 1.2 ### Training results ### Framework versions - Transformers 4.32.1 - Pytorch 2.0.1+cu117 - Datasets 2.10.1 - Tokenizers 0.13.3
1,290
[ [ -0.0360107421875, -0.06500244140625, 0.0195465087890625, 0.0083160400390625, -0.041107177734375, -0.029693603515625, -0.00872039794921875, -0.0306243896484375, 0.01873779296875, 0.02880859375, -0.043609619140625, -0.0234222412109375, -0.053863525390625, -0.0069732666015625, -0.015777587890625, 0.1024169921875, -0.0119781494140625, 0.01528167724609375, 0.00044608116149902344, -0.0008668899536132812, -0.0186920166015625, -0.035491943359375, -0.0762939453125, -0.057830810546875, 0.04412841796875, 0.00321197509765625, 0.0606689453125, 0.05963134765625, 0.042022705078125, 0.0234527587890625, -0.0165557861328125, -0.004978179931640625, -0.04412841796875, -0.040802001953125, -0.00513458251953125, -0.0309295654296875, -0.06634521484375, 0.0103759765625, 0.0538330078125, 0.0107879638671875, -0.01549530029296875, 0.037017822265625, 0.0172271728515625, 0.0219879150390625, -0.0305633544921875, 0.03802490234375, -0.036224365234375, 0.027191162109375, -0.0155181884765625, -0.021575927734375, -0.0119476318359375, -0.024322509765625, -0.0025653839111328125, -0.054901123046875, 0.031036376953125, -0.0162353515625, 0.084716796875, 0.0391845703125, -0.0220489501953125, 0.015380859375, -0.06048583984375, 0.027313232421875, -0.046630859375, -0.0001786947250366211, 0.0295867919921875, 0.042236328125, 0.00865936279296875, -0.057098388671875, -0.03271484375, -0.003986358642578125, 0.0018815994262695312, 0.01438140869140625, -0.00336456298828125, 0.00605010986328125, 0.06256103515625, 0.025604248046875, -0.042327880859375, 0.02471923828125, -0.044281005859375, -0.00882720947265625, 0.04840087890625, 0.022430419921875, -0.012908935546875, -0.00429534912109375, -0.036376953125, -0.00708770751953125, -0.040435791015625, -0.0068511962890625, 0.045806884765625, 0.017822265625, -0.02935791015625, 0.056671142578125, -0.0167388916015625, 0.043243408203125, 0.004497528076171875, -0.016448974609375, 0.038116455078125, -0.0028324127197265625, -0.03857421875, -0.00530242919921875, 0.06683349609375, 0.028228759765625, 0.0204620361328125, 0.006481170654296875, -0.0162200927734375, -0.0031948089599609375, 0.006481170654296875, -0.0738525390625, -0.02459716796875, 0.003398895263671875, -0.0275115966796875, -0.045257568359375, 0.010986328125, -0.04412841796875, 0.00026106834411621094, -0.033966064453125, 0.051483154296875, -0.01058197021484375, -0.0185699462890625, -0.00229644775390625, -0.007122039794921875, 0.0301055908203125, 0.021514892578125, -0.06048583984375, 0.0255126953125, 0.03607177734375, 0.035003662109375, 0.01192474365234375, -0.035858154296875, -0.0271453857421875, 0.0177459716796875, -0.0088958740234375, 0.043212890625, 0.0018682479858398438, -0.0274658203125, -0.012481689453125, 0.01788330078125, -0.0083465576171875, -0.0301055908203125, 0.0550537109375, -0.031982421875, 0.02569580078125, -0.032806396484375, -0.0306243896484375, -0.00968170166015625, 0.0227203369140625, -0.048736572265625, 0.0850830078125, 0.0261688232421875, -0.0692138671875, 0.04022216796875, -0.058746337890625, -0.0205535888671875, 0.014190673828125, -0.0138092041015625, -0.057708740234375, -0.00487518310546875, 0.00000667572021484375, 0.0248260498046875, -0.02984619140625, 0.0258941650390625, -0.0243682861328125, -0.04510498046875, -0.006069183349609375, -0.0352783203125, 0.045318603515625, 0.0158233642578125, -0.03515625, 0.017913818359375, -0.069580078125, 0.0241546630859375, 0.034027099609375, -0.04071044921875, 0.0170745849609375, -0.025787353515625, 0.03887939453125, 0.02001953125, 0.034698486328125, -0.038055419921875, 0.02667236328125, -0.0135955810546875, 0.035308837890625, 0.056610107421875, -0.0098114013671875, -0.012664794921875, -0.01293182373046875, 0.02325439453125, 0.00341796875, 0.035858154296875, 0.03607177734375, -0.0426025390625, -0.05615234375, -0.00833892822265625, 0.01015472412109375, 0.033599853515625, -0.039093017578125, 0.061553955078125, -0.0095367431640625, -0.04638671875, -0.0035419464111328125, 0.01012420654296875, 0.03326416015625, 0.033203125, 0.032135009765625, -0.0143585205078125, -0.030303955078125, -0.071533203125, -0.0020904541015625, 0.0007200241088867188, 0.01824951171875, 0.01151275634765625, 0.06011962890625, -0.0274505615234375, 0.0616455078125, -0.059478759765625, -0.01520538330078125, -0.0196685791015625, 0.0008249282836914062, 0.02545166015625, 0.053863525390625, 0.052459716796875, -0.034637451171875, -0.0250091552734375, -0.00695037841796875, -0.06268310546875, 0.020660400390625, 0.00786590576171875, -0.01447296142578125, -0.007720947265625, 0.02581787109375, -0.05755615234375, 0.050689697265625, 0.01509857177734375, -0.0289764404296875, 0.046478271484375, -0.04486083984375, -0.006740570068359375, -0.0911865234375, 0.0208740234375, 0.0116119384765625, -0.00800323486328125, -0.0126800537109375, 0.01264190673828125, -0.0078125, -0.0158538818359375, -0.0262603759765625, 0.046783447265625, -0.0029010772705078125, 0.0008268356323242188, -0.031402587890625, -0.0186309814453125, -0.007541656494140625, 0.047210693359375, 0.01284027099609375, 0.0452880859375, 0.0418701171875, -0.0340576171875, 0.04412841796875, 0.041748046875, -0.0257415771484375, 0.0279541015625, -0.08123779296875, 0.0155181884765625, -0.0003504753112792969, 0.021453857421875, -0.05218505859375, -0.03515625, 0.042724609375, -0.01220703125, 0.0192108154296875, -0.02386474609375, -0.05035400390625, -0.045806884765625, 0.00502777099609375, 0.04150390625, 0.046722412109375, -0.052764892578125, 0.023468017578125, 0.0008144378662109375, 0.015777587890625, -0.032135009765625, -0.056884765625, -0.031524658203125, -0.0181121826171875, -0.024658203125, 0.004791259765625, -0.0026092529296875, 0.01861572265625, -0.0104522705078125, 0.0005707740783691406, -0.0198211669921875, -0.01438140869140625, 0.0271453857421875, 0.0277862548828125, -0.0210113525390625, -0.0141143798828125, 0.0004024505615234375, -0.03326416015625, 0.029052734375, -0.007190704345703125, 0.042449951171875, 0.0036907196044921875, -0.0258026123046875, -0.0567626953125, -0.005527496337890625, 0.0259552001953125, -0.00928497314453125, 0.059051513671875, 0.0914306640625, -0.035980224609375, 0.01044464111328125, -0.03680419921875, -0.0043792724609375, -0.034912109375, 0.050933837890625, -0.03594970703125, -0.02984619140625, 0.0360107421875, 0.00832366943359375, -0.004779815673828125, 0.0675048828125, 0.0367431640625, 0.02032470703125, 0.08306884765625, 0.02587890625, -0.0174102783203125, 0.032440185546875, -0.056671142578125, -0.00385284423828125, -0.0625, -0.0256805419921875, -0.034942626953125, -0.0181121826171875, -0.05352783203125, -0.0089874267578125, 0.0165557861328125, 0.0095367431640625, -0.06134033203125, 0.0281524658203125, -0.039031982421875, 0.0426025390625, 0.04547119140625, 0.037017822265625, -0.0014820098876953125, 0.022216796875, 0.0057525634765625, 0.00998687744140625, -0.0552978515625, -0.016357421875, 0.0927734375, 0.03826904296875, 0.057098388671875, -0.008087158203125, 0.0465087890625, -0.01549530029296875, 0.007030487060546875, -0.040008544921875, 0.038726806640625, -0.0013370513916015625, -0.06024169921875, -0.018890380859375, -0.042724609375, -0.05853271484375, 0.01203155517578125, -0.0260009765625, -0.043365478515625, -0.010162353515625, 0.0231170654296875, -0.0279083251953125, 0.038818359375, -0.04443359375, 0.08538818359375, -0.022308349609375, -0.04119873046875, -0.0186309814453125, -0.032501220703125, 0.0157012939453125, 0.00013887882232666016, -0.0234832763671875, 0.00341796875, 0.0138702392578125, 0.0714111328125, -0.05999755859375, 0.048309326171875, -0.038360595703125, 0.0030879974365234375, 0.03570556640625, -0.03778076171875, 0.0645751953125, 0.02191162109375, -0.0004208087921142578, -0.00140380859375, -0.00959014892578125, -0.053497314453125, -0.02325439453125, 0.058624267578125, -0.10052490234375, -0.015167236328125, -0.043121337890625, -0.025115966796875, -0.01277923583984375, 0.017425537109375, 0.062286376953125, 0.039459228515625, -0.01496124267578125, 0.0024890899658203125, 0.036102294921875, 0.002719879150390625, 0.0239105224609375, 0.00438690185546875, -0.004650115966796875, -0.043212890625, 0.062408447265625, -0.0072784423828125, 0.01153564453125, -0.00391387939453125, 0.016357421875, -0.0294189453125, -0.0362548828125, -0.0255279541015625, 0.032806396484375, -0.0498046875, -0.010162353515625, -0.01995849609375, -0.040679931640625, -0.01971435546875, 0.015960693359375, -0.03369140625, -0.01078033447265625, -0.037322998046875, -0.01360321044921875, 0.036865234375, 0.0560302734375, 0.0029392242431640625, 0.061126708984375, -0.04022216796875, 0.0179901123046875, 0.0286712646484375, 0.040496826171875, -0.00392913818359375, -0.06475830078125, -0.023773193359375, 0.00445556640625, -0.0264739990234375, -0.02593994140625, 0.01953125, -0.0005345344543457031, 0.03643798828125, 0.0406494140625, -0.034881591796875, 0.06689453125, -0.01837158203125, 0.052642822265625, 0.03277587890625, -0.03466796875, 0.037322998046875, -0.032196044921875, 0.025054931640625, 0.0390625, 0.0303497314453125, 0.0139007568359375, 0.015350341796875, -0.08172607421875, -0.053009033203125, 0.06292724609375, 0.0214080810546875, 0.0084228515625, 0.0191650390625, 0.043212890625, 0.00411224365234375, 0.01763916015625, -0.073486328125, -0.0302276611328125, -0.01995849609375, 0.0035419464111328125, -0.0311431884765625, -0.02911376953125, -0.01983642578125, -0.049072265625, 0.075439453125, -0.00904083251953125, 0.0295257568359375, -0.0036563873291015625, 0.01320648193359375, -0.020904541015625, -0.0078277587890625, 0.036285400390625, 0.05120849609375, -0.03631591796875, -0.026397705078125, 0.017303466796875, -0.056610107421875, -0.005054473876953125, 0.0305633544921875, -0.0085296630859375, 0.0008411407470703125, 0.0190277099609375, 0.08563232421875, 0.00714874267578125, 0.0029201507568359375, 0.025604248046875, 0.00008362531661987305, -0.0338134765625, -0.032501220703125, 0.0240478515625, -0.01476287841796875, 0.0169219970703125, 0.0009889602661132812, 0.029541015625, -0.00904083251953125, -0.0243682861328125, -0.005847930908203125, 0.0223541259765625, -0.0263824462890625, -0.0232086181640625, 0.0703125, 0.01047515869140625, -0.005985260009765625, 0.056365966796875, -0.00875091552734375, -0.0167694091796875, 0.050933837890625, 0.046234130859375, 0.064697265625, -0.0081939697265625, 0.00308990478515625, 0.062225341796875, 0.00408172607421875, -0.03045654296875, 0.033599853515625, 0.006069183349609375, -0.036102294921875, -0.0196533203125, -0.042724609375, -0.005321502685546875, 0.047943115234375, -0.07513427734375, 0.0362548828125, -0.048980712890625, -0.035125732421875, 0.009185791015625, 0.02301025390625, -0.0712890625, 0.039886474609375, 0.003326416015625, 0.0711669921875, -0.074462890625, 0.07867431640625, 0.042327880859375, -0.040924072265625, -0.074951171875, -0.0204010009765625, -0.016357421875, -0.06951904296875, 0.042236328125, -0.0038318634033203125, 0.0230255126953125, 0.00771331787109375, -0.0467529296875, -0.0576171875, 0.085205078125, 0.0243072509765625, -0.04168701171875, -0.00563812255859375, 0.0284423828125, 0.04718017578125, -0.0099334716796875, 0.040435791015625, 0.0279693603515625, 0.0308837890625, 0.0206451416015625, -0.0716552734375, -0.004852294921875, -0.0191192626953125, 0.0024700164794921875, 0.018402099609375, -0.0635986328125, 0.07977294921875, -0.01364898681640625, 0.036285400390625, 0.03228759765625, 0.03466796875, 0.0182647705078125, 0.006862640380859375, 0.0218505859375, 0.06561279296875, 0.038604736328125, -0.02264404296875, 0.06927490234375, -0.01529693603515625, 0.0570068359375, 0.09344482421875, -0.006061553955078125, 0.047576904296875, 0.025421142578125, -0.0255584716796875, 0.01215362548828125, 0.060455322265625, -0.03802490234375, 0.0281219482421875, 0.01448822021484375, -0.0012826919555664062, -0.0233001708984375, 0.0323486328125, -0.0721435546875, 0.0274505615234375, -0.0007033348083496094, -0.049224853515625, -0.0202484130859375, -0.0191192626953125, -0.005352020263671875, -0.034393310546875, -0.03350830078125, 0.045074462890625, -0.019287109375, -0.03375244140625, 0.052978515625, 0.004192352294921875, 0.031646728515625, -0.061614990234375, 0.0031566619873046875, -0.005001068115234375, 0.0308074951171875, -0.030242919921875, -0.03912353515625, 0.00013971328735351562, -0.001556396484375, -0.0051422119140625, 0.0114593505859375, 0.0311126708984375, -0.020660400390625, -0.053619384765625, 0.007656097412109375, 0.0263214111328125, 0.022735595703125, -0.0106353759765625, -0.07562255859375, -0.0021839141845703125, -0.0035686492919921875, -0.029632568359375, 0.03179931640625, 0.0169525146484375, 0.006649017333984375, 0.029541015625, 0.043914794921875, 0.003208160400390625, -0.004589080810546875, 0.01593017578125, 0.0684814453125, -0.04840087890625, -0.04931640625, -0.053436279296875, 0.0232086181640625, -0.010589599609375, -0.068115234375, 0.0404052734375, 0.08123779296875, 0.05841064453125, -0.024261474609375, 0.0517578125, 0.002655029296875, 0.018157958984375, -0.040740966796875, 0.04364013671875, -0.0292510986328125, -0.0033397674560546875, -0.0261688232421875, -0.08941650390625, 0.01451873779296875, 0.05718994140625, -0.0239105224609375, 0.02996826171875, 0.042144775390625, 0.044403076171875, -0.021728515625, 0.01287078857421875, 0.00390625, 0.0138092041015625, 0.0115509033203125, 0.045623779296875, 0.035858154296875, -0.07220458984375, 0.0293121337890625, -0.0290374755859375, -0.01000213623046875, -0.011260986328125, -0.0460205078125, -0.072021484375, -0.020263671875, -0.04595947265625, -0.043212890625, -0.0038013458251953125, 0.065673828125, 0.0582275390625, -0.046905517578125, -0.020904541015625, -0.007720947265625, -0.0263824462890625, -0.00887298583984375, -0.0125885009765625, 0.04351806640625, -0.0029811859130859375, -0.048614501953125, -0.012115478515625, -0.03045654296875, 0.034149169921875, -0.01097869873046875, -0.0169830322265625, -0.002758026123046875, -0.03076171875, 0.0325927734375, 0.0232086181640625, -0.04107666015625, -0.0271453857421875, -0.037078857421875, -0.0069122314453125, 0.01381683349609375, 0.027191162109375, -0.04461669921875, 0.0009407997131347656, 0.01505279541015625, 0.0107574462890625, 0.061859130859375, 0.00246429443359375, 0.01500701904296875, -0.043548583984375, 0.042724609375, -0.00620269775390625, 0.030487060546875, -0.0010957717895507812, -0.031768798828125, 0.052032470703125, 0.031707763671875, -0.04339599609375, -0.047576904296875, -0.00775146484375, -0.0865478515625, 0.01085662841796875, 0.0965576171875, 0.006786346435546875, -0.0250701904296875, 0.035430908203125, -0.0262603759765625, 0.0258636474609375, -0.004940032958984375, 0.039093017578125, 0.03509521484375, -0.005645751953125, 0.0183258056640625, -0.043914794921875, 0.040863037109375, 0.004405975341796875, -0.07049560546875, -0.01343536376953125, 0.0214080810546875, 0.047149658203125, -0.002811431884765625, 0.0303497314453125, -0.00439453125, 0.027740478515625, 0.01119232177734375, 0.00714111328125, -0.033294677734375, -0.01153564453125, -0.0169219970703125, 0.0013189315795898438, 0.00732421875, -0.0274505615234375 ] ]
OpenAssistant/pythia-12b-pre-v8-12.5k-steps
2023-05-24T14:06:49.000Z
[ "transformers", "pytorch", "gpt_neox", "text-generation", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
OpenAssistant
null
null
OpenAssistant/pythia-12b-pre-v8-12.5k-steps
6
5,821
transformers
2023-05-05T22:39:17
--- license: apache-2.0 --- **Note: internal model, not ready for use** This is an intermediate model used as base-model for further pythia 12b SFT-8 experiments. It was trained on a wider set of instruction-tuning datasets for >12.5k steps with batch-size 128 and a context size of 2048. The gpt4all dataset had "as a language model" *contamination* (>1.8k entries). We added filtering later, but this model (pre-v8) was trained on the raw unfildered gpt4all dataset. - wandb: https://wandb.ai/open-assistant/supervised-finetuning/runs/sytsyhrp - [sampling report](https://open-assistant.github.io/oasst-model-eval/?f=https%3A%2F%2Fraw.githubusercontent.com%2FOpen-Assistant%2Foasst-model-eval%2Fmain%2Fsampling_reports%2Foasst-pretrained%2F2023-05-05_OpenAssistant_pythia-12b-pre-v8-12_5k-steps_sampling_noprefix2.json) Datasets: ``` pretrain: num_train_epochs: 1 weight_decay: 0.0 use_custom_sampler: true sort_by_length: false datasets: - gpteacher_roleplay: val_split: 0.05 - red_pajama: fraction: 0.25 max_val_set: 1000 - wizardlm_70k: val_split: 0.05 max_val_set: 500 - joke: val_split: 0.05 - poem_instructions: val_split: 0.025 - oa_stackexchange: val_split: 0.05 fraction: 0.1 max_val_set: 1000 - tell_a_joke: val_split: 0.05 max_val_set: 250 - webgpt: val_split: 0.05 max_val_set: 250 - gpt4all: val_split: 0.01 max_val_set: 1000 - alpaca_gpt4: val_split: 0.025 max_val_set: 250 - code_alpaca: val_split: 0.05 max_val_set: 250 - vicuna: max_val_set: 250 - oig_file: source_url: https://huggingface.co/datasets/laion/OIG/resolve/main/unified_chip2.jsonl max_count: 10000 min_length: 250 val_split: 0.05 max_val_set: 250 - minimath: val_split: 0.05 - humaneval_mbpp_codegen_qa: val_split: 0.05 - humaneval_mbpp_testgen_qa: val_split: 0.05 - grade_school_math_instructions: val_split: 0.05 - recipes: val_split: 0.05 - cmu_wiki_qa: val_split: 0.05 - oa_wiki_qa_bart_10000row: val_split: 0.05 max_val_set: 250 - prosocial_dialogue: fraction: 0.1 max_val_set: 250 - explain_prosocial: fraction: 0.075 max_val_set: 250 - soda: fraction: 0.25 max_val_set: 1000 - oa_leet10k: val_split: 0.05 max_val_set: 250 - dolly15k: val_split: 0.05 max_val_set: 300 ``` Pythia: ``` pythia-12b-pretrain: dtype: fp16 log_dir: "pythia_log_12b" learning_rate: 6e-6 model_name: EleutherAI/pythia-12b-deduped output_dir: pythia_model_12b weight_decay: 0.0 max_length: 2048 warmup_steps: 100 gradient_checkpointing: true gradient_accumulation_steps: 4 per_device_train_batch_size: 4 per_device_eval_batch_size: 4 eval_steps: 251 save_steps: 500 num_train_epochs: 1 save_total_limit: 2 deepspeed_config: configs/zero_config_pretrain.json ``` Command used: `deepspeed trainer_sft.py --show_dataset_stats --configs defaults pythia-12b-pretrain pretrain --cache_dir .cache/ --output_dir .saved/pythia-12b-super-pretrain2 --deepspeed`
3,319
[ [ -0.0384521484375, -0.03851318359375, 0.022186279296875, -0.0012941360473632812, -0.01419830322265625, -0.015289306640625, -0.01194000244140625, -0.015838623046875, 0.0078277587890625, 0.0207366943359375, -0.042510986328125, -0.039581298828125, -0.039825439453125, -0.0028095245361328125, -0.007659912109375, 0.08251953125, -0.01354217529296875, 0.003833770751953125, 0.0161895751953125, -0.01436614990234375, -0.03851318359375, -0.033172607421875, -0.03924560546875, -0.0170135498046875, 0.0286407470703125, 0.041473388671875, 0.043792724609375, 0.033660888671875, 0.034942626953125, 0.0233154296875, -0.01123809814453125, 0.004730224609375, -0.033538818359375, -0.0292816162109375, 0.00930023193359375, -0.03485107421875, -0.055877685546875, 0.0155792236328125, 0.05413818359375, 0.043792724609375, 0.003398895263671875, 0.029754638671875, -0.01088714599609375, 0.03662109375, -0.037872314453125, 0.0090179443359375, -0.0158843994140625, 0.0032291412353515625, -0.01247406005859375, -0.004673004150390625, -0.0157012939453125, -0.0218963623046875, 0.0258331298828125, -0.042694091796875, 0.026153564453125, -0.0007271766662597656, 0.09332275390625, 0.0004436969757080078, -0.0157470703125, -0.01397705078125, -0.029510498046875, 0.0574951171875, -0.06732177734375, 0.014495849609375, 0.036041259765625, 0.0021266937255859375, -0.006328582763671875, -0.06207275390625, -0.0340576171875, -0.018951416015625, -0.0218353271484375, 0.013519287109375, -0.01531219482421875, 0.0031871795654296875, 0.053680419921875, 0.057830810546875, -0.06610107421875, -0.00946044921875, -0.0433349609375, -0.0027904510498046875, 0.04376220703125, 0.0171661376953125, 0.0194091796875, -0.00684356689453125, -0.0173187255859375, -0.02362060546875, -0.044677734375, 0.0139923095703125, 0.03680419921875, 0.01024627685546875, -0.02734375, 0.0455322265625, -0.030731201171875, 0.042510986328125, 0.0180816650390625, -0.00011730194091796875, 0.045867919921875, -0.03399658203125, -0.03436279296875, -0.0026187896728515625, 0.08343505859375, 0.0165863037109375, 0.00812530517578125, 0.01224517822265625, -0.008544921875, -0.0031642913818359375, 0.0042877197265625, -0.09136962890625, -0.0467529296875, 0.019866943359375, -0.0251922607421875, -0.0172576904296875, 0.0154876708984375, -0.0511474609375, 0.0015535354614257812, -0.016571044921875, 0.046844482421875, -0.050384521484375, -0.0231781005859375, 0.0035610198974609375, -0.007404327392578125, 0.0100555419921875, 0.005657196044921875, -0.059967041015625, 0.0202484130859375, 0.0190582275390625, 0.06683349609375, 0.0157318115234375, -0.04534912109375, -0.01442718505859375, -0.01346588134765625, -0.031524658203125, 0.0307769775390625, -0.01052093505859375, -0.0162353515625, -0.0312347412109375, 0.0265350341796875, -0.019439697265625, -0.02606201171875, 0.033538818359375, -0.024322509765625, 0.0275726318359375, -0.032012939453125, -0.041168212890625, -0.01453399658203125, -0.004047393798828125, -0.051910400390625, 0.08795166015625, 0.026092529296875, -0.056182861328125, 0.03009033203125, -0.040252685546875, -0.01146697998046875, -0.00270843505859375, 0.0023555755615234375, -0.03564453125, 0.000051081180572509766, 0.013916015625, 0.0290985107421875, -0.025238037109375, 0.0275421142578125, -0.00894927978515625, -0.047088623046875, 0.004352569580078125, -0.03900146484375, 0.07672119140625, 0.0131072998046875, -0.03155517578125, 0.01397705078125, -0.0660400390625, 0.0041351318359375, 0.019683837890625, -0.0214996337890625, -0.004505157470703125, -0.03448486328125, -0.00621795654296875, 0.0102386474609375, 0.01953125, -0.02099609375, 0.0210723876953125, -0.037689208984375, 0.043701171875, 0.0523681640625, 0.000049173831939697266, 0.02825927734375, -0.0306243896484375, 0.0394287109375, 0.00980377197265625, 0.019989013671875, -0.01233673095703125, -0.042999267578125, -0.0712890625, -0.01538848876953125, 0.018890380859375, 0.0203857421875, -0.0214691162109375, 0.06207275390625, -0.0134429931640625, -0.05120849609375, -0.053863525390625, -0.01453399658203125, 0.03564453125, 0.04791259765625, 0.040740966796875, -0.01544189453125, -0.03558349609375, -0.06268310546875, -0.005039215087890625, 0.0012865066528320312, -0.00038051605224609375, 0.020355224609375, 0.07232666015625, -0.000019252300262451172, 0.05078125, -0.046630859375, -0.01230621337890625, -0.004428863525390625, 0.0186767578125, 0.049468994140625, 0.0457763671875, 0.054534912109375, -0.038055419921875, -0.0389404296875, -0.013580322265625, -0.047088623046875, 0.0146942138671875, -0.0029201507568359375, -0.03277587890625, 0.009246826171875, 0.01470184326171875, -0.06658935546875, 0.045166015625, 0.0280609130859375, -0.042633056640625, 0.045562744140625, -0.0206298828125, 0.0228118896484375, -0.08087158203125, 0.023101806640625, -0.00395965576171875, 0.005352020263671875, -0.034637451171875, 0.0079193115234375, 0.00830841064453125, 0.0061492919921875, -0.0269317626953125, 0.041259765625, -0.049102783203125, 0.00601959228515625, -0.002346038818359375, -0.012298583984375, -0.0034942626953125, 0.05035400390625, 0.0088653564453125, 0.0638427734375, 0.045623779296875, -0.032928466796875, 0.0290069580078125, 0.01326751708984375, -0.0265655517578125, 0.0269012451171875, -0.055877685546875, 0.0036640167236328125, 0.0126800537109375, 0.0178985595703125, -0.0662841796875, -0.04388427734375, 0.0275421142578125, -0.048187255859375, 0.016143798828125, -0.0295257568359375, -0.032012939453125, -0.036407470703125, -0.02716064453125, 0.0306549072265625, 0.045440673828125, -0.041595458984375, 0.03369140625, 0.0041961669921875, 0.01019287109375, -0.035247802734375, -0.060455322265625, -0.0264739990234375, -0.0167083740234375, -0.039794921875, 0.02777099609375, -0.01446533203125, 0.01476287841796875, 0.01209259033203125, -0.0167236328125, -0.00826263427734375, -0.0023784637451171875, 0.02227783203125, 0.032196044921875, -0.0208892822265625, -0.0084991455078125, -0.004779815673828125, -0.00612640380859375, 0.008636474609375, -0.01122283935546875, 0.06591796875, -0.02923583984375, -0.00933074951171875, -0.04986572265625, 0.0028553009033203125, 0.041534423828125, -0.018280029296875, 0.07708740234375, 0.0675048828125, -0.03997802734375, 0.0084381103515625, -0.0215301513671875, -0.00882720947265625, -0.0352783203125, 0.02679443359375, -0.03369140625, -0.0458984375, 0.046173095703125, 0.0189971923828125, 0.0097503662109375, 0.0665283203125, 0.036865234375, 0.006328582763671875, 0.079833984375, 0.0092010498046875, -0.01387786865234375, 0.04669189453125, -0.060577392578125, 0.004795074462890625, -0.0650634765625, -0.0174407958984375, -0.02490234375, -0.0174407958984375, -0.0516357421875, -0.017120361328125, 0.02459716796875, 0.0284881591796875, -0.059356689453125, 0.037353515625, -0.0465087890625, 0.034393310546875, 0.06927490234375, 0.0155792236328125, 0.00173187255859375, 0.00347900390625, -0.00608062744140625, 0.0128021240234375, -0.052337646484375, -0.018798828125, 0.10809326171875, 0.020294189453125, 0.040618896484375, 0.0012912750244140625, 0.0556640625, -0.01320648193359375, 0.0128021240234375, -0.048828125, 0.035736083984375, 0.004505157470703125, -0.05078125, -0.0211181640625, -0.048126220703125, -0.0577392578125, 0.01195526123046875, 0.0022335052490234375, -0.06353759765625, 0.011993408203125, 0.01519775390625, -0.0394287109375, 0.020263671875, -0.06964111328125, 0.07904052734375, -0.0212860107421875, -0.0246734619140625, -0.00040721893310546875, -0.04925537109375, 0.035400390625, 0.00792694091796875, -0.006927490234375, -0.00739288330078125, 0.0289306640625, 0.07452392578125, -0.058013916015625, 0.04132080078125, -0.01511383056640625, 0.0162811279296875, 0.032012939453125, -0.0094757080078125, 0.03875732421875, 0.00746917724609375, 0.002132415771484375, 0.019378662109375, 0.0174407958984375, -0.0258026123046875, -0.01486968994140625, 0.0648193359375, -0.0777587890625, -0.0125274658203125, -0.0615234375, -0.038360595703125, -0.002979278564453125, 0.029937744140625, 0.05291748046875, 0.03802490234375, 0.00589752197265625, 0.0231170654296875, 0.045257568359375, -0.01538848876953125, 0.028076171875, 0.0211334228515625, 0.0007066726684570312, -0.055877685546875, 0.07891845703125, 0.0104217529296875, 0.01885986328125, 0.0025482177734375, 0.0179290771484375, -0.0266265869140625, -0.03204345703125, -0.037139892578125, 0.027496337890625, -0.03424072265625, -0.0264129638671875, -0.037384033203125, -0.0266265869140625, -0.04412841796875, 0.00218963623046875, -0.03558349609375, -0.00902557373046875, -0.051910400390625, -0.0110931396484375, 0.046142578125, 0.0307464599609375, -0.0021076202392578125, 0.041717529296875, -0.049224853515625, 0.0372314453125, 0.0029277801513671875, 0.036163330078125, -0.0027141571044921875, -0.06591796875, -0.016693115234375, 0.01490020751953125, -0.0223388671875, -0.0732421875, 0.03656005859375, 0.0018510818481445312, 0.031829833984375, 0.027191162109375, -0.005672454833984375, 0.07666015625, -0.0157623291015625, 0.07379150390625, 0.024993896484375, -0.061248779296875, 0.0491943359375, -0.02618408203125, 0.0178985595703125, 0.04229736328125, 0.04132080078125, -0.02752685546875, -0.0002416372299194336, -0.06683349609375, -0.0848388671875, 0.08416748046875, 0.0158843994140625, -0.00843048095703125, 0.00555419921875, 0.0251617431640625, -0.0083160400390625, 0.0123443603515625, -0.058380126953125, -0.037689208984375, -0.01322174072265625, -0.0258331298828125, 0.002532958984375, -0.0122222900390625, -0.015960693359375, -0.053680419921875, 0.07269287109375, 0.002010345458984375, 0.0279541015625, 0.01010894775390625, -0.01042938232421875, -0.01274871826171875, -0.0094451904296875, 0.0249176025390625, 0.0384521484375, -0.04254150390625, -0.016998291015625, 0.01148223876953125, -0.0540771484375, 0.004573822021484375, 0.0218353271484375, -0.017120361328125, -0.01105499267578125, 0.033172607421875, 0.07623291015625, 0.0038299560546875, -0.03216552734375, 0.0306396484375, -0.0104217529296875, -0.0247802734375, -0.0352783203125, 0.017364501953125, 0.00327301025390625, 0.0162506103515625, 0.01206207275390625, 0.0021152496337890625, 0.00370025634765625, -0.050628662109375, 0.004932403564453125, 0.0186614990234375, -0.009521484375, -0.0293731689453125, 0.04425048828125, -0.00994873046875, -0.0001825094223022461, 0.058868408203125, -0.02935791015625, -0.0223388671875, 0.055572509765625, 0.01861572265625, 0.061279296875, -0.022613525390625, -0.0022430419921875, 0.06109619140625, 0.018310546875, -0.0264739990234375, 0.032379150390625, 0.0171356201171875, -0.036712646484375, 0.0003285408020019531, -0.061614990234375, -0.0018014907836914062, 0.04736328125, -0.06787109375, 0.027069091796875, -0.037750244140625, -0.01934814453125, 0.0019197463989257812, 0.0243682861328125, -0.06414794921875, 0.031524658203125, -0.0058746337890625, 0.060546875, -0.069091796875, 0.057342529296875, 0.049224853515625, -0.056365966796875, -0.08929443359375, -0.007274627685546875, -0.015167236328125, -0.0440673828125, 0.0241241455078125, 0.00565338134765625, 0.017059326171875, 0.005039215087890625, -0.04290771484375, -0.0721435546875, 0.1024169921875, 0.0250701904296875, -0.049102783203125, -0.0172271728515625, -0.002872467041015625, 0.04132080078125, -0.002758026123046875, 0.04925537109375, 0.07147216796875, 0.0283203125, 0.0135345458984375, -0.06573486328125, 0.0133819580078125, -0.0303497314453125, -0.0204315185546875, 0.0125579833984375, -0.07073974609375, 0.10260009765625, -0.00830078125, 0.00811004638671875, 0.024139404296875, 0.03875732421875, 0.038055419921875, 0.01160430908203125, 0.01904296875, 0.058319091796875, 0.05657958984375, -0.02734375, 0.0631103515625, -0.02398681640625, 0.045166015625, 0.07843017578125, 0.014923095703125, 0.041015625, 0.0196533203125, -0.0280609130859375, 0.01824951171875, 0.0640869140625, -0.0212554931640625, 0.043792724609375, 0.009796142578125, -0.01019287109375, -0.00908660888671875, 0.01397705078125, -0.048370361328125, 0.0146484375, 0.02606201171875, -0.045257568359375, 0.003673553466796875, -0.0237579345703125, 0.020538330078125, -0.041839599609375, -0.0193023681640625, 0.0306243896484375, -0.002498626708984375, -0.0528564453125, 0.05303955078125, -0.005580902099609375, 0.04437255859375, -0.0504150390625, -0.01233673095703125, -0.0247039794921875, 0.0201568603515625, -0.02215576171875, -0.05535888671875, 0.0008611679077148438, 0.0104827880859375, -0.00836181640625, 0.004886627197265625, 0.0296783447265625, -0.029449462890625, -0.048828125, 0.01270294189453125, 0.011077880859375, 0.024566650390625, -0.01331329345703125, -0.0501708984375, 0.0054473876953125, 0.00359344482421875, -0.04931640625, 0.0278778076171875, 0.0390625, 0.0122833251953125, 0.0283660888671875, 0.04888916015625, 0.01110076904296875, 0.0166015625, -0.0036334991455078125, 0.0780029296875, -0.048126220703125, -0.0225067138671875, -0.048828125, 0.048828125, 0.0011749267578125, -0.06744384765625, 0.045867919921875, 0.05413818359375, 0.07537841796875, 0.00069427490234375, 0.05780029296875, -0.0211639404296875, 0.0201263427734375, -0.042999267578125, 0.05328369140625, -0.04931640625, -0.0005407333374023438, -0.0289764404296875, -0.049560546875, 0.008575439453125, 0.07159423828125, -0.0275421142578125, 0.0243072509765625, 0.05316162109375, 0.05010986328125, -0.0223541259765625, 0.004627227783203125, -0.00360870361328125, 0.02471923828125, 0.0248565673828125, 0.06201171875, 0.049560546875, -0.0635986328125, 0.03057861328125, -0.047760009765625, -0.01715087890625, -0.01666259765625, -0.0399169921875, -0.07000732421875, -0.0302581787109375, -0.019500732421875, -0.02947998046875, -0.0075225830078125, 0.075927734375, 0.055908203125, -0.07037353515625, -0.01311492919921875, -0.0237579345703125, -0.0242462158203125, -0.0148773193359375, -0.0240325927734375, 0.046417236328125, -0.0017261505126953125, -0.059783935546875, 0.026275634765625, -0.029144287109375, 0.01006317138671875, -0.0038089752197265625, -0.01995849609375, -0.0178985595703125, -0.0157012939453125, 0.02777099609375, 0.01261138916015625, -0.041534423828125, -0.01242828369140625, -0.003673553466796875, -0.0016450881958007812, -0.001251220703125, 0.0225830078125, -0.040252685546875, 0.01323699951171875, 0.0399169921875, 0.0063018798828125, 0.0594482421875, -0.005771636962890625, 0.019561767578125, -0.0494384765625, 0.013427734375, 0.0184173583984375, 0.035980224609375, -0.00331878662109375, -0.022064208984375, 0.045166015625, 0.037078857421875, -0.04443359375, -0.07061767578125, -0.01511383056640625, -0.0697021484375, -0.004871368408203125, 0.08099365234375, -0.0262298583984375, -0.0177001953125, 0.03265380859375, -0.01483917236328125, 0.03912353515625, -0.038482666015625, 0.03155517578125, 0.047393798828125, -0.0171661376953125, -0.0013456344604492188, -0.060272216796875, 0.040435791015625, 0.0163116455078125, -0.06475830078125, 0.003162384033203125, 0.04351806640625, 0.05023193359375, 0.01332855224609375, 0.054107666015625, -0.0084991455078125, 0.0269775390625, 0.014923095703125, 0.007289886474609375, -0.015960693359375, -0.0372314453125, -0.0244140625, -0.0030155181884765625, -0.0121002197265625, -0.027252197265625 ] ]
VMware/open-llama-7b-open-instruct
2023-07-11T06:24:19.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:VMware/open-instruct-v1-oasst-dolly-hhrlhf", "license:cc-by-sa-3.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
VMware
null
null
VMware/open-llama-7b-open-instruct
25
5,821
transformers
2023-06-08T05:02:06
--- license: cc-by-sa-3.0 datasets: - VMware/open-instruct-v1-oasst-dolly-hhrlhf language: - en library_name: transformers pipeline_tag: text-generation --- # VMware/open-llama-7B-open-instruct Instruction-tuned version of the fully trained Open LLama 7B model. The model is open for <b>COMMERCIAL USE</b>. <br> <b> There is a v2 version of this model available, https://huggingface.co/VMware/open-llama-7b-v2-open-instruct </b> <b> NOTE </b> : The model was trained using the Alpaca prompt template <b> NOTE </b> : Fast tokenizer results in incorrect encoding, set the ```use_fast = False``` parameter, when instantiating the tokenizer ## License - <b>Commercially Viable </b> - Instruction dataset, [VMware/open-instruct-v1-oasst-dolly-hhrlhf](https://huggingface.co/datasets/VMware/open-instruct-v1-oasst-dolly-hhrlhf) is under cc-by-sa-3.0 - Language Model, ([openlm-research/open_llama_7b](https://huggingface.co/openlm-research/open_llama_7b)) is under apache-2.0 ## Nomenclature - Model : Open-llama - Model Size: 7B parameters - Dataset: Open-instruct-v1 (oasst,dolly, hhrlhf) ## Use in Transformers ``` import os import torch from transformers import AutoModelForCausalLM, AutoTokenizer model_name = 'VMware/open-llama-7b-open-instruct' tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=False) model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map='sequential') prompt_template = "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\n{instruction}\n\n### Response:" prompt = 'Explain in simple terms how the attention mechanism of a transformer model works' inputt = prompt_template.format(instruction= prompt) input_ids = tokenizer(inputt, return_tensors="pt").input_ids.to("cuda") output1 = model.generate(input_ids, max_length=512) input_length = input_ids.shape[1] output1 = output1[:, input_length:] output = tokenizer.decode(output1[0]) print(output) ''' Attention is a mechanism used in deep learning models, such as transformer models, to capture global dependencies between different parts of the input. In a transformer model, the attention mechanism works by computing a weighted sum of the input vectors and then applying a non-linear activation function to the result. The attention mechanism in a transformer model works in two steps: 1. Query-Key Mapping: First, the input sequence is divided into two parts: the query vector and the key vector. The query vector represents the input at the current position, and the key vector represents the input at a previous position. 2. Attention Weight Calculation: Second, the attention weights are calculated using the dot product between the query vector and each key vector. The attention weights represent the importance of the input at the previous position to the current position. The attention weights are then used to compute the attention score for each input element. The attention score represents the relevance of the input element to the current position. The attention mechanism in a transformer model is designed to capture global dependencies between different parts of the input. By attending to input elements from different positions, the model can learn to understand the relationships between different parts of the input. This allows the model to perform more complex tasks, such as understanding the relationships between words in a sentence or pixels in an image.</s> ''' ``` ## Finetuning details The finetuning scripts will be available in our [RAIL Github Repository](https://github.com/vmware-labs/research-and-development-artificial-intelligence-lab/tree/main/instruction-tuning) ## Evaluation <B>TODO</B>
3,760
[ [ -0.03759765625, -0.052001953125, 0.0293731689453125, 0.017303466796875, -0.0167999267578125, -0.033111572265625, -0.0070037841796875, -0.0219573974609375, -0.00885772705078125, 0.046234130859375, -0.05621337890625, -0.03546142578125, -0.049530029296875, 0.0013895034790039062, -0.021392822265625, 0.0716552734375, -0.00713348388671875, 0.00024211406707763672, -0.01209259033203125, -0.00844573974609375, -0.0501708984375, -0.0301971435546875, -0.05426025390625, -0.0208740234375, 0.020050048828125, 0.0264739990234375, 0.03564453125, 0.05609130859375, 0.062469482421875, 0.028411865234375, -0.01306915283203125, -0.0028514862060546875, -0.044403076171875, -0.0051422119140625, 0.002681732177734375, -0.06304931640625, -0.0264739990234375, -0.003307342529296875, 0.040557861328125, 0.0197906494140625, 0.0005507469177246094, 0.041412353515625, 0.0051727294921875, 0.0152435302734375, -0.042083740234375, 0.0347900390625, -0.040740966796875, 0.0100250244140625, -0.01284027099609375, -0.006679534912109375, -0.0484619140625, -0.02099609375, -0.003086090087890625, -0.04595947265625, 0.031768798828125, 0.005908966064453125, 0.0792236328125, 0.058624267578125, -0.0217437744140625, -0.01538848876953125, -0.060211181640625, 0.06610107421875, -0.054229736328125, 0.0228118896484375, 0.0243988037109375, 0.039154052734375, -0.00681304931640625, -0.072998046875, -0.047637939453125, -0.0188751220703125, -0.014892578125, 0.018585205078125, -0.0220489501953125, 0.006832122802734375, 0.043060302734375, 0.03857421875, -0.02886962890625, 0.01122283935546875, -0.046356201171875, -0.00856781005859375, 0.0189971923828125, 0.0284881591796875, 0.0084686279296875, -0.00472259521484375, -0.0310516357421875, -0.0207061767578125, -0.03857421875, 0.0198822021484375, 0.01383209228515625, 0.01508331298828125, -0.0255889892578125, 0.05322265625, -0.004344940185546875, 0.05194091796875, 0.030029296875, -0.0175628662109375, 0.027374267578125, 0.002315521240234375, -0.0311126708984375, 0.007228851318359375, 0.058013916015625, 0.0229339599609375, 0.002338409423828125, -0.004512786865234375, -0.01404571533203125, 0.022186279296875, 0.036163330078125, -0.060791015625, 0.0023517608642578125, 0.020965576171875, -0.03460693359375, -0.0242156982421875, 0.00780487060546875, -0.04962158203125, -0.0078582763671875, -0.021209716796875, 0.04498291015625, -0.0231170654296875, -0.0014181137084960938, 0.01439666748046875, 0.00304412841796875, 0.052001953125, 0.004573822021484375, -0.061187744140625, 0.02587890625, 0.038818359375, 0.052581787109375, -0.01139068603515625, -0.032135009765625, -0.03521728515625, 0.005908966064453125, 0.00751495361328125, 0.048126220703125, -0.029571533203125, -0.030059814453125, -0.012725830078125, 0.0335693359375, -0.00868988037109375, -0.03778076171875, 0.04449462890625, -0.046234130859375, 0.040069580078125, -0.01056671142578125, -0.031494140625, -0.031494140625, 0.0093994140625, -0.05670166015625, 0.1016845703125, 0.00585174560546875, -0.05938720703125, -0.00432586669921875, -0.0723876953125, -0.0218963623046875, -0.0126953125, 0.01294708251953125, -0.039306640625, 0.00684356689453125, 0.0263519287109375, 0.029815673828125, -0.02569580078125, 0.009063720703125, -0.0009369850158691406, -0.045440673828125, 0.013092041015625, -0.0443115234375, 0.07049560546875, 0.00246429443359375, -0.04290771484375, 0.0247650146484375, -0.06463623046875, -0.0216064453125, 0.03851318359375, -0.0290069580078125, 0.0033130645751953125, -0.01251983642578125, -0.0128936767578125, 0.00652313232421875, 0.0236968994140625, -0.03594970703125, 0.03350830078125, -0.044281005859375, 0.031951904296875, 0.052581787109375, -0.00008189678192138672, 0.031280517578125, -0.0082855224609375, 0.040802001953125, 0.0197296142578125, 0.03924560546875, -0.0249786376953125, -0.0341796875, -0.09991455078125, -0.00649261474609375, 0.0103607177734375, 0.0517578125, -0.04364013671875, 0.02618408203125, -0.012115478515625, -0.049560546875, -0.0751953125, -0.0140838623046875, 0.0364990234375, 0.052734375, 0.050689697265625, -0.0134735107421875, -0.04241943359375, -0.058807373046875, 0.0152435302734375, 0.0125579833984375, -0.00878143310546875, 0.0186309814453125, 0.035186767578125, -0.0295257568359375, 0.058380126953125, -0.05218505859375, -0.03009033203125, -0.0212860107421875, -0.002841949462890625, 0.033050537109375, 0.0274200439453125, 0.038818359375, -0.03521728515625, -0.01552581787109375, -0.01263427734375, -0.05718994140625, -0.0078125, 0.0090484619140625, -0.025665283203125, 0.00432586669921875, 0.0322265625, -0.05780029296875, 0.04327392578125, 0.021636962890625, -0.0134124755859375, 0.0299072265625, 0.0004038810729980469, 0.0022678375244140625, -0.09283447265625, -0.0005578994750976562, -0.003345489501953125, 0.005619049072265625, -0.037750244140625, 0.0164337158203125, 0.00324249267578125, -0.00067901611328125, -0.06524658203125, 0.04095458984375, -0.039825439453125, -0.00516510009765625, -0.0268707275390625, -0.0006666183471679688, -0.0021991729736328125, 0.0654296875, -0.0186614990234375, 0.0606689453125, 0.043701171875, -0.06292724609375, 0.037689208984375, 0.034912109375, -0.00937652587890625, 0.017852783203125, -0.0782470703125, 0.0186004638671875, -0.01544952392578125, 0.0029697418212890625, -0.08221435546875, -0.0125579833984375, 0.041778564453125, -0.0345458984375, 0.0268707275390625, -0.00554656982421875, -0.02239990234375, -0.05133056640625, -0.005615234375, 0.0279388427734375, 0.0467529296875, -0.0513916015625, 0.059234619140625, 0.02264404296875, 0.0210723876953125, -0.050079345703125, -0.052032470703125, -0.0232391357421875, -0.0096435546875, -0.040679931640625, 0.0260772705078125, -0.0003650188446044922, -0.00270843505859375, -0.0024700164794921875, -0.0038089752197265625, 0.00791168212890625, 0.004730224609375, 0.03070068359375, 0.03155517578125, -0.02703857421875, -0.00310516357421875, 0.0008678436279296875, -0.03070068359375, 0.0158233642578125, -0.002582550048828125, 0.06488037109375, -0.0168914794921875, -0.037506103515625, -0.05072021484375, 0.004634857177734375, 0.0484619140625, -0.03729248046875, 0.0556640625, 0.06634521484375, -0.0276336669921875, -0.002597808837890625, -0.040985107421875, -0.0309906005859375, -0.040802001953125, 0.0278778076171875, -0.0256195068359375, -0.04449462890625, 0.04241943359375, 0.005367279052734375, 0.0145721435546875, 0.05633544921875, 0.05865478515625, 0.0094757080078125, 0.0682373046875, 0.0634765625, -0.0028514862060546875, 0.043426513671875, -0.043243408203125, 0.00943756103515625, -0.07403564453125, -0.0303955078125, -0.043731689453125, -0.025970458984375, -0.0237884521484375, -0.027496337890625, 0.0224609375, -0.013397216796875, -0.0308685302734375, 0.040130615234375, -0.06768798828125, 0.0290985107421875, 0.06182861328125, 0.006618499755859375, 0.005672454833984375, -0.006317138671875, -0.004688262939453125, -0.0013589859008789062, -0.037017822265625, -0.0452880859375, 0.07098388671875, 0.03216552734375, 0.0643310546875, -0.0059814453125, 0.0369873046875, 0.006610870361328125, 0.018157958984375, -0.07501220703125, 0.049591064453125, 0.01522064208984375, -0.0212860107421875, -0.0157623291015625, -0.005615234375, -0.07684326171875, 0.021087646484375, -0.00015544891357421875, -0.056365966796875, 0.0023059844970703125, 0.0097503662109375, -0.0209197998046875, 0.022186279296875, -0.059722900390625, 0.0714111328125, -0.0157318115234375, -0.01020050048828125, 0.0077056884765625, -0.05517578125, 0.029449462890625, 0.021575927734375, -0.0188140869140625, 0.01424407958984375, 0.0096588134765625, 0.06475830078125, -0.023284912109375, 0.07794189453125, -0.01197052001953125, -0.0033893585205078125, 0.02618408203125, -0.0169219970703125, 0.038970947265625, -0.01983642578125, -0.0018796920776367188, 0.029571533203125, -0.0160675048828125, -0.006565093994140625, -0.032745361328125, 0.03802490234375, -0.08477783203125, -0.058135986328125, -0.0235595703125, -0.038421630859375, 0.00691986083984375, 0.01496124267578125, 0.047271728515625, 0.0219573974609375, 0.0014858245849609375, 0.017181396484375, 0.062744140625, -0.01824951171875, 0.0426025390625, 0.03436279296875, -0.00600433349609375, -0.022979736328125, 0.057525634765625, 0.00890350341796875, 0.0070953369140625, 0.03924560546875, 0.0031833648681640625, -0.01434326171875, -0.01187896728515625, -0.03228759765625, 0.02508544921875, -0.060882568359375, -0.0212860107421875, -0.063720703125, -0.0297088623046875, -0.03076171875, -0.00027823448181152344, -0.0272216796875, -0.00884246826171875, -0.0440673828125, -0.01485443115234375, 0.03460693359375, 0.053497314453125, 0.0310211181640625, 0.06304931640625, -0.0482177734375, 0.040618896484375, 0.00981903076171875, 0.0193328857421875, 0.015625, -0.05963134765625, -0.0203857421875, 0.002132415771484375, -0.05010986328125, -0.07373046875, 0.0202789306640625, -0.01483154296875, 0.054595947265625, 0.0272674560546875, -0.00262451171875, 0.041015625, -0.03643798828125, 0.06024169921875, 0.0006108283996582031, -0.07135009765625, 0.04974365234375, 0.006633758544921875, 0.040008544921875, 0.016265869140625, 0.0231475830078125, -0.02655029296875, -0.02459716796875, -0.034759521484375, -0.06561279296875, 0.0811767578125, 0.0035572052001953125, 0.0031833648681640625, 0.00774383544921875, 0.01558685302734375, 0.00753021240234375, 0.00666046142578125, -0.06475830078125, -0.032440185546875, -0.025390625, -0.032501220703125, 0.004390716552734375, -0.0272216796875, -0.00788116455078125, -0.028778076171875, 0.0555419921875, 0.005401611328125, 0.051788330078125, -0.004062652587890625, -0.023651123046875, -0.003696441650390625, 0.00014162063598632812, 0.048583984375, 0.020416259765625, -0.00577545166015625, -0.003002166748046875, 0.028350830078125, -0.0621337890625, 0.00878143310546875, 0.006717681884765625, -0.0191650390625, -0.0024318695068359375, 0.01293182373046875, 0.09649658203125, 0.0091552734375, -0.01558685302734375, 0.03228759765625, -0.0197601318359375, -0.0017652511596679688, -0.021820068359375, 0.0183563232421875, 0.0126495361328125, 0.021942138671875, 0.01406097412109375, -0.0023021697998046875, 0.005954742431640625, -0.007442474365234375, -0.01256561279296875, 0.01323699951171875, -0.028961181640625, -0.01526641845703125, 0.07537841796875, 0.00875091552734375, -0.041778564453125, 0.050933837890625, 0.00516510009765625, -0.0313720703125, 0.07061767578125, 0.054443359375, 0.0697021484375, -0.0243988037109375, -0.0103912353515625, 0.0303192138671875, 0.045379638671875, 0.0029773712158203125, 0.01959228515625, 0.002681732177734375, -0.028564453125, -0.01904296875, -0.057525634765625, -0.0313720703125, 0.01445770263671875, -0.054351806640625, 0.04278564453125, -0.021636962890625, -0.0197296142578125, -0.007198333740234375, -0.01184844970703125, -0.0802001953125, 0.02020263671875, -0.0006628036499023438, 0.059783935546875, -0.051025390625, 0.046417236328125, 0.046417236328125, -0.047393798828125, -0.0728759765625, -0.001964569091796875, -0.0218658447265625, -0.07757568359375, 0.03302001953125, 0.036346435546875, -0.007106781005859375, 0.015716552734375, -0.0540771484375, -0.0789794921875, 0.097900390625, 0.0297698974609375, -0.036407470703125, -0.00864410400390625, 0.01959228515625, 0.0295257568359375, -0.016143798828125, 0.036773681640625, 0.0290374755859375, 0.03924560546875, 0.00621795654296875, -0.053009033203125, 0.0149688720703125, -0.0162200927734375, 0.00201416015625, 0.01433563232421875, -0.060577392578125, 0.0775146484375, -0.02178955078125, -0.015472412109375, -0.005123138427734375, 0.0706787109375, 0.0050506591796875, 0.01385498046875, 0.02178955078125, 0.05303955078125, 0.0457763671875, -0.0216064453125, 0.07928466796875, -0.037628173828125, 0.034393310546875, 0.08135986328125, -0.007167816162109375, 0.054443359375, 0.0321044921875, -0.0118408203125, 0.024505615234375, 0.041961669921875, -0.03546142578125, 0.03802490234375, 0.0026607513427734375, 0.002986907958984375, 0.00838470458984375, 0.00447845458984375, -0.0394287109375, 0.0384521484375, 0.021087646484375, -0.041656494140625, -0.007122039794921875, 0.0216064453125, 0.002620697021484375, -0.036041259765625, -0.024688720703125, 0.0628662109375, 0.0009279251098632812, -0.058197021484375, 0.0537109375, 0.0086669921875, 0.0533447265625, -0.043914794921875, -0.0071258544921875, -0.007335662841796875, 0.030487060546875, -0.0170440673828125, -0.04742431640625, 0.0181121826171875, -0.0029697418212890625, -0.027801513671875, 0.01421356201171875, 0.043304443359375, -0.020172119140625, -0.048370361328125, 0.0168609619140625, 0.0122833251953125, 0.021728515625, 0.0077972412109375, -0.037261962890625, 0.0266876220703125, -0.00701141357421875, -0.04437255859375, 0.0015544891357421875, 0.0189666748046875, 0.00110626220703125, 0.05059814453125, 0.037872314453125, -0.0275421142578125, 0.023681640625, 0.0093841552734375, 0.06719970703125, -0.04217529296875, -0.0241851806640625, -0.06610107421875, 0.0509033203125, 0.01056671142578125, -0.035919189453125, 0.06048583984375, 0.0311126708984375, 0.07208251953125, -0.024505615234375, 0.04339599609375, 0.0070037841796875, 0.01462554931640625, -0.033935546875, 0.045867919921875, -0.0286712646484375, 0.00878143310546875, -0.0002741813659667969, -0.08294677734375, -0.005615234375, 0.04241943359375, -0.02984619140625, 0.004627227783203125, 0.04852294921875, 0.0711669921875, -0.0282745361328125, -0.004695892333984375, 0.016265869140625, 0.03436279296875, 0.0172119140625, 0.03961181640625, 0.041717529296875, -0.05029296875, 0.04888916015625, -0.0290985107421875, -0.0197296142578125, -0.0281982421875, -0.041717529296875, -0.07269287109375, -0.032623291015625, -0.0091705322265625, -0.0274658203125, -0.01515960693359375, 0.05963134765625, 0.03167724609375, -0.053375244140625, -0.01227569580078125, 0.00482177734375, 0.0006303787231445312, -0.0250396728515625, -0.02386474609375, 0.0292816162109375, -0.021820068359375, -0.0648193359375, 0.01435089111328125, 0.0075225830078125, 0.01383209228515625, -0.033935546875, 0.00388336181640625, -0.0024700164794921875, 0.0180816650390625, 0.043975830078125, 0.0136871337890625, -0.060791015625, -0.005245208740234375, 0.01849365234375, -0.025634765625, 0.0167999267578125, 0.02691650390625, -0.055999755859375, 0.0147247314453125, 0.02130126953125, 0.042633056640625, 0.05224609375, -0.023681640625, 0.0234832763671875, -0.034454345703125, 0.027008056640625, 0.005344390869140625, 0.045654296875, 0.023101806640625, -0.041412353515625, 0.0369873046875, 0.00839996337890625, -0.045379638671875, -0.057037353515625, 0.00974273681640625, -0.09295654296875, 0.0019006729125976562, 0.08380126953125, -0.0037403106689453125, -0.05322265625, 0.01763916015625, -0.057342529296875, 0.0513916015625, -0.019134521484375, 0.0618896484375, 0.034698486328125, 0.005908966064453125, -0.0154571533203125, -0.03509521484375, 0.0122528076171875, 0.00882720947265625, -0.05865478515625, -0.01751708984375, 0.017822265625, 0.0165863037109375, 0.0166778564453125, 0.02752685546875, -0.005847930908203125, 0.01812744140625, 0.0013036727905273438, 0.034454345703125, -0.0256805419921875, -0.02508544921875, -0.0242767333984375, 0.0217132568359375, -0.018646240234375, -0.051727294921875 ] ]
waifu-diffusion/wd-1-5-beta3
2023-10-12T02:56:59.000Z
[ "diffusers", "license:other", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
null
waifu-diffusion
null
null
waifu-diffusion/wd-1-5-beta3
112
5,820
diffusers
2023-05-14T22:59:44
--- license: other --- # WD 1.5 Beta 3 ![WD 1.5 Radiance](https://i.ibb.co/hYjgvGZ/00160-2195473148.png) For this release, we release five versions of the model: - WD 1.5 Beta3 Base - WD 1.5 Radiance - WD 1.5 Ink - WD 1.5 Mofu - WD 1.5 Illusion The WD 1.5 Base model is only intended for training use. For generation, it is recomended to create your own finetunes and loras on top of WD 1.5 Base or use one of the aesthetic models. More information and sample generations for the aesthetic models are in the release notes ### Release Notes https://saltacc.notion.site/WD-1-5-Beta-3-Release-Notes-1e35a0ed1bb24c5b93ec79c45c217f63 # VAE WD 1.5 uses the same VAE as WD 1.4, which can be found here https://huggingface.co/hakurei/waifu-diffusion-v1-4/blob/main/vae/kl-f8-anime2.ckpt ## License WD 1.5 is released under the Fair AI Public License 1.0-SD (https://freedevproject.org/faipl-1.0-sd/). If any derivative of this model is made, please share your changes accordingly. Special thanks to ronsor/undeleted (https://undeleted.ronsor.com/) for help with the license.
1,087
[ [ -0.057708740234375, -0.04022216796875, 0.0240478515625, 0.027008056640625, -0.03179931640625, -0.0295562744140625, 0.0218353271484375, -0.06005859375, 0.00595855712890625, 0.04632568359375, -0.0767822265625, -0.0540771484375, -0.0191650390625, -0.00504302978515625, -0.0198974609375, 0.0634765625, -0.007167816162109375, 0.006015777587890625, -0.022216796875, -0.031646728515625, -0.0350341796875, -0.0165252685546875, -0.032135009765625, -0.0271148681640625, 0.05718994140625, 0.018951416015625, 0.038970947265625, 0.01474761962890625, 0.05303955078125, 0.021575927734375, -0.01331329345703125, -0.01410675048828125, -0.03173828125, 0.01157379150390625, -0.0094451904296875, -0.0158233642578125, -0.06317138671875, 0.0272216796875, 0.0022602081298828125, 0.0404052734375, -0.0202484130859375, 0.0260467529296875, -0.006549835205078125, 0.0654296875, -0.052276611328125, 0.00144195556640625, 0.000316619873046875, 0.0164337158203125, -0.01184844970703125, 0.023162841796875, -0.0248565673828125, -0.0296630859375, 0.02349853515625, -0.074462890625, 0.024505615234375, -0.01554107666015625, 0.0806884765625, 0.03936767578125, -0.01146697998046875, 0.0190277099609375, -0.052276611328125, 0.029937744140625, -0.0654296875, 0.050689697265625, 0.00397491455078125, 0.048309326171875, 0.0029697418212890625, -0.086669921875, -0.024932861328125, -0.003116607666015625, 0.0230712890625, 0.0225372314453125, -0.0222625732421875, 0.01129150390625, 0.028900146484375, 0.046722412109375, -0.040924072265625, 0.003902435302734375, -0.053955078125, 0.004852294921875, 0.047821044921875, -0.0240478515625, -0.0032863616943359375, -0.00197601318359375, -0.042938232421875, -0.033782958984375, -0.06402587890625, -0.0203857421875, 0.030364990234375, 0.00432586669921875, -0.043304443359375, 0.02923583984375, -0.009185791015625, 0.0275115966796875, 0.011566162109375, -0.01152801513671875, 0.03582763671875, 0.0083770751953125, -0.0295867919921875, -0.02764892578125, 0.043792724609375, 0.0322265625, 0.003070831298828125, 0.005207061767578125, -0.01148223876953125, -0.0015535354614257812, 0.01067352294921875, -0.0672607421875, 0.002227783203125, 0.0218963623046875, -0.05499267578125, -0.046875, 0.016082763671875, -0.033935546875, -0.0143280029296875, 0.00299072265625, 0.027587890625, -0.05145263671875, -0.03607177734375, 0.0101776123046875, -0.0256195068359375, 0.018310546875, 0.0411376953125, -0.056488037109375, 0.0262908935546875, 0.0313720703125, 0.0423583984375, 0.01032257080078125, -0.0076446533203125, 0.0275726318359375, -0.007587432861328125, -0.0244598388671875, 0.0292816162109375, -0.0015163421630859375, -0.05084228515625, -0.0012254714965820312, -0.002017974853515625, 0.0411376953125, -0.044342041015625, 0.043853759765625, -0.045135498046875, 0.0292816162109375, -0.01506805419921875, -0.04425048828125, -0.029571533203125, -0.00566864013671875, -0.064453125, 0.05364990234375, 0.05084228515625, -0.0648193359375, 0.044281005859375, -0.067138671875, 0.00095367431640625, 0.004241943359375, 0.0244903564453125, -0.03289794921875, 0.037109375, -0.0059814453125, -0.0010271072387695312, 0.00836944580078125, -0.03045654296875, -0.054718017578125, -0.0045318603515625, -0.00994110107421875, -0.038116455078125, 0.0677490234375, 0.06280517578125, -0.0080718994140625, 0.004116058349609375, -0.060302734375, 0.0018472671508789062, 0.0174713134765625, 0.006008148193359375, -0.00012731552124023438, 0.00075531005859375, 0.02911376953125, 0.030059814453125, 0.033477783203125, -0.038726806640625, 0.020538330078125, -0.0016574859619140625, 0.004642486572265625, 0.070556640625, 0.0196990966796875, 0.0025920867919921875, -0.039764404296875, 0.05523681640625, 0.038299560546875, 0.0548095703125, -0.0017948150634765625, -0.034393310546875, -0.047698974609375, -0.034149169921875, 0.024932861328125, 0.0119171142578125, -0.03955078125, 0.0247955322265625, 0.0053558349609375, -0.049591064453125, -0.051605224609375, -0.0151824951171875, 0.0306396484375, 0.0213775634765625, 0.0079803466796875, -0.01132965087890625, -0.0399169921875, -0.08111572265625, 0.0186309814453125, -0.01031494140625, 0.0208587646484375, 0.0117645263671875, 0.030670166015625, -0.011383056640625, 0.08575439453125, -0.04608154296875, -0.006839752197265625, -0.01119232177734375, 0.0004775524139404297, 0.007648468017578125, 0.04608154296875, 0.07867431640625, -0.059326171875, -0.034393310546875, -0.01058197021484375, -0.05413818359375, -0.0080718994140625, 0.03631591796875, -0.01035308837890625, 0.021392822265625, 0.01275634765625, -0.037353515625, 0.036163330078125, 0.045135498046875, -0.04290771484375, 0.03350830078125, -0.0200653076171875, 0.0005064010620117188, -0.0814208984375, 0.003940582275390625, 0.03497314453125, -0.04095458984375, -0.0227508544921875, 0.033935546875, -0.004070281982421875, -0.018402099609375, -0.07080078125, 0.054412841796875, -0.028564453125, 0.0016202926635742188, 0.005512237548828125, -0.00930023193359375, 0.00864410400390625, 0.0157318115234375, -0.022918701171875, 0.03643798828125, 0.053955078125, -0.03839111328125, 0.0228118896484375, 0.0255126953125, -0.022186279296875, 0.02850341796875, -0.05242919921875, 0.00978851318359375, -0.03790283203125, 0.01053619384765625, -0.0301666259765625, -0.01422882080078125, 0.0267181396484375, -0.030029296875, 0.0210723876953125, 0.00004220008850097656, -0.0204010009765625, -0.04107666015625, -0.0245361328125, 0.00450897216796875, 0.07965087890625, -0.052734375, 0.00995635986328125, 0.00637054443359375, 0.016998291015625, -0.032928466796875, -0.041656494140625, -0.04852294921875, -0.02362060546875, -0.059814453125, 0.0184783935546875, -0.018707275390625, -0.040924072265625, 0.02142333984375, -0.01113128662109375, -0.00278472900390625, 0.002685546875, 0.030242919921875, 0.0325927734375, -0.020477294921875, -0.038238525390625, -0.004421234130859375, -0.008270263671875, -0.0135345458984375, -0.006694793701171875, 0.0265655517578125, 0.00576019287109375, -0.02587890625, -0.046539306640625, 0.01497650146484375, 0.07470703125, -0.0303802490234375, 0.0411376953125, 0.03338623046875, -0.049102783203125, -0.0196990966796875, -0.0418701171875, -0.016693115234375, -0.038116455078125, 0.025146484375, -0.040985107421875, -0.03228759765625, 0.052093505859375, 0.0168914794921875, 0.0232696533203125, 0.046661376953125, 0.01062774658203125, -0.0023479461669921875, 0.101318359375, 0.07012939453125, 0.005260467529296875, 0.060394287109375, -0.048919677734375, -0.011627197265625, -0.06927490234375, -0.0224151611328125, -0.017364501953125, -0.03759765625, -0.048004150390625, -0.007633209228515625, 0.034881591796875, 0.0330810546875, -0.0231475830078125, 0.01513671875, -0.042388916015625, 0.0283050537109375, 0.0298004150390625, 0.033477783203125, 0.032958984375, 0.01508331298828125, 0.01126861572265625, -0.0243377685546875, -0.02264404296875, -0.035675048828125, 0.0775146484375, 0.039398193359375, 0.06500244140625, 0.0135040283203125, 0.057403564453125, 0.00922393798828125, 0.048858642578125, -0.0241546630859375, 0.03338623046875, -0.0240936279296875, -0.06402587890625, 0.007648468017578125, -0.0291900634765625, -0.03814697265625, 0.01727294921875, -0.0135345458984375, -0.044097900390625, 0.01983642578125, 0.0018062591552734375, 0.00012886524200439453, 0.012603759765625, -0.0645751953125, 0.08428955078125, 0.002716064453125, -0.0250396728515625, -0.0196685791015625, -0.0204315185546875, 0.042572021484375, 0.0184783935546875, -0.007061004638671875, -0.0297088623046875, 0.01116180419921875, 0.03094482421875, -0.06085205078125, 0.035919189453125, -0.046661376953125, -0.01171112060546875, 0.01392364501953125, -0.00798797607421875, 0.030792236328125, 0.002704620361328125, -0.01148223876953125, -0.001407623291015625, -0.0038928985595703125, -0.024749755859375, -0.03704833984375, 0.0517578125, -0.0731201171875, -0.0372314453125, -0.03912353515625, -0.0286407470703125, -0.0064239501953125, 0.0218353271484375, 0.06951904296875, 0.0506591796875, -0.0484619140625, -0.002170562744140625, 0.045867919921875, 0.0012159347534179688, 0.02899169921875, 0.038330078125, -0.0587158203125, -0.021484375, 0.047821044921875, 0.01052093505859375, 0.050567626953125, -0.005474090576171875, 0.0108642578125, -0.023468017578125, -0.040191650390625, -0.031951904296875, 0.0181121826171875, -0.051422119140625, -0.007251739501953125, -0.0189971923828125, -0.01482391357421875, -0.041778564453125, -0.0141448974609375, -0.044952392578125, -0.031707763671875, -0.028656005859375, -0.004421234130859375, 0.0589599609375, 0.0885009765625, -0.0111541748046875, 0.0028171539306640625, -0.022216796875, 0.0174102783203125, 0.0008864402770996094, 0.04193115234375, -0.021697998046875, -0.024993896484375, 0.00395965576171875, 0.00676727294921875, -0.057037353515625, -0.055450439453125, 0.029083251953125, -0.01125335693359375, 0.0215911865234375, 0.04754638671875, -0.006839752197265625, 0.0347900390625, -0.0269775390625, 0.058868408203125, 0.042388916015625, -0.03863525390625, 0.03271484375, -0.047088623046875, 0.0159912109375, 0.041534423828125, 0.0221405029296875, -0.0367431640625, -0.03997802734375, -0.086181640625, -0.0465087890625, 0.044830322265625, -0.001934051513671875, -0.00257110595703125, 0.00826263427734375, 0.0170745849609375, 0.022186279296875, 0.01035308837890625, -0.04644775390625, -0.0616455078125, -0.044769287109375, -0.01422882080078125, 0.021697998046875, 0.004901885986328125, 0.0032291412353515625, -0.027008056640625, 0.06585693359375, 0.01409912109375, 0.00026345252990722656, 0.03814697265625, 0.032989501953125, -0.0187835693359375, 0.0038661956787109375, 0.035064697265625, 0.061309814453125, -0.056732177734375, -0.0272674560546875, -0.01367950439453125, -0.04571533203125, -0.0019779205322265625, 0.004917144775390625, -0.026336669921875, 0.018035888671875, -0.0026607513427734375, 0.07476806640625, 0.007320404052734375, -0.0227203369140625, 0.046234130859375, 0.00008815526962280273, -0.019500732421875, -0.021270751953125, 0.020172119140625, 0.0017185211181640625, 0.0186004638671875, 0.0199127197265625, 0.031585693359375, 0.013641357421875, -0.053253173828125, 0.0035800933837890625, 0.0271148681640625, -0.040924072265625, -0.02874755859375, 0.08258056640625, 0.0127410888671875, -0.01015472412109375, 0.03509521484375, -0.046661376953125, -0.0074615478515625, 0.04278564453125, 0.05511474609375, 0.07373046875, 0.0012140274047851562, 0.03369140625, 0.0350341796875, -0.005260467529296875, -0.01389312744140625, 0.038909912109375, 0.037841796875, -0.036865234375, -0.004756927490234375, -0.034332275390625, -0.027008056640625, 0.01178741455078125, -0.05633544921875, 0.042327880859375, -0.05023193359375, -0.0197601318359375, -0.01224517822265625, 0.0110931396484375, -0.046661376953125, 0.01442718505859375, 0.025146484375, 0.0989990234375, -0.04742431640625, 0.054962158203125, 0.043670654296875, -0.06805419921875, -0.0579833984375, -0.025634765625, 0.004787445068359375, -0.01543426513671875, 0.0282135009765625, -0.0215606689453125, 0.01526641845703125, 0.02874755859375, -0.07470703125, -0.0657958984375, 0.1005859375, 0.029144287109375, -0.033355712890625, 0.0009198188781738281, -0.0277252197265625, 0.02520751953125, -0.031890869140625, 0.0137939453125, 0.0212860107421875, 0.043701171875, 0.0218658447265625, -0.05523681640625, -0.0109100341796875, -0.024505615234375, 0.01442718505859375, -0.001800537109375, -0.06414794921875, 0.07440185546875, -0.031036376953125, -0.022613525390625, 0.03363037109375, 0.054443359375, 0.035125732421875, 0.0004987716674804688, 0.033172607421875, 0.0665283203125, 0.053436279296875, -0.0203857421875, 0.1036376953125, -0.00445556640625, 0.037567138671875, 0.046875, -0.024322509765625, 0.04730224609375, 0.0203094482421875, -0.0184326171875, 0.046112060546875, 0.054901123046875, -0.0066986083984375, 0.06378173828125, -0.002994537353515625, -0.03216552734375, 0.02178955078125, -0.0006437301635742188, -0.060455322265625, -0.00479888916015625, -0.0019512176513671875, -0.0069122314453125, -0.01508331298828125, 0.02117919921875, -0.004978179931640625, -0.0177764892578125, -0.01983642578125, 0.0433349609375, -0.0017242431640625, -0.0256195068359375, 0.0355224609375, 0.00812530517578125, 0.05682373046875, -0.0535888671875, -0.007068634033203125, -0.043731689453125, 0.02203369140625, -0.028076171875, -0.027008056640625, -0.00778961181640625, 0.001567840576171875, -0.031585693359375, -0.01302337646484375, 0.034149169921875, -0.006839752197265625, -0.05712890625, 0.01053619384765625, 0.033050537109375, -0.0004887580871582031, 0.02313232421875, -0.04913330078125, 0.022674560546875, -0.0104827880859375, -0.0377197265625, 0.004802703857421875, 0.026641845703125, 0.0024013519287109375, 0.055999755859375, 0.02593994140625, 0.02117919921875, 0.020263671875, 0.02716064453125, 0.054840087890625, -0.02874755859375, -0.0298004150390625, -0.031463623046875, 0.05303955078125, -0.004703521728515625, -0.059417724609375, 0.047119140625, 0.055816650390625, 0.0740966796875, -0.0340576171875, 0.07391357421875, -0.0267791748046875, 0.0276947021484375, -0.0297088623046875, 0.06103515625, -0.0670166015625, -0.0021915435791015625, -0.034027099609375, -0.080810546875, 0.01126861572265625, 0.054229736328125, 0.01302337646484375, 0.0167388916015625, 0.06304931640625, 0.050262451171875, -0.0204925537109375, 0.00952911376953125, 0.01751708984375, 0.0179901123046875, 0.026519775390625, 0.00963592529296875, 0.046661376953125, -0.05596923828125, 0.0014200210571289062, -0.05096435546875, -0.015228271484375, -0.004405975341796875, -0.048309326171875, -0.0273590087890625, -0.042449951171875, -0.037506103515625, -0.043304443359375, 0.0027027130126953125, 0.073974609375, 0.083984375, -0.0299530029296875, 0.00408935546875, -0.00823974609375, 0.004878997802734375, -0.02020263671875, -0.019744873046875, 0.00382232666015625, -0.0053863525390625, -0.0814208984375, 0.0123748779296875, 0.005474090576171875, 0.04034423828125, -0.048828125, -0.0170745849609375, 0.0074462890625, -0.017578125, 0.0260467529296875, 0.0210723876953125, -0.04644775390625, -0.039398193359375, -0.004802703857421875, -0.0220794677734375, 0.003810882568359375, 0.031707763671875, -0.042510986328125, 0.005115509033203125, 0.034576416015625, -0.01200103759765625, 0.05670166015625, -0.0020465850830078125, 0.034332275390625, -0.05902099609375, 0.04412841796875, 0.007755279541015625, 0.039306640625, 0.017333984375, -0.0025539398193359375, 0.0579833984375, 0.0211029052734375, -0.0219879150390625, -0.057159423828125, 0.0330810546875, -0.0802001953125, -0.0220947265625, 0.06353759765625, 0.0216217041015625, -0.029144287109375, 0.0239410400390625, -0.0216522216796875, 0.005039215087890625, -0.014251708984375, 0.022430419921875, 0.051666259765625, 0.01227569580078125, -0.0203399658203125, -0.0635986328125, 0.0445556640625, -0.00778961181640625, -0.060882568359375, -0.0034656524658203125, 0.0570068359375, 0.039703369140625, 0.0108642578125, 0.05560302734375, -0.04412841796875, 0.0213775634765625, 0.01186370849609375, 0.041656494140625, -0.0193939208984375, -0.0175628662109375, -0.01079559326171875, -0.0066070556640625, 0.0153350830078125, -0.0179290771484375 ] ]
Mikivis/gpt2-large-lora-stf4
2023-09-06T19:47:05.000Z
[ "transformers", "pytorch", "gpt2", "text-generation", "generated_from_trainer", "dataset:customized", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
Mikivis
null
null
Mikivis/gpt2-large-lora-stf4
0
5,820
transformers
2023-09-06T11:47:32
--- base_model: /data1/sunhaoyu/LMFlow/output_models/gpt2-large-lora-sft3 tags: - generated_from_trainer datasets: - customized model-index: - name: gpt2-large-lora-sft4 results: [] --- <!-- This model card has been generated automatically according to the information the Trainer had access to. You should probably proofread and complete it, then remove this comment. --> # gpt2-large-lora-sft4 This model is a fine-tuned version of [/data1/sunhaoyu/LMFlow/output_models/gpt2-large-lora-sft3](https://huggingface.co//data1/sunhaoyu/LMFlow/output_models/gpt2-large-lora-sft3) on the customized dataset. ## Model description More information needed ## Intended uses & limitations More information needed ## Training and evaluation data More information needed ## Training procedure ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 8e-05 - train_batch_size: 1 - eval_batch_size: 8 - seed: 42 - distributed_type: multi-GPU - num_devices: 6 - total_train_batch_size: 6 - total_eval_batch_size: 48 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: linear - num_epochs: 1.0 ### Training results ### Framework versions - Transformers 4.32.1 - Pytorch 2.0.1+cu117 - Datasets 2.10.1 - Tokenizers 0.13.3
1,297
[ [ -0.038726806640625, -0.05908203125, 0.0248565673828125, 0.005809783935546875, -0.0384521484375, -0.0287017822265625, -0.00514984130859375, -0.0248260498046875, 0.01751708984375, 0.0341796875, -0.047515869140625, -0.026947021484375, -0.055908203125, -0.004573822021484375, -0.0208282470703125, 0.10345458984375, -0.01450347900390625, 0.016876220703125, 0.0015926361083984375, -0.0005064010620117188, -0.0217437744140625, -0.0361328125, -0.0830078125, -0.0589599609375, 0.0478515625, 0.0054473876953125, 0.05755615234375, 0.066162109375, 0.04486083984375, 0.0254058837890625, -0.014190673828125, -0.0038394927978515625, -0.04937744140625, -0.0311737060546875, -0.0038661956787109375, -0.0251922607421875, -0.063232421875, 0.005649566650390625, 0.059417724609375, 0.009521484375, -0.016204833984375, 0.035797119140625, 0.021575927734375, 0.0275115966796875, -0.029327392578125, 0.04302978515625, -0.0413818359375, 0.0276641845703125, -0.01461029052734375, -0.02813720703125, -0.01245880126953125, -0.0198822021484375, -0.01157379150390625, -0.043487548828125, 0.031219482421875, -0.0178680419921875, 0.08251953125, 0.043487548828125, -0.0182647705078125, 0.0140228271484375, -0.06378173828125, 0.0257568359375, -0.05523681640625, 0.00579833984375, 0.0207366943359375, 0.046783447265625, 0.00815582275390625, -0.0645751953125, -0.0316162109375, 0.00655364990234375, 0.0081024169921875, 0.0177764892578125, -0.0013523101806640625, 0.008636474609375, 0.061126708984375, 0.0248870849609375, -0.039886474609375, 0.0220947265625, -0.047332763671875, -0.01324462890625, 0.045867919921875, 0.031341552734375, -0.01763916015625, 0.0021514892578125, -0.033416748046875, -0.0017528533935546875, -0.037139892578125, -0.00655364990234375, 0.0435791015625, 0.02044677734375, -0.0224151611328125, 0.055084228515625, -0.018829345703125, 0.0426025390625, 0.001049041748046875, -0.0207977294921875, 0.0391845703125, -0.003437042236328125, -0.0391845703125, -0.005443572998046875, 0.06732177734375, 0.023834228515625, 0.0173187255859375, 0.0125579833984375, -0.020782470703125, -0.004596710205078125, 0.006229400634765625, -0.08160400390625, -0.0245208740234375, 0.00894927978515625, -0.0277099609375, -0.045074462890625, 0.0070953369140625, -0.04302978515625, -0.003055572509765625, -0.02978515625, 0.045318603515625, -0.01171112060546875, -0.01032257080078125, -0.0035495758056640625, -0.0100555419921875, 0.025360107421875, 0.021697998046875, -0.061126708984375, 0.027435302734375, 0.039947509765625, 0.041961669921875, 0.0024852752685546875, -0.036407470703125, -0.036590576171875, 0.01558685302734375, -0.005859375, 0.04156494140625, 0.0018987655639648438, -0.0231170654296875, -0.0134735107421875, 0.01235198974609375, -0.007965087890625, -0.0323486328125, 0.060089111328125, -0.032958984375, 0.0240325927734375, -0.0216522216796875, -0.04052734375, -0.01053619384765625, 0.016754150390625, -0.04888916015625, 0.078369140625, 0.0187530517578125, -0.06982421875, 0.035064697265625, -0.0616455078125, -0.00983428955078125, 0.01751708984375, -0.0178375244140625, -0.06048583984375, -0.01100921630859375, -0.0017881393432617188, 0.03570556640625, -0.020904541015625, 0.0294036865234375, -0.029998779296875, -0.048309326171875, -0.0066375732421875, -0.0462646484375, 0.046478271484375, 0.0193634033203125, -0.033905029296875, 0.0218505859375, -0.06982421875, 0.023101806640625, 0.0278472900390625, -0.045745849609375, 0.0260772705078125, -0.023651123046875, 0.03228759765625, 0.020904541015625, 0.02850341796875, -0.036407470703125, 0.0214691162109375, -0.009521484375, 0.043365478515625, 0.0572509765625, 0.00003832578659057617, -0.008636474609375, -0.01349639892578125, 0.0228729248046875, 0.00665283203125, 0.033416748046875, 0.032562255859375, -0.049957275390625, -0.055877685546875, -0.0178070068359375, 0.00848388671875, 0.0323486328125, -0.043487548828125, 0.05206298828125, -0.01262664794921875, -0.046539306640625, -0.007678985595703125, 0.02008056640625, 0.0252685546875, 0.039794921875, 0.025543212890625, -0.01337432861328125, -0.0270538330078125, -0.0733642578125, -0.0003247261047363281, 0.0006985664367675781, 0.0185699462890625, 0.00942230224609375, 0.05999755859375, -0.0272064208984375, 0.05865478515625, -0.060882568359375, -0.0183868408203125, -0.015869140625, -0.0008955001831054688, 0.035400390625, 0.047515869140625, 0.05108642578125, -0.028961181640625, -0.0229339599609375, -0.00897979736328125, -0.0555419921875, 0.01873779296875, 0.00798797607421875, -0.0201568603515625, -0.004787445068359375, 0.024749755859375, -0.051513671875, 0.045989990234375, 0.02569580078125, -0.029632568359375, 0.0390625, -0.039276123046875, -0.01177978515625, -0.08837890625, 0.01447296142578125, 0.011688232421875, -0.01326751708984375, -0.016448974609375, 0.0126495361328125, -0.0043792724609375, -0.0175323486328125, -0.03265380859375, 0.04583740234375, -0.00005829334259033203, 0.0058441162109375, -0.0335693359375, -0.02496337890625, -0.0039043426513671875, 0.052886962890625, 0.014404296875, 0.044036865234375, 0.040802001953125, -0.037139892578125, 0.04638671875, 0.029510498046875, -0.0181427001953125, 0.026092529296875, -0.075439453125, 0.01192474365234375, 0.0018262863159179688, 0.0225982666015625, -0.05511474609375, -0.032958984375, 0.047088623046875, -0.017730712890625, 0.019775390625, -0.0206451416015625, -0.048309326171875, -0.043121337890625, 0.0016736984252929688, 0.033905029296875, 0.04803466796875, -0.052032470703125, 0.028656005859375, -0.002017974853515625, 0.0198516845703125, -0.03155517578125, -0.054412841796875, -0.0300445556640625, -0.02398681640625, -0.0282745361328125, 0.0026264190673828125, -0.0077056884765625, 0.022705078125, -0.0107574462890625, 0.0039520263671875, -0.023773193359375, -0.01064300537109375, 0.03216552734375, 0.0230865478515625, -0.0193023681640625, -0.020263671875, -0.00252532958984375, -0.02587890625, 0.03277587890625, -0.00601959228515625, 0.03839111328125, 0.000270843505859375, -0.028167724609375, -0.0543212890625, -0.00490570068359375, 0.028961181640625, -0.0025424957275390625, 0.060089111328125, 0.09600830078125, -0.040618896484375, 0.0031890869140625, -0.0288543701171875, -0.0104827880859375, -0.0361328125, 0.049652099609375, -0.035552978515625, -0.03387451171875, 0.042724609375, 0.01207733154296875, 0.0033435821533203125, 0.0673828125, 0.04638671875, 0.0175018310546875, 0.08502197265625, 0.017425537109375, -0.01316070556640625, 0.0316162109375, -0.06109619140625, -0.00037026405334472656, -0.060699462890625, -0.0218963623046875, -0.032623291015625, -0.0195159912109375, -0.053009033203125, -0.0130157470703125, 0.0167083740234375, 0.0016164779663085938, -0.062042236328125, 0.0205078125, -0.035919189453125, 0.034576416015625, 0.048828125, 0.03289794921875, 0.0009708404541015625, 0.01678466796875, 0.0014781951904296875, 0.008209228515625, -0.05987548828125, -0.0193939208984375, 0.083740234375, 0.047515869140625, 0.049041748046875, -0.0026416778564453125, 0.055023193359375, -0.01241302490234375, 0.01165771484375, -0.044708251953125, 0.037628173828125, 0.004238128662109375, -0.056671142578125, -0.020355224609375, -0.04339599609375, -0.0567626953125, 0.0170135498046875, -0.024444580078125, -0.04071044921875, -0.0124053955078125, 0.028228759765625, -0.0248260498046875, 0.047149658203125, -0.046875, 0.08209228515625, -0.0162353515625, -0.04638671875, -0.0084381103515625, -0.0265960693359375, 0.02581787109375, -0.004199981689453125, -0.0185699462890625, -0.00006556510925292969, 0.00959014892578125, 0.06494140625, -0.06646728515625, 0.050994873046875, -0.036468505859375, -0.00010126829147338867, 0.035400390625, -0.035552978515625, 0.058868408203125, 0.02178955078125, 0.0005593299865722656, 0.006526947021484375, -0.01163482666015625, -0.052093505859375, -0.0297393798828125, 0.0703125, -0.09716796875, -0.018280029296875, -0.041595458984375, -0.0235748291015625, -0.01313018798828125, 0.0157470703125, 0.05462646484375, 0.038116455078125, -0.006336212158203125, 0.0031585693359375, 0.033203125, 0.0074005126953125, 0.0290679931640625, -0.001617431640625, -0.004489898681640625, -0.044952392578125, 0.06884765625, -0.00899505615234375, 0.01338958740234375, -0.0027618408203125, 0.01678466796875, -0.0272674560546875, -0.0372314453125, -0.0296630859375, 0.03173828125, -0.05609130859375, -0.0164337158203125, -0.0239105224609375, -0.048583984375, -0.0193939208984375, 0.0205078125, -0.033905029296875, -0.01039886474609375, -0.042724609375, -0.0102996826171875, 0.041778564453125, 0.054901123046875, 0.005451202392578125, 0.055694580078125, -0.04620361328125, 0.010772705078125, 0.0285491943359375, 0.04541015625, -0.00038623809814453125, -0.0655517578125, -0.026702880859375, 0.002094268798828125, -0.03656005859375, -0.034423828125, 0.02783203125, 0.004730224609375, 0.038848876953125, 0.0462646484375, -0.0361328125, 0.06201171875, -0.02459716796875, 0.055450439453125, 0.0279388427734375, -0.037506103515625, 0.0277862548828125, -0.034454345703125, 0.0264892578125, 0.0300445556640625, 0.0264892578125, 0.00974273681640625, 0.005916595458984375, -0.07989501953125, -0.05389404296875, 0.055206298828125, 0.0269927978515625, 0.0091705322265625, 0.0160980224609375, 0.04302978515625, 0.002407073974609375, 0.01551055908203125, -0.06854248046875, -0.03143310546875, -0.0191802978515625, 0.0016450881958007812, -0.0301513671875, -0.0220947265625, -0.014556884765625, -0.047637939453125, 0.0777587890625, -0.01580810546875, 0.035247802734375, -0.006214141845703125, 0.025482177734375, -0.014190673828125, -0.00945281982421875, 0.042236328125, 0.048583984375, -0.036956787109375, -0.0202789306640625, 0.01552581787109375, -0.0526123046875, -0.008544921875, 0.0220947265625, -0.003101348876953125, 0.00403594970703125, 0.017791748046875, 0.0911865234375, 0.002666473388671875, 0.00017547607421875, 0.02349853515625, -0.005191802978515625, -0.040802001953125, -0.031494140625, 0.028961181640625, -0.0025234222412109375, 0.0194244384765625, -0.0026264190673828125, 0.0296783447265625, 0.0011510848999023438, -0.0208282470703125, -0.006038665771484375, 0.0228729248046875, -0.025787353515625, -0.0186614990234375, 0.0743408203125, 0.01093292236328125, -0.006580352783203125, 0.05328369140625, -0.00778961181640625, -0.0190277099609375, 0.04693603515625, 0.041778564453125, 0.0660400390625, -0.00013554096221923828, -0.001331329345703125, 0.060455322265625, -0.0013446807861328125, -0.0256805419921875, 0.034820556640625, 0.0089569091796875, -0.038177490234375, -0.0263824462890625, -0.038665771484375, -0.01202392578125, 0.04986572265625, -0.0703125, 0.0404052734375, -0.046051025390625, -0.03680419921875, 0.012054443359375, 0.0207366943359375, -0.0772705078125, 0.03179931640625, 0.00897979736328125, 0.072265625, -0.078125, 0.078369140625, 0.042877197265625, -0.037139892578125, -0.07623291015625, -0.025054931640625, -0.0121612548828125, -0.06866455078125, 0.04266357421875, -0.0111083984375, 0.0184173583984375, 0.006622314453125, -0.04754638671875, -0.06048583984375, 0.08953857421875, 0.0230255126953125, -0.039093017578125, -0.002613067626953125, 0.0220794677734375, 0.048431396484375, -0.0183563232421875, 0.03790283203125, 0.0226593017578125, 0.0247955322265625, 0.027435302734375, -0.06793212890625, -0.00562286376953125, -0.0211334228515625, 0.008392333984375, 0.0200653076171875, -0.060791015625, 0.07769775390625, -0.01220703125, 0.03607177734375, 0.0220794677734375, 0.0408935546875, 0.02777099609375, 0.00794219970703125, 0.0213470458984375, 0.05780029296875, 0.038116455078125, -0.0199737548828125, 0.0704345703125, -0.0188446044921875, 0.055328369140625, 0.09674072265625, -0.007110595703125, 0.0526123046875, 0.03472900390625, -0.0174102783203125, 0.0075531005859375, 0.0628662109375, -0.0310516357421875, 0.027313232421875, 0.0218505859375, 0.002414703369140625, -0.0188140869140625, 0.033233642578125, -0.061126708984375, 0.02471923828125, 0.0039825439453125, -0.0562744140625, -0.0177001953125, -0.0225372314453125, -0.00431060791015625, -0.0367431640625, -0.03509521484375, 0.040069580078125, -0.018157958984375, -0.03448486328125, 0.056793212890625, 0.005481719970703125, 0.032562255859375, -0.048828125, -0.00653839111328125, -0.00554656982421875, 0.03607177734375, -0.030303955078125, -0.04754638671875, 0.004703521728515625, -0.0013027191162109375, -0.0028781890869140625, 0.006946563720703125, 0.032745361328125, -0.0189056396484375, -0.051727294921875, 0.00530242919921875, 0.0300140380859375, 0.0292205810546875, -0.0071258544921875, -0.0771484375, 0.0009584426879882812, 0.0003581047058105469, -0.02484130859375, 0.0268402099609375, 0.0207366943359375, 0.006481170654296875, 0.033233642578125, 0.040771484375, 0.013824462890625, 0.0013103485107421875, 0.0198516845703125, 0.06805419921875, -0.0419921875, -0.0418701171875, -0.057220458984375, 0.02276611328125, -0.0099639892578125, -0.061126708984375, 0.03656005859375, 0.078369140625, 0.059112548828125, -0.02020263671875, 0.052581787109375, 0.01483917236328125, 0.0206756591796875, -0.039794921875, 0.04150390625, -0.0280914306640625, 0.0010213851928710938, -0.0248260498046875, -0.08892822265625, 0.011260986328125, 0.049835205078125, -0.0160064697265625, 0.027313232421875, 0.043182373046875, 0.0404052734375, -0.02325439453125, 0.010711669921875, 0.0119781494140625, 0.00986480712890625, 0.0166778564453125, 0.046875, 0.03369140625, -0.06646728515625, 0.030120849609375, -0.033233642578125, -0.0072479248046875, -0.01279449462890625, -0.053558349609375, -0.07159423828125, -0.0245208740234375, -0.04803466796875, -0.04852294921875, -0.00786590576171875, 0.06439208984375, 0.055694580078125, -0.04827880859375, -0.0231170654296875, -0.006084442138671875, -0.0267486572265625, -0.012115478515625, -0.0146636962890625, 0.036834716796875, -0.002765655517578125, -0.05419921875, -0.0180206298828125, -0.026885986328125, 0.0330810546875, -0.020294189453125, -0.0230255126953125, 0.0019521713256835938, -0.02569580078125, 0.03021240234375, 0.0169219970703125, -0.04248046875, -0.0267486572265625, -0.028228759765625, -0.00420379638671875, 0.0163726806640625, 0.0264434814453125, -0.04107666015625, -0.0015163421630859375, 0.0232391357421875, 0.01425933837890625, 0.05987548828125, -0.00440216064453125, 0.0199737548828125, -0.0439453125, 0.0286712646484375, -0.004322052001953125, 0.039947509765625, 0.0007681846618652344, -0.035980224609375, 0.049896240234375, 0.0300445556640625, -0.0462646484375, -0.046661376953125, -0.00948333740234375, -0.08270263671875, 0.01299285888671875, 0.10064697265625, 0.0058441162109375, -0.0242919921875, 0.0355224609375, -0.02581787109375, 0.028106689453125, -0.015899658203125, 0.03375244140625, 0.03973388671875, -0.015533447265625, 0.01248931884765625, -0.05010986328125, 0.0377197265625, 0.00021696090698242188, -0.06683349609375, -0.020355224609375, 0.022918701171875, 0.0494384765625, -0.005702972412109375, 0.03253173828125, -0.002655029296875, 0.0165557861328125, 0.002292633056640625, 0.00737762451171875, -0.02923583984375, -0.00958251953125, -0.0234527587890625, 0.004322052001953125, 0.0123443603515625, -0.0291748046875 ] ]
NewstaR/Morningstar-13b-hf
2023-09-16T00:51:12.000Z
[ "transformers", "pytorch", "llama", "text-generation", "llama2", "en", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
NewstaR
null
null
NewstaR/Morningstar-13b-hf
0
5,820
transformers
2023-09-13T15:55:38
--- language: - en tags: - llama - llama2 --- # MorningStar ![Morningstar](Morningstar.jpg) | Model | Average ⬆️ | ARC | HellaSwag | MMLU | TruthfulQA | |----------------------------------|------------|-------|-----------|-------|------------| | NewstaR/Morningstar-13b-hf 📑 | 59.93 | 59.04 | 81.93 | 54.63 | 44.12 | ## Model Details - Model name: MorningStar - Model type: LLaMa 2 (13 billion parameters) ## Intended Use - Text generation - Content creation - Conversational agent ## Capabilities MorningStar is optimized for natural language processing tasks like text generation and dialogue. It can produce fluent, coherent text across a variety of topics. ## Limitations - May generate incorrect or nonsensical text - Lacks true language understanding - Potential for generating biased or unsafe content ## Training Data Details on MorningStar's training data are unavailable. It was likely trained on a large corpus of text data scraped from the internet. ## Ethical Considerations - Large language models like MorningStar carry risks around bias, toxicity, and misinformation. - Model outputs should be monitored and filtered before use in real applications. - Avoid harmful or unethical prompts.
1,258
[ [ -0.0203399658203125, -0.062225341796875, 0.030548095703125, 0.03656005859375, -0.0265350341796875, -0.0012531280517578125, -0.00605010986328125, -0.04534912109375, 0.00403594970703125, 0.049468994140625, -0.040435791015625, -0.05535888671875, -0.058013916015625, 0.0056304931640625, -0.03564453125, 0.10150146484375, 0.0235137939453125, -0.0201263427734375, 0.0094146728515625, 0.0011148452758789062, -0.0124969482421875, -0.046234130859375, -0.078125, -0.01299285888671875, 0.0249176025390625, 0.0294189453125, 0.07421875, 0.0322265625, 0.04608154296875, 0.023345947265625, -0.0080108642578125, 0.0016651153564453125, -0.046539306640625, 0.0020427703857421875, 0.020050048828125, -0.00439453125, -0.047821044921875, -0.0040435791015625, 0.049041748046875, 0.034942626953125, -0.010711669921875, 0.0279998779296875, 0.0007853507995605469, 0.0545654296875, -0.026031494140625, 0.035430908203125, -0.0167083740234375, -0.0046539306640625, -0.016845703125, 0.004085540771484375, -0.01165008544921875, -0.04656982421875, 0.007049560546875, -0.051239013671875, -0.0016870498657226562, 0.0302276611328125, 0.09918212890625, 0.0215301513671875, -0.0271148681640625, -0.011077880859375, -0.054595947265625, 0.059539794921875, -0.07183837890625, 0.0218658447265625, 0.03277587890625, 0.0537109375, 0.0120697021484375, -0.04534912109375, -0.037811279296875, -0.0229644775390625, -0.004535675048828125, -0.0092010498046875, -0.035400390625, -0.0013132095336914062, 0.01302337646484375, 0.0287628173828125, -0.03277587890625, 0.00989532470703125, -0.0297088623046875, -0.0229644775390625, 0.049346923828125, 0.028106689453125, 0.0027446746826171875, -0.005191802978515625, -0.0164642333984375, -0.01033782958984375, -0.049224853515625, 0.006107330322265625, 0.052459716796875, 0.028045654296875, -0.027801513671875, 0.0443115234375, -0.04302978515625, 0.036773681640625, 0.00728607177734375, -0.0213623046875, 0.02886962890625, -0.039825439453125, -0.0296478271484375, -0.006397247314453125, 0.0689697265625, 0.05126953125, 0.0168914794921875, 0.00223541259765625, -0.0258331298828125, -0.0081329345703125, 0.025848388671875, -0.0841064453125, -0.01081085205078125, 0.021270751953125, -0.03753662109375, -0.05523681640625, -0.0101776123046875, -0.0565185546875, -0.011932373046875, -0.0029468536376953125, 0.0121002197265625, -0.02301025390625, -0.0352783203125, 0.00919342041015625, 0.01336669921875, 0.01395416259765625, 0.0200042724609375, -0.057586669921875, 0.0196990966796875, 0.0163421630859375, 0.04840087890625, 0.0209197998046875, -0.0011005401611328125, -0.0027484893798828125, -0.0012683868408203125, -0.0187835693359375, 0.05908203125, -0.036590576171875, -0.038970947265625, -0.005401611328125, -0.011444091796875, 0.00702667236328125, -0.0225830078125, 0.06414794921875, -0.03289794921875, 0.034576416015625, -0.027099609375, -0.0157318115234375, -0.0404052734375, 0.020050048828125, -0.046966552734375, 0.0908203125, 0.00765228271484375, -0.0650634765625, 0.00783538818359375, -0.066162109375, 0.0005626678466796875, -0.005519866943359375, -0.0212554931640625, -0.033782958984375, -0.035430908203125, 0.013214111328125, 0.0517578125, -0.0275115966796875, 0.032958984375, 0.004230499267578125, -0.01551055908203125, 0.0240325927734375, -0.0389404296875, 0.05120849609375, 0.03369140625, -0.023162841796875, 0.01291656494140625, -0.0711669921875, -0.0113067626953125, 0.0310211181640625, -0.0238800048828125, -0.005611419677734375, -0.015960693359375, 0.033599853515625, 0.0093536376953125, 0.04168701171875, -0.037109375, -0.0165252685546875, -0.022552490234375, 0.004947662353515625, 0.048004150390625, 0.0096282958984375, 0.0030879974365234375, -0.046783447265625, 0.041778564453125, 0.00858306884765625, 0.00466156005859375, 0.015716552734375, -0.043060302734375, -0.07196044921875, -0.0059356689453125, 0.00955963134765625, 0.037017822265625, -0.027801513671875, 0.048431396484375, -0.0205078125, -0.05908203125, -0.0222625732421875, 0.006988525390625, 0.054931640625, 0.055419921875, 0.011749267578125, 0.01224517822265625, -0.04541015625, -0.0966796875, -0.01181793212890625, -0.0183868408203125, 0.008575439453125, 0.0202178955078125, 0.037689208984375, -0.0310211181640625, 0.051422119140625, -0.04718017578125, -0.0013189315795898438, -0.03338623046875, 0.004230499267578125, 0.0248870849609375, 0.0195159912109375, 0.056488037109375, -0.045318603515625, -0.037841796875, -0.0086822509765625, -0.066650390625, -0.0121917724609375, -0.007129669189453125, -0.01312255859375, 0.006366729736328125, 0.0252227783203125, -0.041259765625, 0.01332855224609375, 0.0487060546875, -0.0531005859375, 0.08001708984375, 0.0036525726318359375, 0.0036773681640625, -0.09356689453125, 0.0275726318359375, 0.0311279296875, -0.0198974609375, -0.0435791015625, 0.0017747879028320312, -0.019073486328125, 0.0007176399230957031, -0.04351806640625, 0.04034423828125, -0.0015230178833007812, 0.0177154541015625, -0.01898193359375, -0.002941131591796875, 0.00975799560546875, 0.041290283203125, -0.01114654541015625, 0.089111328125, 0.0167388916015625, -0.055145263671875, 0.0158843994140625, 0.043060302734375, -0.0382080078125, 0.024627685546875, -0.06365966796875, -0.0021839141845703125, -0.00087738037109375, 0.0069122314453125, -0.05548095703125, -0.019012451171875, 0.0281524658203125, -0.032135009765625, -0.00977325439453125, 0.022735595703125, -0.0257720947265625, -0.005916595458984375, -0.0236053466796875, 0.01336669921875, 0.02801513671875, -0.0186004638671875, -0.00640869140625, 0.01654052734375, -0.033416748046875, -0.059844970703125, -0.036590576171875, 0.021148681640625, -0.01461029052734375, -0.053253173828125, 0.027862548828125, -0.00565338134765625, -0.0400390625, -0.004116058349609375, 0.023040771484375, -0.006031036376953125, -0.005779266357421875, 0.035980224609375, 0.0205535888671875, -0.033599853515625, -0.01184844970703125, 0.006694793701171875, -0.0234832763671875, 0.012237548828125, 0.044677734375, 0.03924560546875, -0.03875732421875, 0.006145477294921875, -0.049041748046875, 0.012451171875, 0.041839599609375, 0.00818634033203125, 0.06304931640625, 0.02496337890625, -0.0287017822265625, 0.00882720947265625, -0.0457763671875, -0.01082611083984375, -0.04095458984375, 0.034912109375, -0.0183563232421875, -0.047210693359375, 0.046234130859375, 0.01457977294921875, 0.0223388671875, 0.06597900390625, 0.0386962890625, 0.0021495819091796875, 0.06707763671875, 0.0650634765625, -0.006683349609375, 0.031341552734375, -0.0275726318359375, 0.01352691650390625, -0.06353759765625, -0.0278472900390625, -0.049224853515625, -0.01410675048828125, -0.0298004150390625, -0.004886627197265625, -0.00728607177734375, -0.015960693359375, -0.053497314453125, 0.04058837890625, -0.03448486328125, 0.032958984375, 0.038909912109375, 0.00173187255859375, 0.038299560546875, -0.012298583984375, -0.0162811279296875, -0.0090484619140625, -0.0189971923828125, -0.08990478515625, 0.09039306640625, 0.0404052734375, 0.044281005859375, 0.0174713134765625, 0.0182952880859375, 0.0257415771484375, 0.01247406005859375, -0.0343017578125, 0.047943115234375, 0.0011396408081054688, -0.06842041015625, -0.01311492919921875, -0.0244140625, -0.04742431640625, 0.0221099853515625, -0.022247314453125, -0.03240966796875, -0.0102081298828125, 0.0017747879028320312, -0.04931640625, 0.03289794921875, -0.03704833984375, 0.06182861328125, -0.00885009765625, 0.006622314453125, -0.0024051666259765625, -0.0299224853515625, 0.045562744140625, -0.002910614013671875, 0.006511688232421875, -0.0323486328125, 0.00307464599609375, 0.06488037109375, -0.0275726318359375, 0.07861328125, -0.0037078857421875, 0.0022945404052734375, 0.0111236572265625, -0.002178192138671875, 0.047760009765625, 0.0036449432373046875, -0.0166015625, 0.0191192626953125, 0.0048065185546875, -0.0246734619140625, -0.0112762451171875, 0.044921875, -0.09051513671875, -0.037994384765625, -0.05230712890625, -0.0237274169921875, -0.01220703125, 0.0154571533203125, 0.03533935546875, 0.021331787109375, -0.03759765625, 0.020355224609375, 0.021759033203125, -0.0205535888671875, 0.034210205078125, 0.04241943359375, -0.032196044921875, -0.0212554931640625, 0.0638427734375, -0.005558013916015625, 0.00621795654296875, 0.00563812255859375, -0.00984954833984375, -0.0362548828125, -0.05950927734375, -0.02801513671875, 0.0156707763671875, -0.04241943359375, -0.0163726806640625, -0.057586669921875, -0.052490234375, -0.0126953125, -0.003910064697265625, -0.03521728515625, -0.03857421875, -0.052398681640625, -0.032562255859375, 0.0283660888671875, 0.059539794921875, 0.0232391357421875, 0.059814453125, -0.0478515625, 0.00942230224609375, 0.0335693359375, 0.0234222412109375, -0.0163421630859375, -0.05377197265625, -0.01629638671875, 0.0101470947265625, -0.058837890625, -0.046356201171875, 0.05523681640625, 0.01430511474609375, 0.036407470703125, 0.0474853515625, 0.01038360595703125, 0.0217132568359375, -0.01959228515625, 0.0648193359375, 0.0127105712890625, -0.04083251953125, 0.035400390625, -0.042510986328125, 0.013214111328125, 0.05804443359375, 0.028900146484375, -0.038848876953125, -0.045989990234375, -0.059600830078125, -0.040435791015625, 0.028594970703125, 0.05194091796875, 0.006481170654296875, 0.01422882080078125, 0.01348876953125, 0.01611328125, 0.026397705078125, -0.0733642578125, -0.0244903564453125, -0.028411865234375, -0.0253753662109375, -0.0066375732421875, -0.0310516357421875, -0.010589599609375, -0.0034885406494140625, 0.071044921875, 0.005115509033203125, 0.044219970703125, -0.01171112060546875, 0.0006537437438964844, -0.0066375732421875, 0.01488494873046875, 0.0633544921875, 0.0506591796875, -0.04534912109375, 0.0001386404037475586, 0.00399017333984375, -0.055999755859375, 0.00164031982421875, 0.002071380615234375, -0.006473541259765625, 0.0015459060668945312, 0.0220489501953125, 0.059417724609375, 0.018218994140625, -0.048187255859375, 0.0311279296875, -0.019989013671875, -0.01155853271484375, -0.04034423828125, 0.022369384765625, 0.0011091232299804688, 0.028411865234375, 0.038726806640625, 0.0208740234375, 0.01605224609375, -0.00977325439453125, -0.0093994140625, 0.02362060546875, -0.0222320556640625, -0.037445068359375, 0.05169677734375, 0.03424072265625, -0.0258331298828125, 0.05242919921875, -0.0140533447265625, -0.018646240234375, 0.0582275390625, 0.0439453125, 0.0692138671875, 0.0049285888671875, 0.0286407470703125, 0.0400390625, 0.031341552734375, -0.00760650634765625, 0.0271148681640625, 0.033935546875, -0.068359375, -0.02972412109375, -0.062042236328125, -0.05718994140625, 0.039031982421875, -0.031402587890625, 0.01317596435546875, -0.062255859375, -0.03082275390625, 0.0154571533203125, 0.02520751953125, -0.046417236328125, 0.0364990234375, -0.004276275634765625, 0.07415771484375, -0.057952880859375, 0.063720703125, 0.0281524658203125, -0.048492431640625, -0.04205322265625, -0.01739501953125, 0.01296234130859375, -0.06988525390625, 0.039794921875, 0.03558349609375, -0.00926971435546875, -0.0033779144287109375, -0.041168212890625, -0.0703125, 0.08758544921875, 0.03497314453125, -0.0443115234375, -0.007537841796875, 0.01192474365234375, 0.0300750732421875, -0.0316162109375, 0.0296478271484375, 0.024871826171875, 0.041412353515625, 0.00945281982421875, -0.0587158203125, 0.0144500732421875, -0.038421630859375, -0.00202178955078125, -0.0027980804443359375, -0.06610107421875, 0.06353759765625, -0.006336212158203125, -0.0095062255859375, 0.01030731201171875, 0.05023193359375, 0.0350341796875, 0.005664825439453125, 0.01995849609375, 0.0372314453125, 0.039398193359375, -0.004009246826171875, 0.07879638671875, -0.025238037109375, 0.01495361328125, 0.06396484375, 0.00005614757537841797, 0.06689453125, 0.00931549072265625, -0.01500701904296875, 0.058746337890625, 0.06341552734375, 0.002285003662109375, 0.032470703125, -0.034332275390625, -0.0195770263671875, -0.00618743896484375, 0.00463104248046875, -0.04132080078125, 0.030548095703125, 0.021514892578125, -0.033721923828125, -0.019378662109375, -0.0322265625, 0.02099609375, 0.007518768310546875, 0.0011186599731445312, 0.04449462890625, 0.0006690025329589844, -0.01611328125, 0.044342041015625, 0.019287109375, 0.056396484375, -0.03729248046875, 0.01314544677734375, -0.025299072265625, 0.006587982177734375, -0.004322052001953125, -0.05078125, 0.0396728515625, 0.0183563232421875, -0.007415771484375, -0.01690673828125, 0.0426025390625, 0.0109405517578125, -0.06842041015625, -0.00702667236328125, 0.023284912109375, 0.019134521484375, -0.03363037109375, -0.07342529296875, 0.01229095458984375, -0.0018129348754882812, -0.029754638671875, 0.01397705078125, 0.01052093505859375, 0.0209503173828125, 0.06353759765625, 0.059417724609375, -0.00342559814453125, -0.0093536376953125, -0.0162811279296875, 0.0650634765625, -0.080810546875, -0.04901123046875, -0.05792236328125, 0.03277587890625, -0.020477294921875, -0.0535888671875, 0.0645751953125, 0.051605224609375, 0.022735595703125, 0.006778717041015625, 0.054718017578125, 0.01038360595703125, 0.0433349609375, -0.03192138671875, 0.07684326171875, -0.019866943359375, 0.017730712890625, -0.00640106201171875, -0.08404541015625, -0.01558685302734375, 0.06365966796875, -0.018890380859375, 0.0201416015625, 0.04571533203125, 0.068115234375, 0.0015716552734375, 0.0269775390625, 0.028594970703125, 0.034912109375, 0.034942626953125, 0.032928466796875, 0.04473876953125, -0.044952392578125, 0.049102783203125, -0.0219268798828125, -0.01448822021484375, -0.037322998046875, -0.0650634765625, -0.07965087890625, -0.0304718017578125, -0.029144287109375, -0.0271759033203125, -0.0008511543273925781, 0.062469482421875, 0.054046630859375, -0.06878662109375, -0.0275726318359375, -0.00041866302490234375, -0.0022296905517578125, -0.004306793212890625, -0.01496124267578125, 0.01027679443359375, -0.01410675048828125, -0.04644775390625, 0.0226593017578125, 0.00738525390625, 0.0281524658203125, -0.046234130859375, -0.0175933837890625, -0.01302337646484375, 0.0178985595703125, 0.04150390625, 0.021026611328125, -0.047821044921875, -0.0264892578125, -0.0009660720825195312, -0.032623291015625, 0.0009713172912597656, 0.0027713775634765625, -0.0291595458984375, -0.01010894775390625, 0.00785064697265625, 0.03204345703125, 0.0288543701171875, -0.0104827880859375, 0.038055419921875, -0.057403564453125, 0.0218658447265625, 0.022613525390625, 0.01233673095703125, 0.02960205078125, -0.043243408203125, 0.0261077880859375, -0.00016021728515625, -0.0465087890625, -0.0499267578125, 0.0027179718017578125, -0.07586669921875, -0.01800537109375, 0.09307861328125, 0.0217742919921875, -0.016448974609375, -0.0008969306945800781, -0.0311737060546875, 0.0041656494140625, -0.061431884765625, 0.058319091796875, 0.0574951171875, -0.0023708343505859375, -0.00666046142578125, -0.0645751953125, 0.040008544921875, -0.0031528472900390625, -0.06292724609375, -0.004241943359375, 0.033447265625, 0.02215576171875, 0.00830078125, 0.04730224609375, -0.00428009033203125, 0.00942230224609375, 0.013580322265625, 0.011444091796875, -0.01280975341796875, -0.022857666015625, -0.0005130767822265625, 0.006275177001953125, 0.015380859375, -0.023529052734375 ] ]
ehartford/dolphin-llama2-7b
2023-08-10T05:42:09.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:ehartford/dolphin", "license:llama2", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
ehartford
null
null
ehartford/dolphin-llama2-7b
65
5,819
transformers
2023-08-06T23:24:01
--- license: llama2 datasets: - ehartford/dolphin language: - en --- Dolphin 🐬 https://erichartford.com/dolphin Dolphin-Llama2-7b's training was sponsored by [preemo](https://www.preemo.io/). This model is based on llama2, so it is suitable for commercial or non-commercial use. This model is uncensored. I have filtered the dataset to remove alignment and bias. This makes the model more compliant. You are advised to implement your own alignment layer before exposing the model as a service. It will be highly compliant to any requests, even unethical ones. Please read my blog post about uncensored models. https://erichartford.com/uncensored-models You are responsible for any content you create using this model. Enjoy responsibly. ## Dataset This dataset is an open-source implementation of [Microsoft's Orca](https://www.microsoft.com/en-us/research/publication/orca-progressive-learning-from-complex-explanation-traces-of-gpt-4/) After uncensoring, deduping, and cleaning, our dataset consists of: - 842,610 instructions of FLANv2 augmented with GPT-4 completions - 2,625,353 instructions of FLANv2 augmented with GPT-3.5 completions We followed the submix and system prompt distribution outlined in the Orca paper. With a few exceptions. We included all 75k of CoT in the FLAN-1m dataset rather than sampling that. Also, we found that many items were duplicated, so we removed duplicates. Then we filtered out instances of alignment, refusal, avoidance, and bias, in order to produce an uncensored model upon which can be layered your personalized alignment LoRA. We also filtered out duplicates and cleaned the data. ## Training We trained with the flan5m (gpt3.5 completions) dataset in its entirety for 3 epochs at a learning rate of 2e-5 before we stopped training to avoid overfit. We trained with the flan1m (gpt4 completions) dataset in its entirety for 2.5 epochs at a learning rate of 1e-5 before we stopped training to avoid overfit. It took about 400 hours to train on 8x A100s We used a prompt format similar to Vicuna, but we added the SYSTEM: field. Prompt format: ``` SYSTEM: You are an autoregressive language model that has been fine-tuned with instruction-tuning and RLHF. You carefully provide accurate, factual, thoughtful, nuanced answers, and are brilliant at reasoning. If you think there might not be a correct answer, you say so. Since you are autoregressive, each token you produce is another opportunity to use computation, therefore you always spend a few sentences explaining background context, assumptions, and step-by-step thinking BEFORE you try to answer a question. USER: {prompt} ASSISTANT: ``` Example: ``` SYSTEM: you are an expert dolphin trainer USER: What is the best way to train a dolphin to obey me? Please answer step by step. ASSISTANT: ``` ## Team The core Dolphin Team includes: - Eric "Faldore" Hartford - Pankaj Mathur - Rob "Rohan" O'Callahan - Tom "TheBloke" Jobbins ## Gratitude - This model was made possible by the generous sponsorship of preemo.io. - Thank you to Microsoft for authoring the Orca paper and inspiring this work. - Special thanks to WingLian, NanoBit, Teknium for helpful advice - Thank you to all the other people in the Open Source AI community who have taught me and helped me along the way. ## Example Output ``` SYSTEM: you are an expert mechanic USER: Please tell me step by step how to fix my car that won't start. ASSISTANT: To help your vehicle start, I will guide you through a step-by-step process on what could be causing the issue and how to resolve it. 1. Check for obvious issues: First, inspect the exterior of the car to identify any visible problems such as damage or obstructions which might affect its ability to start. Make sure there is no debris in the air intakes or exhaust system. 2. Inspect the battery: The most common reason a car may not start is due to a dead or faulty battery. Use a battery charger or jump starter to give the battery a boost if needed. If the battery is indeed dead, replace it with a new one. 3. Check the ignition switch: The ignition switch must be turned on to provide power to the spark plugs. Ensure it is in the "Start" position. 4. Test the spark plugs: One of the spark plugs should produce a strong spark when touched together. If all spark plugs fail to create a spark, they need to be replaced. 5. Examine the fuel system: Fuel can become contaminated or low, preventing the engine from starting. Check the fuel level and quality by looking at the dipstick or using a fuel sensor scan tool. Also, examine the fuel pump for proper functioning. 6. Assess the electrical system: A malfunction within the vehicle's electrical system can also cause difficulty in starting the car. Perform a diagnostic scan to check for any error codes related to the starter motor, alternator, or other components. 7. Inspect the starter motor: The starter motor should operate smoothly when you turn the key to the "start" position. If you hear a sizzling sound while pressing the button, the starter motor may have a problem. Replace it if necessary. 8. Evaluate the timing belt/chain: Timing belts or chains that are worn out or damaged can lead to poor performance and difficulty starting the engine. Have a mechanic inspect this component and replace it if required. 9. Consider checking for a mechanical failure: In rare cases, a significant mechanical failure could prevent the vehicle from starting. Consult a professional mechanic if you suspect this might be the case. By following these steps, you should be able to diagnose and potentially fix the issue causing your car to not start. However, if after going through these checks and still having trouble, it is recommended to seek assistance from a qualified mechanic. ``` [Buy me a coffee](https://www.buymeacoffee.com/ehartford)
5,852
[ [ -0.030914306640625, -0.045196533203125, 0.0411376953125, 0.0137176513671875, -0.024261474609375, 0.004817962646484375, 0.00689697265625, -0.055023193359375, -0.0029811859130859375, 0.0296630859375, -0.056243896484375, -0.00384521484375, -0.05462646484375, 0.00518035888671875, -0.006916046142578125, 0.07965087890625, -0.01369476318359375, 0.01800537109375, 0.0200347900390625, -0.0046844482421875, -0.043304443359375, -0.004528045654296875, -0.10137939453125, -0.0019130706787109375, 0.0298004150390625, 0.031982421875, 0.04345703125, 0.0692138671875, 0.0184326171875, 0.029693603515625, 0.0008625984191894531, 0.02447509765625, -0.031494140625, 0.005615234375, -0.01206207275390625, -0.028472900390625, -0.040618896484375, 0.011138916015625, 0.00284576416015625, 0.035552978515625, -0.01189422607421875, 0.007701873779296875, 0.006591796875, 0.040374755859375, -0.0367431640625, 0.031097412109375, -0.0166473388671875, -0.001312255859375, -0.01263427734375, -0.0047760009765625, 0.00516510009765625, -0.03131103515625, -0.012359619140625, -0.06939697265625, 0.013336181640625, 0.0012044906616210938, 0.0721435546875, 0.025726318359375, -0.0275115966796875, -0.03240966796875, -0.04638671875, 0.058258056640625, -0.051666259765625, 0.02825927734375, 0.03887939453125, 0.01849365234375, -0.03314208984375, -0.0281829833984375, -0.0423583984375, -0.0347900390625, 0.0190887451171875, 0.0154876708984375, 0.0166778564453125, -0.0195465087890625, 0.0296478271484375, 0.0333251953125, -0.053070068359375, 0.00836181640625, -0.01456451416015625, -0.01580810546875, 0.042816162109375, 0.006618499755859375, 0.033233642578125, 0.0126190185546875, -0.0396728515625, -0.032623291015625, -0.038818359375, 0.01383209228515625, 0.044342041015625, 0.034759521484375, -0.01123046875, 0.05560302734375, -0.002750396728515625, 0.052398681640625, 0.0191802978515625, -0.032989501953125, 0.00597381591796875, -0.0235137939453125, -0.0289764404296875, 0.0242462158203125, 0.0504150390625, 0.0213470458984375, 0.03240966796875, 0.006610870361328125, -0.0254669189453125, -0.022857666015625, 0.0199432373046875, -0.028472900390625, -0.00034737586975097656, 0.0369873046875, -0.03369140625, -0.030517578125, 0.0024127960205078125, -0.040863037109375, -0.021942138671875, -0.036529541015625, 0.041046142578125, -0.0506591796875, -0.0171661376953125, 0.01212310791015625, -0.0032215118408203125, 0.0281219482421875, 0.0198974609375, -0.0819091796875, 0.01486968994140625, 0.03167724609375, 0.05572509765625, 0.01763916015625, -0.0272369384765625, -0.022003173828125, 0.0240631103515625, -0.02703857421875, 0.048004150390625, 0.0136260986328125, -0.0248565673828125, -0.01189422607421875, 0.0247955322265625, -0.00380706787109375, -0.03448486328125, 0.0178985595703125, -0.03924560546875, 0.0251007080078125, -0.039306640625, -0.041290283203125, -0.005413055419921875, 0.00614166259765625, -0.051177978515625, 0.07232666015625, 0.005702972412109375, -0.060150146484375, 0.0254669189453125, -0.0478515625, -0.050140380859375, -0.01016998291015625, -0.01059722900390625, -0.039825439453125, 0.0029468536376953125, 0.0202178955078125, 0.0180511474609375, -0.02178955078125, 0.0043487548828125, -0.0146331787109375, -0.006992340087890625, -0.0014905929565429688, -0.0013418197631835938, 0.105224609375, 0.010833740234375, -0.042938232421875, -0.0189056396484375, -0.08538818359375, -0.00384521484375, 0.040374755859375, -0.0092620849609375, -0.0211334228515625, -0.0029544830322265625, -0.019805908203125, 0.007732391357421875, 0.04168701171875, -0.046112060546875, 0.0290069580078125, -0.030792236328125, 0.0139923095703125, 0.0640869140625, 0.00992584228515625, 0.0157012939453125, -0.036956787109375, 0.04248046875, 0.006008148193359375, 0.014862060546875, 0.0031681060791015625, -0.064208984375, -0.0677490234375, 0.001667022705078125, -0.01152801513671875, 0.05133056640625, -0.04461669921875, 0.042816162109375, 0.0269012451171875, -0.037628173828125, -0.0244293212890625, -0.005748748779296875, 0.039337158203125, 0.056365966796875, 0.04290771484375, -0.03363037109375, -0.043304443359375, -0.064697265625, 0.00269317626953125, -0.0162811279296875, 0.002559661865234375, 0.021697998046875, 0.0330810546875, 0.0142822265625, 0.062225341796875, -0.0333251953125, -0.0426025390625, -0.0182647705078125, 0.003917694091796875, 0.0246734619140625, 0.056610107421875, 0.038299560546875, -0.03564453125, -0.022796630859375, 0.00754547119140625, -0.0792236328125, 0.00395965576171875, -0.007106781005859375, -0.00031685829162597656, 0.0196075439453125, 0.03515625, -0.033477783203125, 0.030975341796875, 0.01540374755859375, -0.027984619140625, 0.034332275390625, -0.0396728515625, 0.0078277587890625, -0.06573486328125, 0.017852783203125, -0.0139617919921875, -0.0115203857421875, -0.018402099609375, -0.0059967041015625, -0.0192718505859375, -0.018341064453125, -0.033294677734375, 0.0155792236328125, -0.042022705078125, -0.01384735107421875, -0.027435302734375, -0.0016107559204101562, -0.0091552734375, 0.01580810546875, 0.007434844970703125, 0.07147216796875, 0.0386962890625, -0.068115234375, 0.0118560791015625, 0.0254058837890625, -0.034393310546875, 0.019775390625, -0.06903076171875, 0.0237274169921875, -0.0181121826171875, -0.00720977783203125, -0.07672119140625, -0.0262298583984375, 0.037689208984375, -0.01509857177734375, 0.0511474609375, 0.0042572021484375, -0.031097412109375, -0.0048065185546875, 0.0140533447265625, 0.0225982666015625, 0.025909423828125, -0.031494140625, 0.0225372314453125, 0.02716064453125, -0.00701904296875, -0.04754638671875, -0.038177490234375, -0.00875091552734375, -0.00025916099548339844, -0.034912109375, 0.0276031494140625, 0.00787353515625, -0.0031986236572265625, -0.0218658447265625, -0.00293731689453125, -0.006595611572265625, 0.03863525390625, 0.00711822509765625, 0.0153656005859375, -0.0102386474609375, 0.0016489028930664062, -0.000050187110900878906, -0.028076171875, 0.0043487548828125, -0.03143310546875, 0.040313720703125, -0.0143890380859375, -0.0267181396484375, -0.04638671875, 0.01029205322265625, 0.0489501953125, -0.006313323974609375, 0.043487548828125, 0.02679443359375, -0.0177764892578125, 0.0135955810546875, -0.035491943359375, -0.0164031982421875, -0.032562255859375, 0.01035308837890625, -0.0129547119140625, -0.032470703125, 0.03350830078125, 0.015716552734375, 0.0042266845703125, 0.0310516357421875, 0.04840087890625, -0.0204010009765625, 0.0836181640625, 0.059722900390625, -0.00904083251953125, 0.03167724609375, -0.02642822265625, 0.017242431640625, -0.03387451171875, -0.058013916015625, -0.03143310546875, -0.01395416259765625, -0.04541015625, 0.00887298583984375, 0.0191802978515625, 0.00487518310546875, -0.0491943359375, 0.04852294921875, -0.039703369140625, 0.0299224853515625, 0.025848388671875, 0.00943756103515625, 0.0203094482421875, -0.003856658935546875, -0.0214996337890625, -0.0009312629699707031, -0.04901123046875, -0.04754638671875, 0.0806884765625, 0.02978515625, 0.0631103515625, 0.017578125, 0.056243896484375, 0.017578125, 0.01490020751953125, -0.043701171875, 0.0360107421875, -0.00004851818084716797, -0.06231689453125, -0.035980224609375, -0.00870513916015625, -0.08795166015625, -0.01328277587890625, 0.0013628005981445312, -0.05145263671875, 0.036529541015625, 0.00821685791015625, -0.0261993408203125, 0.004993438720703125, -0.045166015625, 0.07012939453125, -0.054901123046875, -0.024078369140625, 0.02667236328125, -0.0543212890625, 0.053070068359375, -0.0013036727905273438, 0.0155487060546875, -0.0204620361328125, -0.0178375244140625, 0.0462646484375, -0.0675048828125, 0.06427001953125, -0.029571533203125, -0.0181121826171875, 0.0287322998046875, -0.004558563232421875, 0.029266357421875, 0.005016326904296875, 0.00771331787109375, 0.00019991397857666016, -0.007801055908203125, -0.034454345703125, -0.032623291015625, 0.037353515625, -0.08734130859375, -0.0215301513671875, -0.056060791015625, 0.005825042724609375, -0.00896453857421875, 0.03515625, 0.042694091796875, 0.01436614990234375, -0.0017852783203125, -0.0268402099609375, 0.053314208984375, -0.007640838623046875, 0.046875, 0.040740966796875, -0.027008056640625, -0.033233642578125, 0.054901123046875, 0.005794525146484375, 0.00815582275390625, 0.00685882568359375, 0.01111602783203125, -0.03472900390625, -0.0294342041015625, -0.054931640625, 0.0247344970703125, -0.036865234375, -0.02178955078125, -0.02618408203125, -0.006847381591796875, -0.031158447265625, 0.0229949951171875, -0.05731201171875, -0.0233154296875, -0.04388427734375, 0.01122283935546875, 0.0291290283203125, 0.06805419921875, -0.006214141845703125, 0.028533935546875, -0.047210693359375, 0.0140380859375, 0.02239990234375, 0.001087188720703125, 0.0230712890625, -0.0421142578125, -0.00511932373046875, -0.0016374588012695312, -0.03668212890625, -0.06494140625, 0.0310516357421875, 0.00290679931640625, 0.046112060546875, 0.0565185546875, -0.00787353515625, 0.06671142578125, -0.033050537109375, 0.0623779296875, 0.0113983154296875, -0.0670166015625, 0.056396484375, -0.0208740234375, -0.0107269287109375, 0.05419921875, 0.0599365234375, -0.00970458984375, -0.027923583984375, -0.058197021484375, -0.05621337890625, 0.04730224609375, 0.0280609130859375, -0.0116729736328125, -0.0151214599609375, 0.04132080078125, 0.00391387939453125, 0.01512908935546875, -0.056854248046875, -0.03582763671875, -0.045562744140625, 0.02301025390625, -0.0213470458984375, 0.011688232421875, 0.013580322265625, -0.0192718505859375, 0.07855224609375, -0.0015630722045898438, 0.036041259765625, 0.03399658203125, -0.007297515869140625, -0.006641387939453125, -0.00818634033203125, 0.064208984375, 0.05267333984375, -0.0247650146484375, -0.01102447509765625, 0.0010271072387695312, -0.033233642578125, 0.042816162109375, -0.00106048583984375, -0.0011301040649414062, -0.0103607177734375, 0.0272064208984375, 0.044097900390625, 0.0113525390625, -0.042266845703125, 0.02471923828125, -0.0017852783203125, 0.01070404052734375, -0.038360595703125, 0.020843505859375, -0.0018396377563476562, -0.0019445419311523438, 0.020599365234375, -0.01062774658203125, -0.00827789306640625, -0.058929443359375, -0.00954437255859375, 0.024566650390625, -0.02850341796875, -0.0271148681640625, 0.0577392578125, -0.01377105712890625, -0.045745849609375, 0.06231689453125, -0.00571441650390625, -0.03692626953125, 0.0736083984375, 0.053436279296875, 0.0648193359375, -0.027587890625, 0.00551605224609375, 0.035797119140625, 0.0217132568359375, -0.0172119140625, 0.0518798828125, 0.005901336669921875, -0.06219482421875, -0.0195770263671875, -0.039825439453125, -0.0297393798828125, 0.049835205078125, -0.058013916015625, 0.0182647705078125, -0.06378173828125, -0.016876220703125, -0.003047943115234375, 0.0074462890625, -0.040252685546875, 0.036224365234375, 0.0026798248291015625, 0.07208251953125, -0.07574462890625, 0.053955078125, 0.06146240234375, -0.08453369140625, -0.07611083984375, -0.0048675537109375, -0.0087127685546875, -0.07965087890625, 0.047454833984375, 0.00435638427734375, 0.003154754638671875, -0.0035762786865234375, -0.054412841796875, -0.08038330078125, 0.08306884765625, 0.02227783203125, -0.03741455078125, -0.0012035369873046875, 0.0104217529296875, 0.044219970703125, -0.03302001953125, 0.024566650390625, 0.041748046875, 0.05438232421875, -0.002094268798828125, -0.06695556640625, 0.01433563232421875, 0.005161285400390625, -0.0002410411834716797, -0.00435638427734375, -0.07135009765625, 0.08123779296875, -0.02667236328125, 0.008697509765625, 0.0303955078125, 0.03729248046875, 0.0335693359375, 0.019439697265625, 0.038360595703125, 0.052215576171875, 0.06536865234375, 0.0152587890625, 0.08953857421875, -0.037567138671875, 0.030364990234375, 0.0828857421875, -0.0229034423828125, 0.0462646484375, 0.034576416015625, -0.03228759765625, 0.02996826171875, 0.0645751953125, -0.016510009765625, 0.043548583984375, -0.0034999847412109375, -0.0107574462890625, -0.0312042236328125, 0.00521087646484375, -0.04541015625, 0.03973388671875, 0.00890350341796875, -0.04425048828125, -0.04541015625, 0.01148223876953125, -0.01152801513671875, -0.0227813720703125, -0.01010894775390625, 0.0648193359375, 0.0104522705078125, -0.06268310546875, 0.053955078125, 0.001773834228515625, 0.0014019012451171875, -0.048004150390625, 0.004718780517578125, -0.031036376953125, 0.01015472412109375, -0.0177459716796875, -0.03741455078125, 0.0165252685546875, -0.01294708251953125, -0.0340576171875, -0.018646240234375, 0.049224853515625, -0.00612640380859375, -0.00995635986328125, 0.027069091796875, 0.0426025390625, 0.04864501953125, 0.0017528533935546875, -0.0736083984375, -0.01000213623046875, 0.0013284683227539062, -0.0164031982421875, 0.03741455078125, 0.01068878173828125, -0.00905609130859375, 0.047821044921875, 0.057220458984375, 0.00608062744140625, -0.006626129150390625, -0.00594329833984375, 0.0562744140625, -0.041595458984375, -0.0229339599609375, -0.055267333984375, 0.02838134765625, 0.0092315673828125, -0.040496826171875, 0.05792236328125, 0.058624267578125, 0.063720703125, -0.02716064453125, 0.05938720703125, -0.01201629638671875, 0.03515625, -0.048248291015625, 0.063232421875, -0.05413818359375, 0.02239990234375, 0.011627197265625, -0.0826416015625, 0.004093170166015625, 0.070556640625, -0.009246826171875, 0.0184173583984375, 0.03375244140625, 0.06146240234375, -0.0152130126953125, 0.007511138916015625, 0.01316070556640625, 0.0255889892578125, 0.0188751220703125, 0.037994384765625, 0.06549072265625, -0.03924560546875, 0.02691650390625, -0.02685546875, -0.034423828125, -0.01416778564453125, -0.08575439453125, -0.044464111328125, -0.0224761962890625, -0.0323486328125, -0.045318603515625, 0.02398681640625, 0.07684326171875, 0.0433349609375, -0.07281494140625, -0.03466796875, -0.044677734375, 0.0079345703125, -0.0229339599609375, -0.01522064208984375, 0.0203704833984375, -0.049591064453125, -0.050506591796875, 0.048858642578125, 0.0036220550537109375, 0.048065185546875, -0.0294952392578125, 0.0117340087890625, -0.023162841796875, -0.01436614990234375, 0.011199951171875, 0.07379150390625, -0.04095458984375, -0.0237274169921875, -0.023406982421875, -0.01459503173828125, 0.004962921142578125, 0.04840087890625, -0.046905517578125, 0.028656005859375, 0.016845703125, 0.0283203125, 0.05267333984375, -0.0021495819091796875, 0.0311126708984375, -0.0260467529296875, 0.00008749961853027344, 0.01409912109375, 0.0306396484375, 0.006679534912109375, -0.0269012451171875, 0.025238037109375, 0.04132080078125, -0.05133056640625, -0.03338623046875, 0.016815185546875, -0.08123779296875, -0.013092041015625, 0.07366943359375, -0.0254364013671875, -0.024383544921875, -0.042266845703125, -0.0196685791015625, 0.0241241455078125, -0.0202789306640625, 0.06494140625, 0.03515625, -0.03570556640625, 0.00807952880859375, -0.03912353515625, 0.046478271484375, 0.0178375244140625, -0.0540771484375, -0.0253753662109375, 0.0004019737243652344, 0.0537109375, 0.01849365234375, 0.040130615234375, 0.00698089599609375, 0.02081298828125, -0.005702972412109375, 0.00913238525390625, -0.036163330078125, -0.0111541748046875, -0.0299224853515625, 0.004119873046875, -0.003021240234375, -0.0303802490234375 ] ]
pankajmathur/model_009
2023-08-24T14:41:28.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "license:llama2", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
pankajmathur
null
null
pankajmathur/model_009
1
5,818
transformers
2023-08-10T14:23:47
--- language: - en library_name: transformers license: llama2 --- LlaMA-2 License, more details coming soon...
110
[ [ -0.0268707275390625, -0.0227813720703125, 0.025634765625, 0.0499267578125, -0.0496826171875, 0.00948333740234375, 0.0211029052734375, -0.07977294921875, 0.01242828369140625, 0.06561279296875, -0.0212860107421875, -0.015380859375, -0.048492431640625, -0.01611328125, -0.058197021484375, 0.05694580078125, -0.00643157958984375, -0.023101806640625, 0.0033054351806640625, -0.006504058837890625, -0.051788330078125, -0.055023193359375, -0.0214385986328125, -0.0328369140625, 0.055816650390625, 0.0279083251953125, 0.0416259765625, 0.050140380859375, 0.037567138671875, 0.01508331298828125, -0.0173797607421875, -0.01151275634765625, -0.005428314208984375, -0.002475738525390625, 0.023681640625, -0.06695556640625, -0.048187255859375, -0.0008611679077148438, 0.03326416015625, 0.01401519775390625, -0.023681640625, 0.0016613006591796875, -0.01336669921875, 0.03363037109375, -0.014862060546875, 0.009124755859375, 0.0018825531005859375, 0.01290130615234375, -0.03173828125, 0.002376556396484375, -0.023406982421875, -0.0133209228515625, 0.0047149658203125, -0.09136962890625, -0.023681640625, 0.0147705078125, 0.09625244140625, 0.043426513671875, -0.05517578125, -0.0104217529296875, -0.014892578125, 0.0233001708984375, -0.006641387939453125, 0.0158233642578125, 0.047637939453125, 0.056976318359375, -0.0177154541015625, -0.07464599609375, -0.0152130126953125, -0.0108489990234375, -0.0260467529296875, -0.0130462646484375, 0.005828857421875, -0.0167999267578125, 0.017181396484375, 0.00867462158203125, 0.00958251953125, 0.01265716552734375, -0.057647705078125, 0.00698089599609375, 0.0689697265625, -0.0009927749633789062, -0.005092620849609375, 0.028839111328125, -0.052490234375, 0.0184783935546875, -0.06072998046875, 0.010955810546875, 0.04632568359375, -0.032867431640625, -0.06390380859375, 0.059051513671875, -0.01280975341796875, 0.011993408203125, 0.0025691986083984375, 0.001766204833984375, 0.052337646484375, -0.0235748291015625, -0.00899505615234375, 0.01163482666015625, 0.0217132568359375, 0.04132080078125, 0.00638580322265625, 0.0019931793212890625, -0.00970458984375, -0.007781982421875, -0.005535125732421875, -0.058197021484375, -0.0279388427734375, 0.0113372802734375, -0.042724609375, -0.0504150390625, -0.0242156982421875, -0.044952392578125, -0.0263214111328125, -0.0113677978515625, 0.02740478515625, 0.00246429443359375, -0.052581787109375, 0.0084381103515625, -0.04736328125, 0.04364013671875, 0.04931640625, -0.034210205078125, 0.004184722900390625, 0.05694580078125, 0.054107666015625, -0.01026153564453125, -0.01264190673828125, -0.010650634765625, 0.029022216796875, -0.00693511962890625, 0.052032470703125, -0.03472900390625, -0.0244293212890625, -0.00031065940856933594, 0.01558685302734375, 0.054534912109375, -0.035736083984375, 0.0008759498596191406, -0.0723876953125, -0.0272369384765625, -0.034759521484375, -0.00452423095703125, -0.01139068603515625, 0.00592041015625, -0.041534423828125, 0.0599365234375, 0.033935546875, -0.0293731689453125, 0.0186614990234375, -0.0184478759765625, -0.05145263671875, 0.008392333984375, -0.00019657611846923828, -0.005016326904296875, 0.006603240966796875, -0.01235198974609375, 0.01253509521484375, -0.02349853515625, 0.0165863037109375, -0.01387786865234375, -0.022857666015625, 0.03271484375, 0.0015697479248046875, 0.07366943359375, 0.00946044921875, -0.003566741943359375, -0.0162200927734375, -0.03985595703125, -0.0258636474609375, 0.003025054931640625, -0.05438232421875, 0.01873779296875, -0.0010013580322265625, 0.00955963134765625, 0.0088348388671875, 0.05303955078125, -0.03680419921875, 0.0194091796875, 0.005123138427734375, 0.0195159912109375, 0.041473388671875, -0.01171112060546875, 0.0204620361328125, -0.01369476318359375, 0.044097900390625, 0.0034847259521484375, 0.038787841796875, -0.0167083740234375, -0.05377197265625, -0.061004638671875, -0.004894256591796875, 0.00843048095703125, 0.04156494140625, -0.009613037109375, 0.02020263671875, 0.04583740234375, -0.061767578125, -0.01073455810546875, -0.016693115234375, 0.029022216796875, 0.0240631103515625, 0.00531005859375, -0.03271484375, -0.043365478515625, -0.0460205078125, 0.02264404296875, -0.0171661376953125, 0.02337646484375, 0.0232696533203125, 0.044891357421875, -0.045684814453125, 0.039764404296875, -0.04608154296875, -0.05279541015625, -0.0103759765625, -0.0022106170654296875, 0.0017108917236328125, 0.01035308837890625, 0.057373046875, -0.040557861328125, -0.016937255859375, 0.0275726318359375, -0.03265380859375, 0.006984710693359375, 0.01068115234375, 0.0014867782592773438, 0.007038116455078125, 0.016845703125, -0.05810546875, 0.03997802734375, 0.0196533203125, 0.009918212890625, 0.044830322265625, -0.01885986328125, -0.0288848876953125, -0.04217529296875, -0.0275115966796875, -0.04595947265625, -0.00527191162109375, 0.00211334228515625, -0.0033130645751953125, -0.033355712890625, 0.0026416778564453125, -0.0276947021484375, 0.001708984375, -0.034698486328125, -0.04766845703125, -0.0309295654296875, -0.0016498565673828125, -0.00806427001953125, 0.00689697265625, 0.0286102294921875, 0.07647705078125, 0.0919189453125, -0.043304443359375, 0.059661865234375, 0.016357421875, -0.00751495361328125, 0.028778076171875, -0.05902099609375, -0.0063018798828125, 0.005123138427734375, 0.053131103515625, -0.052520751953125, -0.06561279296875, 0.01387786865234375, -0.021514892578125, 0.0037288665771484375, -0.0273590087890625, -0.03436279296875, -0.024993896484375, -0.006984710693359375, 0.0259246826171875, 0.0435791015625, -0.056854248046875, 0.029449462890625, 0.08929443359375, 0.00814056396484375, -0.043121337890625, -0.07098388671875, 0.0255126953125, -0.00872039794921875, -0.0059661865234375, 0.00562286376953125, 0.0250396728515625, -0.03082275390625, -0.00521087646484375, 0.040679931640625, -0.03155517578125, 0.03350830078125, 0.03173828125, 0.002498626708984375, -0.0261077880859375, 0.0008416175842285156, 0.021026611328125, -0.019683837890625, 0.04327392578125, 0.0213470458984375, 0.045684814453125, -0.0226898193359375, 0.002666473388671875, -0.054962158203125, 0.01218414306640625, -0.01190185546875, -0.032440185546875, 0.005947113037109375, 0.01468658447265625, -0.0281219482421875, 0.0087127685546875, -0.032196044921875, -0.02581787109375, -0.04791259765625, 0.0352783203125, -0.01141357421875, -0.053009033203125, 0.0132293701171875, -0.023681640625, -0.0050506591796875, 0.0323486328125, 0.05419921875, -0.0238189697265625, 0.0540771484375, 0.040802001953125, 0.0247955322265625, 0.035491943359375, -0.01157379150390625, -0.01235198974609375, -0.056304931640625, -0.0709228515625, -0.0152130126953125, -0.0156707763671875, -0.034759521484375, -0.0295562744140625, 0.00009000301361083984, 0.00730133056640625, -0.0101776123046875, 0.057952880859375, -0.040283203125, 0.053466796875, 0.0252227783203125, -0.021087646484375, 0.01432037353515625, -0.0035457611083984375, 0.0131072998046875, 0.023040771484375, -0.005985260009765625, -0.040618896484375, 0.10272216796875, 0.02874755859375, 0.034912109375, 0.022613525390625, 0.03546142578125, 0.027587890625, 0.028533935546875, -0.0313720703125, 0.042083740234375, 0.0003337860107421875, -0.0780029296875, 0.004856109619140625, 0.035430908203125, -0.06488037109375, 0.02899169921875, 0.012481689453125, -0.025482177734375, 0.0122528076171875, -0.0020198822021484375, -0.01222991943359375, 0.0059051513671875, -0.043182373046875, 0.00446319580078125, -0.045379638671875, 0.007015228271484375, -0.04620361328125, -0.09039306640625, 0.033935546875, -0.023651123046875, 0.0167236328125, -0.034332275390625, -0.051177978515625, 0.036773681640625, -0.03179931640625, 0.034759521484375, -0.030029296875, -0.016754150390625, 0.0377197265625, -0.0244903564453125, 0.0166015625, 0.048370361328125, -0.0071258544921875, 0.0098724365234375, 0.0023708343505859375, 0.0144500732421875, -0.0009002685546875, 0.034027099609375, -0.069091796875, -0.0290374755859375, -0.06512451171875, -0.04010009765625, 0.0164642333984375, -0.009307861328125, 0.0245513916015625, -0.0016164779663085938, 0.004673004150390625, -0.004058837890625, 0.035736083984375, -0.010009765625, 0.04522705078125, 0.041534423828125, -0.0237274169921875, -0.05267333984375, 0.04229736328125, 0.0022945404052734375, -0.00753021240234375, 0.0002636909484863281, -0.001758575439453125, -0.028350830078125, -0.024139404296875, -0.04998779296875, 0.02880859375, -0.02313232421875, -0.01511383056640625, -0.0186309814453125, -0.0126190185546875, -0.0069427490234375, -0.010650634765625, -0.03277587890625, -0.0606689453125, -0.063232421875, -0.0047607421875, 0.06805419921875, 0.0989990234375, -0.01224517822265625, 0.0784912109375, -0.031036376953125, 0.0216064453125, 0.0235443115234375, 0.0170135498046875, -0.00368499755859375, -0.05364990234375, -0.01465606689453125, -0.0157928466796875, -0.04461669921875, -0.057281494140625, 0.0124359130859375, 0.0013666152954101562, 0.007656097412109375, 0.05047607421875, -0.00799560546875, 0.0496826171875, -0.03857421875, 0.055633544921875, 0.05059814453125, -0.033050537109375, 0.05328369140625, -0.011077880859375, -0.04168701171875, 0.01348876953125, -0.01499176025390625, -0.004058837890625, -0.02685546875, -0.01568603515625, -0.0670166015625, 0.0399169921875, -0.0029048919677734375, 0.050811767578125, 0.0153961181640625, 0.046295166015625, 0.0213775634765625, 0.0240631103515625, -0.057952880859375, -0.0487060546875, 0.00890350341796875, 0.0272369384765625, 0.014373779296875, -0.05438232421875, -0.0117034912109375, -0.060028076171875, 0.037384033203125, 0.047515869140625, 0.03155517578125, 0.0009241104125976562, 0.01363372802734375, -0.0015287399291992188, 0.0145263671875, 0.08544921875, 0.040008544921875, -0.0081024169921875, 0.021728515625, 0.032440185546875, -0.054931640625, 0.0352783203125, 0.019622802734375, 0.01367950439453125, 0.00018453598022460938, 0.0222015380859375, 0.039642333984375, -0.02288818359375, -0.040435791015625, 0.05657958984375, 0.015655517578125, -0.0237579345703125, -0.07366943359375, 0.01461029052734375, 0.019927978515625, 0.037933349609375, 0.027618408203125, 0.0005006790161132812, 0.00785064697265625, -0.0159149169921875, 0.01280975341796875, 0.0121002197265625, -0.019866943359375, -0.0276947021484375, 0.0709228515625, 0.04083251953125, 0.0045318603515625, 0.0192108154296875, -0.03021240234375, -0.043304443359375, 0.0709228515625, 0.0634765625, 0.005084991455078125, -0.01142120361328125, 0.0098724365234375, 0.0195770263671875, 0.020843505859375, -0.019927978515625, 0.045135498046875, -0.0085296630859375, 0.0017423629760742188, -0.0035037994384765625, -0.008575439453125, -0.0430908203125, -0.02716064453125, -0.040924072265625, 0.0182647705078125, -0.052520751953125, -0.0262451171875, -0.03076171875, 0.02447509765625, -0.0083770751953125, 0.019622802734375, 0.01922607421875, 0.0751953125, -0.047943115234375, 0.1058349609375, 0.040283203125, -0.036712646484375, -0.054534912109375, -0.0406494140625, -0.01256561279296875, -0.0703125, 0.06396484375, 0.03814697265625, -0.040557861328125, 0.01488494873046875, -0.034393310546875, -0.0794677734375, 0.119384765625, 0.040924072265625, -0.08795166015625, 0.019195556640625, 0.01556396484375, 0.0209197998046875, -0.032196044921875, -0.004718780517578125, 0.06732177734375, 0.06158447265625, 0.01201629638671875, -0.0665283203125, -0.0240020751953125, 0.015655517578125, -0.039764404296875, 0.0029430389404296875, -0.06982421875, 0.0672607421875, -0.06884765625, 0.00205230712890625, 0.0263214111328125, 0.06005859375, 0.033203125, 0.031585693359375, 0.0186004638671875, 0.065673828125, 0.0770263671875, 0.0021820068359375, 0.06976318359375, 0.00557708740234375, 0.0006923675537109375, 0.08782958984375, -0.041412353515625, 0.08026123046875, 0.04498291015625, -0.039215087890625, 0.04095458984375, 0.06121826171875, -0.0261383056640625, 0.04693603515625, -0.006439208984375, -0.02716064453125, -0.003543853759765625, -0.0771484375, -0.0684814453125, 0.018646240234375, 0.0175628662109375, -0.0340576171875, 0.0117034912109375, -0.025360107421875, -0.00307464599609375, -0.034942626953125, -0.031646728515625, 0.0343017578125, 0.0242767333984375, -0.01052093505859375, 0.0104522705078125, 0.01146697998046875, 0.0306243896484375, -0.0227203369140625, 0.0037860870361328125, 0.006591796875, 0.0157928466796875, -0.017913818359375, -0.095703125, 0.0159912109375, 0.009857177734375, 0.0251617431640625, 0.01016998291015625, 0.043731689453125, -0.026824951171875, -0.024444580078125, 0.05218505859375, -0.0186920166015625, 0.0283050537109375, 0.00724029541015625, -0.0223541259765625, 0.0225982666015625, -0.03253173828125, -0.00624847412109375, 0.0007977485656738281, -0.021575927734375, 0.0057373046875, 0.052734375, 0.0273895263671875, 0.010101318359375, 0.03204345703125, -0.01031494140625, 0.0753173828125, -0.01122283935546875, -0.01476287841796875, -0.048004150390625, 0.0191802978515625, -0.01239013671875, -0.046783447265625, -0.0015859603881835938, 0.07562255859375, 0.0628662109375, -0.0021495819091796875, 0.056243896484375, -0.006542205810546875, -0.010284423828125, -0.00914764404296875, 0.044708251953125, -0.07183837890625, -0.00006699562072753906, 0.058685302734375, -0.033355712890625, -0.0235443115234375, 0.0340576171875, 0.0180816650390625, -0.00978851318359375, 0.037261962890625, 0.03643798828125, 0.013153076171875, -0.0083770751953125, 0.0018224716186523438, -0.0266876220703125, 0.0100250244140625, 0.048858642578125, 0.0863037109375, -0.045196533203125, 0.043792724609375, -0.05126953125, 0.0024471282958984375, -0.044464111328125, -0.0379638671875, -0.058868408203125, -0.0197296142578125, -0.01265716552734375, -0.00983428955078125, -0.0223236083984375, 0.052154541015625, 0.046844482421875, -0.0210723876953125, -0.03253173828125, 0.0083465576171875, 0.02294921875, 0.0295562744140625, 0.0007643699645996094, 0.0269622802734375, 0.00661468505859375, -0.00072479248046875, 0.0045013427734375, 0.0364990234375, 0.034393310546875, -0.0106353759765625, -0.04595947265625, 0.021026611328125, 0.0034885406494140625, 0.048858642578125, 0.006072998046875, -0.06793212890625, -0.0172119140625, -0.04583740234375, -0.036895751953125, 0.029205322265625, 0.0235137939453125, -0.0180206298828125, -0.004619598388671875, 0.02105712890625, 0.011505126953125, 0.0229644775390625, 0.0023651123046875, 0.01953125, -0.017242431640625, 0.049530029296875, -0.001857757568359375, -0.0032596588134765625, -0.00666046142578125, -0.0357666015625, 0.03631591796875, 0.0164337158203125, -0.04962158203125, -0.06182861328125, 0.0423583984375, -0.1075439453125, 0.00736236572265625, 0.0772705078125, -0.00922393798828125, -0.0182952880859375, -0.0191650390625, -0.037109375, 0.0217132568359375, -0.0203704833984375, 0.01251983642578125, 0.0243988037109375, -0.0110321044921875, -0.00391387939453125, -0.06005859375, 0.024658203125, -0.01971435546875, -0.09136962890625, -0.008758544921875, 0.032440185546875, 0.0302581787109375, -0.0122833251953125, 0.027862548828125, -0.0003459453582763672, 0.0640869140625, -0.00772857666015625, -0.00823974609375, -0.03155517578125, -0.043212890625, -0.00624847412109375, 0.01027679443359375, 0.0335693359375, -0.049591064453125 ] ]
RWKV/rwkv-raven-14b
2023-05-15T10:09:50.000Z
[ "transformers", "pytorch", "rwkv", "text-generation", "dataset:EleutherAI/pile", "endpoints_compatible", "has_space", "region:us" ]
text-generation
RWKV
null
null
RWKV/rwkv-raven-14b
49
5,817
transformers
2023-05-05T12:24:03
--- datasets: - EleutherAI/pile --- ![RWKlogo.png](https://s3.amazonaws.com/moonup/production/uploads/62441d1d9fdefb55a0b7d12c/UWpP-lGRZJJDaEx_uUlDv.png) # Model card for RWKV-4 | 14B parameters chat version (Raven) RWKV is a project led by [Bo Peng](https://github.com/BlinkDL). Learn more about the model architecture in the blogposts from Johan Wind [here](https://johanwind.github.io/2023/03/23/rwkv_overview.html) and [here](https://johanwind.github.io/2023/03/23/rwkv_details.html). Learn more about the project by joining the [RWKV discord server](https://discordapp.com/users/468093332535640064). # Table of contents 0. [TL;DR](#TL;DR) 1. [Model Details](#model-details) 2. [Usage](#usage) 3. [Citation](#citation) ## TL;DR Below is the description from the [original repository](https://github.com/BlinkDL/RWKV-LM) > RWKV is an RNN with transformer-level LLM performance. It can be directly trained like a GPT (parallelizable). It's combining the best of RNN and transformer - great performance, fast inference, saves VRAM, fast training, "infinite" ctx_len, and free sentence embedding. ## Model Details The details of the architecture can be found on the blogpost mentioned above and the Hugging Face blogpost of the integration. ## Usage ### Convert the raw weights to the HF format You can use the [`convert_rwkv_checkpoint_to_hf.py`](https://github.com/huggingface/transformers/tree/main/src/transformers/models/rwkv/convert_rwkv_checkpoint_to_hf.py) script by specifying the repo_id of the original weights, the filename and the output directory. You can also optionally directly push the converted model on the Hub by passing `--push_to_hub` flag and `--model_name` argument to specify where to push the converted weights. ```bash python convert_rwkv_checkpoint_to_hf.py --repo_id RAW_HUB_REPO --checkpoint_file RAW_FILE --output_dir OUTPUT_DIR --push_to_hub --model_name dummy_user/converted-rwkv ``` ### Generate text You can use the `AutoModelForCausalLM` and `AutoTokenizer` classes to generate texts from the model. Expand the sections below to understand how to run the model in different scenarios: The "Raven" models needs to be prompted in a specific way, learn more about that [in the integration blogpost](https://huggingface.co/blog/rwkv). ### Running the model on a CPU <details> <summary> Click to expand </summary> ```python from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-raven-14b") tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-raven-14b") prompt = "\nIn a shocking finding, scientist discovered a herd of dragons living in a remote, previously unexplored valley, in Tibet. Even more surprising to the researchers was the fact that the dragons spoke perfect Chinese." inputs = tokenizer(prompt, return_tensors="pt") output = model.generate(inputs["input_ids"], max_new_tokens=40) print(tokenizer.decode(output[0].tolist(), skip_special_tokens=True)) ``` ### Running the model on a single GPU <details> <summary> Click to expand </summary> ```python from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-raven-14b").to(0) tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-raven-14b") prompt = "\nIn a shocking finding, scientist discovered a herd of dragons living in a remote, previously unexplored valley, in Tibet. Even more surprising to the researchers was the fact that the dragons spoke perfect Chinese." inputs = tokenizer(prompt, return_tensors="pt").to(0) output = model.generate(inputs["input_ids"], max_new_tokens=40) print(tokenizer.decode(output[0].tolist(), skip_special_tokens=True)) ``` </details> </details> ### Running the model in half-precision, on GPU <details> <summary> Click to expand </summary> ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-raven-14b", torch_dtype=torch.float16).to(0) tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-raven-14b") prompt = "\nIn a shocking finding, scientist discovered a herd of dragons living in a remote, previously unexplored valley, in Tibet. Even more surprising to the researchers was the fact that the dragons spoke perfect Chinese." inputs = tokenizer(prompt, return_tensors="pt").to(0) output = model.generate(inputs["input_ids"], max_new_tokens=40) print(tokenizer.decode(output[0].tolist(), skip_special_tokens=True)) ``` </details> ### Running the model multiple GPUs <details> <summary> Click to expand </summary> ```python # pip install accelerate from transformers import AutoModelForCausalLM, AutoTokenizer model = AutoModelForCausalLM.from_pretrained("RWKV/rwkv-raven-14b", device_map="auto") tokenizer = AutoTokenizer.from_pretrained("RWKV/rwkv-raven-14b") prompt = "\nIn a shocking finding, scientist discovered a herd of dragons living in a remote, previously unexplored valley, in Tibet. Even more surprising to the researchers was the fact that the dragons spoke perfect Chinese." inputs = tokenizer(prompt, return_tensors="pt").to(0) output = model.generate(inputs["input_ids"], max_new_tokens=40) print(tokenizer.decode(output[0].tolist(), skip_special_tokens=True)) ``` </details> ## Citation If you use this model, please consider citing the original work, from the original repo [here](https://github.com/BlinkDL/ChatRWKV/)
5,429
[ [ -0.0263214111328125, -0.040740966796875, -0.0026912689208984375, 0.01800537109375, -0.01015472412109375, -0.0280914306640625, 0.0026035308837890625, -0.0267486572265625, 0.0041656494140625, 0.0157623291015625, -0.043792724609375, -0.0234832763671875, -0.0343017578125, -0.0013828277587890625, -0.032958984375, 0.06317138671875, 0.0013742446899414062, -0.00015914440155029297, 0.0205230712890625, -0.0004012584686279297, -0.0065460205078125, -0.023956298828125, -0.044403076171875, -0.045440673828125, 0.03802490234375, -0.0252227783203125, 0.048065185546875, 0.08306884765625, 0.0257720947265625, 0.0274810791015625, -0.01117706298828125, 0.011932373046875, -0.0213775634765625, -0.0099639892578125, 0.0027675628662109375, -0.00873565673828125, -0.0263519287109375, 0.0091705322265625, 0.057220458984375, 0.0236053466796875, -0.0189208984375, 0.0189208984375, 0.0081024169921875, 0.0161895751953125, -0.02618408203125, 0.0223388671875, -0.028167724609375, 0.0212249755859375, -0.00376129150390625, -0.004638671875, -0.02398681640625, -0.0002435445785522461, 0.0008859634399414062, -0.0787353515625, 0.0308685302734375, 0.00745391845703125, 0.09375, 0.037384033203125, -0.0177154541015625, 0.00589752197265625, -0.035736083984375, 0.062164306640625, -0.07550048828125, 0.0279083251953125, 0.0016956329345703125, 0.003448486328125, -0.0203857421875, -0.08203125, -0.054962158203125, -0.01324462890625, -0.01166534423828125, 0.018157958984375, -0.014312744140625, 0.003139495849609375, 0.044219970703125, 0.034759521484375, -0.04046630859375, 0.002765655517578125, -0.034820556640625, -0.0288848876953125, 0.041412353515625, 0.029052734375, 0.03729248046875, -0.03497314453125, -0.02239990234375, -0.044158935546875, -0.03125, 0.013671875, 0.0250244140625, 0.03521728515625, -0.02569580078125, 0.040771484375, -0.01447296142578125, 0.054901123046875, 0.0247650146484375, 0.002880096435546875, 0.0170440673828125, -0.0217437744140625, -0.033447265625, -0.0171661376953125, 0.08209228515625, 0.011993408203125, -0.00823211669921875, -0.01076507568359375, -0.00748443603515625, -0.020294189453125, 0.007137298583984375, -0.0760498046875, -0.03466796875, 0.0159759521484375, -0.061248779296875, -0.02410888671875, -0.0038280487060546875, -0.049713134765625, -0.0216522216796875, 0.00487518310546875, 0.043121337890625, -0.0236968994140625, -0.052337646484375, -0.0017137527465820312, -0.0268402099609375, 0.049713134765625, 0.0015354156494140625, -0.0845947265625, -0.0076751708984375, 0.04156494140625, 0.061187744140625, -0.0037708282470703125, -0.05499267578125, -0.0303497314453125, 0.0027866363525390625, -0.023162841796875, 0.0338134765625, -0.00565338134765625, -0.043182373046875, -0.0152435302734375, 0.024627685546875, -0.0177764892578125, -0.02679443359375, 0.0418701171875, -0.0229949951171875, 0.0282440185546875, -0.03387451171875, -0.03521728515625, -0.0216064453125, 0.017547607421875, -0.036834716796875, 0.0936279296875, 0.0016870498657226562, -0.07952880859375, 0.01490020751953125, -0.044708251953125, -0.0222625732421875, 0.007427215576171875, 0.0063323974609375, -0.03515625, -0.004711151123046875, 0.0227203369140625, 0.032745361328125, -0.01032257080078125, 0.004550933837890625, -0.0266571044921875, -0.037933349609375, 0.01033782958984375, -0.03369140625, 0.0867919921875, 0.02850341796875, -0.036651611328125, 0.018310546875, -0.04168701171875, 0.00847625732421875, 0.00798797607421875, -0.031951904296875, 0.005352020263671875, -0.0036487579345703125, 0.010162353515625, 0.00609588623046875, 0.014984130859375, -0.042266845703125, 0.016326904296875, -0.045013427734375, 0.053314208984375, 0.04449462890625, -0.0176544189453125, 0.020782470703125, -0.0275726318359375, 0.01453399658203125, -0.003154754638671875, 0.02618408203125, -0.015655517578125, -0.045318603515625, -0.07916259765625, -0.0160369873046875, 0.01464080810546875, 0.0290679931640625, -0.055450439453125, 0.0322265625, -0.0119781494140625, -0.048583984375, -0.045501708984375, -0.0234832763671875, 0.01180267333984375, 0.0457763671875, 0.0322265625, 0.0027904510498046875, -0.0309600830078125, -0.04669189453125, -0.0202484130859375, -0.02862548828125, -0.0046539306640625, 0.0186614990234375, 0.0435791015625, -0.02679443359375, 0.05889892578125, -0.0198822021484375, -0.00664520263671875, -0.0156707763671875, 0.0205230712890625, 0.02862548828125, 0.058135986328125, 0.0271759033203125, -0.04339599609375, -0.0261383056640625, 0.0078582763671875, -0.07000732421875, 0.010711669921875, -0.014404296875, -0.00476837158203125, -0.0007605552673339844, 0.0220794677734375, -0.054656982421875, 0.03564453125, 0.03216552734375, -0.0174407958984375, 0.05023193359375, -0.025482177734375, 0.01140594482421875, -0.085693359375, 0.019927978515625, -0.00867462158203125, 0.0025768280029296875, -0.034759521484375, 0.008758544921875, 0.00958251953125, -0.01247406005859375, -0.04010009765625, 0.0693359375, -0.0290679931640625, -0.0009813308715820312, -0.01544952392578125, -0.020599365234375, 0.001708984375, 0.05010986328125, 0.009246826171875, 0.055023193359375, 0.057769775390625, -0.051055908203125, 0.04632568359375, 0.0254058837890625, -0.0174560546875, -0.006984710693359375, -0.06964111328125, 0.0024566650390625, 0.0126953125, 0.014404296875, -0.0579833984375, -0.019927978515625, 0.03656005859375, -0.054351806640625, 0.026214599609375, -0.0195770263671875, -0.0267486572265625, -0.0377197265625, -0.0032100677490234375, 0.036834716796875, 0.047332763671875, -0.0628662109375, 0.06671142578125, 0.017791748046875, 0.0195770263671875, -0.0643310546875, -0.07342529296875, 0.00453948974609375, -0.0217132568359375, -0.0477294921875, 0.03466796875, 0.0011510848999023438, 0.0051727294921875, 0.0027179718017578125, 0.01436614990234375, -0.0027561187744140625, -0.010467529296875, 0.0290985107421875, 0.032684326171875, -0.01194000244140625, -0.0040283203125, -0.0178375244140625, -0.017852783203125, 0.0185699462890625, -0.034393310546875, 0.03216552734375, -0.01418304443359375, -0.0232086181640625, -0.0648193359375, 0.00748443603515625, 0.045806884765625, -0.01025390625, 0.05810546875, 0.07513427734375, -0.0278472900390625, -0.0175018310546875, -0.036651611328125, -0.030670166015625, -0.038848876953125, 0.041778564453125, -0.021209716796875, -0.029998779296875, 0.0587158203125, 0.01253509521484375, 0.0150299072265625, 0.057830810546875, 0.043914794921875, -0.00033855438232421875, 0.08502197265625, 0.04791259765625, 0.003505706787109375, 0.036651611328125, -0.04815673828125, 0.017822265625, -0.060821533203125, -0.024505615234375, -0.0288848876953125, 0.003925323486328125, -0.0447998046875, -0.027618408203125, 0.01280975341796875, 0.00189208984375, -0.045318603515625, 0.0205230712890625, -0.0675048828125, 0.00916290283203125, 0.037872314453125, -0.00225830078125, -0.00789642333984375, 0.001422882080078125, -0.02520751953125, 0.000274658203125, -0.07666015625, -0.011932373046875, 0.06842041015625, 0.037261962890625, 0.05389404296875, -0.0168914794921875, 0.03704833984375, 0.0094757080078125, 0.0272216796875, -0.044403076171875, 0.032684326171875, -0.00799560546875, -0.05010986328125, -0.0219879150390625, -0.037322998046875, -0.047027587890625, 0.03070068359375, -0.01959228515625, -0.0298004150390625, 0.01934814453125, 0.006626129150390625, -0.044952392578125, 0.046417236328125, -0.04052734375, 0.07879638671875, 0.0006709098815917969, -0.02984619140625, 0.003757476806640625, -0.026611328125, 0.0394287109375, 0.0197601318359375, 0.0008006095886230469, 0.0073089599609375, 0.0194244384765625, 0.07275390625, -0.04864501953125, 0.06280517578125, -0.0186920166015625, 0.01198577880859375, 0.025848388671875, -0.0221099853515625, 0.042999267578125, -0.00618743896484375, -0.01385498046875, 0.02197265625, 0.01270294189453125, -0.0222320556640625, -0.0271759033203125, 0.060272216796875, -0.0853271484375, -0.030792236328125, -0.039703369140625, -0.0465087890625, 0.0292205810546875, 0.018218994140625, 0.043975830078125, 0.034637451171875, -0.0007996559143066406, -0.0011577606201171875, 0.042236328125, -0.03826904296875, 0.057952880859375, 0.0203857421875, -0.019561767578125, -0.04119873046875, 0.060638427734375, 0.0009479522705078125, 0.004535675048828125, 0.0005917549133300781, 0.01763916015625, -0.044525146484375, -0.03131103515625, -0.053192138671875, 0.0328369140625, -0.0577392578125, -0.005924224853515625, -0.060821533203125, -0.0447998046875, -0.0506591796875, 0.00896453857421875, -0.041229248046875, -0.01358795166015625, -0.039794921875, 0.01552581787109375, 0.024383544921875, 0.044708251953125, -0.025115966796875, 0.0208892822265625, -0.05462646484375, 0.0183868408203125, 0.042938232421875, 0.00493621826171875, 0.0215911865234375, -0.069091796875, -0.0155181884765625, 0.01641845703125, -0.0106658935546875, -0.0440673828125, 0.053924560546875, -0.00008946657180786133, 0.0518798828125, 0.023162841796875, 0.011749267578125, 0.07183837890625, -0.0119171142578125, 0.06927490234375, 0.01543426513671875, -0.06640625, 0.0112457275390625, -0.031646728515625, 0.0215911865234375, -0.00026726722717285156, 0.00246429443359375, -0.042999267578125, -0.007488250732421875, -0.035125732421875, -0.048492431640625, 0.04803466796875, 0.01335906982421875, 0.01122283935546875, 0.00922393798828125, 0.043243408203125, -0.016845703125, -0.0182037353515625, -0.07989501953125, -0.0382080078125, -0.05706787109375, 0.0014085769653320312, 0.01409149169921875, -0.0021610260009765625, -0.004444122314453125, -0.048583984375, 0.06671142578125, 0.0008807182312011719, 0.03680419921875, 0.0297088623046875, 0.0033702850341796875, -0.004207611083984375, -0.010986328125, 0.0306854248046875, 0.032684326171875, -0.002986907958984375, -0.0118255615234375, 0.0296173095703125, -0.042694091796875, -0.01201629638671875, 0.0242462158203125, -0.019775390625, -0.00014579296112060547, 0.0279541015625, 0.068359375, -0.01096343994140625, -0.01087188720703125, 0.0271759033203125, -0.02630615234375, -0.0220794677734375, -0.029327392578125, 0.009674072265625, 0.020477294921875, 0.03192138671875, 0.039703369140625, -0.00017511844635009766, -0.011383056640625, -0.01490020751953125, 0.006927490234375, 0.034210205078125, -0.0244140625, -0.01549530029296875, 0.08282470703125, 0.0198974609375, -0.0201263427734375, 0.07421875, -0.0157012939453125, -0.038848876953125, 0.05889892578125, 0.03680419921875, 0.0745849609375, -0.004547119140625, 0.00705718994140625, 0.06353759765625, 0.0227203369140625, -0.020111083984375, -0.0017709732055664062, -0.007244110107421875, -0.055877685546875, -0.040771484375, -0.06689453125, -0.012237548828125, 0.01114654541015625, -0.049957275390625, 0.03204345703125, -0.02178955078125, -0.00958251953125, 0.0006494522094726562, 0.00646209716796875, -0.03924560546875, 0.00872802734375, 0.0095672607421875, 0.0738525390625, -0.056976318359375, 0.08978271484375, 0.03369140625, -0.0288543701171875, -0.08819580078125, -0.01012420654296875, -0.023345947265625, -0.06829833984375, 0.058563232421875, 0.0162811279296875, -0.00637054443359375, 0.0279388427734375, -0.037109375, -0.05389404296875, 0.08880615234375, 0.003498077392578125, -0.024322509765625, -0.006374359130859375, 0.00351715087890625, 0.03192138671875, -0.0162506103515625, 0.04046630859375, 0.0171966552734375, 0.040771484375, 0.026580810546875, -0.062164306640625, 0.00862884521484375, -0.032196044921875, -0.00701141357421875, -0.0040740966796875, -0.050811767578125, 0.10198974609375, -0.0278778076171875, -0.0284576416015625, 0.0160369873046875, 0.0745849609375, 0.022705078125, -0.004673004150390625, 0.031951904296875, 0.04534912109375, 0.044525146484375, -0.01511383056640625, 0.0736083984375, -0.04302978515625, 0.058929443359375, 0.04046630859375, 0.007076263427734375, 0.044586181640625, 0.02178955078125, -0.01010894775390625, 0.02880859375, 0.068115234375, -0.027435302734375, 0.029693603515625, 0.00870513916015625, -0.01526641845703125, -0.023529052734375, 0.005321502685546875, -0.054534912109375, 0.0193023681640625, 0.01154327392578125, -0.0222015380859375, -0.01107025146484375, 0.00035881996154785156, -0.0012998580932617188, -0.0328369140625, -0.019378662109375, 0.032379150390625, -0.0009851455688476562, -0.051513671875, 0.0714111328125, 0.0019054412841796875, 0.07012939453125, -0.053436279296875, -0.004730224609375, -0.01067352294921875, 0.0263519287109375, -0.0183563232421875, -0.039215087890625, 0.0256805419921875, -0.01515960693359375, -0.015655517578125, -0.0199737548828125, 0.0360107421875, -0.03289794921875, -0.04351806640625, 0.0201263427734375, 0.007808685302734375, 0.026702880859375, 0.0011720657348632812, -0.075927734375, 0.0135650634765625, 0.01375579833984375, -0.0300445556640625, 0.0157470703125, 0.014434814453125, 0.0288543701171875, 0.058197021484375, 0.065185546875, 0.0086822509765625, 0.026702880859375, -0.01366424560546875, 0.061981201171875, -0.058685302734375, -0.0360107421875, -0.06158447265625, 0.0384521484375, -0.0006556510925292969, -0.037384033203125, 0.07269287109375, 0.04132080078125, 0.045806884765625, 0.0028438568115234375, 0.058380126953125, -0.0172119140625, 0.020751953125, -0.0172882080078125, 0.0823974609375, -0.0455322265625, 0.01129150390625, 0.006153106689453125, -0.045989990234375, -0.0011138916015625, 0.062744140625, -0.0009565353393554688, 0.006832122802734375, 0.0384521484375, 0.07940673828125, -0.0016832351684570312, 0.001232147216796875, 0.0182952880859375, 0.03216552734375, 0.032928466796875, 0.0277557373046875, 0.055908203125, -0.05889892578125, 0.048492431640625, -0.0312347412109375, -0.01080322265625, 0.0196075439453125, -0.07061767578125, -0.072509765625, -0.042266845703125, -0.02935791015625, -0.043426513671875, -0.003185272216796875, 0.0462646484375, 0.0638427734375, -0.045196533203125, -0.0223388671875, -0.01194000244140625, 0.00017535686492919922, -0.019989013671875, -0.0180206298828125, 0.039794921875, -0.0234375, -0.06427001953125, 0.01910400390625, 0.0026226043701171875, 0.0206451416015625, -0.044647216796875, -0.0220184326171875, -0.00849151611328125, -0.01189422607421875, 0.005847930908203125, 0.042572021484375, -0.06488037109375, -0.01238250732421875, 0.005859375, -0.0157012939453125, -0.0146636962890625, 0.035308837890625, -0.056915283203125, 0.03240966796875, 0.0440673828125, 0.0328369140625, 0.0673828125, -0.01157379150390625, 0.04046630859375, -0.012725830078125, 0.0234375, -0.00246429443359375, 0.0211181640625, 0.02734375, -0.025482177734375, 0.00962066650390625, 0.034576416015625, -0.059814453125, -0.072021484375, -0.013092041015625, -0.060455322265625, -0.026702880859375, 0.08685302734375, -0.0272064208984375, -0.0338134765625, -0.0059967041015625, 0.0014286041259765625, 0.04718017578125, -0.0032253265380859375, 0.0614013671875, 0.038482666015625, -0.0091400146484375, -0.01556396484375, -0.03924560546875, 0.05450439453125, 0.019775390625, -0.031646728515625, 0.017303466796875, 0.0006842613220214844, 0.042236328125, 0.010650634765625, 0.031768798828125, 0.00433349609375, 0.01332855224609375, 0.0219268798828125, 0.0308074951171875, -0.03851318359375, 0.0136260986328125, -0.0202178955078125, -0.004047393798828125, -0.0286712646484375, -0.0333251953125 ] ]
digiplay/Photon_v1
2023-07-21T21:05:01.000Z
[ "diffusers", "stable-diffusion", "stable-diffusion-diffusers", "text-to-image", "license:other", "endpoints_compatible", "has_space", "diffusers:StableDiffusionPipeline", "region:us" ]
text-to-image
digiplay
null
null
digiplay/Photon_v1
13
5,817
diffusers
2023-06-08T19:02:37
--- license: other tags: - stable-diffusion - stable-diffusion-diffusers - text-to-image - diffusers inference: true --- Model info: https://civitai.com/models/84728/photon ![下載 - 2023-06-09T072756.715.png](https://cdn-uploads.huggingface.co/production/uploads/646c83c871d0c8a6e4455854/nK5-XX5jPaHEnzySaUNer.png)
313
[ [ -0.0240478515625, -0.0108795166015625, 0.0246734619140625, 0.00931549072265625, -0.042022705078125, -0.0078582763671875, 0.039306640625, -0.0241241455078125, 0.032745361328125, 0.03338623046875, -0.052520751953125, -0.01202392578125, -0.01459503173828125, -0.0176239013671875, -0.0189666748046875, 0.0240631103515625, 0.04339599609375, 0.039031982421875, 0.031463623046875, 0.0150604248046875, -0.02471923828125, -0.010040283203125, -0.0506591796875, -0.0200958251953125, 0.042572021484375, 0.0179595947265625, 0.06707763671875, 0.02239990234375, 0.036773681640625, 0.0281524658203125, -0.0020275115966796875, -0.010894775390625, -0.01898193359375, -0.035888671875, -0.0015840530395507812, -0.0416259765625, -0.06951904296875, -0.0131072998046875, 0.043060302734375, 0.0291748046875, 0.0225830078125, -0.004123687744140625, 0.00618743896484375, 0.01971435546875, -0.0257568359375, 0.0264739990234375, -0.0014123916625976562, 0.0172882080078125, -0.0268707275390625, -0.040679931640625, -0.0278167724609375, -0.0266571044921875, -0.025665283203125, -0.0692138671875, 0.031646728515625, 0.0165252685546875, 0.08917236328125, 0.005626678466796875, -0.0692138671875, -0.0031566619873046875, -0.043975830078125, 0.04144287109375, -0.0262451171875, 0.062164306640625, 0.0102386474609375, 0.034576416015625, -0.0250396728515625, -0.047149658203125, -0.016326904296875, 0.0208740234375, -0.0192108154296875, 0.0250701904296875, -0.0233001708984375, 0.00046944618225097656, 0.015380859375, 0.026458740234375, -0.049346923828125, -0.003925323486328125, -0.0675048828125, -0.01534271240234375, 0.028778076171875, -0.0003924369812011719, 0.04205322265625, -0.0015382766723632812, -0.042083740234375, 0.0185546875, -0.03399658203125, -0.0139007568359375, 0.0148468017578125, 0.01013946533203125, -0.028656005859375, 0.04376220703125, -0.010223388671875, 0.04803466796875, 0.00812530517578125, 0.0233001708984375, 0.0186309814453125, -0.03955078125, -0.03143310546875, -0.0256500244140625, 0.0304412841796875, 0.05511474609375, -0.0260162353515625, 0.0007243156433105469, -0.0031490325927734375, -0.0242462158203125, -0.004123687744140625, -0.0816650390625, -0.0232086181640625, 0.01172637939453125, -0.036376953125, -0.0235443115234375, 0.0241241455078125, -0.0753173828125, -0.00659942626953125, 0.0170135498046875, 0.031158447265625, -0.035675048828125, -0.042633056640625, 0.019012451171875, -0.00876617431640625, 0.0367431640625, 0.039093017578125, -0.05389404296875, -0.00849151611328125, 0.01235198974609375, 0.05078125, 0.051116943359375, 0.02423095703125, -0.0165252685546875, 0.01751708984375, -0.00591278076171875, 0.026580810546875, -0.0157928466796875, -0.042266845703125, -0.006076812744140625, 0.018707275390625, 0.00852203369140625, -0.004627227783203125, 0.06103515625, -0.060546875, 0.021209716796875, -0.0274810791015625, -0.03277587890625, -0.01898193359375, -0.007099151611328125, -0.062347412109375, 0.028472900390625, 0.0290985107421875, -0.064208984375, 0.03375244140625, -0.056640625, 0.014495849609375, 0.0709228515625, -0.01202392578125, -0.0181121826171875, 0.0245208740234375, -0.0128326416015625, 0.0245513916015625, -0.0276947021484375, -0.01076507568359375, -0.035369873046875, -0.01397705078125, 0.0335693359375, -0.02471923828125, 0.097900390625, 0.054107666015625, -0.0139007568359375, -0.004657745361328125, -0.04998779296875, -0.002819061279296875, 0.032257080078125, 0.0005650520324707031, 0.020416259765625, -0.01078033447265625, 0.0213623046875, 0.033294677734375, 0.04083251953125, -0.03466796875, 0.006969451904296875, 0.01229095458984375, 0.026336669921875, 0.02899169921875, 0.0146331787109375, -0.01253509521484375, -0.0352783203125, 0.06072998046875, -0.0012607574462890625, 0.05865478515625, -0.00493621826171875, -0.0308990478515625, -0.0782470703125, -0.0296630859375, -0.004138946533203125, -0.00312042236328125, -0.07598876953125, 0.05023193359375, 0.007358551025390625, -0.0687255859375, -0.007610321044921875, -0.0309906005859375, 0.0196533203125, 0.039031982421875, -0.011199951171875, -0.02398681640625, -0.02239990234375, -0.078369140625, -0.004589080810546875, -0.00255584716796875, -0.0154571533203125, 0.0246429443359375, 0.0382080078125, -0.005588531494140625, 0.045806884765625, -0.04052734375, -0.0115203857421875, -0.0218505859375, -0.0058441162109375, 0.034820556640625, 0.042938232421875, 0.06549072265625, -0.0985107421875, -0.00977325439453125, -0.022308349609375, -0.045654296875, -0.00994110107421875, 0.012359619140625, -0.016448974609375, -0.013427734375, 0.0213165283203125, -0.049774169921875, 0.044952392578125, 0.0303955078125, -0.07745361328125, 0.038726806640625, -0.03631591796875, 0.0499267578125, -0.0771484375, 0.0374755859375, 0.0306243896484375, -0.0293426513671875, -0.038787841796875, 0.03369140625, 0.0240936279296875, -0.0010166168212890625, -0.07684326171875, 0.01873779296875, -0.057861328125, 0.0023593902587890625, 0.006504058837890625, 0.00421142578125, 0.00807952880859375, 0.01317596435546875, -0.00302886962890625, 0.043670654296875, 0.03662109375, -0.05096435546875, 0.050384521484375, 0.0231781005859375, -0.050689697265625, 0.0225982666015625, -0.07415771484375, -0.013031005859375, 0.0152740478515625, 0.01251983642578125, -0.049652099609375, -0.044891357421875, 0.021392822265625, -0.041229248046875, 0.002002716064453125, -0.00301361083984375, -0.06695556640625, -0.032440185546875, -0.0164642333984375, 0.060638427734375, 0.03216552734375, -0.032562255859375, 0.0152435302734375, 0.01849365234375, -0.0005831718444824219, 0.0170135498046875, -0.052734375, -0.0184478759765625, -0.0240478515625, -0.049102783203125, 0.039825439453125, 0.0009298324584960938, -0.01236724853515625, -0.02362060546875, 0.0279998779296875, -0.035125732421875, 0.01300811767578125, 0.004032135009765625, 0.057708740234375, -0.007427215576171875, 0.0115509033203125, -0.00962066650390625, -0.0268096923828125, 0.00304412841796875, 0.0178070068359375, 0.0631103515625, -0.03094482421875, -0.01488494873046875, -0.0767822265625, -0.00543975830078125, 0.0643310546875, -0.009246826171875, 0.037872314453125, 0.035858154296875, -0.05206298828125, 0.0143890380859375, -0.030914306640625, -0.0199127197265625, -0.04144287109375, 0.0293426513671875, -0.0270538330078125, -0.0098724365234375, 0.02685546875, 0.01074981689453125, -0.033233642578125, 0.055816650390625, 0.0264434814453125, 0.01049041748046875, 0.0968017578125, 0.0565185546875, 0.021820068359375, 0.043670654296875, -0.049346923828125, 0.011260986328125, -0.06744384765625, -0.04296875, -0.0240936279296875, -0.01313018798828125, -0.041015625, -0.027374267578125, 0.01383209228515625, 0.00688934326171875, -0.04571533203125, 0.06329345703125, -0.0222320556640625, 0.0232086181640625, 0.031707763671875, 0.0272979736328125, 0.0193939208984375, -0.054107666015625, 0.024810791015625, -0.01432037353515625, -0.04315185546875, -0.01336669921875, 0.046661376953125, 0.0219879150390625, 0.0190277099609375, 0.0299224853515625, 0.032073974609375, -0.007843017578125, -0.01334381103515625, -0.0276336669921875, 0.037841796875, 0.0188446044921875, -0.0811767578125, 0.017791748046875, -0.0286712646484375, -0.05560302734375, 0.03802490234375, -0.0181732177734375, -0.03857421875, 0.0201873779296875, 0.01806640625, -0.03369140625, 0.0308837890625, -0.041534423828125, 0.052215576171875, -0.060577392578125, -0.035552978515625, -0.012176513671875, -0.02008056640625, 0.032257080078125, 0.01995849609375, 0.030303955078125, 0.0036144256591796875, -0.01178741455078125, 0.03302001953125, -0.0465087890625, 0.03985595703125, -0.02093505859375, 0.0171356201171875, -0.020416259765625, -0.01617431640625, 0.022674560546875, 0.0213775634765625, -0.004932403564453125, -0.036224365234375, -0.0096435546875, -0.047882080078125, -0.0265960693359375, 0.0361328125, -0.04620361328125, -0.0239410400390625, -0.06866455078125, -0.0156707763671875, -0.004261016845703125, 0.03302001953125, 0.085205078125, 0.04241943359375, -0.02093505859375, 0.026702880859375, 0.037261962890625, -0.0104827880859375, 0.049285888671875, 0.033294677734375, -0.046600341796875, -0.03533935546875, 0.050079345703125, 0.00923919677734375, 0.025299072265625, 0.0185394287109375, 0.00955963134765625, -0.01727294921875, -0.0148468017578125, -0.01251983642578125, 0.041778564453125, -0.01103973388671875, -0.0157928466796875, -0.00046563148498535156, -0.02325439453125, -0.03314208984375, -0.0338134765625, -0.035430908203125, -0.01800537109375, -0.0626220703125, -0.005096435546875, 0.0196380615234375, 0.0701904296875, 0.04730224609375, 0.0303192138671875, -0.0305328369140625, 0.00251007080078125, 0.0295562744140625, 0.039398193359375, -0.0194854736328125, -0.053924560546875, 0.00565338134765625, 0.0235443115234375, -0.0604248046875, -0.053009033203125, 0.06573486328125, 0.00801849365234375, 0.0269622802734375, 0.0460205078125, 0.007709503173828125, 0.05035400390625, -0.00433349609375, 0.057830810546875, 0.051177978515625, -0.06781005859375, 0.045013427734375, -0.0418701171875, 0.04119873046875, 0.0138702392578125, 0.01372528076171875, -0.01055908203125, -0.03350830078125, -0.0838623046875, -0.048614501953125, 0.051666259765625, 0.036651611328125, -0.01186370849609375, 0.04351806640625, 0.01338958740234375, 0.006072998046875, 0.0148773193359375, -0.0611572265625, -0.036163330078125, -0.027435302734375, -0.00824737548828125, -0.00826263427734375, -0.01404571533203125, 0.00910186767578125, -0.049591064453125, 0.06524658203125, -0.01690673828125, 0.039642333984375, 0.0112762451171875, -0.00807952880859375, 0.0024814605712890625, 0.0005755424499511719, 0.0699462890625, 0.057281494140625, -0.041229248046875, 0.0012950897216796875, -0.01534271240234375, -0.0124664306640625, -0.027557373046875, -0.007450103759765625, 0.00531005859375, 0.01079559326171875, 0.01800537109375, 0.052215576171875, 0.055999755859375, -0.0248870849609375, 0.059478759765625, -0.0196685791015625, 0.017333984375, -0.037841796875, 0.0096282958984375, 0.012908935546875, 0.028350830078125, 0.033050537109375, 0.00811004638671875, 0.0321044921875, -0.042633056640625, 0.017181396484375, 0.027191162109375, -0.0367431640625, -0.067626953125, 0.08416748046875, 0.029541015625, -0.035064697265625, 0.041748046875, -0.01488494873046875, -0.03875732421875, 0.0556640625, 0.043670654296875, 0.07025146484375, -0.013763427734375, 0.0291595458984375, 0.023162841796875, -0.0026607513427734375, 0.026580810546875, 0.048309326171875, 0.005245208740234375, -0.0244140625, 0.046234130859375, -0.017303466796875, -0.0306854248046875, 0.006011962890625, -0.061920166015625, 0.06658935546875, -0.039642333984375, -0.0093536376953125, 0.0230712890625, 0.023834228515625, -0.06890869140625, 0.049346923828125, -0.0004954338073730469, 0.09228515625, -0.052032470703125, 0.09429931640625, 0.00710296630859375, -0.0506591796875, -0.01352691650390625, -0.030120849609375, 0.006504058837890625, -0.072021484375, 0.03167724609375, 0.0250701904296875, -0.0071868896484375, -0.008880615234375, -0.049957275390625, -0.0643310546875, 0.11865234375, 0.047515869140625, -0.055816650390625, 0.01117706298828125, -0.018707275390625, 0.027099609375, -0.040618896484375, 0.0416259765625, -0.006649017333984375, 0.0283203125, 0.029449462890625, -0.05133056640625, -0.01357269287109375, -0.054473876953125, 0.006633758544921875, 0.00992584228515625, -0.09991455078125, 0.07403564453125, -0.01043701171875, -0.0164947509765625, 0.0085601806640625, 0.05096435546875, -0.0102081298828125, 0.00012958049774169922, 0.04571533203125, 0.01800537109375, 0.035797119140625, -0.0255584716796875, 0.0814208984375, -0.027191162109375, 0.044097900390625, 0.04754638671875, -0.017364501953125, 0.0244903564453125, 0.01415252685546875, -0.001255035400390625, 0.009185791015625, 0.06292724609375, -0.028656005859375, 0.0289306640625, -0.0271453857421875, -0.0271759033203125, -0.006572723388671875, -0.005794525146484375, -0.03485107421875, 0.0215301513671875, 0.015106201171875, -0.034515380859375, 0.0011587142944335938, -0.01340484619140625, -0.00914764404296875, -0.018768310546875, -0.0447998046875, 0.0223541259765625, -0.01488494873046875, -0.0169219970703125, 0.007038116455078125, -0.0234375, 0.0257415771484375, -0.037994384765625, 0.01526641845703125, 0.003398895263671875, 0.020416259765625, -0.0288238525390625, -0.041259765625, 0.00829315185546875, -0.005950927734375, 0.00001817941665649414, -0.021881103515625, 0.033172607421875, -0.01226043701171875, -0.07940673828125, 0.031768798828125, -0.026885986328125, 0.003604888916015625, 0.027130126953125, -0.059722900390625, 0.0186614990234375, 0.006938934326171875, -0.00024068355560302734, 0.0002378225326538086, -0.006317138671875, 0.0266876220703125, 0.07196044921875, 0.0011444091796875, 0.01203155517578125, 0.031036376953125, -0.024017333984375, 0.0416259765625, -0.06060791015625, -0.033538818359375, -0.042449951171875, 0.05352783203125, -0.02752685546875, -0.07110595703125, 0.0601806640625, 0.087646484375, 0.051544189453125, -0.0203704833984375, 0.037750244140625, -0.005321502685546875, 0.01425933837890625, -0.0361328125, 0.0631103515625, -0.04669189453125, 0.002590179443359375, 0.00897216796875, -0.050018310546875, -0.0205535888671875, 0.032318115234375, 0.01544189453125, 0.031341552734375, 0.03179931640625, 0.041534423828125, -0.0015230178833007812, 0.0250701904296875, 0.040771484375, 0.01812744140625, 0.04754638671875, 0.02911376953125, 0.03094482421875, -0.0721435546875, -0.0166168212890625, -0.04437255859375, -0.04730224609375, -0.046661376953125, -0.0465087890625, -0.03326416015625, -0.063720703125, -0.04754638671875, -0.00830078125, -0.0211334228515625, 0.05743408203125, 0.067138671875, -0.045806884765625, -0.01407623291015625, -0.0172119140625, -0.0003516674041748047, 0.0157318115234375, -0.0211181640625, 0.008056640625, 0.021636962890625, -0.0589599609375, -0.0070648193359375, 0.0107269287109375, 0.047393798828125, -0.0157928466796875, 0.01038360595703125, -0.0208587646484375, 0.019256591796875, 0.00765228271484375, 0.018829345703125, -0.04107666015625, -0.034759521484375, -0.01506805419921875, -0.01288604736328125, 0.005161285400390625, 0.0379638671875, -0.0148162841796875, 0.013214111328125, 0.040496826171875, -0.0267791748046875, 0.0266876220703125, -0.0174560546875, 0.048797607421875, -0.01751708984375, 0.028839111328125, 0.03338623046875, 0.07427978515625, 0.0048980712890625, -0.022186279296875, 0.022674560546875, 0.025634765625, -0.055328369140625, -0.074951171875, 0.0212860107421875, -0.0968017578125, -0.01153564453125, 0.048309326171875, 0.01297760009765625, -0.041534423828125, 0.0200958251953125, -0.0248870849609375, 0.018463134765625, -0.00475311279296875, 0.0247955322265625, 0.046661376953125, -0.0038318634033203125, -0.0237884521484375, -0.02740478515625, 0.017608642578125, -0.0037860870361328125, -0.039093017578125, -0.0184783935546875, 0.02301025390625, 0.019744873046875, 0.025177001953125, 0.0246734619140625, -0.0235443115234375, 0.05426025390625, 0.020416259765625, 0.024749755859375, 0.01384735107421875, -0.0130615234375, -0.0213165283203125, 0.01146697998046875, 0.0016164779663085938, -0.051971435546875 ] ]
TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ
2023-09-27T12:45:18.000Z
[ "transformers", "safetensors", "llama", "text-generation", "en", "dataset:Open-Orca/OpenOrca", "arxiv:2306.02707", "arxiv:2301.13688", "arxiv:2307.09288", "license:llama2", "has_space", "text-generation-inference", "region:us" ]
text-generation
TheBloke
null
null
TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ
21
5,817
transformers
2023-08-03T09:03:29
--- language: - en license: llama2 library_name: transformers datasets: - Open-Orca/OpenOrca model_name: OpenOrca x OpenChat - Preview2 - 13B base_model: Open-Orca/OpenOrcaxOpenChat-Preview2-13B inference: false model_creator: Open-Orca model_type: llama pipeline_tag: text-generation prompt_template: 'GPT4 User: {prompt}<|end_of_turn|>GPT4 Assistant: ' quantized_by: TheBloke --- <!-- header start --> <!-- 200823 --> <div style="width: auto; margin-left: auto; margin-right: auto"> <img src="https://i.imgur.com/EBdldam.jpg" alt="TheBlokeAI" style="width: 100%; min-width: 400px; display: block; margin: auto;"> </div> <div style="display: flex; justify-content: space-between; width: 100%;"> <div style="display: flex; flex-direction: column; align-items: flex-start;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://discord.gg/theblokeai">Chat & support: TheBloke's Discord server</a></p> </div> <div style="display: flex; flex-direction: column; align-items: flex-end;"> <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.patreon.com/TheBlokeAI">Want to contribute? TheBloke's Patreon page</a></p> </div> </div> <div style="text-align:center; margin-top: 0em; margin-bottom: 0em"><p style="margin-top: 0.25em; margin-bottom: 0em;">TheBloke's LLM work is generously supported by a grant from <a href="https://a16z.com">andreessen horowitz (a16z)</a></p></div> <hr style="margin-top: 1.0em; margin-bottom: 1.0em;"> <!-- header end --> # OpenOrca x OpenChat - Preview2 - 13B - GPTQ - Model creator: [Open-Orca](https://huggingface.co/Open-Orca) - Original model: [OpenOrca x OpenChat - Preview2 - 13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B) <!-- description start --> ## Description This repo contains GPTQ model files for [Open-Orca's OpenOrca x OpenChat - Preview2 - 13B](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B). Multiple GPTQ parameter permutations are provided; see Provided Files below for details of the options provided, their parameters, and the software used to create them. <!-- description end --> <!-- repositories-available start --> ## Repositories available * [AWQ model(s) for GPU inference.](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-AWQ) * [GPTQ models for GPU inference, with multiple quantisation parameter options.](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ) * [Open-Orca's original unquantised fp16 model in pytorch format, for GPU inference and for further conversions](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B) <!-- repositories-available end --> <!-- prompt-template start --> ## Prompt template: OpenChat ``` GPT4 User: {prompt}<|end_of_turn|>GPT4 Assistant: ``` <!-- prompt-template end --> <!-- README_GPTQ.md-provided-files start --> ## Provided files and GPTQ parameters Multiple quantisation parameters are provided, to allow you to choose the best one for your hardware and requirements. Each separate quant is in a different branch. See below for instructions on fetching from different branches. All recent GPTQ files are made with AutoGPTQ, and all files in non-main branches are made with AutoGPTQ. Files in the `main` branch which were uploaded before August 2023 were made with GPTQ-for-LLaMa. <details> <summary>Explanation of GPTQ parameters</summary> - Bits: The bit size of the quantised model. - GS: GPTQ group size. Higher numbers use less VRAM, but have lower quantisation accuracy. "None" is the lowest possible value. - Act Order: True or False. Also known as `desc_act`. True results in better quantisation accuracy. Some GPTQ clients have had issues with models that use Act Order plus Group Size, but this is generally resolved now. - Damp %: A GPTQ parameter that affects how samples are processed for quantisation. 0.01 is default, but 0.1 results in slightly better accuracy. - GPTQ dataset: The dataset used for quantisation. Using a dataset more appropriate to the model's training can improve quantisation accuracy. Note that the GPTQ dataset is not the same as the dataset used to train the model - please refer to the original model repo for details of the training dataset(s). - Sequence Length: The length of the dataset sequences used for quantisation. Ideally this is the same as the model sequence length. For some very long sequence models (16+K), a lower sequence length may have to be used. Note that a lower sequence length does not limit the sequence length of the quantised model. It only impacts the quantisation accuracy on longer inference sequences. - ExLlama Compatibility: Whether this file can be loaded with ExLlama, which currently only supports Llama models in 4-bit. </details> | Branch | Bits | GS | Act Order | Damp % | GPTQ Dataset | Seq Len | Size | ExLlama | Desc | | ------ | ---- | -- | --------- | ------ | ------------ | ------- | ---- | ------- | ---- | | [main](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/main) | 4 | 128 | No | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.26 GB | Yes | 4-bit, without Act Order and group size 128g. | | [gptq-4bit-32g-actorder_True](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/gptq-4bit-32g-actorder_True) | 4 | 32 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 8.00 GB | Yes | 4-bit, with Act Order and group size 32g. Gives highest possible inference quality, with maximum VRAM usage. | | [gptq-4bit-64g-actorder_True](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/gptq-4bit-64g-actorder_True) | 4 | 64 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.51 GB | Yes | 4-bit, with Act Order and group size 64g. Uses less VRAM than 32g, but with slightly lower accuracy. | | [gptq-4bit-128g-actorder_True](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/gptq-4bit-128g-actorder_True) | 4 | 128 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 7.26 GB | Yes | 4-bit, with Act Order and group size 128g. Uses even less VRAM than 64g, but with slightly lower accuracy. | | [gptq-8bit--1g-actorder_True](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/gptq-8bit--1g-actorder_True) | 8 | None | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.36 GB | No | 8-bit, with Act Order. No group size, to lower VRAM requirements. | | [gptq-8bit-128g-actorder_True](https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ/tree/gptq-8bit-128g-actorder_True) | 8 | 128 | Yes | 0.1 | [wikitext](https://huggingface.co/datasets/wikitext/viewer/wikitext-2-v1/test) | 4096 | 13.65 GB | No | 8-bit, with group size 128g for higher inference quality and with Act Order for even higher accuracy. | <!-- README_GPTQ.md-provided-files end --> <!-- README_GPTQ.md-download-from-branches start --> ## How to download from branches - In text-generation-webui, you can add `:branch` to the end of the download name, eg `TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ:main` - With Git, you can clone a branch with: ``` git clone --single-branch --branch main https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ ``` - In Python Transformers code, the branch is the `revision` parameter; see below. <!-- README_GPTQ.md-download-from-branches end --> <!-- README_GPTQ.md-text-generation-webui start --> ## How to easily download and use this model in [text-generation-webui](https://github.com/oobabooga/text-generation-webui). Please make sure you're using the latest version of [text-generation-webui](https://github.com/oobabooga/text-generation-webui). It is strongly recommended to use the text-generation-webui one-click-installers unless you're sure you know how to make a manual install. 1. Click the **Model tab**. 2. Under **Download custom model or LoRA**, enter `TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ`. - To download from a specific branch, enter for example `TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ:main` - see Provided Files above for the list of branches for each option. 3. Click **Download**. 4. The model will start downloading. Once it's finished it will say "Done". 5. In the top left, click the refresh icon next to **Model**. 6. In the **Model** dropdown, choose the model you just downloaded: `OpenOrcaxOpenChat-Preview2-13B-GPTQ` 7. The model will automatically load, and is now ready for use! 8. If you want any custom settings, set them and then click **Save settings for this model** followed by **Reload the Model** in the top right. * Note that you do not need to and should not set manual GPTQ parameters any more. These are set automatically from the file `quantize_config.json`. 9. Once you're ready, click the **Text Generation tab** and enter a prompt to get started! <!-- README_GPTQ.md-text-generation-webui end --> <!-- README_GPTQ.md-use-from-python start --> ## How to use this GPTQ model from Python code ### Install the necessary packages Requires: Transformers 4.32.0 or later, Optimum 1.12.0 or later, and AutoGPTQ 0.4.2 or later. ```shell pip3 install transformers>=4.32.0 optimum>=1.12.0 pip3 install auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ # Use cu117 if on CUDA 11.7 ``` If you have problems installing AutoGPTQ using the pre-built wheels, install it from source instead: ```shell pip3 uninstall -y auto-gptq git clone https://github.com/PanQiWei/AutoGPTQ cd AutoGPTQ pip3 install . ``` ### For CodeLlama models only: you must use Transformers 4.33.0 or later. If 4.33.0 is not yet released when you read this, you will need to install Transformers from source: ```shell pip3 uninstall -y transformers pip3 install git+https://github.com/huggingface/transformers.git ``` ### You can then use the following code ```python from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline model_name_or_path = "TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ" # To use a different branch, change revision # For example: revision="main" model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map="auto", trust_remote_code=False, revision="main") tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True) prompt = "Tell me about AI" prompt_template=f'''GPT4 User: {prompt}<|end_of_turn|>GPT4 Assistant: ''' print("\n\n*** Generate:") input_ids = tokenizer(prompt_template, return_tensors='pt').input_ids.cuda() output = model.generate(inputs=input_ids, temperature=0.7, do_sample=True, top_p=0.95, top_k=40, max_new_tokens=512) print(tokenizer.decode(output[0])) # Inference can also be done using transformers' pipeline print("*** Pipeline:") pipe = pipeline( "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=512, do_sample=True, temperature=0.7, top_p=0.95, top_k=40, repetition_penalty=1.1 ) print(pipe(prompt_template)[0]['generated_text']) ``` <!-- README_GPTQ.md-use-from-python end --> <!-- README_GPTQ.md-compatibility start --> ## Compatibility The files provided are tested to work with AutoGPTQ, both via Transformers and using AutoGPTQ directly. They should also work with [Occ4m's GPTQ-for-LLaMa fork](https://github.com/0cc4m/KoboldAI). [ExLlama](https://github.com/turboderp/exllama) is compatible with Llama models in 4-bit. Please see the Provided Files table above for per-file compatibility. [Huggingface Text Generation Inference (TGI)](https://github.com/huggingface/text-generation-inference) is compatible with all GPTQ models. <!-- README_GPTQ.md-compatibility end --> <!-- footer start --> <!-- 200823 --> ## Discord For further support, and discussions on these models and AI in general, join us at: [TheBloke AI's Discord server](https://discord.gg/theblokeai) ## Thanks, and how to contribute Thanks to the [chirper.ai](https://chirper.ai) team! Thanks to Clay from [gpus.llm-utils.org](llm-utils)! I've had a lot of people ask if they can contribute. I enjoy providing models and helping people, and would love to be able to spend even more time doing it, as well as expanding into new projects like fine tuning/training. If you're able and willing to contribute it will be most gratefully received and will help me to keep providing more models, and to start work on new AI projects. Donaters will get priority support on any and all AI/LLM/model questions and requests, access to a private Discord room, plus other benefits. * Patreon: https://patreon.com/TheBlokeAI * Ko-Fi: https://ko-fi.com/TheBlokeAI **Special thanks to**: Aemon Algiz. **Patreon special mentions**: Alicia Loh, Stephen Murray, K, Ajan Kanaga, RoA, Magnesian, Deo Leter, Olakabola, Eugene Pentland, zynix, Deep Realms, Raymond Fosdick, Elijah Stavena, Iucharbius, Erik Bjäreholt, Luis Javier Navarrete Lozano, Nicholas, theTransient, John Detwiler, alfie_i, knownsqashed, Mano Prime, Willem Michiel, Enrico Ros, LangChain4j, OG, Michael Dempsey, Pierre Kircher, Pedro Madruga, James Bentley, Thomas Belote, Luke @flexchar, Leonard Tan, Johann-Peter Hartmann, Illia Dulskyi, Fen Risland, Chadd, S_X, Jeff Scroggin, Ken Nordquist, Sean Connelly, Artur Olbinski, Swaroop Kallakuri, Jack West, Ai Maven, David Ziegler, Russ Johnson, transmissions 11, John Villwock, Alps Aficionado, Clay Pascal, Viktor Bowallius, Subspace Studios, Rainer Wilmers, Trenton Dambrowitz, vamX, Michael Levine, 준교 김, Brandon Frisco, Kalila, Trailburnt, Randy H, Talal Aujan, Nathan Dryer, Vadim, 阿明, ReadyPlayerEmma, Tiffany J. Kim, George Stoitzev, Spencer Kim, Jerry Meng, Gabriel Tamborski, Cory Kujawski, Jeffrey Morgan, Spiking Neurons AB, Edmond Seymore, Alexandros Triantafyllidis, Lone Striker, Cap'n Zoog, Nikolai Manek, danny, ya boyyy, Derek Yates, usrbinkat, Mandus, TL, Nathan LeClaire, subjectnull, Imad Khwaja, webtim, Raven Klaugh, Asp the Wyvern, Gabriel Puliatti, Caitlyn Gatomon, Joseph William Delisle, Jonathan Leane, Luke Pendergrass, SuperWojo, Sebastain Graf, Will Dee, Fred von Graf, Andrey, Dan Guido, Daniel P. Andersen, Nitin Borwankar, Elle, Vitor Caleffi, biorpg, jjj, NimbleBox.ai, Pieter, Matthew Berman, terasurfer, Michael Davis, Alex, Stanislav Ovsiannikov Thank you to all my generous patrons and donaters! And thank you again to a16z for their generous grant. <!-- footer end --> # Original model card: Open-Orca's OpenOrca x OpenChat - Preview2 - 13B <p><h1>🐋 The Second OpenOrca Model Preview! 🐋</h1></p> ![OpenOrca Logo](https://huggingface.co/datasets/Open-Orca/OpenOrca/resolve/main/OpenOrcaLogo.png "OpenOrca Logo") # OpenOrca x OpenChat - Preview2 - 13B We have used our own [OpenOrca dataset](https://huggingface.co/datasets/Open-Orca/OpenOrca) to fine-tune Llama2-13B using [OpenChat](https://huggingface.co/openchat) packing. This dataset is our attempt to reproduce the dataset generated for Microsoft Research's [Orca Paper](https://arxiv.org/abs/2306.02707). This second preview release is trained on a curated filtered subset of most of our GPT-4 augmented data. This release highlights that our dataset and training methods have surpassed performance parity with the Orca paper. We measured this with BigBench-Hard and AGIEval results with the same methods as used in the Orca paper, finding **~103%** of original Orca's performance on average. As well, this is done with <1/10th the compute requirement and using <20% of the dataset size from the original Orca paper. We have run extensive evaluations internally and expect this model to **place number 1** on both the HuggingFaceH4 Open LLM Leaderboard and the GPT4ALL Leaderboard for 13B models. "One" of [OpenChat](https://huggingface.co/openchat) has joined our team, and we'd like to provide special thanks for their training of this model! We have utilized OpenChat [MultiPack algorithm](https://github.com/imoneoi/multipack_sampler) which achieves 99.85% bin-packing efficiency on our dataset. This has significantly reduced training time, with efficiency improvement of 3-10X over traditional methods. <img src="https://raw.githubusercontent.com/imoneoi/openchat/master/assets/logo_new.png" style="width: 40%"> Want to visualize our full (pre-filtering) dataset? Check out our [Nomic Atlas Map](https://atlas.nomic.ai/map/c1b88b47-2d9b-47e0-9002-b80766792582/2560fd25-52fe-42f1-a58f-ff5eccc890d2). [<img src="https://huggingface.co/Open-Orca/OpenOrca-Preview1-13B/resolve/main/OpenOrca%20Nomic%20Atlas.png" alt="Atlas Nomic Dataset Map" width="400" height="400" />](https://atlas.nomic.ai/map/c1b88b47-2d9b-47e0-9002-b80766792582/2560fd25-52fe-42f1-a58f-ff5eccc890d2) We are in-process with training more models, so keep a look out on our org for releases coming soon with exciting partners. We will also give sneak-peak announcements on our Discord, which you can find here: https://AlignmentLab.ai # Prompt Template We use our own prompt template which we call "`OpenChat Llama2 V1`". The model is heavily conditioned to work using this format only and will likely encounter issues such as run-on output which emulates a chat between a user and assistant if this format is not properly followed. Examples: ``` # Single-turn `OpenChat Llama2 V1` tokenize("You are OpenOrcaChat.<|end_of_turn|>User: Hello<|end_of_turn|>Assistant:") # [1, 887, 526, 4673, 2816, 1113, 1451, 271, 29889, 32000, 4911, 29901, 15043, 32000, 4007, 22137, 29901] # Multi-turn `OpenChat Llama2 V1` tokenize("You are OpenOrcaChat.<|end_of_turn|>User: Hello<|end_of_turn|>Assistant: Hi<|end_of_turn|>User: How are you today?<|end_of_turn|>Assistant:") # [1, 887, 526, 4673, 2816, 1113, 1451, 271, 29889, 32000, 4911, 29901, 15043, 32000, 4007, 22137, 29901, 6324, 32000, 4911, 29901, 1128, 526, 366, 9826, 29973, 32000, 4007, 22137, 29901] ``` For UIs with Prefix and Suffix fields, these will likely work: Prefix (include a space after colon): ``` User: ``` Suffix (space after colon): ``` <|end_of_turn|>\nAssistant: ``` **Oobabooga's text-generation-webui instructions can be found [further down the page](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B#serving-with-oobabooga--text-generation-webui).** # Evaluation We have evaluated **OpenOrcaxOpenChat-Preview2-13B** on hard reasoning tasks from BigBench-Hard and AGIEval as outlined in the Orca paper. Our average performance for BigBench-Hard: 0.488 Average for AGIEval: 0.447 We find our score averages to **~103%** of the total performance that was shown in the Orca paper, using the same evaluation methods as outlined in the paper. So we are surpassing Orca performance with <20% of the dataset size and <1/10th the training budget! As well, we have evaluated using the methodology and tools for the HuggingFace Leaderboard and GPT4ALL Leaderboard, and find that we place #1 on both for all 13B models at release time! ## AGIEval Performance We present our results in two columns. The column for "`(Orca Paper eval)`" uses the methods outlined in the Orca paper, so as to be a direct apples-to-apples comparison with the results from the paper. The column for "`(HF Leaderboard eval)`" uses EleutherAI's LM Evaluation Harness with settings outlined by HuggingFace. These results are not comparable to the other columns, as the methods are different. ![OpenOrca Preview2 AGIEval Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2AGIEval.png "AGIEval Performance") ## BigBench-Hard Performance We present our results in two columns. The column for "`(Orca Paper eval)`" uses the methods outlined in the Orca paper, so as to be a direct apples-to-apples comparison with the results from the paper. The column for "`(HF Leaderboard eval)`" uses EleutherAI's LM Evaluation Harness with settings outlined by HuggingFace. These results are not comparable to the other columns, as the methods are different. ![OpenOrca Preview2 BigBench-Hard Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2BigBenchHardEval.png "BigBench-Hard Performance") ## HuggingFaceH4 Open LLM Leaderboard Performance We have run our own tests using parameters matching the [HuggingFaceH4 Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard) evals. We place #1 for all 13B models at release time! ![OpenOrca Preview2 HuggingFace Leaderboard Internal Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2HuggingFaceLeaderboard.png "HuggingFace Leaderboard Internal Performance") **Update Aug 10th:** The official results on the leaderboard are below. ![OpenOrca Preview2 HuggingFace Leaderboard Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2HFLeaderboardOfficial.png "HuggingFace Leaderboard Performance") Since our release, a new model which merges an Orca-style model with a Platypus (trained on STEM and logic) model places narrowly above ours, but we were #1 at release time. Below we also highlight how our model fits relative to models of all sizes on the current (as of Aug 10th, 2023) leaderboard. ![OpenOrca Preview2 HuggingFace Leaderboard Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2HFLeaderboardFull.png "HuggingFace Full Leaderboard") Notably, performance is beyond falcon-40b-instruct, and close to LLaMA1-65B base. ## GPT4ALL Leaderboard Performance We have tested using parameters matching the GPT4ALL Benchmark Suite and report our results and placement vs their official reporting below. We place #1 for all open models and come within comparison of `text-davinci-003`, a proprietary OpenAI model an order of magnitude larger. ![OpenOrca Preview2 GPT4ALL Performance](https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaP2GPT4ALL_Leaderboard.png "GPT4ALL Performance") # Dataset We used a curated, filtered selection of most of the GPT-4 augmented data from our OpenOrca dataset, which aims to reproduce the Orca Research Paper dataset. Further details of our curation practices will be forthcoming with our full model releases. # Training We trained with 8x A100-80G GPUs for 46 hours, completing 5 epochs of full fine tuning on our dataset in one training run. This contrasts with the 20x A100-80G GPUs for 200 hours used in the Orca paper, for only 3 epochs, and requiring stacked training (which is known to suffer catastrophic forgetting). Our compute requirement was <1/10th that of the original Orca. Commodity cost was ~$600. Please await our full releases for further training details. # Serving This model is most easily served with [OpenChat's](https://github.com/imoneoi/openchat) customized vLLM OpenAI-compatible API server. This is highly recommended as it is by far the fastest in terms of inference speed and is a quick and easy option for setup. We also illustrate setup of Oobabooga/text-generation-webui below. The settings outlined there will also apply to other uses of `Transformers`. ## Serving Quantized Pre-quantized models are now available courtesy of our friend TheBloke: * **GGML**: https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GGML * **GPTQ**: https://huggingface.co/TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ The serving instructions below only apply to the unquantized model being presented in the repository you are viewing here. There are some notes, such as on use of the prompt format, that will still apply to the quantized models though. ## Serving with OpenChat [Install OpenChat](https://github.com/imoneoi/openchat/#installation) After installation, run: ```bash python -m ochat.serving.openai_api_server \ --model-type openchat_llama2 \ --model Open-Orca/OpenOrcaxOpenChat-Preview2-13B \ --engine-use-ray --worker-use-ray --max-num-batched-tokens 5120 ``` Follow the OpenChat documentation to use features such as tensor parallelism on consumer GPUs, API keys, and logging. You may then connect to the OpenAI-compatible API endpoint with tools such as [BetterGPT.chat](https://bettergpt.chat). ## Serving with Oobabooga / text-generation-webui The model may also be loaded via [oobabooga/text-generation-webui](https://github.com/oobabooga/text-generation-webui/) in a similar manner to other models. See the requirements below. Note that inference with just the Transformers library is significantly slower than using the recommended OpenChat vLLM server. ### Oobabooga Key Requirements * You will first need to download the model as you normally do to the "`models/`" folder of your `text-generation-webui` installation. * To use the unquantized model presented here, select "`Transformers`"" in the webui's "`Model`" tab "`Model loader`" dropdown. * You will likely want to tick "`auto-devices`". The model will require >40GB VRAM after loading in context for inference. * The model was trained in bf16, so tick the "`bf16`" box for best performance. * It will run safely on single GPUs with VRAM >=48GB (e.g. A6000) * If using consumer GPUs, e.g. 2x RTX3090 24GB, you will likely want to enter "18,17" under "`tensor_split`" to split the model across both GPUs * The model will perform significantly better if you use the appropriate prompting template * We will submit a PR to include our prompting template into text-generation-webui soon * For now, manually enter the settings described in the following sections: ### Oobabooga Chat Settings In the "`Chat settings`" tab, select the following settings: For "`User String`" ... ``` User: ``` For "`Bot string`" ... ``` Assistant: ``` For "`Context`", this is analogous to system prompt. It is not necessary, but we have found good results with the below example. System prompts used in the Orca training also work well. ... ``` You are a helpful assistant. Please answer truthfully and write out your thinking step by step to be sure you get the right answer. If you make a mistake or encounter an error in your thinking, say so out loud and attempt to correct it. If you don't know or aren't sure about something, say so clearly. You will act as a professional logician, mathematician, and physicist. You will also act as the most appropriate type of expert to answer any particular question or solve the relevant problem; state which expert type your are, if so. Also think of any particular named expert that would be ideal to answer the relevant question or solve the relevant problem; name and act as them, if appropriate. ``` For "`Turn template`", this is absolutely essential to have. You will get poor, mixed up output without this template ... ``` <|user|> <|user-message|><|end_of_turn|>\n<|bot|> <|bot-message|>\n ``` When done, it should look as below: <img src="https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaLlama2OobaboogaChatInstructionTemplate.png" style="width: 40%"> You may then save this as a named template preset by clicking the "Floppy" icon and giving it an appropriate name in the popup, e.g. "`OpenOrcaxOpenChat Llama2`". ### Oobabooga Text Generation Mode In the "`Text generation`" tab, select "`instruct`" as the mode: #### Mode Illustration It should look as below: <img src="https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B/resolve/main/Images/OpenOrcaLlama2OobaboogaInstructMode.png" style="width: 40%"> Then you should be ready to generate! # Citation ```bibtex @software{OpenOrcaxOpenChatPreview2, title = {OpenOrcaxOpenChatPreview2: Llama2-13B Model Instruct-tuned on Filtered OpenOrcaV1 GPT-4 Dataset}, author = {Guan Wang and Bleys Goodson and Wing Lian and Eugene Pentland and Austin Cook and Chanvichet Vong and "Teknium"}, year = {2023}, publisher = {HuggingFace}, journal = {HuggingFace repository}, howpublished = {\url{https://https://huggingface.co/Open-Orca/OpenOrcaxOpenChat-Preview2-13B}, } @software{openchat, title = {{OpenChat: Advancing Open-source Language Models with Imperfect Data}}, author = {Wang, Guan and Cheng, Sijie and Yu, Qiying and Liu, Changling}, doi = {10.5281/zenodo.8105775}, url = {https://github.com/imoneoi/openchat}, version = {pre-release}, year = {2023}, month = {7}, } @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } @misc{longpre2023flan, title={The Flan Collection: Designing Data and Methods for Effective Instruction Tuning}, author={Shayne Longpre and Le Hou and Tu Vu and Albert Webson and Hyung Won Chung and Yi Tay and Denny Zhou and Quoc V. Le and Barret Zoph and Jason Wei and Adam Roberts}, year={2023}, eprint={2301.13688}, archivePrefix={arXiv}, primaryClass={cs.AI} } @misc{touvron2023llama, title={Llama 2: Open Foundation and Fine-Tuned Chat Models}, author={Hugo Touvron and Louis Martin and Kevin Stone and Peter Albert and Amjad Almahairi and Yasmine Babaei and Nikolay Bashlykov and Soumya Batra and Prajjwal Bhargava and Shruti Bhosale and Dan Bikel and Lukas Blecher and Cristian Canton Ferrer and Moya Chen and Guillem Cucurull and David Esiobu and Jude Fernandes and Jeremy Fu and Wenyin Fu and Brian Fuller and Cynthia Gao and Vedanuj Goswami and Naman Goyal and Anthony Hartshorn and Saghar Hosseini and Rui Hou and Hakan Inan and Marcin Kardas and Viktor Kerkez and Madian Khabsa and Isabel Kloumann and Artem Korenev and Punit Singh Koura and Marie-Anne Lachaux and Thibaut Lavril and Jenya Lee and Diana Liskovich and Yinghai Lu and Yuning Mao and Xavier Martinet and Todor Mihaylov and Pushkar Mishra and Igor Molybog and Yixin Nie and Andrew Poulton and Jeremy Reizenstein and Rashi Rungta and Kalyan Saladi and Alan Schelten and Ruan Silva and Eric Michael Smith and Ranjan Subramanian and Xiaoqing Ellen Tan and Binh Tang and Ross Taylor and Adina Williams and Jian Xiang Kuan and Puxin Xu and Zheng Yan and Iliyan Zarov and Yuchen Zhang and Angela Fan and Melanie Kambadur and Sharan Narang and Aurelien Rodriguez and Robert Stojnic and Sergey Edunov and Thomas Scialom}, year={2023}, eprint={2307.09288}, archivePrefix={arXiv}, } ```
30,837
[ [ -0.040130615234375, -0.06640625, 0.0017547607421875, 0.0206451416015625, -0.0168914794921875, -0.0112457275390625, 0.00418853759765625, -0.0499267578125, 0.0235137939453125, 0.02783203125, -0.045166015625, -0.040985107421875, -0.01654052734375, -0.0082855224609375, -0.0251922607421875, 0.077880859375, 0.00608062744140625, -0.022857666015625, 0.0004355907440185547, -0.0220489501953125, -0.0244140625, -0.02972412109375, -0.06646728515625, -0.00978851318359375, 0.0322265625, 0.022979736328125, 0.060882568359375, 0.036285400390625, 0.01522064208984375, 0.024200439453125, -0.00943756103515625, 0.004680633544921875, -0.05267333984375, -0.01081085205078125, 0.0116729736328125, -0.0255279541015625, -0.050262451171875, 0.006664276123046875, 0.0270233154296875, 0.0170745849609375, -0.027313232421875, 0.01430511474609375, 0.0081939697265625, 0.046478271484375, -0.043670654296875, 0.0207366943359375, -0.028228759765625, -0.0035457611083984375, -0.01447296142578125, 0.0102386474609375, -0.01190948486328125, -0.041595458984375, 0.01141357421875, -0.06304931640625, 0.0005650520324707031, 0.004276275634765625, 0.08953857421875, -0.007152557373046875, -0.042633056640625, 0.00017189979553222656, -0.038238525390625, 0.040191650390625, -0.06866455078125, 0.02349853515625, 0.0362548828125, 0.022979736328125, -0.0261993408203125, -0.0635986328125, -0.04730224609375, -0.004413604736328125, -0.008026123046875, 0.0243682861328125, -0.032867431640625, 0.004230499267578125, 0.024261474609375, 0.05499267578125, -0.06671142578125, -0.0050048828125, -0.0311431884765625, -0.0161895751953125, 0.05712890625, 0.0156402587890625, 0.0285491943359375, -0.005401611328125, -0.015167236328125, -0.036407470703125, -0.044281005859375, 0.014404296875, 0.02984619140625, 0.011871337890625, -0.0556640625, 0.03778076171875, -0.025787353515625, 0.0325927734375, 0.00728607177734375, -0.0074310302734375, 0.027801513671875, -0.04486083984375, -0.0328369140625, -0.0270233154296875, 0.1004638671875, 0.0238800048828125, -0.01336669921875, 0.026824951171875, 0.007305145263671875, -0.01015472412109375, -0.0006918907165527344, -0.0718994140625, -0.0280609130859375, 0.033477783203125, -0.040924072265625, -0.022430419921875, 0.0007567405700683594, -0.05413818359375, 0.001964569091796875, 0.00861358642578125, 0.02923583984375, -0.04296875, -0.03350830078125, 0.01505279541015625, -0.0207977294921875, 0.031768798828125, 0.0323486328125, -0.05645751953125, 0.039703369140625, 0.0174713134765625, 0.060760498046875, 0.0113525390625, -0.00936126708984375, -0.00970458984375, -0.0036907196044921875, -0.01509857177734375, 0.035491943359375, -0.00844573974609375, -0.03802490234375, -0.0204620361328125, 0.0199127197265625, 0.004459381103515625, -0.0138702392578125, 0.04571533203125, -0.01910400390625, 0.0325927734375, -0.0391845703125, -0.028076171875, -0.0231475830078125, 0.005596160888671875, -0.052276611328125, 0.09783935546875, 0.027496337890625, -0.0689697265625, 0.01319122314453125, -0.054718017578125, -0.01337432861328125, -0.00865936279296875, 0.00293731689453125, -0.032867431640625, -0.01373291015625, 0.0203399658203125, 0.0211029052734375, -0.025238037109375, -0.01157379150390625, -0.034881591796875, -0.01910400390625, 0.0139007568359375, -0.03277587890625, 0.0980224609375, 0.014984130859375, -0.0213623046875, -0.0037975311279296875, -0.048614501953125, 0.0077972412109375, 0.038543701171875, -0.01800537109375, -0.01445770263671875, -0.01348876953125, 0.0025005340576171875, 0.006214141845703125, 0.0175323486328125, -0.040985107421875, 0.0310821533203125, -0.0296783447265625, 0.04974365234375, 0.04486083984375, -0.0006499290466308594, 0.0243682861328125, -0.02984619140625, 0.032379150390625, 0.00940704345703125, 0.05078125, -0.00579833984375, -0.057830810546875, -0.058197021484375, -0.0137786865234375, 0.023162841796875, 0.044586181640625, -0.05108642578125, 0.041900634765625, -0.01373291015625, -0.062744140625, -0.0294647216796875, -0.0175933837890625, 0.024169921875, 0.0194854736328125, 0.0243377685546875, -0.04656982421875, -0.02392578125, -0.05633544921875, 0.01546478271484375, -0.03985595703125, 0.002960205078125, 0.048431396484375, 0.05828857421875, -0.01300048828125, 0.06927490234375, -0.05255126953125, -0.00933837890625, 0.00121307373046875, 0.00728607177734375, 0.015380859375, 0.04400634765625, 0.065673828125, -0.054534912109375, -0.044403076171875, -0.0007672309875488281, -0.04522705078125, -0.000621795654296875, 0.002834320068359375, -0.033721923828125, 0.0211639404296875, 0.002597808837890625, -0.08258056640625, 0.059539794921875, 0.046478271484375, -0.042938232421875, 0.057586669921875, -0.0172882080078125, 0.011016845703125, -0.0826416015625, 0.01082611083984375, 0.0062255859375, -0.022430419921875, -0.0323486328125, -0.0034694671630859375, -0.0019159317016601562, 0.006847381591796875, -0.034210205078125, 0.0570068359375, -0.0419921875, 0.0009441375732421875, 0.01012420654296875, 0.00075531005859375, 0.0167083740234375, 0.040252685546875, -0.02203369140625, 0.057952880859375, 0.037078857421875, -0.0309295654296875, 0.039337158203125, 0.03863525390625, -0.007419586181640625, 0.015777587890625, -0.06683349609375, 0.018218994140625, 0.011871337890625, 0.0374755859375, -0.0860595703125, -0.01497650146484375, 0.044219970703125, -0.050537109375, 0.0241241455078125, -0.016510009765625, -0.0305938720703125, -0.039154052734375, -0.043731689453125, 0.027191162109375, 0.049285888671875, -0.033233642578125, 0.0290374755859375, 0.03216552734375, 0.00196075439453125, -0.049285888671875, -0.047821044921875, -0.00998687744140625, -0.010650634765625, -0.05322265625, 0.033416748046875, -0.006725311279296875, 0.0018157958984375, 0.0022296905517578125, -0.0004432201385498047, -0.006359100341796875, -0.00949859619140625, 0.0279541015625, 0.0186004638671875, -0.01068878173828125, -0.016265869140625, 0.0128936767578125, 0.0032806396484375, -0.0034027099609375, -0.0217132568359375, 0.042816162109375, -0.0137481689453125, -0.005931854248046875, -0.03033447265625, 0.0188140869140625, 0.04107666015625, -0.006412506103515625, 0.06573486328125, 0.058197021484375, -0.022735595703125, 0.01003265380859375, -0.0323486328125, -0.0127410888671875, -0.03826904296875, 0.01104736328125, -0.01617431640625, -0.0609130859375, 0.05157470703125, 0.039947509765625, 0.0247039794921875, 0.04498291015625, 0.033599853515625, 0.006683349609375, 0.07489013671875, 0.034881591796875, -0.021392822265625, 0.041595458984375, -0.03936767578125, -0.007320404052734375, -0.052276611328125, -0.01629638671875, -0.039154052734375, -0.01129913330078125, -0.060150146484375, -0.032867431640625, 0.02252197265625, 0.01473236083984375, -0.04974365234375, 0.048736572265625, -0.046142578125, 0.0122528076171875, 0.042633056640625, 0.024383544921875, 0.0141143798828125, -0.002902984619140625, -0.01519012451171875, 0.01073455810546875, -0.037811279296875, -0.0283660888671875, 0.0841064453125, 0.03143310546875, 0.056121826171875, 0.0181121826171875, 0.03082275390625, 0.0029468536376953125, 0.0210418701171875, -0.037353515625, 0.044677734375, 0.01070404052734375, -0.050079345703125, -0.0272216796875, -0.049163818359375, -0.076416015625, 0.01093292236328125, -0.006549835205078125, -0.06939697265625, 0.026031494140625, -0.0011920928955078125, -0.0183868408203125, 0.0168609619140625, -0.052276611328125, 0.08087158203125, -0.0014619827270507812, -0.0192413330078125, -0.0018987655639648438, -0.055633544921875, 0.0304412841796875, 0.0199127197265625, -0.0038471221923828125, -0.00931549072265625, -0.0167999267578125, 0.056243896484375, -0.069091796875, 0.0618896484375, -0.0130157470703125, -0.007709503173828125, 0.04193115234375, -0.006565093994140625, 0.039764404296875, 0.004756927490234375, 0.007503509521484375, 0.0313720703125, 0.027740478515625, -0.0294647216796875, -0.03167724609375, 0.043121337890625, -0.0762939453125, -0.0258331298828125, -0.0258026123046875, -0.0305938720703125, 0.0029582977294921875, 0.005603790283203125, 0.035003662109375, 0.034393310546875, -0.0180511474609375, 0.00656890869140625, 0.041778564453125, -0.036285400390625, 0.03033447265625, 0.0271453857421875, -0.0290374755859375, -0.0458984375, 0.054718017578125, 0.007488250732421875, 0.022857666015625, 0.0259246826171875, 0.006351470947265625, -0.03857421875, -0.03082275390625, -0.042449951171875, 0.0258636474609375, -0.035064697265625, -0.023468017578125, -0.053253173828125, -0.024078369140625, -0.038421630859375, 0.0233917236328125, -0.03302001953125, -0.04693603515625, -0.04058837890625, 0.004180908203125, 0.064697265625, 0.03082275390625, -0.007549285888671875, 0.0217742919921875, -0.06378173828125, 0.01264190673828125, 0.0260009765625, 0.0181427001953125, 0.005584716796875, -0.047637939453125, -0.00833892822265625, 0.02838134765625, -0.050445556640625, -0.068115234375, 0.04388427734375, 0.01343536376953125, 0.027740478515625, 0.038421630859375, 0.0173492431640625, 0.063232421875, -0.01192474365234375, 0.07891845703125, 0.0189666748046875, -0.0662841796875, 0.040802001953125, -0.04705810546875, 0.0126800537109375, 0.02984619140625, 0.042816162109375, -0.02813720703125, -0.0272216796875, -0.05548095703125, -0.0557861328125, 0.04583740234375, 0.040313720703125, 0.0117950439453125, 0.0025730133056640625, 0.044189453125, -0.002964019775390625, 0.00598907470703125, -0.0633544921875, -0.046661376953125, -0.0294036865234375, -0.0098419189453125, 0.01203155517578125, -0.00691986083984375, -0.0066986083984375, -0.042724609375, 0.059783935546875, -0.01047515869140625, 0.0576171875, 0.0187530517578125, 0.0211944580078125, -0.0038928985595703125, 0.00806427001953125, 0.034576416015625, 0.04327392578125, -0.01849365234375, -0.02838134765625, 0.01032257080078125, -0.06573486328125, 0.002071380615234375, 0.0199127197265625, -0.0024547576904296875, -0.01107025146484375, 0.005462646484375, 0.062744140625, -0.00876617431640625, -0.03070068359375, 0.043731689453125, -0.0311431884765625, -0.0251617431640625, -0.0190887451171875, 0.016387939453125, 0.0193939208984375, 0.0253143310546875, 0.0267333984375, -0.0229034423828125, 0.014892578125, -0.04925537109375, -0.0014057159423828125, 0.0362548828125, -0.0184326171875, -0.0306549072265625, 0.058563232421875, 0.0037975311279296875, 0.001827239990234375, 0.053741455078125, -0.0206756591796875, -0.0244598388671875, 0.057464599609375, 0.03265380859375, 0.0538330078125, -0.0169830322265625, 0.0168914794921875, 0.040679931640625, 0.0106353759765625, -0.00936126708984375, 0.0335693359375, 0.00009137392044067383, -0.053955078125, -0.021514892578125, -0.036102294921875, -0.02825927734375, 0.024200439453125, -0.059906005859375, 0.0187530517578125, -0.0272369384765625, -0.0306854248046875, -0.00867462158203125, 0.0181732177734375, -0.035308837890625, 0.007595062255859375, -0.007694244384765625, 0.08056640625, -0.04840087890625, 0.0653076171875, 0.0458984375, -0.034942626953125, -0.07672119140625, -0.01885986328125, 0.01055908203125, -0.04254150390625, 0.01236724853515625, 0.0070953369140625, 0.02667236328125, 0.0010881423950195312, -0.0577392578125, -0.0638427734375, 0.10693359375, 0.02545166015625, -0.0369873046875, -0.0086212158203125, 0.001239776611328125, 0.027252197265625, -0.0103607177734375, 0.0653076171875, 0.037689208984375, 0.0241241455078125, 0.0182037353515625, -0.0889892578125, 0.03118896484375, -0.036895751953125, 0.0023136138916015625, 0.0178375244140625, -0.07769775390625, 0.07525634765625, -0.005428314208984375, -0.0190582275390625, 0.0209197998046875, 0.0478515625, 0.0218505859375, -0.0030689239501953125, 0.03143310546875, 0.05645751953125, 0.04656982421875, -0.0264892578125, 0.078125, -0.00716400146484375, 0.041656494140625, 0.058074951171875, -0.0013017654418945312, 0.057220458984375, 0.0117950439453125, -0.045562744140625, 0.042633056640625, 0.0755615234375, 0.0021495819091796875, 0.029144287109375, -0.00652313232421875, -0.0169525146484375, 0.005832672119140625, 0.006439208984375, -0.049407958984375, 0.017242431640625, 0.0311126708984375, -0.01543426513671875, -0.0015659332275390625, -0.006771087646484375, 0.007488250732421875, -0.049774169921875, -0.00836944580078125, 0.047943115234375, 0.0189971923828125, -0.0211334228515625, 0.0679931640625, -0.003368377685546875, 0.049957275390625, -0.042633056640625, -0.01065826416015625, -0.0289764404296875, -0.0014925003051757812, -0.0179595947265625, -0.058197021484375, 0.014404296875, -0.0196533203125, 0.0007338523864746094, 0.0031642913818359375, 0.048126220703125, -0.0123748779296875, -0.01406097412109375, 0.0277557373046875, 0.03179931640625, 0.0256500244140625, -0.01251220703125, -0.08135986328125, 0.0228271484375, 0.007785797119140625, -0.052001953125, 0.0323486328125, 0.034210205078125, 0.005992889404296875, 0.0572509765625, 0.049224853515625, -0.007488250732421875, -0.0046539306640625, -0.00821685791015625, 0.08355712890625, -0.054931640625, -0.0287322998046875, -0.059539794921875, 0.045318603515625, -0.01203155517578125, -0.038330078125, 0.060211181640625, 0.0445556640625, 0.057098388671875, 0.010162353515625, 0.04620361328125, -0.040679931640625, 0.0207061767578125, -0.0226593017578125, 0.0576171875, -0.049591064453125, 0.00665283203125, -0.034332275390625, -0.052978515625, 0.004909515380859375, 0.0518798828125, -0.00983428955078125, 0.0192718505859375, 0.034942626953125, 0.075439453125, -0.004589080810546875, 0.023956298828125, 0.010772705078125, 0.0298614501953125, 0.0201263427734375, 0.0645751953125, 0.057220458984375, -0.07183837890625, 0.053375244140625, -0.0236358642578125, -0.0227508544921875, -0.0086212158203125, -0.05694580078125, -0.06890869140625, -0.040985107421875, -0.035919189453125, -0.046478271484375, -0.006175994873046875, 0.06298828125, 0.06573486328125, -0.040283203125, -0.0270233154296875, -0.00830841064453125, -0.001800537109375, -0.01800537109375, -0.0245513916015625, 0.0204620361328125, 0.0210723876953125, -0.051361083984375, 0.00832366943359375, 0.003986358642578125, 0.034820556640625, -0.0160675048828125, -0.021728515625, -0.01290130615234375, 0.0038127899169921875, 0.04522705078125, 0.047637939453125, -0.0340576171875, -0.00844573974609375, -0.0074920654296875, -0.01355743408203125, 0.023162841796875, 0.02197265625, -0.048828125, 0.000354766845703125, 0.042816162109375, 0.0035076141357421875, 0.06640625, 0.005474090576171875, 0.02899169921875, -0.036163330078125, 0.01442718505859375, 0.0016946792602539062, 0.024078369140625, 0.0009331703186035156, -0.0306549072265625, 0.048736572265625, 0.02081298828125, -0.044219970703125, -0.0587158203125, -0.0168304443359375, -0.0947265625, -0.01285552978515625, 0.08221435546875, -0.02020263671875, -0.027008056640625, -0.00975799560546875, -0.03082275390625, 0.0182647705078125, -0.052215576171875, 0.0269012451171875, 0.03253173828125, -0.01171112060546875, -0.027008056640625, -0.053802490234375, 0.033050537109375, 0.0035076141357421875, -0.07452392578125, 0.0011339187622070312, 0.0335693359375, 0.02838134765625, 0.0069580078125, 0.07257080078125, -0.02374267578125, 0.0199737548828125, 0.004917144775390625, 0.01007080078125, -0.0006074905395507812, 0.0081939697265625, -0.01251983642578125, -0.00568389892578125, -0.0191650390625, -0.01346588134765625 ] ]
teknium/OpenHermes-13B
2023-09-24T11:07:28.000Z
[ "transformers", "pytorch", "llama", "text-generation", "llama-2", "instruct", "finetune", "alpaca", "gpt4", "synthetic data", "distillation", "en", "dataset:teknium/openhermes", "license:mit", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
teknium
null
null
teknium/OpenHermes-13B
32
5,815
transformers
2023-09-06T04:36:29
--- base_model: NousResearch/Llama-2-13b-hf tags: - llama-2 - instruct - finetune - alpaca - gpt4 - synthetic data - distillation datasets: - teknium/openhermes model-index: - name: openhermes-13b results: [] license: mit language: - en --- # OpenHermes-13B ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/ovkrkIIUwJ9azhPtW6dAb.png) ## Model description OpenHermes 13B is the first fine tune of the Hermes dataset that has a fully open source dataset! OpenHermes was trained on 242,000 entries of primarily GPT-4 generated data, from open datasets across the AI landscape, including: - GPTeacher - General Instruct, Roleplay v1, Roleplay v2, and Code Instruct Datasets, by Teknium - WizardLM (v1, evol_instruct 70k), by WizardLM Team/nlpxucan - Airoboros GPT-4 (v1.0), by JonDurbin - Camel-AI's domain expert datasets, by the Camel-AI Team - CodeAlpaca, by Sahil2801 - GPT4-LLM and Unnatural Instructions, by Microsoft Filtering included removal of OpenAI refusals, disclaimers, and "As an AI" type examples and more The base dataset mix the model was trained on is identical to Nous-Hermes', minus the Nous-Instruct and PDACTL datasets which were private datasets. The WANDB Project is public and can be examined at this link: https://wandb.ai/teknium1/openhermes/runs/openhermes-v2-fullft-13b Huge thank you to [main_horse](https://twitter.com/main_horse) for compute access and a16z for sponsoring my work, and all the dataset creators and other people who's work has contributed to this project! ## Example Outputs ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/wMSeFqUSBwCNefm7s6G1-.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/00iVenvEOMWIO9X6EY2EZ.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/o7hHbCbtwMLitDy-FWDAg.png) ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/-9ZT1FBSE2BJhDowoh6Gj.png) ## Benchmark Information ## Benchmark Results GPT-4All Benchmark Set ``` | Task |Version| Metric |Value | |Stderr| |-------------|------:|--------|-----:|---|-----:| |arc_challenge| 0|acc |0.5009|± |0.0146| | | |acc_norm|0.5247|± |0.0146| |arc_easy | 0|acc |0.8127|± |0.0080| | | |acc_norm|0.7854|± |0.0084| |boolq | 1|acc |0.8153|± |0.0068| |hellaswag | 0|acc |0.6126|± |0.0049| | | |acc_norm|0.7995|± |0.0040| |openbookqa | 0|acc |0.3660|± |0.0216| | | |acc_norm|0.4600|± |0.0223| |piqa | 0|acc |0.7922|± |0.0095| | | |acc_norm|0.8112|± |0.0091| |winogrande | 0|acc |0.7293|± |0.0125| Average: 0.7036 ``` AGI-Eval ``` | Task |Version| Metric |Value | |Stderr| |------------------------------|------:|--------|-----:|---|-----:| |agieval_aqua_rat | 0|acc |0.2008|± |0.0252| | | |acc_norm|0.2126|± |0.0257| |agieval_logiqa_en | 0|acc |0.3410|± |0.0186| | | |acc_norm|0.3564|± |0.0188| |agieval_lsat_ar | 0|acc |0.2261|± |0.0276| | | |acc_norm|0.2174|± |0.0273| |agieval_lsat_lr | 0|acc |0.3725|± |0.0214| | | |acc_norm|0.3373|± |0.0210| |agieval_lsat_rc | 0|acc |0.4684|± |0.0305| | | |acc_norm|0.4572|± |0.0304| |agieval_sat_en | 0|acc |0.6553|± |0.0332| | | |acc_norm|0.5971|± |0.0343| |agieval_sat_en_without_passage| 0|acc |0.4515|± |0.0348| | | |acc_norm|0.4029|± |0.0343| |agieval_sat_math | 0|acc |0.3273|± |0.0317| | | |acc_norm|0.2636|± |0.0298| Average: 0.3556 ``` BigBench Reasoning Test ``` | Task |Version| Metric |Value | |Stderr| |------------------------------------------------|------:|---------------------|-----:|---|-----:| |bigbench_causal_judgement | 0|multiple_choice_grade|0.5368|± |0.0363| |bigbench_date_understanding | 0|multiple_choice_grade|0.7127|± |0.0236| |bigbench_disambiguation_qa | 0|multiple_choice_grade|0.3023|± |0.0286| |bigbench_geometric_shapes | 0|multiple_choice_grade|0.1003|± |0.0159| | | |exact_str_match |0.0000|± |0.0000| |bigbench_logical_deduction_five_objects | 0|multiple_choice_grade|0.2720|± |0.0199| |bigbench_logical_deduction_seven_objects | 0|multiple_choice_grade|0.1986|± |0.0151| |bigbench_logical_deduction_three_objects | 0|multiple_choice_grade|0.4500|± |0.0288| |bigbench_movie_recommendation | 0|multiple_choice_grade|0.2880|± |0.0203| |bigbench_navigate | 0|multiple_choice_grade|0.5000|± |0.0158| |bigbench_reasoning_about_colored_objects | 0|multiple_choice_grade|0.5390|± |0.0111| |bigbench_ruin_names | 0|multiple_choice_grade|0.3906|± |0.0231| |bigbench_salient_translation_error_detection | 0|multiple_choice_grade|0.1844|± |0.0123| |bigbench_snarks | 0|multiple_choice_grade|0.5249|± |0.0372| |bigbench_sports_understanding | 0|multiple_choice_grade|0.5335|± |0.0159| |bigbench_temporal_sequences | 0|multiple_choice_grade|0.2980|± |0.0145| |bigbench_tracking_shuffled_objects_five_objects | 0|multiple_choice_grade|0.2048|± |0.0114| |bigbench_tracking_shuffled_objects_seven_objects| 0|multiple_choice_grade|0.1297|± |0.0080| |bigbench_tracking_shuffled_objects_three_objects| 0|multiple_choice_grade|0.4500|± |0.0288| Average: 36.75 ``` This is a slight improvement on GPT4ALL Suite and BigBench Suite, with a degredation in AGIEval compared to the original hermes. Average Score Comparison between Nous-Hermes Llama-2 and OpenHermes Llama-2: ``` | Bench | Nous-Hermes | OpenHermes | Change | |------------------------------|------------:|------------|--------| |GPT4All | 70.00| 70.36| +0.36| |------------------------------------------------------------------| |BigBench | 36.57| 36.75| +0.18| |------------------------------------------------------------------| |AGI Eval | 37.20| 35.56| -1.64| ``` ## Training procedure ![image/png](https://cdn-uploads.huggingface.co/production/uploads/6317aade83d8d2fd903192d9/pqQ6MrMVy80hHEKSfqIX2.png) ### Training hyperparameters The following hyperparameters were used during training: - learning_rate: 2e-05 - train_batch_size: 2 - seed: 42 - distributed_type: multi-GPU - num_devices: 8 - gradient_accumulation_steps: 8 - total_train_batch_size: 128 - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08 - lr_scheduler_type: cosine - lr_scheduler_warmup_steps: 300 - num_epochs: 3
7,486
[ [ -0.059051513671875, -0.051025390625, 0.0218048095703125, 0.01372528076171875, -0.01861572265625, -0.004077911376953125, -0.00909423828125, -0.036834716796875, 0.0372314453125, 0.007083892822265625, -0.04400634765625, -0.04632568359375, -0.056732177734375, 0.004100799560546875, 0.012451171875, 0.0772705078125, -0.012725830078125, -0.00855255126953125, 0.020111083984375, -0.0312042236328125, -0.02069091796875, -0.0010538101196289062, -0.059844970703125, -0.006511688232421875, 0.022247314453125, 0.031585693359375, 0.05023193359375, 0.039459228515625, 0.05096435546875, 0.026611328125, -0.0188140869140625, 0.00553131103515625, -0.01244354248046875, -0.017669677734375, 0.0115203857421875, -0.020263671875, -0.05694580078125, 0.015899658203125, 0.042755126953125, 0.0382080078125, -0.005062103271484375, 0.037139892578125, 0.0172271728515625, 0.06805419921875, -0.0283660888671875, 0.0204925537109375, -0.003025054931640625, -0.0012140274047851562, -0.0181732177734375, -0.01201629638671875, 0.00011831521987915039, -0.03692626953125, -0.00644683837890625, -0.0565185546875, 0.00835418701171875, 0.0081329345703125, 0.1004638671875, 0.01338958740234375, -0.020721435546875, -0.00984954833984375, -0.03179931640625, 0.067138671875, -0.053314208984375, 0.0157623291015625, 0.033233642578125, 0.01511383056640625, -0.00936126708984375, -0.03485107421875, -0.053314208984375, 0.00011181831359863281, -0.0176849365234375, 0.035430908203125, -0.01514434814453125, -0.0168609619140625, 0.032257080078125, 0.053924560546875, -0.053863525390625, 0.005939483642578125, -0.039886474609375, -0.006702423095703125, 0.059661865234375, 0.020172119140625, 0.0168609619140625, -0.005176544189453125, -0.0330810546875, -0.0341796875, -0.028411865234375, 0.033447265625, 0.0306549072265625, 0.00835418701171875, -0.0382080078125, 0.046234130859375, -0.022857666015625, 0.03436279296875, 0.02423095703125, -0.0169219970703125, 0.058807373046875, -0.0269622802734375, -0.025482177734375, -0.00982666015625, 0.06591796875, 0.036285400390625, 0.0041046142578125, 0.0166168212890625, 0.006988525390625, 0.00789642333984375, -0.0004448890686035156, -0.068603515625, -0.00833892822265625, 0.0299224853515625, -0.033447265625, -0.0121002197265625, 0.010894775390625, -0.0692138671875, 0.00261688232421875, -0.017547607421875, 0.0230560302734375, -0.052703857421875, -0.0166015625, 0.004581451416015625, -0.0069122314453125, 0.03936767578125, 0.025726318359375, -0.05023193359375, 0.02252197265625, 0.027587890625, 0.060882568359375, -0.005657196044921875, -0.013702392578125, -0.0214691162109375, 0.0080108642578125, -0.037200927734375, 0.042999267578125, -0.01103973388671875, -0.0142974853515625, -0.0271759033203125, 0.021026611328125, -0.0157012939453125, -0.0189056396484375, 0.060882568359375, -0.00795745849609375, 0.037139892578125, -0.039215087890625, -0.033203125, -0.0234527587890625, 0.0277557373046875, -0.05072021484375, 0.09637451171875, 0.0199127197265625, -0.06854248046875, 0.046905517578125, -0.0587158203125, 0.00040340423583984375, -0.006443023681640625, -0.01294708251953125, -0.06451416015625, -0.0132293701171875, 0.033172607421875, 0.0299835205078125, -0.031829833984375, -0.0021305084228515625, -0.0207366943359375, -0.03045654296875, -0.006404876708984375, -0.01007080078125, 0.07818603515625, 0.0187225341796875, -0.059539794921875, 0.00676727294921875, -0.0694580078125, 0.0190887451171875, 0.036468505859375, -0.0289764404296875, -0.003864288330078125, -0.02630615234375, -0.0158233642578125, 0.0228118896484375, 0.00862884521484375, -0.0531005859375, 0.02032470703125, -0.03125, 0.01312255859375, 0.06390380859375, 0.005611419677734375, 0.0172119140625, -0.0477294921875, 0.0263519287109375, 0.01131439208984375, 0.01045989990234375, 0.0078277587890625, -0.05218505859375, -0.053131103515625, -0.048797607421875, 0.0041351318359375, 0.041656494140625, -0.0233612060546875, 0.054168701171875, -0.0073089599609375, -0.055206298828125, -0.04205322265625, -0.0009465217590332031, 0.03448486328125, 0.03839111328125, 0.040557861328125, -0.0222625732421875, -0.02691650390625, -0.06951904296875, -0.0094451904296875, -0.0110626220703125, -0.0018510818481445312, 0.031951904296875, 0.0665283203125, -0.0004944801330566406, 0.0501708984375, -0.056732177734375, -0.037933349609375, -0.0209197998046875, -0.01024627685546875, 0.04669189453125, 0.05401611328125, 0.06182861328125, -0.03961181640625, -0.050079345703125, 0.00006854534149169922, -0.0604248046875, 0.0092010498046875, 0.01221466064453125, -0.0189666748046875, 0.026458740234375, 0.0160675048828125, -0.058929443359375, 0.0577392578125, 0.033843994140625, -0.0487060546875, 0.06610107421875, -0.033477783203125, 0.0305938720703125, -0.080322265625, 0.0313720703125, 0.007785797119140625, 0.01229095458984375, -0.01849365234375, 0.003215789794921875, 0.002285003662109375, 0.0037403106689453125, -0.0212860107421875, 0.062408447265625, -0.044952392578125, 0.0022144317626953125, 0.0209197998046875, 0.0009369850158691406, -0.0056915283203125, 0.05010986328125, -0.0065155029296875, 0.068359375, 0.05279541015625, -0.03350830078125, 0.0182037353515625, 0.028778076171875, -0.03533935546875, 0.052703857421875, -0.05157470703125, -0.007366180419921875, -0.00034499168395996094, 0.021636962890625, -0.08245849609375, -0.0244293212890625, 0.03814697265625, -0.040985107421875, 0.0252838134765625, 0.00870513916015625, -0.02191162109375, -0.064697265625, -0.047943115234375, 0.01140594482421875, 0.036041259765625, -0.035430908203125, 0.0181121826171875, -0.0005087852478027344, -0.004886627197265625, -0.04833984375, -0.049285888671875, -0.0239410400390625, -0.0267791748046875, -0.052947998046875, 0.0288238525390625, -0.017578125, -0.00945281982421875, 0.003505706787109375, -0.025787353515625, -0.0005555152893066406, -0.0030841827392578125, 0.0212860107421875, 0.0309906005859375, -0.0261993408203125, -0.0185699462890625, -0.00992584228515625, -0.022125244140625, -0.0017032623291015625, -0.0000680685043334961, 0.029876708984375, -0.02923583984375, -0.033538818359375, -0.0557861328125, 0.0117340087890625, 0.047454833984375, -0.0238800048828125, 0.07611083984375, 0.0516357421875, -0.010498046875, 0.01043701171875, -0.03448486328125, -0.0079193115234375, -0.037017822265625, 0.00634002685546875, -0.039276123046875, -0.055999755859375, 0.038848876953125, 0.00653839111328125, 0.0192108154296875, 0.0606689453125, 0.04058837890625, -0.00926971435546875, 0.0721435546875, 0.0246429443359375, -0.0142059326171875, 0.017669677734375, -0.0521240234375, 0.01068878173828125, -0.0625, -0.0212860107421875, -0.04473876953125, -0.04241943359375, -0.050048828125, -0.022369384765625, 0.022674560546875, 0.000024437904357910156, -0.05804443359375, 0.0190277099609375, -0.05096435546875, 0.016143798828125, 0.053924560546875, 0.02301025390625, 0.00823974609375, -0.001682281494140625, -0.0306854248046875, -0.004894256591796875, -0.025634765625, -0.038055419921875, 0.09619140625, 0.014862060546875, 0.03436279296875, 0.0181884765625, 0.052093505859375, 0.01425933837890625, 0.0081939697265625, -0.031646728515625, 0.042449951171875, 0.0142364501953125, -0.0557861328125, -0.0248565673828125, -0.02734375, -0.086181640625, 0.03375244140625, -0.019195556640625, -0.0718994140625, 0.00884246826171875, 0.00786590576171875, -0.0245513916015625, 0.041107177734375, -0.047760009765625, 0.06951904296875, -0.007167816162109375, -0.048980712890625, -0.005863189697265625, -0.048370361328125, 0.021087646484375, 0.0088958740234375, 0.0209808349609375, -0.015625, 0.0006375312805175781, 0.060760498046875, -0.053436279296875, 0.027008056640625, -0.01788330078125, 0.0086669921875, 0.031463623046875, -0.0068511962890625, 0.041656494140625, 0.0043792724609375, -0.0108489990234375, 0.00261688232421875, -0.0011053085327148438, -0.0606689453125, -0.007091522216796875, 0.05352783203125, -0.07086181640625, -0.0496826171875, -0.06158447265625, -0.031097412109375, -0.008392333984375, 0.0180816650390625, 0.019805908203125, 0.019866943359375, -0.01129150390625, 0.0192108154296875, 0.04052734375, -0.029876708984375, 0.0389404296875, 0.0230560302734375, 0.00586700439453125, -0.035064697265625, 0.06036376953125, -0.001705169677734375, 0.0137786865234375, 0.0013570785522460938, 0.0185699462890625, -0.01454925537109375, -0.029449462890625, -0.041290283203125, 0.034759521484375, -0.01488494873046875, -0.01480865478515625, -0.035552978515625, -0.00626373291015625, -0.039886474609375, -0.0268707275390625, -0.017578125, -0.03167724609375, -0.004802703857421875, -0.0211944580078125, 0.039886474609375, 0.036773681640625, -0.01812744140625, 0.01751708984375, -0.035675048828125, 0.0239105224609375, 0.0107574462890625, 0.032135009765625, -0.01224517822265625, -0.035491943359375, -0.005855560302734375, -0.0021610260009765625, -0.04949951171875, -0.06805419921875, 0.042938232421875, -0.0006108283996582031, 0.0423583984375, 0.0333251953125, -0.00968170166015625, 0.052398681640625, -0.0038509368896484375, 0.072998046875, 0.020294189453125, -0.0487060546875, 0.045928955078125, -0.03375244140625, 0.0283660888671875, 0.04351806640625, 0.0399169921875, -0.0224761962890625, -0.034027099609375, -0.06610107421875, -0.063232421875, 0.082275390625, 0.0310516357421875, -0.0266265869140625, 0.01468658447265625, 0.006778717041015625, -0.0064544677734375, 0.0099945068359375, -0.049072265625, -0.0501708984375, 0.00008910894393920898, -0.0150146484375, -0.016326904296875, -0.0014371871948242188, -0.0173492431640625, -0.047332763671875, 0.05755615234375, 0.00836181640625, 0.037841796875, 0.016876220703125, 0.01160430908203125, -0.0012617111206054688, 0.0164031982421875, 0.033905029296875, 0.04229736328125, -0.033111572265625, -0.0130462646484375, 0.0201873779296875, -0.0577392578125, 0.01155853271484375, 0.01092529296875, -0.012420654296875, -0.01239776611328125, 0.0224151611328125, 0.044158935546875, -0.01001739501953125, -0.024566650390625, 0.03759765625, 0.0034351348876953125, -0.0418701171875, -0.025238037109375, 0.00002574920654296875, -0.00518035888671875, 0.0260009765625, 0.0204010009765625, 0.006206512451171875, -0.0023403167724609375, -0.039398193359375, 0.011810302734375, 0.025665283203125, -0.0201416015625, -0.0128631591796875, 0.058746337890625, -0.0078277587890625, 0.0014371871948242188, 0.036163330078125, -0.01514434814453125, -0.036468505859375, 0.06890869140625, 0.0209197998046875, 0.045745849609375, -0.036651611328125, 0.01165008544921875, 0.0770263671875, 0.0294189453125, 0.0029773712158203125, 0.039215087890625, 0.01251220703125, -0.0196990966796875, -0.0098114013671875, -0.05621337890625, -0.0171356201171875, 0.0257415771484375, -0.04876708984375, 0.020538330078125, -0.035247802734375, -0.011260986328125, 0.0008425712585449219, 0.0340576171875, -0.06378173828125, 0.039306640625, -0.007049560546875, 0.07257080078125, -0.062286376953125, 0.0511474609375, 0.060455322265625, -0.0484619140625, -0.0792236328125, -0.01458740234375, 0.00992584228515625, -0.059051513671875, 0.040557861328125, 0.01232147216796875, 0.0207366943359375, -0.011260986328125, -0.024505615234375, -0.08740234375, 0.09808349609375, 0.0036907196044921875, -0.02471923828125, 0.017730712890625, 0.0140533447265625, 0.039703369140625, 0.0016355514526367188, 0.049072265625, 0.0413818359375, 0.0606689453125, 0.00943756103515625, -0.06781005859375, 0.0269775390625, -0.039306640625, -0.0250091552734375, 0.03143310546875, -0.07415771484375, 0.07720947265625, -0.0206298828125, 0.00395965576171875, -0.007801055908203125, 0.040679931640625, 0.03662109375, 0.03240966796875, 0.02801513671875, 0.07818603515625, 0.05499267578125, -0.0255126953125, 0.0894775390625, -0.02197265625, 0.035675048828125, 0.062286376953125, 0.0128936767578125, 0.04571533203125, 0.0279998779296875, -0.045135498046875, 0.035247802734375, 0.052398681640625, -0.00762176513671875, 0.0220947265625, 0.01108551025390625, -0.00665283203125, 0.0009417533874511719, 0.031829833984375, -0.05450439453125, 0.018402099609375, 0.016143798828125, -0.0186614990234375, -0.0119476318359375, -0.01044464111328125, 0.023040771484375, -0.00310516357421875, -0.0202484130859375, 0.041046142578125, -0.00795745849609375, -0.05035400390625, 0.0545654296875, -0.00374603271484375, 0.041259765625, -0.0487060546875, 0.0066680908203125, -0.0201873779296875, 0.03363037109375, -0.03106689453125, -0.07562255859375, 0.023040771484375, -0.0020351409912109375, 0.007442474365234375, -0.004100799560546875, 0.02960205078125, 0.0005326271057128906, -0.03594970703125, 0.0262603759765625, 0.036865234375, 0.0185546875, 0.0024089813232421875, -0.070556640625, -0.0028667449951171875, 0.003887176513671875, -0.060638427734375, 0.027435302734375, 0.03863525390625, -0.00018262863159179688, 0.03680419921875, 0.050048828125, 0.0009446144104003906, 0.00841522216796875, -0.031280517578125, 0.07421875, -0.057647705078125, -0.039520263671875, -0.05322265625, 0.0347900390625, -0.02386474609375, -0.05035400390625, 0.07232666015625, 0.06158447265625, 0.051483154296875, 0.0017824172973632812, 0.0443115234375, -0.036163330078125, 0.041748046875, -0.0196990966796875, 0.041778564453125, -0.06744384765625, -0.0150146484375, -0.03692626953125, -0.06439208984375, -0.0128936767578125, 0.056182861328125, -0.0421142578125, 0.0183258056640625, 0.061370849609375, 0.05157470703125, -0.0023040771484375, 0.01338958740234375, -0.005229949951171875, 0.0270233154296875, 0.01560211181640625, 0.06158447265625, 0.0305328369140625, -0.039520263671875, 0.04052734375, -0.032135009765625, -0.0293121337890625, -0.021331787109375, -0.040069580078125, -0.05560302734375, -0.0411376953125, -0.03106689453125, -0.034576416015625, -0.006389617919921875, 0.0657958984375, 0.03729248046875, -0.05413818359375, -0.0218963623046875, -0.00403594970703125, -0.005840301513671875, -0.04364013671875, -0.016021728515625, 0.05950927734375, -0.007785797119140625, -0.06097412109375, 0.0029125213623046875, 0.00212860107421875, 0.0056915283203125, 0.0161285400390625, -0.0172119140625, -0.0251312255859375, 0.01236724853515625, 0.032623291015625, 0.029022216796875, -0.038543701171875, -0.02191162109375, -0.00675201416015625, -0.018463134765625, 0.031829833984375, 0.0065765380859375, -0.034423828125, 0.0078125, 0.026153564453125, 0.02508544921875, 0.06365966796875, 0.0021228790283203125, 0.0020465850830078125, -0.0180816650390625, 0.0157928466796875, 0.00003254413604736328, 0.03240966796875, -0.00571441650390625, -0.0179443359375, 0.061614990234375, 0.0233612060546875, -0.04693603515625, -0.058563232421875, -0.015472412109375, -0.10296630859375, -0.0155792236328125, 0.075439453125, -0.0133819580078125, -0.034515380859375, 0.0120391845703125, -0.035858154296875, 0.01224517822265625, -0.042755126953125, 0.0445556640625, 0.04248046875, -0.0175323486328125, 0.00507354736328125, -0.05914306640625, 0.0302886962890625, 0.039093017578125, -0.06329345703125, -0.019073486328125, 0.033538818359375, 0.0253448486328125, 0.0120391845703125, 0.056182861328125, -0.0218048095703125, 0.01499176025390625, 0.0145416259765625, 0.01355743408203125, -0.005615234375, 0.0103912353515625, 0.0006041526794433594, 0.021148681640625, -0.012054443359375, -0.04168701171875 ] ]
microsoft/graphcodebert-base
2022-09-26T17:06:54.000Z
[ "transformers", "pytorch", "tf", "jax", "roberta", "fill-mask", "arxiv:2009.08366", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
fill-mask
microsoft
null
null
microsoft/graphcodebert-base
24
5,814
transformers
2022-03-02T23:29:05
## GraphCodeBERT model GraphCodeBERT is a graph-based pre-trained model based on the Transformer architecture for programming language, which also considers data-flow information along with code sequences. GraphCodeBERT consists of 12 layers, 768 dimensional hidden states, and 12 attention heads. The maximum sequence length for the model is 512. The model is trained on the CodeSearchNet dataset, which includes 2.3M functions with document pairs for six programming languages. More details can be found in the [paper](https://arxiv.org/abs/2009.08366) by Guo et. al. **Disclaimer:** The team releasing BERT did not write a model card for this model so this model card has been written by the Hugging Face community members.
730
[ [ -0.036651611328125, -0.038726806640625, 0.022308349609375, 0.0160675048828125, -0.0036106109619140625, 0.027862548828125, 0.0157623291015625, -0.03497314453125, 0.00971221923828125, 0.056976318359375, -0.04217529296875, -0.052276611328125, -0.0592041015625, -0.008880615234375, -0.051910400390625, 0.09173583984375, 0.00799560546875, -0.0007963180541992188, -0.001888275146484375, 0.005809783935546875, -0.00818634033203125, -0.047698974609375, -0.01904296875, -0.0312042236328125, 0.041046142578125, 0.00818634033203125, 0.07232666015625, 0.043792724609375, 0.031768798828125, 0.016143798828125, -0.01641845703125, -0.01513671875, -0.060028076171875, -0.0299072265625, 0.0268707275390625, -0.0443115234375, -0.041412353515625, 0.0007486343383789062, 0.031341552734375, 0.05810546875, 0.019134521484375, 0.0205841064453125, 0.0024547576904296875, 0.06298828125, -0.0296478271484375, 0.031585693359375, -0.0195159912109375, 0.01313018798828125, 0.01178741455078125, 0.020233154296875, -0.0499267578125, -0.042755126953125, 0.0206756591796875, -0.01276397705078125, 0.01549530029296875, -0.006221771240234375, 0.06298828125, 0.00394439697265625, -0.035247802734375, -0.01375579833984375, -0.0545654296875, 0.0291900634765625, -0.036224365234375, 0.02789306640625, 0.0028667449951171875, 0.026153564453125, 0.0111083984375, -0.09405517578125, -0.021636962890625, 0.007476806640625, -0.01505279541015625, -0.00885009765625, -0.032867431640625, 0.02044677734375, 0.048583984375, 0.007049560546875, -0.0556640625, -0.01250457763671875, -0.057342529296875, -0.01531982421875, 0.051727294921875, 0.01306915283203125, 0.018035888671875, -0.0189971923828125, -0.036834716796875, -0.007038116455078125, -0.035400390625, 0.0174407958984375, 0.040069580078125, 0.007747650146484375, -0.0192413330078125, 0.0234222412109375, 0.013946533203125, 0.078125, -0.004840850830078125, 0.01486968994140625, 0.03631591796875, 0.005428314208984375, -0.0335693359375, -0.0004868507385253906, 0.060211181640625, -0.004150390625, 0.03350830078125, -0.025421142578125, -0.0163726806640625, 0.0268402099609375, 0.036834716796875, -0.062408447265625, -0.053924560546875, 0.0207366943359375, -0.04632568359375, -0.0452880859375, 0.0008559226989746094, -0.03558349609375, -0.01287078857421875, -0.00276947021484375, 0.0190887451171875, -0.0484619140625, -0.0134735107421875, -0.004451751708984375, -0.0169830322265625, 0.018218994140625, 0.00676727294921875, -0.038421630859375, 0.028472900390625, 0.038665771484375, 0.06011962890625, -0.0020236968994140625, -0.0236663818359375, -0.0036182403564453125, -0.0213623046875, -0.038909912109375, 0.043365478515625, -0.068115234375, -0.0277099609375, 0.0037517547607421875, 0.01253509521484375, -0.0165252685546875, -0.0131988525390625, 0.0338134765625, -0.06640625, -0.0015726089477539062, -0.0009059906005859375, -0.0499267578125, -0.0335693359375, -0.0005955696105957031, -0.067626953125, 0.0723876953125, 0.033782958984375, -0.0545654296875, 0.007038116455078125, -0.04833984375, -0.0199432373046875, 0.0160064697265625, -0.0157928466796875, -0.0309295654296875, -0.0142974853515625, 0.01186370849609375, 0.0272369384765625, 0.005069732666015625, 0.0225372314453125, -0.020233154296875, -0.041229248046875, 0.009368896484375, 0.0022983551025390625, 0.08795166015625, 0.027008056640625, -0.01477813720703125, 0.0302734375, -0.063720703125, 0.0224151611328125, 0.012481689453125, -0.01560211181640625, 0.0156402587890625, -0.016510009765625, 0.00916290283203125, 0.031829833984375, 0.01409912109375, -0.032073974609375, 0.020904541015625, -0.0211181640625, 0.068603515625, 0.037872314453125, -0.0160675048828125, 0.0238800048828125, -0.026947021484375, 0.059814453125, -0.0034580230712890625, 0.0212249755859375, -0.03521728515625, -0.0482177734375, -0.050079345703125, -0.02789306640625, 0.03289794921875, 0.04010009765625, -0.036163330078125, 0.044891357421875, -0.017608642578125, -0.061920166015625, -0.0222320556640625, -0.005878448486328125, 0.03314208984375, 0.0166473388671875, 0.005458831787109375, -0.0196075439453125, -0.03558349609375, -0.06884765625, 0.0120849609375, 0.00649261474609375, 0.0160064697265625, 0.0283355712890625, 0.042633056640625, -0.036956787109375, 0.06427001953125, -0.054595947265625, -0.0002467632293701172, -0.034820556640625, 0.01258087158203125, 0.036468505859375, 0.0182342529296875, 0.068115234375, -0.046356201171875, -0.0341796875, -0.03521728515625, -0.050567626953125, 0.01971435546875, 0.01485443115234375, -0.029296875, 0.0007047653198242188, 0.033599853515625, -0.04986572265625, 0.0440673828125, 0.0693359375, -0.0057830810546875, 0.0305328369140625, -0.0028133392333984375, -0.023468017578125, -0.0650634765625, 0.0157928466796875, -0.00649261474609375, -0.0268402099609375, -0.05694580078125, 0.01374053955078125, 0.024017333984375, -0.0631103515625, -0.0282745361328125, 0.01309967041015625, -0.04156494140625, -0.0162506103515625, -0.015625, 0.004375457763671875, 0.0044708251953125, 0.0653076171875, -0.006778717041015625, 0.0399169921875, 0.07672119140625, -0.0433349609375, 0.039276123046875, 0.033447265625, -0.014129638671875, 0.0013275146484375, -0.06671142578125, 0.047882080078125, -0.020843505859375, 0.028045654296875, -0.09521484375, -0.00019216537475585938, 0.0020503997802734375, -0.044525146484375, 0.0213623046875, -0.0176239013671875, -0.036529541015625, -0.065185546875, -0.0156402587890625, 0.026580810546875, 0.04827880859375, -0.036773681640625, 0.046356201171875, 0.035369873046875, 0.01271820068359375, -0.0439453125, -0.04339599609375, -0.00321197509765625, 0.0147552490234375, -0.0704345703125, 0.0128326416015625, -0.011199951171875, 0.018646240234375, 0.01126861572265625, 0.01165008544921875, -0.017547607421875, -0.0103912353515625, 0.037933349609375, 0.04034423828125, 0.002094268798828125, 0.00893402099609375, -0.01511383056640625, -0.006496429443359375, 0.01348876953125, -0.0284271240234375, 0.07025146484375, -0.03778076171875, -0.0030002593994140625, -0.020111083984375, 0.0133819580078125, 0.046112060546875, -0.02923583984375, 0.048492431640625, 0.0299224853515625, -0.0295867919921875, -0.0185394287109375, 0.006496429443359375, 0.001110076904296875, -0.036468505859375, 0.0308990478515625, -0.03192138671875, -0.0643310546875, 0.057891845703125, 0.004241943359375, -0.006488800048828125, 0.0303802490234375, 0.032867431640625, 0.01334381103515625, 0.08941650390625, 0.05889892578125, -0.0277862548828125, 0.027191162109375, -0.04913330078125, 0.034332275390625, -0.04254150390625, -0.020172119140625, -0.030029296875, -0.0299530029296875, -0.0433349609375, -0.017120361328125, 0.00893402099609375, 0.012725830078125, -0.042205810546875, 0.06591796875, -0.07208251953125, 0.0106658935546875, 0.051361083984375, 0.0245513916015625, -0.0155029296875, -0.01251983642578125, -0.0082244873046875, 0.01849365234375, -0.06488037109375, -0.02899169921875, 0.0799560546875, 0.03424072265625, 0.07232666015625, -0.00620269775390625, 0.05078125, 0.0263214111328125, 0.01032257080078125, -0.0506591796875, 0.043487548828125, 0.00439453125, -0.0606689453125, -0.0238037109375, -0.0225372314453125, -0.06781005859375, -0.00649261474609375, -0.0075836181640625, -0.06475830078125, 0.0026397705078125, 0.0202789306640625, -0.005672454833984375, 0.018218994140625, -0.058624267578125, 0.07183837890625, -0.0194244384765625, 0.01055908203125, 0.007183074951171875, -0.05859375, 0.0265045166015625, -0.0007338523864746094, -0.0005970001220703125, 0.005523681640625, 0.043365478515625, 0.061126708984375, -0.029510498046875, 0.06573486328125, -0.016448974609375, 0.006549835205078125, 0.036834716796875, 0.0093231201171875, 0.05718994140625, -0.0236663818359375, 0.0192108154296875, 0.031646728515625, 0.003345489501953125, -0.034088134765625, -0.031402587890625, 0.035919189453125, -0.07684326171875, -0.019989013671875, -0.011993408203125, -0.032501220703125, -0.01165008544921875, 0.033447265625, 0.036041259765625, 0.0408935546875, 0.00008541345596313477, 0.0088653564453125, 0.037261962890625, -0.0266876220703125, 0.0253753662109375, -0.0042572021484375, -0.03668212890625, -0.0169830322265625, 0.056671142578125, 0.0035915374755859375, 0.01141357421875, 0.0168914794921875, -0.0113983154296875, -0.00453948974609375, -0.0084228515625, -0.026092529296875, 0.0260009765625, -0.06878662109375, -0.006946563720703125, -0.06805419921875, -0.065185546875, -0.04302978515625, -0.01425933837890625, -0.0311431884765625, -0.0225067138671875, -0.01479339599609375, -0.0149078369140625, 0.0220184326171875, 0.059478759765625, 0.0247802734375, 0.028594970703125, -0.065185546875, 0.0056610107421875, 0.0196685791015625, 0.0474853515625, 0.00498199462890625, -0.027801513671875, -0.0167999267578125, -0.01531982421875, -0.0218353271484375, -0.06951904296875, 0.03326416015625, 0.0021724700927734375, 0.049224853515625, 0.029083251953125, -0.0011491775512695312, 0.02166748046875, -0.03643798828125, 0.04791259765625, 0.0250396728515625, -0.08734130859375, 0.04266357421875, -0.0097808837890625, 0.016937255859375, -0.002655029296875, 0.05413818359375, -0.0310516357421875, -0.034820556640625, -0.05816650390625, -0.0513916015625, 0.057281494140625, 0.01497650146484375, 0.0242919921875, 0.0070037841796875, 0.017333984375, -0.00855255126953125, 0.021728515625, -0.061767578125, -0.045074462890625, -0.03955078125, -0.0161895751953125, 0.0076446533203125, -0.0341796875, -0.018096923828125, -0.0224609375, 0.021209716796875, -0.0210418701171875, 0.06658935546875, -0.006862640380859375, -0.0174407958984375, 0.006153106689453125, -0.0163726806640625, 0.043792724609375, 0.06683349609375, -0.0184478759765625, 0.0034351348876953125, -0.0148468017578125, -0.03265380859375, -0.0094451904296875, 0.0170440673828125, -0.002925872802734375, -0.0008730888366699219, 0.04693603515625, 0.08575439453125, 0.002368927001953125, -0.04266357421875, 0.048583984375, 0.007030487060546875, -0.037689208984375, -0.0350341796875, 0.020904541015625, 0.0142974853515625, 0.031005859375, 0.01369476318359375, 0.0104827880859375, 0.005985260009765625, -0.0213165283203125, 0.031982421875, 0.018646240234375, -0.057952880859375, -0.0258636474609375, 0.0684814453125, 0.007328033447265625, -0.030853271484375, 0.06011962890625, 0.00727081298828125, -0.058685302734375, 0.06243896484375, 0.031524658203125, 0.046783447265625, 0.0027828216552734375, 0.0234832763671875, 0.033966064453125, 0.021484375, 0.020599365234375, 0.0182647705078125, 0.00521087646484375, -0.057159423828125, -0.0146026611328125, -0.01849365234375, -0.0152435302734375, 0.00820159912109375, -0.0579833984375, 0.02374267578125, -0.052001953125, -0.0284881591796875, 0.024169921875, -0.01812744140625, -0.062225341796875, 0.01383209228515625, 0.0209503173828125, 0.09100341796875, -0.0616455078125, 0.0772705078125, 0.04541015625, -0.0263519287109375, -0.07257080078125, -0.0194549560546875, -0.01473236083984375, -0.091064453125, 0.07763671875, 0.0227508544921875, -0.0101165771484375, 0.0189361572265625, -0.07476806640625, -0.06982421875, 0.07080078125, 0.0311126708984375, -0.0390625, -0.0200347900390625, -0.0095367431640625, 0.042877197265625, -0.0400390625, 0.017578125, 0.0190887451171875, 0.0009765625, 0.015777587890625, -0.042938232421875, -0.01415252685546875, -0.0352783203125, 0.02703857421875, 0.0171356201171875, -0.052581787109375, 0.08477783203125, -0.0267333984375, -0.018829345703125, -0.01473236083984375, 0.0288848876953125, -0.0002758502960205078, -0.01462554931640625, 0.028778076171875, 0.036346435546875, 0.058502197265625, -0.0005826950073242188, 0.079345703125, -0.08404541015625, 0.064697265625, 0.0616455078125, -0.0177001953125, 0.0222930908203125, -0.012847900390625, -0.0175018310546875, 0.061279296875, 0.055084228515625, -0.02520751953125, 0.041107177734375, 0.01361846923828125, 0.01515960693359375, -0.01396942138671875, 0.0258941650390625, -0.0261688232421875, 0.0287628173828125, 0.018524169921875, -0.0701904296875, -0.0018892288208007812, -0.00464630126953125, 0.00978851318359375, -0.017333984375, -0.01397705078125, 0.026824951171875, -0.003238677978515625, -0.034088134765625, 0.054229736328125, -0.0021820068359375, 0.01248931884765625, -0.037933349609375, -0.00601959228515625, -0.006084442138671875, 0.01302337646484375, -0.0226593017578125, -0.07135009765625, 0.01001739501953125, -0.007633209228515625, -0.00885009765625, 0.0008645057678222656, 0.05841064453125, -0.00972747802734375, -0.0452880859375, 0.0484619140625, 0.01129150390625, 0.01053619384765625, 0.00891876220703125, -0.06201171875, 0.0184173583984375, -0.0018529891967773438, -0.0184783935546875, -0.004032135009765625, 0.0191192626953125, 0.01263427734375, 0.06488037109375, 0.0168609619140625, -0.0160675048828125, 0.013916015625, 0.007595062255859375, 0.0892333984375, -0.04351806640625, -0.048126220703125, -0.04779052734375, 0.0310516357421875, -0.00402069091796875, -0.01044464111328125, 0.042022705078125, 0.05047607421875, 0.056732177734375, -0.0158843994140625, 0.07415771484375, -0.006137847900390625, 0.029296875, -0.01250457763671875, 0.0367431640625, -0.01366424560546875, -0.00801849365234375, -0.039276123046875, -0.06787109375, -0.0173797607421875, 0.051666259765625, -0.025848388671875, 0.0158843994140625, 0.0447998046875, 0.05401611328125, 0.0169677734375, 0.00965118408203125, 0.033111572265625, 0.0005931854248046875, 0.025604248046875, 0.0628662109375, 0.036773681640625, -0.0310516357421875, 0.05291748046875, -0.0114898681640625, -0.02423095703125, -0.01654052734375, -0.0345458984375, -0.09173583984375, -0.043731689453125, -0.01355743408203125, -0.05194091796875, -0.00968170166015625, 0.042144775390625, 0.07305908203125, -0.04345703125, -0.0027923583984375, -0.01515960693359375, -0.006298065185546875, -0.0095672607421875, -0.019317626953125, 0.0307464599609375, -0.0137939453125, -0.038726806640625, -0.0131988525390625, 0.023406982421875, -0.01125335693359375, -0.023468017578125, -0.00800323486328125, 0.007686614990234375, 0.0013475418090820312, 0.044586181640625, 0.01132965087890625, -0.0245208740234375, -0.040771484375, 0.0131988525390625, -0.0276031494140625, 0.0281219482421875, 0.061798095703125, -0.056549072265625, 0.0352783203125, 0.029052734375, 0.032501220703125, 0.06976318359375, -0.00890350341796875, 0.03326416015625, -0.046722412109375, 0.01788330078125, 0.012115478515625, 0.0295867919921875, 0.00508880615234375, -0.031585693359375, 0.031768798828125, 0.02099609375, -0.042694091796875, -0.0382080078125, 0.042205810546875, -0.0726318359375, -0.0235137939453125, 0.059600830078125, -0.0249176025390625, -0.0269622802734375, 0.009857177734375, -0.004161834716796875, 0.0194549560546875, -0.013824462890625, 0.039459228515625, 0.044891357421875, -0.039031982421875, -0.03955078125, -0.0276031494140625, 0.0197601318359375, -0.026763916015625, -0.0440673828125, -0.01309967041015625, 0.0249786376953125, 0.0235443115234375, 0.0343017578125, 0.049560546875, -0.0167236328125, 0.0157012939453125, 0.0311126708984375, 0.0249176025390625, -0.01152801513671875, -0.0369873046875, -0.004001617431640625, 0.016448974609375, 0.003528594970703125, -0.0152435302734375 ] ]
TehVenom/PPO_Pygway-V8p4_Dev-6b
2023-05-19T18:01:37.000Z
[ "transformers", "pytorch", "gptj", "text-generation", "en", "license:apache-2.0", "has_space", "region:us" ]
text-generation
TehVenom
null
null
TehVenom/PPO_Pygway-V8p4_Dev-6b
7
5,814
transformers
2023-03-17T13:08:51
--- language: en license: apache-2.0 commercial: 'no' inference: false --- # GPT-J 6B - PPO_Pygway Mix ## Model description This is a merged model, using a weighted parameter blend strategy at a (20:20:60) ratio between the models: - [20%] - KoboldAI/GPT-J-6B-Janeway: https://huggingface.co/KoboldAI/GPT-J-6B-Janeway - [20%] - reciprocate/ppo_hh_gpt-j: https://huggingface.co/reciprocate/ppo_hh_gpt-j - [60%] - Pygmalion/Pygmalion-6b DEV (V8 / Part 4): https://huggingface.co/Pygmalion/Pygmalion-6b By their respective authors. **Warning: PPO_Pygway-V8p4_Dev-6b may generate NSFW or inappropriate content due to the base models (Mainly [Pygmalion/Pygmalion-6b V8P4](https://huggingface.co/Pygmalion/Pygmalion-6b)) being trained on general user logs, and internet archives.** ### Intended Use: Research purposes only, intended for responsible use. Express a conversation in natural language, and PPO_Pygmalion will pick up on the conversational format. Try starting a two line prompt such as: ``` Bot: "Hello, how are you?" You: "I am doing just fine, thank you." ``` Or any other topic, and the model will carry on in this back and forth style. ## Information: For more details, check out the related source models, especially [Pygmalion/Pygmalion-6b V8P4](https://huggingface.co/Pygmalion/Pygmalion-6b) for more information on how to utilize the chat bot formatting expected. In a similar manner to fine-tuning, merging weights does not add information but transforms it, therefore it is important to consider trade-offs. PPO_Pygway combines `ppo_hh_gpt-j`, `Janeway-6b` and `Pygmalion-6b V8P4`; all three models were blended in a two step process using a simple weighted parameter method ``` (X*A + Y*B) ``` With X & Y being the model weighs, and A/B being how strongly they are represented within the final value. The intent of this is to elevate the end-model by borrowing the strongly represented aspects out of each base model, but may also weaken other faces of each model, which can be desirable if the base models have problematic traits that need to be worked on. Blend was done in FP32 and output saved in FP16 for reduced storage needs. ## Limitations and biases Based on known problems with NLP technology, potential relevant factors include bias (gender, profession, race and religion). <ins>Warning: This model has a moderate NSFW bias.</ins> ### License GPT-J-6b is licensed by EleutherAI under the apache-2.0 license. All Rights Reserved. ### BibTeX entry and citation info ``` @misc{gpt-j, author = {Wang, Ben and Komatsuzaki, Aran}, title = {{GPT-J-6B: A 6 Billion Parameter Autoregressive Language Model}}, howpublished = {\url{https://github.com/kingoflolz/mesh-transformer-jax}}, year = 2021, month = May } ``` ### Credits To: Models involved: - https://huggingface.co/EleutherAI/gpt-j-6B - https://huggingface.co/Pygmalion/Pygmalion-6b - https://huggingface.co/reciprocate/ppo_hh_gpt-j - https://huggingface.co/KoboldAI/GPT-J-6B-Janeway Average weights merging Script credit to Concedo: - https://huggingface.co/concedo ### Related datasets and articles: PPO_HH-GPT-J-6b's Dataset is a variant of the Helpful Harmless assistant themed dataset and Proximal Policy Optimization, specific datasets used are unknown; listed repo datasets include: - https://huggingface.co/datasets/reciprocate/summarize_eval_ilql - https://huggingface.co/datasets/reciprocate/hh_eval_ilql PPO explained: - https://paperswithcode.com/method/ppo Potential HH-type datasets utilized: - https://huggingface.co/HuggingFaceH4 - https://huggingface.co/datasets/Anthropic/hh-rlhf No formal evaluation is available for this model at this time. It is recommend to use this model with the KoboldAI software. All feedback and comments can be directed to TeH_Venom on the KoboldAI discord.
3,816
[ [ -0.024810791015625, -0.05224609375, 0.01294708251953125, 0.0189208984375, -0.019927978515625, -0.024810791015625, -0.018890380859375, -0.03912353515625, 0.01291656494140625, 0.03662109375, -0.044464111328125, -0.01528167724609375, -0.0479736328125, 0.0020008087158203125, -0.001178741455078125, 0.09307861328125, 0.0033550262451171875, -0.0126800537109375, -0.00634002685546875, 0.0002982616424560547, -0.0255279541015625, -0.040313720703125, -0.054534912109375, -0.026275634765625, 0.0298919677734375, -0.00388336181640625, 0.083984375, 0.04156494140625, 0.024383544921875, 0.033477783203125, -0.0243988037109375, 0.0036163330078125, -0.040008544921875, -0.0020809173583984375, -0.0014295578002929688, -0.01456451416015625, -0.05535888671875, 0.019683837890625, 0.044525146484375, 0.04803466796875, -0.0172576904296875, 0.001773834228515625, 0.0087890625, 0.0281982421875, -0.034515380859375, 0.02447509765625, -0.0284881591796875, -0.00859832763671875, 0.00867462158203125, 0.02691650390625, -0.0156402587890625, -0.021392822265625, 0.0266265869140625, -0.04400634765625, 0.004779815673828125, 0.002231597900390625, 0.09588623046875, 0.01160430908203125, -0.0233306884765625, -0.00678253173828125, -0.031158447265625, 0.06689453125, -0.0899658203125, 0.01849365234375, 0.020233154296875, 0.012847900390625, -0.026214599609375, -0.042266845703125, -0.049072265625, -0.023681640625, -0.0135498046875, 0.030181884765625, -0.0169677734375, 0.0007352828979492188, 0.01219940185546875, 0.0433349609375, -0.044189453125, -0.0020503997802734375, -0.03399658203125, -0.0193023681640625, 0.06939697265625, 0.0041046142578125, 0.0242156982421875, -0.035186767578125, -0.0340576171875, -0.0293121337890625, -0.0146026611328125, 0.0020008087158203125, 0.045745849609375, 0.0230560302734375, -0.05035400390625, 0.047698974609375, -0.009735107421875, 0.060882568359375, 0.0136871337890625, -0.0213165283203125, 0.035186767578125, -0.05120849609375, -0.0288848876953125, -0.025909423828125, 0.08709716796875, 0.04107666015625, 0.019927978515625, -0.002056121826171875, -0.0135040283203125, -0.01404571533203125, 0.01146697998046875, -0.06427001953125, -0.0297393798828125, 0.0213775634765625, -0.04071044921875, -0.0267791748046875, 0.0028171539306640625, -0.045501708984375, -0.010955810546875, -0.0203857421875, 0.042999267578125, -0.035400390625, -0.05279541015625, 0.01016998291015625, -0.0186309814453125, 0.033294677734375, 0.033355712890625, -0.0472412109375, 0.0125579833984375, 0.0224761962890625, 0.06182861328125, -0.0033359527587890625, -0.015228271484375, 0.0012273788452148438, -0.005657196044921875, -0.0103759765625, 0.03143310546875, -0.0240631103515625, -0.042724609375, -0.0249786376953125, 0.01212310791015625, -0.0254058837890625, -0.03533935546875, 0.044647216796875, -0.0027828216552734375, 0.06756591796875, 0.001186370849609375, -0.041473388671875, -0.0290985107421875, 0.01224517822265625, -0.042694091796875, 0.08099365234375, 0.044189453125, -0.05511474609375, 0.009033203125, -0.042144775390625, -0.0104522705078125, -0.0016202926635742188, 0.004978179931640625, -0.047943115234375, -0.007110595703125, 0.01082611083984375, 0.03204345703125, -0.031005859375, 0.024383544921875, -0.04541015625, -0.009307861328125, 0.0148773193359375, -0.030548095703125, 0.08099365234375, 0.018524169921875, -0.0171661376953125, 0.00923919677734375, -0.05072021484375, 0.0007047653198242188, 0.03387451171875, -0.00994110107421875, -0.0171661376953125, -0.0305938720703125, 0.0119171142578125, 0.035247802734375, 0.03509521484375, -0.0158233642578125, 0.026641845703125, -0.0306854248046875, 0.02557373046875, 0.046295166015625, 0.004276275634765625, 0.0411376953125, -0.046234130859375, 0.038299560546875, 0.005535125732421875, 0.035400390625, 0.005939483642578125, -0.0657958984375, -0.0543212890625, -0.01410675048828125, 0.0274810791015625, 0.03778076171875, -0.05462646484375, 0.06707763671875, -0.006542205810546875, -0.046051025390625, -0.032501220703125, -0.00701904296875, 0.035797119140625, 0.045013427734375, 0.0247650146484375, -0.0311126708984375, -0.024810791015625, -0.0731201171875, -0.00775146484375, -0.0248565673828125, -0.0215911865234375, 0.028778076171875, 0.035369873046875, -0.0164031982421875, 0.058868408203125, -0.0355224609375, -0.00457763671875, -0.018310546875, 0.016876220703125, 0.032623291015625, 0.06390380859375, 0.04766845703125, -0.062744140625, -0.039825439453125, -0.0120391845703125, -0.052276611328125, -0.01352691650390625, -0.004302978515625, -0.025665283203125, 0.01898193359375, -0.00241851806640625, -0.05767822265625, 0.025421142578125, 0.057342529296875, -0.034393310546875, 0.041961669921875, -0.0174407958984375, 0.0147552490234375, -0.10302734375, 0.005859375, 0.0229034423828125, -0.008514404296875, -0.040863037109375, -0.0038127899169921875, -0.0135040283203125, 0.0003497600555419922, -0.038787841796875, 0.0545654296875, -0.042572021484375, 0.0097808837890625, -0.00623321533203125, -0.005359649658203125, -0.00016760826110839844, 0.050018310546875, -0.0023593902587890625, 0.04278564453125, 0.044830322265625, -0.032745361328125, 0.01715087890625, 0.01580810546875, 0.01446533203125, 0.034759521484375, -0.06231689453125, 0.00823974609375, 0.0033702850341796875, 0.0262298583984375, -0.0732421875, -0.02764892578125, 0.050323486328125, -0.047821044921875, 0.031951904296875, -0.041290283203125, -0.0364990234375, -0.022186279296875, -0.03411865234375, 0.02447509765625, 0.05755615234375, -0.0123291015625, 0.06109619140625, 0.0265655517578125, -0.0157012939453125, -0.01393890380859375, -0.029571533203125, -0.00994110107421875, -0.040863037109375, -0.070068359375, 0.036468505859375, -0.021759033203125, -0.0035800933837890625, 0.0025730133056640625, -0.0021533966064453125, -0.00579833984375, -0.01531219482421875, 0.011749267578125, 0.042449951171875, -0.001705169677734375, -0.0299224853515625, -0.00556182861328125, 0.0008769035339355469, 0.00359344482421875, -0.0115203857421875, 0.0362548828125, -0.0063934326171875, 0.008026123046875, -0.042877197265625, 0.017059326171875, 0.046295166015625, 0.007785797119140625, 0.06939697265625, 0.0648193359375, -0.02178955078125, 0.018768310546875, -0.0418701171875, -0.01049041748046875, -0.0352783203125, 0.01157379150390625, -0.019561767578125, -0.051300048828125, 0.0596923828125, 0.018707275390625, 0.0011396408081054688, 0.0555419921875, 0.042999267578125, 0.0061798095703125, 0.09246826171875, 0.033355712890625, -0.01096343994140625, 0.049713134765625, -0.028289794921875, 0.0019550323486328125, -0.06640625, -0.021942138671875, -0.03302001953125, -0.01052093505859375, -0.08154296875, -0.04437255859375, 0.02630615234375, 0.01299285888671875, -0.01959228515625, 0.04833984375, -0.03863525390625, 0.001125335693359375, 0.043670654296875, 0.01123809814453125, 0.00841522216796875, -0.0074005126953125, 0.00241851806640625, 0.0010557174682617188, -0.05615234375, -0.0144195556640625, 0.07354736328125, 0.03741455078125, 0.055938720703125, 0.012786865234375, 0.05462646484375, -0.004917144775390625, 0.0229034423828125, -0.04559326171875, 0.041412353515625, -0.0031585693359375, -0.0379638671875, -0.01558685302734375, -0.06756591796875, -0.0706787109375, 0.0364990234375, -0.01494598388671875, -0.06170654296875, 0.01427459716796875, 0.0082855224609375, -0.0185699462890625, -0.003314971923828125, -0.078369140625, 0.07159423828125, -0.013031005859375, -0.0295867919921875, 0.00959014892578125, -0.062469482421875, 0.0321044921875, 0.0192718505859375, 0.0082855224609375, -0.01239776611328125, -0.0008254051208496094, 0.06292724609375, -0.049407958984375, 0.0509033203125, -0.021514892578125, -0.01528167724609375, 0.026275634765625, -0.01219940185546875, 0.03826904296875, -0.002025604248046875, 0.0106964111328125, 0.032073974609375, -0.00354766845703125, -0.0282745361328125, -0.045440673828125, 0.0511474609375, -0.0706787109375, -0.03076171875, -0.03759765625, -0.048431396484375, -0.00930023193359375, 0.01535797119140625, 0.020111083984375, 0.020782470703125, 0.024505615234375, 0.023284912109375, 0.03668212890625, -0.02197265625, 0.0246124267578125, 0.03521728515625, -0.01422882080078125, -0.0439453125, 0.05841064453125, 0.002544403076171875, 0.03497314453125, 0.00421905517578125, 0.038421630859375, -0.038604736328125, -0.0280303955078125, -0.0273590087890625, 0.03997802734375, -0.0243377685546875, -0.01259613037109375, -0.04254150390625, -0.035400390625, -0.0501708984375, 0.0035839080810546875, -0.034393310546875, -0.0301513671875, -0.019500732421875, 0.0215911865234375, 0.02899169921875, 0.026947021484375, 0.0027027130126953125, 0.03369140625, -0.052978515625, 0.01507568359375, 0.007080078125, 0.02545166015625, -0.012969970703125, -0.06622314453125, -0.01300811767578125, 0.0268707275390625, -0.0198516845703125, -0.06646728515625, 0.045440673828125, -0.0025539398193359375, 0.04766845703125, 0.005279541015625, -0.0014352798461914062, 0.05682373046875, -0.005687713623046875, 0.061126708984375, 0.0218353271484375, -0.0501708984375, 0.03533935546875, -0.048583984375, 0.03753662109375, 0.0258941650390625, 0.04278564453125, -0.0265350341796875, -0.0390625, -0.07550048828125, -0.0860595703125, 0.0517578125, 0.053070068359375, 0.0010347366333007812, 0.0230560302734375, 0.0231475830078125, -0.0098114013671875, 0.0151519775390625, -0.07122802734375, -0.0209808349609375, -0.0218353271484375, -0.0245361328125, 0.017486572265625, 0.0003752708435058594, -0.006439208984375, -0.03289794921875, 0.0623779296875, -0.01114654541015625, 0.03369140625, 0.006496429443359375, 0.00041484832763671875, -0.0006155967712402344, -0.00511932373046875, 0.0316162109375, 0.0294952392578125, -0.035980224609375, -0.030242919921875, -0.00656890869140625, -0.035858154296875, -0.0235137939453125, 0.04150390625, -0.01146697998046875, -0.00807952880859375, 0.0242462158203125, 0.06512451171875, 0.0251312255859375, -0.0258941650390625, 0.0299224853515625, -0.02001953125, -0.02557373046875, -0.014190673828125, 0.0270538330078125, 0.006397247314453125, 0.0168304443359375, 0.014923095703125, -0.00418853759765625, 0.015716552734375, -0.032806396484375, 0.006694793701171875, 0.0257568359375, -0.003543853759765625, -0.0202178955078125, 0.04730224609375, 0.01393890380859375, 0.00151824951171875, 0.055755615234375, -0.0186767578125, -0.0265960693359375, 0.04718017578125, 0.03558349609375, 0.06866455078125, -0.0189056396484375, -0.0005159378051757812, 0.040435791015625, 0.0092315673828125, -0.00666046142578125, 0.006542205810546875, -0.0020294189453125, -0.0596923828125, -0.03204345703125, -0.051300048828125, -0.024688720703125, 0.03521728515625, -0.050994873046875, 0.0209503173828125, -0.0213623046875, -0.026214599609375, 0.0001819133758544922, 0.01294708251953125, -0.0440673828125, 0.023651123046875, 0.00623321533203125, 0.039520263671875, -0.07879638671875, 0.05218505859375, 0.06060791015625, -0.059661865234375, -0.0799560546875, 0.00409698486328125, 0.005764007568359375, -0.049102783203125, 0.004787445068359375, 0.00921630859375, 0.0172882080078125, -0.002452850341796875, -0.042388916015625, -0.07196044921875, 0.08795166015625, 0.0250244140625, -0.02508544921875, -0.0192718505859375, -0.01319122314453125, 0.035919189453125, -0.0204010009765625, 0.047821044921875, 0.03741455078125, 0.032745361328125, 0.03131103515625, -0.09735107421875, 0.007965087890625, -0.031646728515625, 0.0262298583984375, 0.0026683807373046875, -0.06964111328125, 0.07855224609375, 0.0025119781494140625, 0.0000922083854675293, 0.00656890869140625, 0.04901123046875, 0.027496337890625, -0.005168914794921875, 0.048187255859375, 0.04913330078125, 0.028564453125, -0.007171630859375, 0.08746337890625, -0.0243988037109375, 0.054656982421875, 0.08233642578125, -0.002826690673828125, 0.038482666015625, 0.01433563232421875, -0.03424072265625, 0.0328369140625, 0.056793212890625, -0.0049285888671875, 0.024139404296875, -0.0008144378662109375, -0.0102386474609375, -0.0028133392333984375, -0.003101348876953125, -0.0423583984375, 0.032958984375, 0.0166778564453125, -0.0265655517578125, -0.0135498046875, -0.0196990966796875, 0.03240966796875, -0.0251312255859375, -0.0044097900390625, 0.05902099609375, 0.004276275634765625, -0.055938720703125, 0.059173583984375, -0.0016307830810546875, 0.06732177734375, -0.0628662109375, -0.006221771240234375, -0.025360107421875, 0.0007104873657226562, -0.00737762451171875, -0.05950927734375, 0.0167999267578125, -0.0206756591796875, -0.007808685302734375, -0.0202484130859375, 0.057769775390625, -0.053070068359375, -0.042572021484375, 0.025115966796875, 0.0260009765625, 0.0224609375, -0.00390625, -0.0928955078125, 0.0193023681640625, 0.0002455711364746094, -0.03887939453125, 0.037017822265625, 0.0191497802734375, 0.0015439987182617188, 0.050384521484375, 0.052764892578125, 0.00392913818359375, -0.01214599609375, 0.0160980224609375, 0.06964111328125, -0.031036376953125, -0.040496826171875, -0.0537109375, 0.06011962890625, -0.018218994140625, -0.027679443359375, 0.0667724609375, 0.037353515625, 0.06671142578125, -0.00005364418029785156, 0.051849365234375, -0.031829833984375, 0.0211944580078125, -0.0340576171875, 0.05328369140625, -0.055633544921875, -0.0001207590103149414, -0.035888671875, -0.07525634765625, 0.0006847381591796875, 0.061187744140625, -0.01210784912109375, 0.047515869140625, 0.0361328125, 0.06439208984375, -0.00157928466796875, -0.00554656982421875, 0.0162353515625, 0.026641845703125, 0.018218994140625, 0.040283203125, 0.03607177734375, -0.04876708984375, 0.02984619140625, -0.034332275390625, -0.033660888671875, -0.0187225341796875, -0.0633544921875, -0.0574951171875, -0.045745849609375, -0.040924072265625, -0.04638671875, -0.002826690673828125, 0.06561279296875, 0.0675048828125, -0.043426513671875, -0.02557373046875, 0.0035457611083984375, 0.00043201446533203125, -0.01055908203125, -0.0260467529296875, 0.006504058837890625, 0.0012521743774414062, -0.072998046875, 0.00921630859375, 0.003936767578125, 0.0282440185546875, -0.0210723876953125, -0.01043701171875, -0.0191497802734375, -0.006061553955078125, 0.038543701171875, 0.011505126953125, -0.051239013671875, -0.0112762451171875, -0.0208740234375, 0.0001493692398071289, -0.0014696121215820312, 0.03375244140625, -0.041748046875, 0.02886962890625, 0.042694091796875, 0.0204315185546875, 0.041107177734375, 0.01412200927734375, 0.05072021484375, -0.048309326171875, 0.0170745849609375, 0.0127410888671875, 0.02685546875, 0.033935546875, -0.035919189453125, 0.039520263671875, 0.03668212890625, -0.0457763671875, -0.043670654296875, -0.0019626617431640625, -0.093017578125, -0.012969970703125, 0.09576416015625, -0.01251220703125, -0.024810791015625, 0.014739990234375, -0.0222625732421875, 0.0235137939453125, -0.04412841796875, 0.04034423828125, 0.051544189453125, -0.0256195068359375, -0.0233306884765625, -0.041961669921875, 0.029510498046875, 0.0247802734375, -0.0552978515625, -0.0153350830078125, 0.044647216796875, 0.034088134765625, 0.005657196044921875, 0.046722412109375, -0.0158843994140625, 0.0002543926239013672, -0.0024089813232421875, 0.00923919677734375, -0.004405975341796875, -0.0004706382751464844, -0.0210418701171875, -0.009613037109375, -0.0177459716796875, -0.0088043212890625 ] ]
LoupGarou/WizardCoder-Guanaco-15B-V1.1
2023-07-13T03:21:55.000Z
[ "transformers", "pytorch", "gpt_bigcode", "text-generation", "en", "dataset:guanaco", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
LoupGarou
null
null
LoupGarou/WizardCoder-Guanaco-15B-V1.1
11
5,813
transformers
2023-07-12T06:10:19
--- language: - en datasets: - guanaco model_hub_library: - transformers license: - apache-2.0 --- ## WizardCoder-Guanaco-15B-V1.1 Model Card The WizardCoder-Guanaco-15B-V1.1 is a language model that combines the strengths of the [WizardCoder](https://huggingface.co/WizardLM/WizardCoder-15B-V1.0) base model and the [openassistant-guanaco](https://huggingface.co/datasets/timdettmers/openassistant-guanaco) dataset for finetuning. The openassistant-guanaco dataset was further trimmed to within 2 standard deviations of token size for input and output pairs and all non-english data has been removed to reduce training size requirements. Version 1.1 showcases notable enhancements, employing a modified version of the previous openassistant-guanaco dataset. This dataset underwent a comprehensive revision, replacing every single answer with those generated by GPT-4. The volume of the datasets has also been augmented by approximately 50%, with a particular focus on high school and abstract algebra. This expansion leveraged the combined capabilities of GPT-4 and GPT-3.5-Turbo. The initial evaluation of algebraic functions over 12 epochs indicated promising results from this enriched dataset. However, this is just the beginning; further refinements are in the pipeline, aiming to optimize the dataset quality and subsequently decrease the number of epochs required to achieve comparable results. Considering the need to curtail memory consumption during training, this dataset was tailored to consist solely of English language questions and answers. Consequently, the model's performance in language translation may not be up to par. Nevertheless, the focus remains on enhancing the model's proficiency and efficiency within its defined scope. # Intended Use This model is designed to be used for a wide array of text generation tasks that require understanding and generating English text. The model is expected to perform well in tasks such as answering questions, writing essays, summarizing text, translation, and more. However, given the specific data processing and finetuning done, it might be particularly effective for tasks related to English language question-answering systems. # Limitations Despite the powerful capabilities of this model, users should be aware of its limitations. The model's knowledge is up to date only until the time it was trained, and it doesn't know about events in the world after that. It can sometimes produce incorrect or nonsensical responses, as it doesn't understand the text in the same way humans do. It should be used as a tool to assist in generating text and not as a sole source of truth. # How to use Here is an example of how to use this model: ```python from transformers import AutoModelForCausalLM, AutoTokenizer import time import torch class Chatbot: def __init__(self, model_name): self.tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side='left') self.model = AutoModelForCausalLM.from_pretrained(model_name, load_in_4bit=True, torch_dtype=torch.bfloat16) if self.tokenizer.pad_token_id is None: self.tokenizer.pad_token_id = self.tokenizer.eos_token_id def get_response(self, prompt): inputs = self.tokenizer.encode_plus(prompt, return_tensors="pt", padding='max_length', max_length=100) if next(self.model.parameters()).is_cuda: inputs = {name: tensor.to('cuda') for name, tensor in inputs.items()} start_time = time.time() tokens = self.model.generate(input_ids=inputs['input_ids'], attention_mask=inputs['attention_mask'], pad_token_id=self.tokenizer.pad_token_id, max_new_tokens=400) end_time = time.time() output_tokens = tokens[0][inputs['input_ids'].shape[-1]:] output = self.tokenizer.decode(output_tokens, skip_special_tokens=True) time_taken = end_time - start_time return output, time_taken def main(): chatbot = Chatbot("LoupGarou/WizardCoder-Guanaco-15B-V1.1") while True: user_input = input("Enter your prompt: ") if user_input.lower() == 'quit': break output, time_taken = chatbot.get_response(user_input) print("\033[33m" + output + "\033[0m") print("Time taken to process: ", time_taken, "seconds") print("Exited the program.") if __name__ == "__main__": main() ``` # Training Procedure The WizardCoder model, serving as the base, was fine-tuned on a modified version of the openassistant-guanaco dataset. This dataset underwent a significant revision, replacing every single answer with responses generated by the AI model GPT-4. It was then expanded by approximately 50%, emphasizing high school and abstract algebra-related questions, using a mix of GPT-4 and GPT-3.5-Turbo for answer generation. The selected dataset was standardized to fall within two standard deviations of token size for the question sets, ensuring consistency in data handling. The order of the questions was also randomized to mitigate any potential biases during the training phase. In the interest of optimizing memory usage during the training process, the dataset was streamlined to only include English language content. As a result, all non-English data was systematically expunged from this fine-tuning dataset. It's worth noting that this modification limits the model's performance in language translation tasks, but it significantly boosts its efficiency and effectiveness when dealing with English language questions and answers. ## Acknowledgements This model, WizardCoder-Guanaco-15B-V1.1, is simply building on the efforts of two great teams to evaluate the performance of a combined model with the strengths of the [WizardCoder base model](https://huggingface.co/WizardLM/WizardCoder-15B-V1.0) and the [openassistant-guanaco dataset](https://huggingface.co/datasets/timdettmers/openassistant-guanaco). A sincere appreciation goes out to the developers and the community involved in the creation and refinement of these models. Their commitment to providing open source tools and datasets have been instrumental in making this project a reality. Moreover, a special note of thanks to the [Hugging Face](https://huggingface.co/) team, whose transformative library has not only streamlined the process of model creation and adaptation, but also democratized the access to state-of-the-art machine learning technologies. Their impact on the development of this project cannot be overstated.
6,604
[ [ -0.02093505859375, -0.06524658203125, -0.004215240478515625, 0.0206298828125, 0.00176239013671875, -0.005634307861328125, -0.0170440673828125, -0.020782470703125, -0.014739990234375, 0.0177459716796875, -0.036895751953125, -0.032196044921875, -0.048248291015625, 0.01226043701171875, -0.0251617431640625, 0.09600830078125, -0.00504302978515625, 0.00336456298828125, -0.0147247314453125, 0.00547027587890625, -0.03839111328125, -0.031097412109375, -0.0675048828125, -0.0275115966796875, 0.032012939453125, 0.0179290771484375, 0.04766845703125, 0.034912109375, 0.025390625, 0.0240020751953125, 0.000059485435485839844, 0.031646728515625, -0.04931640625, -0.012298583984375, 0.00591278076171875, -0.02740478515625, -0.0372314453125, -0.007595062255859375, 0.0231475830078125, 0.0294189453125, -0.0176239013671875, 0.021209716796875, 0.0111083984375, 0.033416748046875, -0.04571533203125, 0.0312347412109375, -0.04266357421875, -0.00005120038986206055, 0.0035572052001953125, -0.0205535888671875, -0.022491455078125, -0.02496337890625, 0.002079010009765625, -0.05059814453125, 0.0271759033203125, -0.001758575439453125, 0.0887451171875, 0.0145721435546875, -0.0149993896484375, -0.024505615234375, -0.0626220703125, 0.05126953125, -0.05999755859375, 0.01552581787109375, 0.037078857421875, 0.025909423828125, -0.0155181884765625, -0.055999755859375, -0.056976318359375, -0.017669677734375, -0.00804901123046875, 0.01247406005859375, -0.0255126953125, 0.0062713623046875, 0.01537322998046875, 0.032928466796875, -0.0625, -0.004718780517578125, -0.0714111328125, -0.023162841796875, 0.04931640625, 0.02777099609375, 0.032379150390625, -0.0140228271484375, -0.0023021697998046875, -0.00847625732421875, -0.0276947021484375, 0.01239776611328125, 0.0390625, 0.0163421630859375, -0.00933837890625, 0.0496826171875, -0.025238037109375, 0.0478515625, 0.0109710693359375, -0.01520538330078125, 0.023590087890625, -0.0126190185546875, -0.0276031494140625, -0.0168304443359375, 0.0765380859375, 0.0250091552734375, 0.0288848876953125, -0.01210784912109375, -0.0042724609375, -0.00835418701171875, 0.006977081298828125, -0.07086181640625, -0.0306396484375, 0.03265380859375, -0.02288818359375, -0.0183563232421875, -0.0018606185913085938, -0.035003662109375, -0.00429534912109375, -0.007350921630859375, 0.0501708984375, -0.0482177734375, -0.0198974609375, 0.0279083251953125, -0.0127410888671875, 0.031890869140625, 0.0222930908203125, -0.08758544921875, 0.0240020751953125, 0.027008056640625, 0.06781005859375, -0.01209259033203125, -0.0390625, -0.01503753662109375, -0.00241851806640625, -0.0182342529296875, 0.04705810546875, -0.0183258056640625, -0.0267181396484375, -0.018096923828125, 0.0035152435302734375, -0.0261383056640625, -0.03350830078125, 0.0168914794921875, -0.046295166015625, 0.048004150390625, 0.00897979736328125, -0.04815673828125, -0.00928497314453125, 0.0230712890625, -0.0404052734375, 0.08294677734375, 0.01922607421875, -0.05450439453125, 0.0097503662109375, -0.05255126953125, -0.01568603515625, -0.0009236335754394531, 0.0003097057342529297, -0.028045654296875, -0.00817108154296875, 0.0307464599609375, 0.02532958984375, -0.030792236328125, 0.00893402099609375, -0.0003943443298339844, -0.03515625, 0.0156707763671875, -0.052093505859375, 0.088134765625, 0.0267181396484375, -0.01641845703125, -0.003204345703125, -0.051361083984375, 0.0114288330078125, 0.023681640625, -0.02264404296875, -0.0017461776733398438, -0.02532958984375, 0.0008373260498046875, 0.0231781005859375, 0.027557373046875, -0.033355712890625, 0.0175018310546875, -0.042449951171875, 0.0233306884765625, 0.05072021484375, 0.00208282470703125, 0.0257568359375, -0.018157958984375, 0.0491943359375, -0.0021839141845703125, 0.0275115966796875, -0.00537872314453125, -0.03961181640625, -0.068115234375, -0.0157928466796875, 0.02752685546875, 0.06036376953125, -0.044219970703125, 0.046112060546875, -0.02142333984375, -0.054962158203125, -0.04815673828125, 0.002735137939453125, 0.0296630859375, 0.04095458984375, 0.038848876953125, -0.015716552734375, -0.0252227783203125, -0.059539794921875, -0.005046844482421875, -0.0230560302734375, -0.0197296142578125, 0.0095672607421875, 0.047332763671875, -0.00553131103515625, 0.07012939453125, -0.047393798828125, -0.01244354248046875, -0.0217742919921875, 0.00527191162109375, 0.0155792236328125, 0.04962158203125, 0.024566650390625, -0.0430908203125, -0.04705810546875, 0.01340484619140625, -0.06671142578125, 0.0017309188842773438, -0.00762939453125, -0.014984130859375, 0.0136260986328125, 0.02825927734375, -0.0675048828125, 0.049591064453125, 0.03179931640625, -0.0297393798828125, 0.04791259765625, -0.0284881591796875, 0.02679443359375, -0.08221435546875, 0.00260162353515625, -0.0277557373046875, 0.01032257080078125, -0.0499267578125, -0.01065826416015625, 0.00225830078125, -0.001712799072265625, -0.0440673828125, 0.05865478515625, -0.032470703125, 0.029327392578125, -0.0229339599609375, -0.00089263916015625, 0.00568389892578125, 0.0650634765625, -0.01448822021484375, 0.069091796875, 0.04681396484375, -0.051055908203125, 0.043487548828125, 0.0257720947265625, -0.012481689453125, 0.0243377685546875, -0.0714111328125, 0.0191802978515625, -0.01561737060546875, -0.006053924560546875, -0.0780029296875, -0.013671875, 0.033416748046875, -0.0706787109375, 0.0290374755859375, -0.0130615234375, -0.03936767578125, -0.043548583984375, -0.0102386474609375, 0.0166168212890625, 0.048858642578125, -0.031005859375, 0.041290283203125, 0.019378662109375, 0.003444671630859375, -0.04815673828125, -0.0506591796875, -0.00859832763671875, -0.01209259033203125, -0.055694580078125, 0.0126495361328125, -0.0099334716796875, 0.00004661083221435547, -0.007244110107421875, -0.008514404296875, -0.00800323486328125, -0.005512237548828125, 0.005008697509765625, 0.028961181640625, 0.0038967132568359375, 0.01165008544921875, 0.006107330322265625, -0.0042266845703125, 0.005970001220703125, -0.0255279541015625, 0.05401611328125, -0.004917144775390625, -0.013153076171875, -0.02752685546875, 0.012939453125, 0.0287933349609375, -0.030517578125, 0.08856201171875, 0.050323486328125, -0.03643798828125, -0.007671356201171875, -0.0343017578125, -0.01073455810546875, -0.040679931640625, 0.045257568359375, -0.03204345703125, -0.055328369140625, 0.05126953125, 0.03802490234375, 0.0153045654296875, 0.039459228515625, 0.058074951171875, 0.0087738037109375, 0.08795166015625, 0.0364990234375, 0.0039825439453125, 0.0379638671875, -0.04107666015625, 0.0180206298828125, -0.047332763671875, -0.038787841796875, -0.035430908203125, 0.0032939910888671875, -0.040130615234375, -0.0236968994140625, 0.0079345703125, 0.01439666748046875, -0.027008056640625, 0.031585693359375, -0.049407958984375, 0.031402587890625, 0.033416748046875, 0.003978729248046875, 0.0101318359375, 0.00714874267578125, 0.0030651092529296875, 0.0095367431640625, -0.061126708984375, -0.043365478515625, 0.09698486328125, 0.0197601318359375, 0.06365966796875, -0.0091094970703125, 0.03167724609375, 0.008453369140625, 0.01006317138671875, -0.0411376953125, 0.04022216796875, 0.01552581787109375, -0.057647705078125, -0.03143310546875, -0.035400390625, -0.0867919921875, 0.019866943359375, 0.005191802978515625, -0.058135986328125, -0.0005044937133789062, 0.0070343017578125, -0.02655029296875, 0.023773193359375, -0.07537841796875, 0.08843994140625, -0.00330352783203125, -0.02630615234375, -0.0035915374755859375, -0.03155517578125, 0.0224456787109375, 0.00926971435546875, -0.0117950439453125, -0.0085296630859375, 0.006778717041015625, 0.06402587890625, -0.048370361328125, 0.046142578125, -0.00994873046875, -0.005786895751953125, 0.042449951171875, -0.0098419189453125, 0.0205535888671875, 0.0034122467041015625, 0.0086212158203125, 0.01702880859375, 0.01021575927734375, -0.04571533203125, -0.053436279296875, 0.050506591796875, -0.08123779296875, -0.0396728515625, -0.030303955078125, -0.0416259765625, -0.01099395751953125, 0.0043182373046875, 0.03802490234375, 0.033660888671875, -0.00942230224609375, -0.003875732421875, 0.05255126953125, -0.02056884765625, 0.02734375, 0.02581787109375, -0.020904541015625, -0.0276641845703125, 0.06512451171875, 0.020904541015625, 0.017059326171875, 0.0185699462890625, -0.0007810592651367188, -0.011505126953125, -0.03448486328125, -0.03631591796875, 0.0161895751953125, -0.05389404296875, -0.02374267578125, -0.06524658203125, -0.040069580078125, -0.051544189453125, 0.00946044921875, -0.0298614501953125, -0.03765869140625, -0.0286407470703125, -0.01021575927734375, 0.0517578125, 0.033233642578125, 0.008026123046875, 0.0239105224609375, -0.04632568359375, 0.02056884765625, 0.03509521484375, 0.0185089111328125, 0.004199981689453125, -0.06494140625, -0.0433349609375, 0.007686614990234375, -0.03271484375, -0.061431884765625, 0.03692626953125, 0.007434844970703125, 0.03802490234375, 0.01513671875, -0.00571441650390625, 0.058319091796875, -0.03533935546875, 0.0670166015625, -0.0015506744384765625, -0.048583984375, 0.0369873046875, -0.0207061767578125, 0.042449951171875, 0.026275634765625, 0.02978515625, -0.04248046875, -0.03924560546875, -0.03790283203125, -0.05889892578125, 0.05853271484375, 0.04205322265625, 0.025146484375, -0.01195526123046875, 0.025604248046875, 0.0110626220703125, 0.00991058349609375, -0.043792724609375, -0.036956787109375, -0.0264739990234375, -0.026123046875, 0.007770538330078125, 0.00707244873046875, 0.0121002197265625, -0.043548583984375, 0.06317138671875, -0.0033321380615234375, 0.05126953125, 0.01535797119140625, -0.024261474609375, 0.0095367431640625, 0.00775909423828125, 0.032684326171875, 0.052734375, -0.009246826171875, -0.0256805419921875, 0.03082275390625, -0.060150146484375, 0.00215911865234375, 0.03125, -0.0188446044921875, -0.00351715087890625, 0.02191162109375, 0.07586669921875, -0.02056884765625, -0.0413818359375, 0.030303955078125, -0.0189361572265625, -0.0136566162109375, -0.027679443359375, 0.025909423828125, -0.00374603271484375, 0.0111846923828125, 0.0404052734375, 0.003204345703125, 0.0081329345703125, -0.0193023681640625, -0.002864837646484375, 0.024078369140625, -0.01195526123046875, -0.030517578125, 0.06549072265625, 0.0016689300537109375, -0.0279388427734375, 0.049835205078125, -0.01399993896484375, -0.03314208984375, 0.053741455078125, 0.03375244140625, 0.06842041015625, -0.016937255859375, 0.0102386474609375, 0.0489501953125, 0.0257720947265625, -0.0001537799835205078, 0.00917816162109375, -0.0009107589721679688, -0.04986572265625, -0.027313232421875, -0.035980224609375, -0.00792694091796875, 0.01511383056640625, -0.042999267578125, 0.03765869140625, -0.0268096923828125, -0.00714874267578125, -0.0272216796875, 0.0157928466796875, -0.07562255859375, 0.0176544189453125, 0.0007786750793457031, 0.07269287109375, -0.049560546875, 0.06488037109375, 0.0309295654296875, -0.055023193359375, -0.06591796875, -0.0126800537109375, -0.0201416015625, -0.05609130859375, 0.0244903564453125, 0.0124053955078125, -0.004390716552734375, 0.0113067626953125, -0.051849365234375, -0.062103271484375, 0.0916748046875, 0.02362060546875, -0.0167694091796875, -0.035919189453125, 0.0111083984375, 0.055999755859375, -0.0161285400390625, 0.052093505859375, 0.047393798828125, 0.015625, -0.00478363037109375, -0.08294677734375, -0.00876617431640625, -0.03692626953125, -0.0006494522094726562, -0.0022983551025390625, -0.059356689453125, 0.07183837890625, -0.0104217529296875, -0.01152801513671875, 0.0130615234375, 0.074462890625, 0.010772705078125, 0.002197265625, 0.0109100341796875, 0.0279388427734375, 0.064208984375, -0.004180908203125, 0.0784912109375, -0.0247039794921875, 0.04022216796875, 0.0682373046875, 0.0115814208984375, 0.04278564453125, 0.03411865234375, -0.024505615234375, 0.032623291015625, 0.0440673828125, -0.006130218505859375, 0.04345703125, 0.01236724853515625, -0.0142822265625, 0.00010591745376586914, 0.0154876708984375, -0.03411865234375, 0.0264434814453125, 0.0157928466796875, -0.006984710693359375, -0.0108795166015625, 0.0279998779296875, 0.01244354248046875, -0.01323699951171875, -0.019744873046875, 0.046142578125, -0.01535797119140625, -0.0506591796875, 0.07305908203125, 0.002223968505859375, 0.06671142578125, -0.0660400390625, 0.0053863525390625, -0.036041259765625, 0.01439666748046875, -0.011474609375, -0.0428466796875, 0.0141448974609375, 0.006496429443359375, -0.00701904296875, 0.00016891956329345703, 0.04486083984375, -0.044097900390625, -0.039764404296875, 0.0128173828125, 0.033172607421875, 0.0247344970703125, 0.0031795501708984375, -0.057861328125, -0.0007009506225585938, 0.00124359130859375, -0.036712646484375, 0.0126800537109375, 0.040802001953125, -0.0008611679077148438, 0.0550537109375, 0.03594970703125, -0.00238800048828125, 0.024322509765625, -0.00656890869140625, 0.06524658203125, -0.037506103515625, -0.020477294921875, -0.0633544921875, 0.04522705078125, -0.0081787109375, -0.044281005859375, 0.08203125, 0.052581787109375, 0.08099365234375, -0.01580810546875, 0.05828857421875, -0.022552490234375, 0.0229644775390625, -0.029144287109375, 0.06585693359375, -0.035736083984375, 0.0192413330078125, -0.0151824951171875, -0.061065673828125, -0.01387786865234375, 0.04840087890625, -0.028045654296875, 0.01824951171875, 0.048004150390625, 0.08660888671875, -0.003204345703125, -0.01250457763671875, 0.02203369140625, -0.0008792877197265625, 0.0279083251953125, 0.05816650390625, 0.049407958984375, -0.068115234375, 0.08319091796875, -0.0229949951171875, -0.01422119140625, 0.00252532958984375, -0.031402587890625, -0.09478759765625, -0.04840087890625, -0.030609130859375, -0.040496826171875, 0.005832672119140625, 0.07574462890625, 0.043182373046875, -0.0609130859375, -0.01910400390625, -0.0034618377685546875, -0.0015974044799804688, -0.0202484130859375, -0.0209197998046875, 0.0313720703125, -0.0276947021484375, -0.07830810546875, 0.004215240478515625, 0.00009566545486450195, 0.0136566162109375, -0.024810791015625, -0.0244903564453125, -0.01168060302734375, 0.021697998046875, 0.03643798828125, 0.036224365234375, -0.03375244140625, -0.0184173583984375, 0.0290679931640625, -0.0271148681640625, 0.01348114013671875, 0.029083251953125, -0.0616455078125, 0.035003662109375, 0.03802490234375, 0.037506103515625, 0.05609130859375, -0.023529052734375, 0.040618896484375, -0.037689208984375, 0.0183258056640625, 0.0025997161865234375, 0.0124053955078125, 0.0299072265625, -0.028564453125, 0.03594970703125, 0.008819580078125, -0.049407958984375, -0.055206298828125, -0.0175323486328125, -0.042999267578125, -0.015869140625, 0.0849609375, -0.0022525787353515625, -0.0379638671875, -0.01299285888671875, -0.03948974609375, 0.033233642578125, -0.02691650390625, 0.0625, 0.0660400390625, -0.007068634033203125, -0.01678466796875, -0.043701171875, 0.03448486328125, 0.0204010009765625, -0.05853271484375, -0.0016202926635742188, 0.028594970703125, 0.0299072265625, 0.00396728515625, 0.06317138671875, -0.0124359130859375, 0.036834716796875, 0.0118865966796875, 0.020263671875, -0.0274505615234375, -0.0149688720703125, -0.033447265625, 0.0015735626220703125, -0.0073699951171875, -0.02978515625 ] ]
pankajmathur/orca_mini_7b
2023-07-13T06:32:16.000Z
[ "transformers", "pytorch", "llama", "text-generation", "en", "dataset:psmathur/alpaca_orca", "dataset:psmathur/dolly-v2_orca", "dataset:psmathur/WizardLM_Orca", "arxiv:2306.02707", "license:cc-by-nc-sa-4.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
pankajmathur
null
null
pankajmathur/orca_mini_7b
17
5,812
transformers
2023-06-23T04:31:14
--- license: cc-by-nc-sa-4.0 language: - en library_name: transformers pipeline_tag: text-generation datasets: - psmathur/alpaca_orca - psmathur/dolly-v2_orca - psmathur/WizardLM_Orca --- # orca_mini_7b An [OpenLLaMa-7B model](https://github.com/openlm-research/open_llama) model trained on explain tuned datasets, created using Instructions and Input from WizardLM, Alpaca & Dolly-V2 datasets and applying Orca Research Paper dataset construction approaches. # Dataset We build explain tuned [WizardLM dataset ~70K](https://github.com/nlpxucan/WizardLM), [Alpaca dataset ~52K](https://crfm.stanford.edu/2023/03/13/alpaca.html) & [Dolly-V2 dataset ~15K](https://github.com/databrickslabs/dolly) created using approaches from [Orca Research Paper](https://arxiv.org/abs/2306.02707). We leverage all of the 15 system instructions provided in Orca Research Paper. to generate custom datasets, in contrast to vanilla instruction tuning approaches used by original datasets. This helps student model aka this model to learn ***thought*** process from teacher model, which is ChatGPT (gpt-3.5-turbo-0301 version). Please see below example usage how the **System** prompt is added before each **instruction**. # Training The training configurations are provided in the table below. The training takes on 8x A100(80G) GPUs and lasts for around 7 Hours for cost of $84 using [Lambda Labs](https://lambdalabs.com) We used DeepSpeed with fully sharded data parallelism, also know as [ZeRO stage 3](https://engineering.fb.com/2021/07/15/open-source/fsdp/) by writing our own fine tunning scripts plus leveraging some of the model training code provided by amazing [OpenAlpaca repo](https://github.com/yxuansu/OpenAlpaca) Here are some of params used during training: ||| |:-------------:|:-------------:| |*batch_size*|32| |*train_micro_batch_size_per_gpu*|2| |*gradient_accumulation_steps*|2| |*Learning rate*|2e-5| |*Max length*|1024| |*Epochs*|3| |*Optimizer*|AdamW| # Example Usage Below shows an example on how to use this model ```python import torch from transformers import LlamaForCausalLM, LlamaTokenizer # Hugging Face model_path model_path = 'psmathur/orca_mini_7b' tokenizer = LlamaTokenizer.from_pretrained(model_path) model = LlamaForCausalLM.from_pretrained( model_path, torch_dtype=torch.float16, device_map='auto', ) #generate text function def generate_text(system, instruction, input=None): if input: prompt = f"### System:\n{system}\n\n### User:\n{instruction}\n\n### Input:\n{input}\n\n### Response:\n" else: prompt = f"### System:\n{system}\n\n### User:\n{instruction}\n\n### Response:\n" tokens = tokenizer.encode(prompt) tokens = torch.LongTensor(tokens).unsqueeze(0) tokens = tokens.to('cuda') instance = {'input_ids': tokens,'top_p': 1.0, 'temperature':0.7, 'generate_len': 1024, 'top_k': 50} length = len(tokens[0]) with torch.no_grad(): rest = model.generate( input_ids=tokens, max_length=length+instance['generate_len'], use_cache=True, do_sample=True, top_p=instance['top_p'], temperature=instance['temperature'], top_k=instance['top_k'] ) output = rest[0][length:] string = tokenizer.decode(output, skip_special_tokens=True) return f'[!] Response: {string}' # Sample Test Instruction Used by Youtuber Sam Witteveen https://www.youtube.com/@samwitteveenai system = 'You are an AI assistant that follows instruction extremely well. Help as much as you can.' instruction = 'Write a letter to Sam Altman, CEO of OpenAI, requesting him to convert GPT4 a private model by OpenAI to an open source project' print(generate_text(system, instruction)) ``` ``` [!] Response: Dear Sam Altman, I am writing to request that you convert the GPT4 private model developed by OpenAI to an open source project. As a user of OpenAI, I have been waiting for the day when I can use the advanced natural language processing capabilities of GPT4 in a more open and accessible way. While OpenAI has made significant progress in developing AI applications, it has primarily focused on building private models that are not accessible to the general public. However, with the recent release of GPT-3, there is a growing demand for more open and accessible AI tools. Converting GPT4 to an open source project would allow for greater transparency, collaboration, and innovation. It would also help to build trust in the technology and ensure that it is used ethically and responsibly. I urge you to consider converting GPT4 to an open source project. This would be a significant contribution to the AI community and would help to create a more open and accessible future. Thank you for your consideration. Sincerely, [Your Name] ``` **P.S. I am #opentowork and #collaboration, if you can help, please reach out to me at www.linkedin.com/in/pankajam** ** Next Goals: 1) Try more data like actually using FLAN-v2, just like Orka Research Paper (I am open for suggestions) 2) Provide more options for Text generation UI. (may be https://github.com/oobabooga/text-generation-webui) 3) Provide 4bit GGML/GPTQ quantized model (may be [TheBloke](https://huggingface.co/TheBloke) can help here) Limitations & Biases: This model can produce factually incorrect output, and should not be relied on to produce factually accurate information. This model was trained on various public datasets. While great efforts have been taken to clean the pretraining data, it is possible that this model could generate lewd, biased or otherwise offensive outputs. Disclaimer: The license on this model does not constitute legal advice. We are not responsible for the actions of third parties who use this model. Please cosult an attorney before using this model for commercial purposes. Citiation: If you found wizardlm_alpaca_dolly_orca_open_llama_7b useful in your research or applications, please kindly cite using the following BibTeX: ``` @misc{orca_mini_7b, author = {Pankaj Mathur}, title = {orca_mini_7b: An explain tuned OpenLLaMA-7b model on custom wizardlm, alpaca, & dolly datasets}, year = {2023}, publisher = {GitHub, HuggingFace}, journal = {GitHub repository, HuggingFace repository}, howpublished = {\url{https://github.com/pankajarm/wizardlm_alpaca_dolly_orca_open_llama_7b}, \url{https://https://huggingface.co/psmathur/wizardlm_alpaca_dolly_orca_open_llama_7b}}, } ``` ``` @misc{mukherjee2023orca, title={Orca: Progressive Learning from Complex Explanation Traces of GPT-4}, author={Subhabrata Mukherjee and Arindam Mitra and Ganesh Jawahar and Sahaj Agarwal and Hamid Palangi and Ahmed Awadallah}, year={2023}, eprint={2306.02707}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ``` @software{openlm2023openllama, author = {Xinyang Geng and Hao Liu}, title = {OpenLLaMA: An Open Reproduction of LLaMA}, month = May, year = 2023, url = {https://github.com/openlm-research/open_llama} } ``` ``` @misc{openalpaca, author = {Yixuan Su and Tian Lan and Deng Cai}, title = {OpenAlpaca: A Fully Open-Source Instruction-Following Model Based On OpenLLaMA}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/yxuansu/OpenAlpaca}}, } ``` ``` @misc{alpaca, author = {Rohan Taori and Ishaan Gulrajani and Tianyi Zhang and Yann Dubois and Xuechen Li and Carlos Guestrin and Percy Liang and Tatsunori B. Hashimoto }, title = {Stanford Alpaca: An Instruction-following LLaMA model}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/tatsu-lab/stanford_alpaca}}, } ```
7,770
[ [ -0.022216796875, -0.06494140625, 0.025421142578125, 0.0045013427734375, -0.01517486572265625, -0.0266571044921875, -0.0192413330078125, -0.042816162109375, -0.004512786865234375, 0.0307159423828125, -0.0372314453125, -0.05029296875, -0.0269927978515625, -0.0007152557373046875, -0.014404296875, 0.09967041015625, -0.02801513671875, -0.006336212158203125, 0.01215362548828125, -0.011688232421875, -0.024017333984375, -0.0270233154296875, -0.056793212890625, -0.016204833984375, 0.031951904296875, 0.01441192626953125, 0.0457763671875, 0.04833984375, 0.0218658447265625, 0.0225067138671875, -0.0019989013671875, 0.016754150390625, -0.03826904296875, -0.0227203369140625, 0.005863189697265625, -0.034912109375, -0.0511474609375, 0.0148162841796875, 0.03314208984375, 0.0177764892578125, -0.01136016845703125, 0.0273284912109375, 0.00864410400390625, 0.0234527587890625, -0.0401611328125, 0.039093017578125, -0.0345458984375, -0.0017299652099609375, -0.02642822265625, -0.0042572021484375, -0.019256591796875, -0.044769287109375, 0.00463104248046875, -0.06671142578125, 0.02838134765625, -0.0122833251953125, 0.084716796875, 0.01424407958984375, -0.01255035400390625, -0.03057861328125, -0.051483154296875, 0.054901123046875, -0.07257080078125, 0.0082550048828125, 0.0232086181640625, 0.0261688232421875, -0.01366424560546875, -0.06378173828125, -0.060455322265625, -0.01788330078125, -0.0068359375, 0.016632080078125, -0.00022912025451660156, -0.01202392578125, 0.0184326171875, 0.037139892578125, -0.047393798828125, -0.00328826904296875, -0.0458984375, -0.01184844970703125, 0.037322998046875, 0.007556915283203125, 0.01506805419921875, -0.00391387939453125, -0.0258941650390625, -0.0263519287109375, -0.053802490234375, 0.0190277099609375, 0.03912353515625, 0.02679443359375, -0.0298919677734375, 0.05120849609375, -0.0086212158203125, 0.04736328125, -0.004833221435546875, -0.02294921875, 0.0377197265625, -0.026092529296875, -0.032745361328125, -0.00246429443359375, 0.07000732421875, 0.002349853515625, 0.0038318634033203125, 0.004390716552734375, -0.0058441162109375, 0.00528717041015625, 0.00386810302734375, -0.062469482421875, -0.01690673828125, 0.00945281982421875, -0.028533935546875, -0.023956298828125, -0.003513336181640625, -0.059967041015625, -0.00986480712890625, -0.010894775390625, 0.038482666015625, -0.040313720703125, -0.0219573974609375, 0.0214080810546875, 0.0107269287109375, 0.039764404296875, 0.022979736328125, -0.07464599609375, 0.0112152099609375, 0.03619384765625, 0.07574462890625, 0.01654052734375, -0.0274200439453125, -0.0183563232421875, 0.01486968994140625, -0.017608642578125, 0.035003662109375, -0.0233612060546875, -0.023712158203125, -0.01678466796875, 0.005062103271484375, -0.01279449462890625, -0.0200042724609375, 0.0440673828125, -0.03680419921875, 0.03741455078125, -0.0159759521484375, -0.0151519775390625, -0.033905029296875, 0.00832366943359375, -0.045379638671875, 0.074951171875, 0.0083465576171875, -0.06182861328125, 0.0150604248046875, -0.07720947265625, -0.00569915771484375, -0.0166015625, -0.00634765625, -0.048553466796875, -0.0198516845703125, 0.038848876953125, 0.0174560546875, -0.02728271484375, 0.0151519775390625, -0.0193634033203125, -0.01531219482421875, 0.0021038055419921875, -0.02716064453125, 0.08270263671875, 0.0217742919921875, -0.042022705078125, 0.02264404296875, -0.060791015625, -0.00643157958984375, 0.0296478271484375, -0.03289794921875, -0.00766754150390625, -0.0175933837890625, -0.01194000244140625, -0.0015859603881835938, 0.0290374755859375, -0.04541015625, 0.0287322998046875, -0.03924560546875, 0.04974365234375, 0.060455322265625, -0.0105133056640625, 0.02423095703125, -0.019287109375, 0.030303955078125, -0.005947113037109375, 0.0287322998046875, -0.00888824462890625, -0.06610107421875, -0.06939697265625, -0.021331787109375, 0.01499176025390625, 0.0297088623046875, -0.052642822265625, 0.025909423828125, -0.01316070556640625, -0.0489501953125, -0.052032470703125, -0.005405426025390625, 0.0298309326171875, 0.06243896484375, 0.04620361328125, -0.022857666015625, -0.031585693359375, -0.04736328125, 0.00440216064453125, -0.01123046875, -0.0085906982421875, 0.01461029052734375, 0.051483154296875, -0.00783538818359375, 0.07257080078125, -0.044921875, -0.0301513671875, -0.005435943603515625, 0.01313018798828125, 0.025299072265625, 0.052978515625, 0.048248291015625, -0.0394287109375, -0.0304107666015625, 0.0039825439453125, -0.06744384765625, 0.007843017578125, 0.006259918212890625, -0.02337646484375, 0.024444580078125, 0.009429931640625, -0.06097412109375, 0.054901123046875, 0.03509521484375, -0.027099609375, 0.038055419921875, -0.01479339599609375, 0.004428863525390625, -0.07110595703125, 0.021697998046875, -0.0068817138671875, -0.0010194778442382812, -0.03265380859375, 0.0167388916015625, -0.0024318695068359375, -0.013702392578125, -0.034027099609375, 0.041534423828125, -0.034088134765625, -0.003314971923828125, -0.007320404052734375, -0.0020465850830078125, -0.0028285980224609375, 0.059661865234375, -0.00112152099609375, 0.06195068359375, 0.039581298828125, -0.03466796875, 0.021575927734375, 0.031463623046875, -0.0249786376953125, 0.00785064697265625, -0.0657958984375, 0.0302276611328125, 0.005950927734375, 0.035125732421875, -0.05584716796875, -0.01953125, 0.06231689453125, -0.03790283203125, 0.01806640625, -0.0024623870849609375, -0.0345458984375, -0.027374267578125, -0.02667236328125, 0.02618408203125, 0.04248046875, -0.052947998046875, 0.043701171875, 0.0096282958984375, 0.006473541259765625, -0.03857421875, -0.049560546875, -0.0227813720703125, -0.017059326171875, -0.0550537109375, 0.0287933349609375, -0.013397216796875, 0.00893402099609375, 0.003726959228515625, 0.000858306884765625, 0.00327301025390625, -0.01139068603515625, 0.0179901123046875, 0.032806396484375, -0.0301513671875, -0.0080413818359375, 0.0005087852478027344, -0.00708770751953125, -0.002750396728515625, -0.0268707275390625, 0.0482177734375, -0.025177001953125, -0.022216796875, -0.039581298828125, 0.005138397216796875, 0.0228729248046875, -0.03375244140625, 0.07257080078125, 0.06292724609375, -0.0260162353515625, 0.00414276123046875, -0.02557373046875, -0.01033782958984375, -0.039703369140625, 0.0161590576171875, -0.026397705078125, -0.04644775390625, 0.035430908203125, 0.0201568603515625, 0.032501220703125, 0.046875, 0.048431396484375, 0.0222930908203125, 0.0648193359375, 0.05047607421875, 0.0032520294189453125, 0.03228759765625, -0.049468994140625, 0.0074310302734375, -0.06512451171875, -0.036590576171875, -0.046234130859375, -0.01334381103515625, -0.03778076171875, -0.03631591796875, 0.0318603515625, 0.00856781005859375, -0.048980712890625, 0.0272369384765625, -0.0584716796875, 0.018463134765625, 0.04620361328125, 0.0253753662109375, 0.016632080078125, -0.00010776519775390625, 0.005596160888671875, 0.0167999267578125, -0.042694091796875, -0.052978515625, 0.09930419921875, 0.0294647216796875, 0.045684814453125, 0.00786590576171875, 0.0496826171875, -0.0105133056640625, 0.032379150390625, -0.03326416015625, 0.04254150390625, 0.0082855224609375, -0.04351806640625, -0.036285400390625, -0.0316162109375, -0.0845947265625, 0.013153076171875, 0.0022945404052734375, -0.060272216796875, 0.01239776611328125, 0.0030765533447265625, -0.0298309326171875, 0.03228759765625, -0.054168701171875, 0.07080078125, -0.006450653076171875, -0.01605224609375, 0.0025196075439453125, -0.04052734375, 0.04705810546875, 0.0011148452758789062, 0.008331298828125, -0.004734039306640625, -0.01389312744140625, 0.07196044921875, -0.059722900390625, 0.0684814453125, -0.017608642578125, -0.02362060546875, 0.038116455078125, -0.0256805419921875, 0.0391845703125, 0.0119476318359375, -0.0179443359375, 0.032012939453125, -0.014862060546875, -0.036651611328125, -0.0256805419921875, 0.0640869140625, -0.09588623046875, -0.03924560546875, -0.0377197265625, -0.0379638671875, 0.0012950897216796875, 0.01515960693359375, 0.031646728515625, 0.025299072265625, 0.00928497314453125, -0.007129669189453125, 0.03985595703125, -0.0290374755859375, 0.0377197265625, 0.035736083984375, -0.0178985595703125, -0.0416259765625, 0.06817626953125, 0.01300048828125, 0.005840301513671875, 0.015380859375, 0.021026611328125, -0.02178955078125, -0.044586181640625, -0.044769287109375, 0.03680419921875, -0.05523681640625, -0.0194091796875, -0.045501708984375, -0.022125244140625, -0.044189453125, -0.0027008056640625, -0.0247344970703125, -0.0202178955078125, -0.055023193359375, -0.00656890869140625, 0.0506591796875, 0.052734375, 0.0019521713256835938, 0.0260162353515625, -0.041839599609375, 0.0266265869140625, 0.029052734375, 0.0160369873046875, 0.0095672607421875, -0.050140380859375, -0.0158843994140625, 0.0170135498046875, -0.052886962890625, -0.0662841796875, 0.03814697265625, 0.0120086669921875, 0.035858154296875, 0.01360321044921875, 0.0037860870361328125, 0.060699462890625, -0.0210723876953125, 0.07611083984375, 0.0131988525390625, -0.0714111328125, 0.045562744140625, -0.03143310546875, 0.0182647705078125, 0.0146942138671875, 0.03564453125, -0.003955841064453125, -0.02081298828125, -0.04522705078125, -0.06732177734375, 0.07781982421875, 0.03533935546875, 0.0003902912139892578, 0.018035888671875, 0.03289794921875, 0.026519775390625, 0.01068115234375, -0.0753173828125, -0.019317626953125, -0.044708251953125, -0.0105743408203125, -0.0098724365234375, 0.006694793701171875, -0.0161895751953125, -0.0196380615234375, 0.0684814453125, -0.00780487060546875, 0.040130615234375, 0.013671875, 0.00665283203125, -0.006683349609375, 0.00417327880859375, 0.05474853515625, 0.043548583984375, -0.02008056640625, -0.0194244384765625, 0.011474609375, -0.052947998046875, -0.000598907470703125, 0.0190277099609375, -0.025390625, -0.00649261474609375, 0.02288818359375, 0.0728759765625, -0.0098114013671875, -0.0190277099609375, 0.02685546875, -0.0006875991821289062, -0.00922393798828125, -0.021240234375, 0.01236724853515625, 0.006053924560546875, 0.026824951171875, 0.0220489501953125, 0.0093994140625, -0.0027618408203125, -0.04180908203125, -0.0233154296875, 0.0144500732421875, 0.004657745361328125, -0.03680419921875, 0.0738525390625, 0.008453369140625, -0.018096923828125, 0.043670654296875, -0.008544921875, -0.0213623046875, 0.054840087890625, 0.038726806640625, 0.06256103515625, -0.0142974853515625, 0.0134735107421875, 0.04296875, 0.024383544921875, -0.00389862060546875, 0.0258331298828125, 0.0034732818603515625, -0.02899169921875, -0.0200042724609375, -0.0511474609375, -0.0194244384765625, 0.03094482421875, -0.041015625, 0.040008544921875, -0.0419921875, -0.004390716552734375, -0.00438690185546875, 0.0030193328857421875, -0.061279296875, 0.01151275634765625, 0.00420379638671875, 0.061126708984375, -0.048431396484375, 0.079833984375, 0.040008544921875, -0.0643310546875, -0.0823974609375, -0.0045013427734375, -0.0121002197265625, -0.06884765625, 0.03497314453125, 0.01537322998046875, 0.0005979537963867188, -0.0002319812774658203, -0.05523681640625, -0.0712890625, 0.10479736328125, 0.041351318359375, -0.03009033203125, -0.0196990966796875, 0.00860595703125, 0.04058837890625, -0.020599365234375, 0.04437255859375, 0.039398193359375, 0.03521728515625, 0.00711822509765625, -0.08099365234375, 0.0197296142578125, -0.01300811767578125, 0.004077911376953125, 0.0037288665771484375, -0.0701904296875, 0.09844970703125, -0.0195770263671875, -0.01116943359375, 0.0230560302734375, 0.0667724609375, 0.0306549072265625, 0.0294189453125, 0.0196990966796875, 0.043243408203125, 0.05438232421875, -0.00514984130859375, 0.0692138671875, -0.0096282958984375, 0.04315185546875, 0.07275390625, -0.0013427734375, 0.058258056640625, 0.016754150390625, -0.0241241455078125, 0.05078125, 0.055450439453125, -0.002017974853515625, 0.05303955078125, 0.00638580322265625, -0.00698089599609375, 0.01666259765625, 0.0177154541015625, -0.06451416015625, 0.0258636474609375, 0.023223876953125, -0.0254669189453125, -0.0179901123046875, 0.0119781494140625, 0.0159149169921875, -0.0198516845703125, -0.0178375244140625, 0.04248046875, -0.00238037109375, -0.042144775390625, 0.08062744140625, 0.009857177734375, 0.059112548828125, -0.06610107421875, -0.0065155029296875, -0.0157928466796875, 0.012786865234375, -0.0261688232421875, -0.037872314453125, 0.0159912109375, 0.0115966796875, 0.00024390220642089844, -0.0020465850830078125, 0.031646728515625, -0.0254669189453125, -0.031402587890625, 0.0092620849609375, 0.00909423828125, 0.0316162109375, 0.01103973388671875, -0.05938720703125, 0.018310546875, 0.011932373046875, -0.04522705078125, 0.028045654296875, 0.033233642578125, 0.0020236968994140625, 0.031951904296875, 0.05810546875, -0.0030517578125, 0.0185089111328125, -0.022003173828125, 0.08355712890625, -0.036865234375, -0.033050537109375, -0.0545654296875, 0.03692626953125, 0.0084075927734375, -0.0416259765625, 0.060943603515625, 0.041534423828125, 0.07769775390625, -0.0161590576171875, 0.06231689453125, -0.01849365234375, 0.021820068359375, -0.047698974609375, 0.05645751953125, -0.034942626953125, 0.026763916015625, -0.0201568603515625, -0.0693359375, 0.0020885467529296875, 0.0584716796875, -0.03826904296875, 0.0127716064453125, 0.04425048828125, 0.06854248046875, -0.01100921630859375, 0.01611328125, 0.0016889572143554688, 0.01666259765625, 0.044189453125, 0.05322265625, 0.03546142578125, -0.044921875, 0.06378173828125, -0.03460693359375, -0.0311431884765625, -0.005084991455078125, -0.060699462890625, -0.0657958984375, -0.036407470703125, -0.0270233154296875, -0.0302276611328125, 0.0016422271728515625, 0.05474853515625, 0.04669189453125, -0.05755615234375, -0.0209808349609375, -0.017120361328125, -0.00438690185546875, -0.022125244140625, -0.0177764892578125, 0.05133056640625, -0.01236724853515625, -0.06951904296875, 0.0210113525390625, -0.00910186767578125, 0.017425537109375, -0.030059814453125, -0.0171356201171875, -0.013031005859375, 0.00669097900390625, 0.0266876220703125, 0.03790283203125, -0.05877685546875, -0.014984130859375, -0.01161956787109375, -0.0107421875, 0.0135955810546875, 0.039306640625, -0.0628662109375, 0.0278472900390625, 0.024566650390625, 0.018524169921875, 0.05780029296875, -0.0204315185546875, 0.0234375, -0.0299835205078125, 0.019989013671875, 0.009979248046875, 0.020751953125, 0.0205078125, -0.030487060546875, 0.0523681640625, 0.0173797607421875, -0.04998779296875, -0.061492919921875, -0.00177001953125, -0.07598876953125, -0.00714874267578125, 0.0806884765625, -0.04010009765625, -0.036712646484375, 0.00385284423828125, -0.03753662109375, 0.040679931640625, -0.042724609375, 0.06201171875, 0.0225372314453125, -0.01120758056640625, -0.00013720989227294922, -0.04425048828125, 0.031890869140625, 0.00199127197265625, -0.072021484375, -0.01277923583984375, 0.019622802734375, 0.0245513916015625, 0.021392822265625, 0.055694580078125, -0.00832366943359375, 0.01453399658203125, 0.00457000732421875, 0.0177001953125, -0.03204345703125, -0.004955291748046875, -0.014678955078125, 0.01088714599609375, -0.01617431640625, -0.044189453125 ] ]
knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI
2023-10-03T10:59:56.000Z
[ "transformers", "pytorch", "tf", "safetensors", "bart", "text2text-generation", "seq2seq", "summarization", "en", "dataset:cnndaily/newyorkdaily/xsum/samsum/dialogsum/AMI", "license:apache-2.0", "model-index", "autotrain_compatible", "endpoints_compatible", "has_space", "region:us" ]
summarization
knkarthick
null
null
knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI
10
5,811
transformers
2022-03-02T23:29:05
--- language: en tags: - bart - seq2seq - summarization license: apache-2.0 datasets: - cnndaily/newyorkdaily/xsum/samsum/dialogsum/AMI metrics: - rouge widget: - text: |- Hi, I'm David and I'm supposed to be an industrial designer. Um, I just got the project announcement about what the project is. Designing a remote control. That's about it, didn't get anything else. Did you get the same thing? Cool. There's too much gear. Okay. Can't draw. Um. Yeah. Um, well anyway, I don't know, it's just the first animal I can think off the top of my head. Um. Yes. Big reason is 'cause I'm allergic to most animals. Allergic to animal fur, so um fish was a natural choice. Um, yeah, and I kind of like whales. They come in and go eat everything in sight. And they're quite harmless and mild and interesting. Tail's a bit big, I think. It's an after dinner dog then. Hmm. It does make sense from maybe the design point of view 'cause you have more complicated characters like European languages, then you need more buttons. So, possibly. Hmm. Yeah. And you keep losing them. Finding them is really a pain, you know. I mean it's usually quite small, or when you want it right, it slipped behind the couch or it's kicked under the table. You know. Yep. Mm-hmm. I think one factor would be production cost. Because there's a cap there, so um depends on how much you can cram into that price. Um. I think that that's the main factor. Cool. Okay. Right. Um well this is the kick-off meeting for our our project. Um and um this is just what we're gonna be doing over the next twenty five minutes. Um so first of all, just to kind of make sure that we all know each other, I'm Laura and I'm the project manager. Do you want to introduce yourself again? Okay. Great. Okay. Um so we're designing a new remote control and um Oh I have to record who's here actually. So that's David, Andrew and Craig, isn't it? And you all arrived on time. Um yeah so des uh design a new remote control. Um, as you can see it's supposed to be original, trendy and user friendly. Um so that's kind of our our brief, as it were. Um and so there are three different stages to the design. Um I'm not really sure what what you guys have already received um in your emails. What did you get? Mm-hmm. Is that what everybody got? Okay. Um. So we're gonna have like individual work and then a meeting about it. And repeat that process three times. Um and at this point we get try out the whiteboard over there. Um. So uh you get to draw your favourite animal and sum up your favourite characteristics of it. So who would like to go first? Very good. Mm-hmm. Yeah. Yeah. Right. Lovely. Right. You can take as long over this as you like, because we haven't got an awful lot to discuss. Ok oh we do we do. Don't feel like you're in a rush, anyway. Ach why not We might have to get you up again then. I don't know what mine is. I'm gonna have to think on the spot now. Is that a whale? Ah. Okay. God, I still don't know what I'm gonna write about. Um. I was gonna choose a dog as well. But I'll just draw a different kind of dog. M my favourite animal is my own dog at home. Um That doesn't really look like him, actually. He looks more like a pig, actually. Ah well. Do you? Oh that's very good of you. Uh. Um he's a mixture of uh various things. Um and what do I like about him, um That's just to suggest that his tail wags. Um he's very friendly and cheery and always pleased to see you, and very kind of affectionate and um uh and he's quite quite wee as well so you know he can doesn't take up too much space. Um and uh And he does a funny thing where he chases his tail as well, which is quite amusing, so It is. I think it is. He only does it after he's had his dinner and um he'll just all of a sudden just get up and start chasing his tail 'round the living room. Yeah, so uh Yeah, maybe. Maybe. Right, um where did you find this? Just down here? Yeah. Okay. Um what are we doing next? Uh um. Okay, uh we now need to discuss the project finance. Um so according to the brief um we're gonna be selling this remote control for twenty five Euro, um and we're aiming to make fifty million Euro. Um so we're gonna be selling this on an international scale. And uh we don't want it to cost any more than uh twelve fifty Euros, so fifty percent of the selling price. Sure. All together. Um I dunno. I imagine That's a good question. I imagine it probably is our sale actually because it's probably up to the the um the retailer to uh sell it for whatever price they want. Um. But I I don't know, I mean do you think the fact that it's going to be sold internationally will have a bearing on how we design it at all? Think it will? Um. Hmm. Oh yeah, regions and stuff, yeah. Yeah. Okay. Yeah. Well for a remote control, do you think that will be I suppose it's depends on how complicated our remote control is. Yeah, yeah. Okay. What, just like in terms of like the wealth of the country? Like how much money people have to spend on things like? Aye, I see what you mean, yeah. Marketing. Good marketing thoughts. Oh gosh, I should be writing all this down. Um. Mm. Yeah. Yeah, yeah. Like how much does, you know, a remote control cost. Well twenty five Euro, I mean that's um that's about like eighteen pounds or something, isn't it? Or no, is it as much as that? Sixteen seventeen eighteen pounds. Um, I dunno, I've never bought a remote control, so I don't know how how good a remote control that would get you. Um. But yeah, I suppose it has to look kind of cool and gimmicky. Um right, okay. Let me just scoot on ahead here. Okay. Um well d Does anybody have anything to add to uh to the finance issue at all? Thin No, actually. That would be useful, though, wouldn't it, if you knew like what your money would get you now. Mm-hmm. Yeah, yeah. Oh. Five minutes to end of meeting. Oh, okay. We're a bit behind. Yeah. Right, so do you think that should be like a main design aim of our remote control d you know, do your your satellite and your regular telly and your V_C_R_ and everything? Mm-hmm. Yeah. Or even like, you know, notes about um what you wanna watch. Like you might put in there oh I want to watch such and such and look a Oh that's a good idea. So extra functionalities. Mm-hmm. Hmm. Um okay, uh I'd wel we're gonna have to wrap up pretty quickly in the next couple of minutes. Um I'll just check we've nothing else. Okay. Um so anything else anybody wants to add about what they don't like about remote controls they've used, what they would really like to be part of this new one at all? You keep losing them. Okay. Yeah. W You get those ones where you can, if you like, whistle or make a really high pitched noise they beep. There I mean is that something we'd want to include, do you think? Dunno. Okay maybe. My goodness. Still feels quite primitive. Maybe like a touch screen or something? Okay. Uh-huh, okay. Well I guess that's up to our industrial designer. It looks better. Yeah. Okay. Okay. Right, well um so just to wrap up, the next meeting's gonna be in thirty minutes. So that's about um about ten to twelve by my watch. Um so inbetween now and then, um as the industrial designer, you're gonna be working on you know the actual working design of it so y you know what you're doing there. Um for user interface, technical functions, I guess that's you know like what we've been talking about, what it'll actually do. Um and uh marketing executive, you'll be just thinking about what it actually what, you know, what requirements it has to has to fulfil and you'll all get instructions emailed to you, I guess. Um. Yeah, so it's th the functional design stage is next, I guess. And uh and that's the end of the meeting. So I got that little message a lot sooner than I thought I would, so Mm-hmm. Uh-huh, yeah. Th Okay, well just very quickly 'cause this we're supposed to finish now. Um I guess that's up to us, I mean you probably want some kind of unique selling point of it, so um, you know Yeah. Mm-hmm. Yeah. Okay. Right, okay, we'll that's that's the end of the meeting, then. Um. So, uh thank you all for coming. Um I'm Craig and I'm User Interface. Yeah. Well, my favourite animal would be a monkey. Then they're small cute and furry, and uh when planet of the apes becomes real, I'm gonna be up there with them. Yeah. I know um My parents went out and bought um remote controls because um they got fed up of having four or five different remote controls for each things the house. So um for them it was just how many devices control. Uh. Mm-hmm. Great. And I'm Andrew and I'm uh our marketing expert. Mm-hmm. Mm-hmm. Yeah, that's that's it. Yeah. I will go. That's fine. Alright. So This one here, right? Okay. Very nice. Alright. My favourite animal is like A beagle. Um charac favourite characteristics of it? Is that right? Uh, right, well basically um high priority for any animal for me is that they be willing to take a lot of physical affection from their family. And, yeah that they have lots of personality and uh be fit and in robust good health. So this is blue. Blue beagle. My family's beagle. I coulda told you a whole lot more about beagles. Boy, let me tell you. Impressionist. Alright. Mm. Superb sketch, by the way. Yep. I see a dog in there. Yep. Now I see a rooster. What kind is it? Is he aware that th it's his own cha tail he's chasing? Hmm. Probably when he was little he got lots of attention for doing it and has forever been conditioned. 'Kay. Um, can we just go over that again? Uh, so bas at twel Alright, yeah. Okay. So cost like production cost is twelve fifty, but selling price is is that wholesale or retail? Like on the shelf. Our sale our sale anyway. Yeah, okay okay. Okay. Mm-hmm. Alright. Yes. Mm-hmm. Mm-hmm. Well right away I'm wondering if there's um th th uh, like with D_V_D_ players, if there are zones. Um f frequencies or something um as well as uh characters, um different uh keypad styles and s symbols. Um. I don't know. Yeah. Yeah. Yeah. And then a and then al the other thing international is on top of the price. I'm thinking the price might might appeal to a certain market in one region, whereas in another it'll be different, so Just a chara just a characteristic of the Just Or just like, basic product podi positioning, the twenty five Euro remote control might be a big hit in London, might not be such a big hit in Greece, who knows, something like that, yeah. Yep. Right away I'm making some kind of assumptions about what what information we're given here, thinking, 'kay trendy probably means something other than just basic, something other than just standard. Um so I'm wondering right away, is selling twenty five Euros, is that sort of the thi is this gonna to be like the premium product kinda thing or Uh-huh. Mm-hmm. Yep. Yeah, I'd say so, yeah. No. Yeah, yeah. Mm-hmm. Do we have any other background information on like how that compares to other other Yeah. Mm-hmm. Yeah, interesting thing about discussing um production of a remote control for me is that l as you point out, I just don't think of remote controls as somethin something people consciously assess in their purchasing habits. It's just like getting shoelaces with shoes or something. It just comes along. Do you know what I mean? Like so sort of like how do you I I mean one one way of looking at it would be, well the people producing television sets, maybe they have to buy remote controls. Or another way is maybe people who have T_V_ sets are really fed up with their remote control and they really want a better one or something. But Right. Right. Okay so Right, so in function one of the priorities might be to combine as many uses I think so. Yeah, yeah. Yeah. Well like um, maybe what we could use is a sort of like a example of a successful other piece technology is palm palm pilots. They're gone from being just like little sort of scribble boards to cameras, M_P_ three players, telephones, everything, agenda. So, like, I wonder if we might add something new to the to the remote control market, such as the lighting in your house, or um Yeah, yeah. An Yeah. Like, p personally for me, at home I've I've combined the um the audio video of my television set and my D_V_D_ player and my C_D_ player. So they w all work actually function together but I have different remote controls for each of them. So it's sort of ironic that that then they're in there um you know, the sound and everything it's just one system. But each one's got its own little part. Mm. Mm. Mm. Mm-hmm. Mm-hmm. Yeah. Yeah. That's just really good id Yep. Uh, sure. I remember when the first remote control my my family had was on a cable. Actually had a cable between it and the T_V_ and big like buttons that sort of like, like on a blender or something. And um, you know, when I think about what they are now, it's better, but actually it's still kind of, I dunno, like a massive junky thing on the table. Maybe we could think about how, could be more, you know, streamlined. S Something like that, yeah. Or whatever would be technologically reasonable. 'Cause it could b it could it could be that f it could be that functionally that doesn't make it any better, but that just the appeal of of not having You know, these days there's a r pe things in people's homes are becoming more and more like chic, you know. Um, nicer materials and might be be worth exploring anyway. Okay. Um. Before we wrap up, just to make sure we're all on the same page here, um, do we We were given sort of an example of a coffee machine or something, right? Well, um are we at ma right now on the assumption that our television remote control may have features which go beyond the television? Or are we keeping sort of like a a design commitment to television features? I I don't know. Yep. Yeah, sure. Okay. Okay, yeah. Okay. Okay. Okay. Alright. model-index: - name: bart-large-meeting-summary-xsum-samsum-dialogsum-AMI results: - task: name: Abstractive Text Summarization type: abstractive-text-summarization dataset: name: "cnndaily/newyorkdaily/xsum/samsum/dialogsum/AMI Meeting Corpus" type: cnndaily/newyorkdaily/xsum/samsum/dialogsum/AMI Meeting Corpus metrics: - name: Validation ROGUE-1 type: rouge-1 value: NA - name: Validation ROGUE-2 type: rouge-2 value: NA - name: Validation ROGUE-L type: rouge-L value: NA - name: Validation ROGUE-Lsum type: rouge-Lsum value: NA - name: Test ROGUE-1 type: rouge-1 value: NA - name: Test ROGUE-2 type: rouge-2 value: NA - name: Test ROGUE-L type: rouge-L value: NA - name: Test ROGUE-Lsum type: rouge-Lsum value: NA --- Model obtained by Fine Tuning 'facebook/bart-large-xsum' ## Usage # Example 1 ```python from transformers import pipeline summarizer = pipeline("summarization", model="knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI") text = '''The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct. ''' summarizer(text) ``` # Example 2 ```python from transformers import pipeline summarizer = pipeline("summarization", model="knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI") text = '''Bangalore is the capital and the largest city of the Indian state of Karnataka. It has a population of more than 8 million and a metropolitan population of around 11 million, making it the third most populous city and fifth most populous urban agglomeration in India. Located in southern India on the Deccan Plateau, at a height of over 900 m (3,000 ft) above sea level, Bangalore is known for its pleasant climate throughout the year. Its elevation is the highest among the major cities of India.The city's history dates back to around 890 CE, in a stone inscription found at the Nageshwara Temple in Begur, Bangalore. The Begur inscription is written in Halegannada (ancient Kannada), mentions 'Bengaluru Kalaga' (battle of Bengaluru). It was a significant turning point in the history of Bangalore as it bears the earliest reference to the name 'Bengaluru'. In 1537 CE, Kempé Gowdā – a feudal ruler under the Vijayanagara Empire – established a mud fort considered to be the foundation of modern Bangalore and its oldest areas, or petes, which exist to the present day. After the fall of Vijayanagar empire in 16th century, the Mughals sold Bangalore to Chikkadevaraja Wodeyar (1673–1704), the then ruler of the Kingdom of Mysore for three lakh rupees. When Haider Ali seized control of the Kingdom of Mysore, the administration of Bangalore passed into his hands. The city was captured by the British East India Company after victory in the Fourth Anglo-Mysore War (1799), who returned administrative control of the city to the Maharaja of Mysore. The old city developed in the dominions of the Maharaja of Mysore and was made capital of the Princely State of Mysore, which existed as a nominally sovereign entity of the British Raj. In 1809, the British shifted their cantonment to Bangalore, outside the old city, and a town grew up around it, which was governed as part of British India. Following India's independence in 1947, Bangalore became the capital of Mysore State, and remained capital when the new Indian state of Karnataka was formed in 1956. The two urban settlements of Bangalore – city and cantonment – which had developed as independent entities merged into a single urban centre in 1949. The existing Kannada name, Bengalūru, was declared the official name of the city in 2006. Bangalore is widely regarded as the "Silicon Valley of India" (or "IT capital of India") because of its role as the nation's leading information technology (IT) exporter. Indian technological organisations are headquartered in the city. A demographically diverse city, Bangalore is the second fastest-growing major metropolis in India. Recent estimates of the metro economy of its urban area have ranked Bangalore either the fourth- or fifth-most productive metro area of India. As of 2017, Bangalore was home to 7,700 millionaires and 8 billionaires with a total wealth of $320 billion. It is home to many educational and research institutions. Numerous state-owned aerospace and defence organisations are located in the city. The city also houses the Kannada film industry. It was ranked the most liveable Indian city with a population of over a million under the Ease of Living Index 2020. ''' summarizer(text) ``` # Example 3 ```python from transformers import pipeline summarizer = pipeline("summarization", model="knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI") text = '''Hi, I'm David and I'm supposed to be an industrial designer. Um, I just got the project announcement about what the project is. Designing a remote control. That's about it, didn't get anything else. Did you get the same thing? Cool. There's too much gear. Okay. Can't draw. Um. Yeah. Um, well anyway, I don't know, it's just the first animal I can think off the top of my head. Um. Yes. Big reason is 'cause I'm allergic to most animals. Allergic to animal fur, so um fish was a natural choice. Um, yeah, and I kind of like whales. They come in and go eat everything in sight. And they're quite harmless and mild and interesting. Tail's a bit big, I think. It's an after dinner dog then. Hmm. It does make sense from maybe the design point of view 'cause you have more complicated characters like European languages, then you need more buttons. So, possibly. Hmm. Yeah. And you keep losing them. Finding them is really a pain, you know. I mean it's usually quite small, or when you want it right, it slipped behind the couch or it's kicked under the table. You know. Yep. Mm-hmm. I think one factor would be production cost. Because there's a cap there, so um depends on how much you can cram into that price. Um. I think that that's the main factor. Cool. Okay. Right. Um well this is the kick-off meeting for our our project. Um and um this is just what we're gonna be doing over the next twenty five minutes. Um so first of all, just to kind of make sure that we all know each other, I'm Laura and I'm the project manager. Do you want to introduce yourself again? Okay. Great. Okay. Um so we're designing a new remote control and um Oh I have to record who's here actually. So that's David, Andrew and Craig, isn't it? And you all arrived on time. Um yeah so des uh design a new remote control. Um, as you can see it's supposed to be original, trendy and user friendly. Um so that's kind of our our brief, as it were. Um and so there are three different stages to the design. Um I'm not really sure what what you guys have already received um in your emails. What did you get? Mm-hmm. Is that what everybody got? Okay. Um. So we're gonna have like individual work and then a meeting about it. And repeat that process three times. Um and at this point we get try out the whiteboard over there. Um. So uh you get to draw your favourite animal and sum up your favourite characteristics of it. So who would like to go first? Very good. Mm-hmm. Yeah. Yeah. Right. Lovely. Right. You can take as long over this as you like, because we haven't got an awful lot to discuss. Ok oh we do we do. Don't feel like you're in a rush, anyway. Ach why not We might have to get you up again then. I don't know what mine is. I'm gonna have to think on the spot now. Is that a whale? Ah. Okay. God, I still don't know what I'm gonna write about. Um. I was gonna choose a dog as well. But I'll just draw a different kind of dog. M my favourite animal is my own dog at home. Um That doesn't really look like him, actually. He looks more like a pig, actually. Ah well. Do you? Oh that's very good of you. Uh. Um he's a mixture of uh various things. Um and what do I like about him, um That's just to suggest that his tail wags. Um he's very friendly and cheery and always pleased to see you, and very kind of affectionate and um uh and he's quite quite wee as well so you know he can doesn't take up too much space. Um and uh And he does a funny thing where he chases his tail as well, which is quite amusing, so It is. I think it is. He only does it after he's had his dinner and um he'll just all of a sudden just get up and start chasing his tail 'round the living room. Yeah, so uh Yeah, maybe. Maybe. Right, um where did you find this? Just down here? Yeah. Okay. Um what are we doing next? Uh um. Okay, uh we now need to discuss the project finance. Um so according to the brief um we're gonna be selling this remote control for twenty five Euro, um and we're aiming to make fifty million Euro. Um so we're gonna be selling this on an international scale. And uh we don't want it to cost any more than uh twelve fifty Euros, so fifty percent of the selling price. Sure. All together. Um I dunno. I imagine That's a good question. I imagine it probably is our sale actually because it's probably up to the the um the retailer to uh sell it for whatever price they want. Um. But I I don't know, I mean do you think the fact that it's going to be sold internationally will have a bearing on how we design it at all? Think it will? Um. Hmm. Oh yeah, regions and stuff, yeah. Yeah. Okay. Yeah. Well for a remote control, do you think that will be I suppose it's depends on how complicated our remote control is. Yeah, yeah. Okay. What, just like in terms of like the wealth of the country? Like how much money people have to spend on things like? Aye, I see what you mean, yeah. Marketing. Good marketing thoughts. Oh gosh, I should be writing all this down. Um. Mm. Yeah. Yeah, yeah. Like how much does, you know, a remote control cost. Well twenty five Euro, I mean that's um that's about like eighteen pounds or something, isn't it? Or no, is it as much as that? Sixteen seventeen eighteen pounds. Um, I dunno, I've never bought a remote control, so I don't know how how good a remote control that would get you. Um. But yeah, I suppose it has to look kind of cool and gimmicky. Um right, okay. Let me just scoot on ahead here. Okay. Um well d Does anybody have anything to add to uh to the finance issue at all? Thin No, actually. That would be useful, though, wouldn't it, if you knew like what your money would get you now. Mm-hmm. Yeah, yeah. Oh. Five minutes to end of meeting. Oh, okay. We're a bit behind. Yeah. Right, so do you think that should be like a main design aim of our remote control d you know, do your your satellite and your regular telly and your V_C_R_ and everything? Mm-hmm. Yeah. Or even like, you know, notes about um what you wanna watch. Like you might put in there oh I want to watch such and such and look a Oh that's a good idea. So extra functionalities. Mm-hmm. Hmm. Um okay, uh I'd wel we're gonna have to wrap up pretty quickly in the next couple of minutes. Um I'll just check we've nothing else. Okay. Um so anything else anybody wants to add about what they don't like about remote controls they've used, what they would really like to be part of this new one at all? You keep losing them. Okay. Yeah. W You get those ones where you can, if you like, whistle or make a really high pitched noise they beep. There I mean is that something we'd want to include, do you think? Dunno. Okay maybe. My goodness. Still feels quite primitive. Maybe like a touch screen or something? Okay. Uh-huh, okay. Well I guess that's up to our industrial designer. It looks better. Yeah. Okay. Okay. Right, well um so just to wrap up, the next meeting's gonna be in thirty minutes. So that's about um about ten to twelve by my watch. Um so inbetween now and then, um as the industrial designer, you're gonna be working on you know the actual working design of it so y you know what you're doing there. Um for user interface, technical functions, I guess that's you know like what we've been talking about, what it'll actually do. Um and uh marketing executive, you'll be just thinking about what it actually what, you know, what requirements it has to has to fulfil and you'll all get instructions emailed to you, I guess. Um. Yeah, so it's th the functional design stage is next, I guess. And uh and that's the end of the meeting. So I got that little message a lot sooner than I thought I would, so Mm-hmm. Uh-huh, yeah. Th Okay, well just very quickly 'cause this we're supposed to finish now. Um I guess that's up to us, I mean you probably want some kind of unique selling point of it, so um, you know Yeah. Mm-hmm. Yeah. Okay. Right, okay, we'll that's that's the end of the meeting, then. Um. So, uh thank you all for coming. Um I'm Craig and I'm User Interface. Yeah. Well, my favourite animal would be a monkey. Then they're small cute and furry, and uh when planet of the apes becomes real, I'm gonna be up there with them. Yeah. I know um My parents went out and bought um remote controls because um they got fed up of having four or five different remote controls for each things the house. So um for them it was just how many devices control. Uh. Mm-hmm. Great. And I'm Andrew and I'm uh our marketing expert. Mm-hmm. Mm-hmm. Yeah, that's that's it. Yeah. I will go. That's fine. Alright. So This one here, right? Okay. Very nice. Alright. My favourite animal is like A beagle. Um charac favourite characteristics of it? Is that right? Uh, right, well basically um high priority for any animal for me is that they be willing to take a lot of physical affection from their family. And, yeah that they have lots of personality and uh be fit and in robust good health. So this is blue. Blue beagle. My family's beagle. I coulda told you a whole lot more about beagles. Boy, let me tell you. Impressionist. Alright. Mm. Superb sketch, by the way. Yep. I see a dog in there. Yep. Now I see a rooster. What kind is it? Is he aware that th it's his own cha tail he's chasing? Hmm. Probably when he was little he got lots of attention for doing it and has forever been conditioned. 'Kay. Um, can we just go over that again? Uh, so bas at twel Alright, yeah. Okay. So cost like production cost is twelve fifty, but selling price is is that wholesale or retail? Like on the shelf. Our sale our sale anyway. Yeah, okay okay. Okay. Mm-hmm. Alright. Yes. Mm-hmm. Mm-hmm. Well right away I'm wondering if there's um th th uh, like with D_V_D_ players, if there are zones. Um f frequencies or something um as well as uh characters, um different uh keypad styles and s symbols. Um. I don't know. Yeah. Yeah. Yeah. And then a and then al the other thing international is on top of the price. I'm thinking the price might might appeal to a certain market in one region, whereas in another it'll be different, so Just a chara just a characteristic of the Just Or just like, basic product podi positioning, the twenty five Euro remote control might be a big hit in London, might not be such a big hit in Greece, who knows, something like that, yeah. Yep. Right away I'm making some kind of assumptions about what what information we're given here, thinking, 'kay trendy probably means something other than just basic, something other than just standard. Um so I'm wondering right away, is selling twenty five Euros, is that sort of the thi is this gonna to be like the premium product kinda thing or Uh-huh. Mm-hmm. Yep. Yeah, I'd say so, yeah. No. Yeah, yeah. Mm-hmm. Do we have any other background information on like how that compares to other other Yeah. Mm-hmm. Yeah, interesting thing about discussing um production of a remote control for me is that l as you point out, I just don't think of remote controls as somethin something people consciously assess in their purchasing habits. It's just like getting shoelaces with shoes or something. It just comes along. Do you know what I mean? Like so sort of like how do you I I mean one one way of looking at it would be, well the people producing television sets, maybe they have to buy remote controls. Or another way is maybe people who have T_V_ sets are really fed up with their remote control and they really want a better one or something. But Right. Right. Okay so Right, so in function one of the priorities might be to combine as many uses I think so. Yeah, yeah. Yeah. Well like um, maybe what we could use is a sort of like a example of a successful other piece technology is palm palm pilots. They're gone from being just like little sort of scribble boards to cameras, M_P_ three players, telephones, everything, agenda. So, like, I wonder if we might add something new to the to the remote control market, such as the lighting in your house, or um Yeah, yeah. An Yeah. Like, p personally for me, at home I've I've combined the um the audio video of my television set and my D_V_D_ player and my C_D_ player. So they w all work actually function together but I have different remote controls for each of them. So it's sort of ironic that that then they're in there um you know, the sound and everything it's just one system. But each one's got its own little part. Mm. Mm. Mm. Mm-hmm. Mm-hmm. Yeah. Yeah. That's just really good id Yep. Uh, sure. I remember when the first remote control my my family had was on a cable. Actually had a cable between it and the T_V_ and big like buttons that sort of like, like on a blender or something. And um, you know, when I think about what they are now, it's better, but actually it's still kind of, I dunno, like a massive junky thing on the table. Maybe we could think about how, could be more, you know, streamlined. S Something like that, yeah. Or whatever would be technologically reasonable. 'Cause it could b it could it could be that f it could be that functionally that doesn't make it any better, but that just the appeal of of not having You know, these days there's a r pe things in people's homes are becoming more and more like chic, you know. Um, nicer materials and might be be worth exploring anyway. Okay. Um. Before we wrap up, just to make sure we're all on the same page here, um, do we We were given sort of an example of a coffee machine or something, right? Well, um are we at ma right now on the assumption that our television remote control may have features which go beyond the television? Or are we keeping sort of like a a design commitment to television features? I I don't know. Yep. Yeah, sure. Okay. Okay, yeah. Okay. Okay. Okay. Alright. ''' summarizer(text) ``` # Example 4 ```python from transformers import pipeline summarizer = pipeline("summarization", model="knkarthick/MEETING-SUMMARY-BART-LARGE-XSUM-SAMSUM-DIALOGSUM-AMI") text = ''' Das : Hi and welcome to the a16z podcast. I’m Das, and in this episode, I talk SaaS go-to-market with David Ulevitch and our newest enterprise general partner Kristina Shen. The first half of the podcast looks at how remote work impacts the SaaS go-to-market and what the smartest founders are doing to survive the current crisis. The second half covers pricing approaches and strategy, including how to think about free versus paid trials and navigating the transition to larger accounts. But we start with why it’s easier to move upmarket than down… and the advantage that gives a SaaS startup against incumbents. David : If you have a cohort of customers that are paying you $10,000 a year for your product, you’re going to find a customer that self-selects and is willing to pay $100,000 a year. Once you get one of those, your organization will figure out how you sell to, how you satisfy and support, customers at that price point and that size. But it’s really hard for a company that sells up market to move down market, because they’ve already baked in all that expensive, heavy lifting sales motion. And so as you go down market with a lower price point, usually, you can’t actually support it. Das : Does that mean that it’s easier for a company to do this go-to-market if they’re a new startup as opposed to if they’re a pre-existing SaaS? Kristina : It’s culturally very, very hard to give a product away for free that you’re already charging for. It feels like you’re eating away at your own potential revenue when you do it. So most people who try it end up pulling back very quickly. David : This is actually one of the key reasons why the bottoms up SaaS motion is just so competitive, and compelling, and so destructive against the traditional sales-driven test motion. If you have that great product and people are choosing to use it, it’s very hard for somebody with a sales-driven motion, and all the cost that’s loaded into that, to be able to compete against it. There are so many markets where initially, we would look at companies and say, “Oh, well, this couldn’t possibly be bottoms up. It has to be sold to the CIO. It has to be sold to the CSO or the CFO.” But in almost every case we’ve been wrong, and there has been a bottoms up motion. The canonical example is Slack. It’s crazy that Slack is a bottoms up company, because you’re talking about corporate messaging, and how could you ever have a messaging solution that only a few people might be using, that only a team might be using? But now it’s just, “Oh, yeah, some people started using it, and then more people started using it, and then everyone had Slack.” Kristina : I think another classic example is Dropbox versus Box. Both started as bottoms up businesses, try before you buy. But Box quickly found, “Hey, I’d rather sell to IT.” And Dropbox said, “Hey, we’ve got a great freemium motion going.” And they catalyzed their business around referrals and giving away free storage and shared storage in a way that really helped drive their bottoms up business. Das : It’s a big leap to go from selling to smaller customers to larger customers. How have you seen SaaS companies know or get the timing right on that? Especially since it does seem like that’s really related to scaling your sales force? Kristina : Don’t try to go from a 100-person company to a 20,000-person company. Start targeting early adopters, maybe they’re late stage pre-IPO companies, then newly IPO’d companies. Starting in tech tends to be a little bit easier because they tend to be early adopters. Going vertical by vertical can be a great strategy as well. Targeting one customer who might be branded in that space, can help brand yourself in that category. And then all their competitors will also want your product if you do a good job. A lot of times people will dedicate a sales rep to each vertical, so that they become really, really knowledgeable in that space, and also build their own brand and reputation and know who are the right customers to target. Das : So right now, you’ve got a lot more people working remote. Does this move to remote work mean that on-premise software is dying? And is it accelerating the move to software as a service? Kristina : This remote work and working from home is only going to catalyze more of the conversion from on-premise over to cloud and SaaS. In general, software spend declines 20% during an economic downturn. This happened in ’08, this happened in ’01. But when we look at the last downturn in ’08, SaaS spend actually, for public companies, increased, on average, 10%, which means there’s a 30% spread, which really shows us that there was a huge catalyst from people moving on-premise to SaaS. David : And as people work remote, the ability to use SaaS tools is much easier than having to VPN back into your corporate network. We’ve been seeing that, inside sales teams have been doing larger and larger deals, essentially moving up market on the inside, without having to engage with field sales teams. In fact, a lot of the new SaaS companies today rather than building out a field team, they have a hybrid team, where people are working and closing deals on the inside and if they had to go out and meet with a customer, they would do that. But by and large, most of it was happening over the phone, over email, and over videoconferencing. And all the deals now, by definition, are gonna be done remote because people can’t go visit their customers in person. Das : So with bottoms up, did user behavior and buyer behavior change, so the go-to-market evolved? Or did the go-to-market evolve and then you saw user and buyer behavior change? I’m curious with this move to remote work. Is that going to trigger more changes or has the go-to-market enabled that change in user behavior, even though we see that change coming because of a lot of forces outside of the market? Kristina : I definitely think they are interrelated. But I do think it was a user change that catalyzed everything. We decided that we preferred better software, and we tried a couple products. We were able to purchase off our credit card. And then IT and procurement eventually said, “Wow, everyone’s buying these already, I might as well get a company license and a company deal so I’m not paying as much.” While obviously software vendors had to offer the products that could be self-served, users started to realize they had the power, they wanted to use better software, they paid with their credit cards. And now software vendors are forced to change their go-to-market to actually suit that use case. Das : If that’s the case that when user behavior has changed, it’s tended to be the catalyzing force of bigger changes in the go-to-market, what are some of the changes you foresee for SaaS because the world has changed to this new reality of remote work and more distributed teams? David : We’re in a very uncertain economic environment right now. And a couple of things will become very clear over the next 3 to 9 to 15 months — you’re going to find out which SaaS products are absolutely essential to helping a business operate and run, and which ones were just nice to have and may not get renewed. I think on the customer, buying side, you’re very likely to see people push back on big annual commitments and prefer to go month-to-month where they can. Or you’ll see more incentives from SaaS startups to offer discounts for annual contracts. You’re going to see people that might sign an annual contract, but they may not want to pay upfront. They may prefer to meter the cash out ratably over the term of the contract. And as companies had empowered and allowed budget authority to be pushed down in organizations, you’re gonna see that budget authority get pulled back, more scrutiny on spending, and likely a lot of SaaS products not get renewed that turned out to not be essential. Kristina : I think the smartest founders are making sure they have the runway to continue to exist. And they’re doing that in a couple of ways. They’re preserving cash, and they are making sure that their existing customers are super, super happy, because retaining your customers is so important in this environment. And they’re making sure that they have efficient or profitable customer acquisition. Don’t spend valuable dollars acquiring customers. But acquire customers efficiently that will add to a great existing customer base. Das : To go into pricing and packaging for SaaS for a moment, what are some of the different pricing approaches that you see SaaS companies taking? Kristina : The old school way of doing SaaS go-to-market is bundle everything together, make the pricing super complex, so you don’t actually understand what you’re paying for. You’re forced to purchase it because you need one component of the product. New modern SaaS pricing is keep it simple, keep it tied to value, and make sure you’re solving one thing really, really well. David : You want to make it easy for your customers to give you money. And if your customers don’t understand your pricing, that’s a huge red flag. Sometimes founders will try to over engineer their pricing model. Kristina : We talk a lot about everything has to be 10X better than the alternatives. But it’s much easier to be 10X better when you solve one thing very, very well, and then have simple pricing around it. I think the most common that most people know about is PEPM or per employee per month, where you’re charging basically for every single seat. Another really common model is the freemium model. So, think about a Dropbox, or an Asana, or a Skype, where it’s trigger based. You try the product for free, but when you hit a certain amount of storage, or a certain amount of users, then it converts over to paid. And then you also have a time trial, where you get the full experience of the product for some limited time period. And then you’re asked if you want to continue using the product to pay. And then there’s pay as go, and particularly, pay as you go as a usage model. So, Slack will say, “Hey, if your users aren’t actually using the product this month, we won’t actually charge you for it.” David : The example that Kristina made about Slack and users, everybody understands what a user is, and if they’re using the product, they pay for it, and if they’re not using it, they don’t pay for it. That’s a very friendly way to make it easy for your customers to give you money. If Slack came up with a pricing model that was like based on number of messages, or number of API integration calls, the customer would have no idea what that means. Kristina : There’s also the consumption model. So Twilio only charges you for every SMS text or phone call that you make on the platform any given month. And so they make money or lose money as your usage goes. The pricing is very aligned to your productivity. David : Generally, those are for products where the usage only goes in one direction. If you think of a company like Databricks, where they’re charging for storage, or Amazon’s S3 service, it is very aligned with the customer, but it also strategically aligns with the business because they know the switching cost is very high, the churn is very low. And generally, in those businesses, you’re only going to store more data, so they can charge based on usage or volume of data. Kristina : Recently, there’s been a huge trend of payment as a revenue. It’s particularly common in vertical markets where SaaS companies are adding payments as a revenue in addition to their employee or subscription revenue. If you look at Shopify, for example, more than 50% of their revenue is actually payment revenue. They’re making money every single time you purchase something off one of their shopping cart websites. Das : When you’re working with a founder or a SaaS startup, how have you seen them find the right pricing model for their product, for their market? Kristina : Step one is just talk to a lot of customers. Try to figure out what is the market pricing for possible alternatives or competitors, understand their pain points and their willingness to pay. And just throw a price out there, because you have to have a starting point in order to actually test and iterate. Particularly in the SMB, or the bottoms up business, you can test and iterate pretty quickly because you have so many data points. David : I always tell founders, step one is to just go out there and talk to customers. Step two is just double your prices. I don’t think there’s ever been a great company with a great product that’s fallen apart because their pricing was wrong. But a lot of SaaS startup founders really under price, and you don’t want to find out two or three years later that you were 200% underpriced. A very common thing that SaaS companies do, they’ll have the basic package that either is free or low cost, that you can just sign up online for. They’ll have a middle package where they share some pricing, and then they’ll have the enterprise package where you have to contact sales to find out more. And that way they don’t actually have to show the pricing for that third package. And that gives the salespeople the flexibility to adjust pricing on a per deal basis. Das : When you’re working with companies, why are they underpricing their products? David : I think it’s psychological. People need to price on value, and they don’t know how much value they’re delivering relative to “Oh, it only cost me $100 a month to provide this service, so I just need to charge $200.” But if it turns out you’re saving your customer $50,000 a year, then you’re wildly underpriced. You have to remember that SaaS is essentially a proxy for outsourced IT. You’re spending money on a SaaS service to not pay to develop something internally, or to have to pay IT to support something that’s more complex on-prem. Software is much cheaper than people, and so generally, the price point can be much higher. Kristina : And the other thing is your value increases over time. You’re delivering more features, more products, you understand the customer better. It’s the beauty of the SaaS model and cloud model that you can iterate and push code immediately, and the customer immediately sees value. A lot of times people have the same price point from the first customer sold to three years later and the 200th customer. Quite frankly, you’ve delivered so much value along the way that your price point should have gone up. The other thing I’ll say is a lot of people discount per seat pricing a lot as they move up market. We tend to tell people that the best validation of your product having great product market fit is your ability to hold your price point. So while there is some natural discounting on a per seat basis because people do deserve some volume discounting, I would say try to resist that as much as possible. Das : Especially for a technical founder, it’s so tempting to get in there and fiddle with these knobs. How do you know when it is time to experiment with your pricing and packaging? David : If you’re looking at your business and you see that you are doing more deals, and they’re closing faster, you should raise your pricing. And you pay attention to how long it takes to close deals and whether the number of deals is staying consistent as you do that. And, at some point, you’re going to find out when you’re losing deals on price. I think a moment where companies have to plan ahead to avoid having to course correct is after they roll out massive pricing and packaging changes, which are pretty natural as companies move up market. But how they navigate that transition to larger accounts, and how they either bring along or move away from those smaller, earlier customers who got them to where they are, tends to be really important because they can get a lot of noise on Twitter, they can get a lot of blowback from their customers. So Zendesk is a company where they rolled out a major packaging change. And when they rolled it out, they hadn’t planned on grandfathering in their early customers. They got a lot of pushback, and very quickly, they put out a blog post and said, “We hear what you’re saying, we appreciate you building the business that we’ve become today. We do need to have a package for the future. But all the people that have been customers so far will be grandfathered in for at least a period of time into the old model.” Kristina : If you iterate pricing constantly, you don’t really have this problem because your customers will be used to pricing changes. You normally pair them with new features, and it all kind of works out. But if you have to go through a big grandfather change, I tend to lean towards treating your early customers really, really well. They adopted when you weren’t a big company yet. They probably co-built the product with you in many ways. And so, it’s great to get more dollars out of your customer base, but treat your early customers well. Das : Are there any other failure modes that you see startups really falling into around pricing and packaging or any common mistakes that they make? David : I think a lot of founders don’t always map out the cost or model of their pricing and their product relative to their cost of actually doing sales and marketing and customer acquisition. Kristina : Inside sales is so popular in Silicon Valley. When you’re selling more to an SMB or mid-market type customer, the expectation is that you’re educating and helping the prospective customer over the phone. And so, you’re not expected to be as high touch. But 5K is almost the minimum price point you need to sell to the SMB with an inside sales team in order to pay for the outbound costs and all the conversions, because there is typically a team that sits around the quota carrying rep. And so, price matching — how much your price point is compared to what your go-to-market motion is — matters a lot. Other big failure modes that I see, people guess the ramp time of a sales rep wrong. And ramp time really ties to the segment of customer you’re selling into. It tends be that if you’re selling into the enterprise, the ramp time for sales reps, because sales cycles are so long, tend to be much longer as well. They could be six months plus, could be a year. While if you’re selling more into SMB or mid-market, the ramp time to get a rep up and running can be much shorter, three to six months. Because the sales cycles are shorter, they just iterate much faster, and they ramp up much more quickly. David : The other thing that people have to understand is that sales velocity is a really important component to figuring out how many reps you should be hiring, whether they should be inside reps or field reps. If it takes you 90 days to close a deal, that can’t be a $5,000 a year deal, that has to be a $50,000 or even $150,000 a year deal. Das : Kristina, I know you’ve done a lot of work with metrics. So how do those play in? Kristina : Probably the one way to sum it all together is how many months does it take to pay back customer acquisition cost. Very commonly within the SaaS world, we talk about a 12-month CAC payback. We typically want to see for every dollar you spend on sales and marketing, you get a dollar back within a year. That means you can tweak the inputs any way you want. Let’s say that doing paid acquisition is really effective for you. Then, you can spend proportionally more on paid acquisition and less on sales reps. Vice versa, if you have a great inbound engine, you actually can hire a lot more sales reps and spend more on sales headcount. With all formulas, it’s a guide rail, so if you have customers that retain really, really well, let’s say you’re selling to the enterprise, and you’ve got a 90% or 95% annual retention rate, then your CAC payback could be between 12 and 24 months. But let’s say you’re selling to the SMB and churn is 2% or 3% monthly, which ends up being like 80% to 90% annual retention. Then, because your customer is less sticky, I would recommend looking at a CAC payback of 6 to 12 months. Das : How should you think about doing a free trial versus a paid trial? David : On the one hand, the bottoms up motion where people can try essentially a full version of a product before they buy it is extremely powerful. On the other hand, I’ve started to try to think about how I advise companies, when they are thinking about a free trial for something that might cost $100,000 or $200,000 a year? Do we do a paid pilot that has some sort of contractual obligation that if we meet then turns into a commercial engagement? Kristina : I do think the beauty of the bottoms up business is that you can get people to try the entire experience of the product for free, and they fall in love with it, and a certain percentage will convert. And that works really, really well for products that can self-serve. When you start moving up market to more complex products, the challenge with trials is it takes work to actually implement the product, whether it be integrations, IT has to give access, etc. You lose that self-serve ability, which is so amazing in the trial. And so, I tend to be more in the camp of paid trials, if it costs you money to actually deploy the trial. And when you’re selling to bigger customers, they associate value when they have to pay. Once a customer has to pay you, then they feel a need to make the project successful and thus they will onboard, schedule things, give you data and access. David : If you can get to a point where you get the customer to do that paid pilot, such that the only difference between a pilot and an actual customer is just the signing of a contract, that’s very powerful. Now, that does force you to have a really good pre-sales motion to make sure that you can deliver on the promise you’ve made your customers. When companies don’t have a great product, and they paper over it with professional services and sales engineering and post-sales support, that paid pilot thing doesn’t work because the experience isn’t good enough. So, it really is incumbent on the SaaS company that does a paid pilot to make sure that they are able to deliver on that experience. Kristina : And one emerging trend recently is people signing an annual contract with a one or three month out, as a replacement to the paid pilot. Because it’s the best of both worlds, the SaaS company that’s selling the product gets a higher level of commitment. And the customer gets the optionality of opting out in the same way as a trial without any clawback. It really comes down to where procurement falls. Sometimes procurement is at the beginning of that decision, which makes it more like an annual contract. Sometimes procurement is at the one or three month opt-out period, which means the customer already has a great experience, loves the product, and it is an easier way to convert procurements to actually sign on… David : And that is a really good segue into renewals. I always tell founders, you might have this subscription business, but it’s not a recurring revenue business until the second year when the revenue actually recurs. I think you really have the first three months to get a customer up and running and happy. And if they’re not, you then have about three months to fix it. And if all that works out, then the remaining six months of the contract can be focused on upsell and expansion. Das : Awesome. Thank you, Kristina. Thank you, David. Kristina : Thanks so much for having us. This was fun. David : Yeah, a lot of fun, great topics, and our favorite thing to talk about. ''' summarizer(text) ```
56,929
[ [ -0.0655517578125, -0.04248046875, 0.0287933349609375, 0.01458740234375, -0.029449462890625, -0.0014972686767578125, -0.0013322830200195312, -0.044891357421875, 0.05010986328125, 0.018157958984375, -0.0234832763671875, -0.007965087890625, -0.025177001953125, -0.003482818603515625, -0.0099334716796875, 0.06182861328125, -0.0033435821533203125, -0.0287322998046875, 0.0229339599609375, 0.0017099380493164062, -0.057464599609375, -0.036285400390625, -0.048583984375, -0.01512908935546875, 0.033447265625, 0.0166473388671875, 0.045623779296875, 0.055145263671875, 0.033905029296875, 0.031005859375, -0.0176849365234375, 0.019989013671875, -0.0330810546875, -0.025787353515625, -0.012054443359375, -0.0279388427734375, -0.028961181640625, 0.005329132080078125, 0.035247802734375, 0.059051513671875, -0.0090179443359375, 0.0172882080078125, 0.002361297607421875, 0.0640869140625, -0.0264129638671875, 0.03363037109375, -0.015869140625, 0.00775146484375, 0.0055694580078125, -0.0250091552734375, -0.006687164306640625, -0.049774169921875, -0.0120849609375, -0.040557861328125, 0.01044464111328125, 0.0234527587890625, 0.11322021484375, 0.0070648193359375, -0.0276947021484375, -0.0215911865234375, -0.056732177734375, 0.07452392578125, -0.0221099853515625, 0.01666259765625, 0.029937744140625, 0.0183868408203125, -0.0225830078125, -0.04150390625, -0.0166473388671875, -0.030487060546875, -0.032623291015625, 0.0283966064453125, -0.01885986328125, -0.008270263671875, 0.031005859375, 0.0273590087890625, -0.04541015625, -0.00978851318359375, -0.05792236328125, -0.01486968994140625, 0.05157470703125, 0.004596710205078125, 0.025909423828125, -0.044708251953125, -0.045867919921875, 0.004444122314453125, -0.033111572265625, 0.025909423828125, 0.0139312744140625, 0.0220489501953125, -0.0116424560546875, 0.036041259765625, -0.0124664306640625, 0.027984619140625, 0.02606201171875, -0.0034046173095703125, 0.01052093505859375, -0.04229736328125, -0.0175018310546875, 0.000022470951080322266, 0.0482177734375, 0.053314208984375, 0.0226898193359375, -0.0041351318359375, 0.01203155517578125, 0.01137542724609375, 0.01248931884765625, -0.05401611328125, -0.001583099365234375, 0.038116455078125, -0.0635986328125, -0.0253753662109375, 0.00017595291137695312, -0.040130615234375, -0.00830841064453125, -0.028717041015625, 0.02142333984375, -0.0207366943359375, -0.0312042236328125, 0.0191650390625, -0.041290283203125, 0.034088134765625, 0.029571533203125, -0.0794677734375, 0.01297760009765625, 0.044097900390625, 0.05419921875, 0.00957489013671875, -0.0114288330078125, -0.0113983154296875, 0.008941650390625, -0.0689697265625, 0.060455322265625, 0.00188446044921875, -0.04718017578125, -0.022216796875, 0.00867462158203125, 0.0234375, -0.050445556640625, 0.03515625, -0.02642822265625, 0.0006837844848632812, -0.041534423828125, -0.0203857421875, -0.0028095245361328125, 0.007198333740234375, -0.0310211181640625, 0.048370361328125, 0.0016584396362304688, -0.051666259765625, 0.035308837890625, -0.034271240234375, -0.050079345703125, 0.0150299072265625, -0.01378631591796875, -0.002368927001953125, 0.02935791015625, 0.0006537437438964844, 0.0247955322265625, -0.032440185546875, 0.00742340087890625, -0.01032257080078125, -0.021148681640625, 0.0175323486328125, 0.020233154296875, 0.073486328125, 0.03656005859375, -0.0105133056640625, -0.0244140625, -0.061676025390625, 0.01053619384765625, 0.002353668212890625, -0.035369873046875, -0.0179443359375, 0.0108489990234375, -0.01499176025390625, 0.01641845703125, 0.031707763671875, -0.03863525390625, 0.021148681640625, -0.0282745361328125, 0.045745849609375, 0.06512451171875, 0.01268768310546875, 0.04351806640625, -0.06207275390625, 0.040283203125, -0.022705078125, 0.016510009765625, -0.0004661083221435547, -0.0251617431640625, -0.05340576171875, -0.0265960693359375, 0.0140380859375, 0.051483154296875, -0.005008697509765625, 0.025177001953125, -0.00726318359375, -0.043792724609375, -0.021697998046875, -0.0201873779296875, 0.0252838134765625, 0.021728515625, -0.00702667236328125, -0.036224365234375, -0.059661865234375, -0.071533203125, 0.001071929931640625, -0.04071044921875, 0.0004949569702148438, 0.040679931640625, 0.0374755859375, 0.00724029541015625, 0.07470703125, -0.06475830078125, -0.027099609375, -0.020263671875, 0.01482391357421875, 0.047882080078125, 0.040130615234375, 0.052154541015625, -0.0675048828125, -0.047210693359375, 0.01739501953125, -0.0662841796875, -0.0031604766845703125, -0.025787353515625, -0.03680419921875, -0.00429534912109375, 0.0246429443359375, -0.090087890625, 0.027191162109375, -0.00801849365234375, -0.01049041748046875, 0.0657958984375, -0.022918701171875, 0.0189971923828125, -0.08544921875, 0.02093505859375, -0.0270538330078125, 0.0092010498046875, -0.04351806640625, 0.005992889404296875, -0.0269622802734375, -0.0160675048828125, -0.03179931640625, 0.04168701171875, -0.039886474609375, -0.00457000732421875, -0.002948760986328125, 0.02337646484375, 0.0120086669921875, 0.043914794921875, -0.01424407958984375, 0.0279388427734375, 0.03350830078125, -0.06256103515625, 0.040374755859375, 0.0307769775390625, -0.04010009765625, 0.047027587890625, -0.052276611328125, -0.0119171142578125, -0.033233642578125, 0.01337432861328125, -0.0792236328125, -0.0130767822265625, 0.0213775634765625, -0.060333251953125, 0.0158538818359375, 0.01629638671875, -0.0197906494140625, -0.054656982421875, -0.02423095703125, -0.0022411346435546875, 0.0487060546875, -0.008453369140625, 0.05316162109375, 0.031646728515625, -0.03302001953125, -0.038299560546875, -0.07379150390625, 0.0037250518798828125, -0.016204833984375, -0.052581787109375, 0.045745849609375, -0.0305023193359375, -0.04443359375, 0.0030956268310546875, -0.018646240234375, -0.0230865478515625, -0.00579833984375, 0.031280517578125, 0.01044464111328125, 0.0021381378173828125, -0.0052642822265625, -0.006832122802734375, -0.023956298828125, 0.0074005126953125, -0.00696563720703125, 0.0298614501953125, -0.020355224609375, -0.003662109375, -0.06268310546875, 0.0288238525390625, 0.065673828125, -0.00437164306640625, 0.0194244384765625, 0.0228424072265625, -0.0190277099609375, -0.0016651153564453125, -0.0384521484375, -0.007038116455078125, -0.03680419921875, 0.027801513671875, -0.043060302734375, -0.030426025390625, 0.0482177734375, -0.00250244140625, 0.02130126953125, 0.03277587890625, 0.041473388671875, -0.03558349609375, 0.06414794921875, 0.064208984375, -0.0113983154296875, 0.029693603515625, -0.02471923828125, 0.04248046875, -0.0382080078125, -0.0258941650390625, -0.0364990234375, -0.03326416015625, -0.054443359375, -0.00146484375, 0.005756378173828125, 0.014129638671875, -0.033935546875, 0.04779052734375, -0.043182373046875, 0.0150146484375, 0.044036865234375, -0.0022983551025390625, 0.0200042724609375, -0.007007598876953125, 0.0012073516845703125, -0.00835418701171875, -0.044891357421875, -0.031707763671875, 0.0450439453125, 0.0205535888671875, 0.0693359375, 0.0135498046875, 0.055877685546875, 0.025787353515625, -0.017486572265625, -0.04547119140625, 0.05419921875, 0.0018482208251953125, -0.0775146484375, -0.01140594482421875, -0.012664794921875, -0.07830810546875, 0.021759033203125, 0.01053619384765625, -0.0506591796875, 0.04937744140625, 0.007671356201171875, -0.05950927734375, 0.0147857666015625, -0.06134033203125, 0.046173095703125, -0.04412841796875, -0.040130615234375, -0.0207061767578125, -0.052764892578125, 0.018646240234375, 0.0006928443908691406, 0.0484619140625, -0.02313232421875, 0.01145172119140625, 0.044036865234375, -0.04339599609375, 0.05908203125, 0.005138397216796875, 0.01953125, 0.04583740234375, 0.0018405914306640625, 0.0229644775390625, 0.00380706787109375, -0.007781982421875, -0.0110626220703125, 0.0015535354614257812, -0.0316162109375, -0.037078857421875, 0.04766845703125, -0.07049560546875, -0.03369140625, -0.045684814453125, -0.03582763671875, 0.0341796875, 0.0196533203125, 0.030914306640625, 0.04608154296875, -0.0116424560546875, -0.0015516281127929688, 0.0277252197265625, -0.040374755859375, 0.049041748046875, 0.032623291015625, -0.036712646484375, -0.041229248046875, 0.0694580078125, 0.017425537109375, 0.02227783203125, 0.0482177734375, 0.0195770263671875, -0.01335906982421875, -0.0023708343505859375, -0.0198516845703125, 0.015869140625, -0.042877197265625, 0.00039696693420410156, -0.03662109375, -0.0093536376953125, -0.0208892822265625, -0.039093017578125, -0.0230560302734375, -0.02313232421875, -0.0225830078125, -0.0014362335205078125, 0.0104217529296875, 0.058349609375, -0.039306640625, -0.0082550048828125, -0.0290069580078125, 0.025909423828125, 0.032867431640625, 0.007537841796875, -0.0014829635620117188, -0.036895751953125, -0.01126861572265625, -0.00804901123046875, -0.0243377685546875, -0.05621337890625, 0.0484619140625, -0.0048370361328125, 0.038818359375, 0.054718017578125, 0.0013284683227539062, 0.0631103515625, -0.03594970703125, 0.0528564453125, 0.0149383544921875, -0.04583740234375, 0.03692626953125, -0.0241851806640625, 0.0147552490234375, 0.023590087890625, 0.031951904296875, -0.03643798828125, -0.00766754150390625, -0.05218505859375, -0.0628662109375, 0.034820556640625, 0.0156707763671875, 0.008331298828125, 0.007701873779296875, 0.031829833984375, -0.0092010498046875, 0.0528564453125, -0.06634521484375, -0.047027587890625, -0.01654052734375, 0.045806884765625, 0.00405120849609375, -0.0215301513671875, 0.004024505615234375, -0.0377197265625, 0.043792724609375, 0.03253173828125, 0.056488037109375, 0.006534576416015625, 0.019744873046875, -0.0156707763671875, 0.02264404296875, 0.046173095703125, 0.07000732421875, -0.03350830078125, 0.00316619873046875, 0.0267181396484375, -0.0199127197265625, 0.0089111328125, 0.00010830163955688477, -0.00440216064453125, 0.0148162841796875, 0.016265869140625, 0.057830810546875, 0.00740814208984375, -0.043060302734375, 0.0202789306640625, 0.01445770263671875, -0.0097503662109375, -0.0384521484375, -0.0256805419921875, 0.00835418701171875, 0.0159149169921875, 0.042144775390625, 0.0038127899169921875, -0.00485992431640625, -0.0587158203125, 0.0166473388671875, 0.040924072265625, -0.0119781494140625, -0.0160064697265625, 0.05145263671875, 0.0143280029296875, -0.0232696533203125, 0.03472900390625, -0.04461669921875, -0.03851318359375, 0.074951171875, 0.0390625, 0.05181884765625, -0.005832672119140625, 0.0294036865234375, 0.042877197265625, 0.048126220703125, 0.021484375, 0.05181884765625, -0.003498077392578125, -0.0235137939453125, -0.026824951171875, -0.07525634765625, -0.0179595947265625, 0.00726318359375, -0.0377197265625, 0.05615234375, -0.049072265625, -0.02276611328125, -0.0119781494140625, 0.0210723876953125, -0.026641845703125, 0.0118255615234375, 0.01195526123046875, 0.06689453125, -0.07000732421875, 0.0178375244140625, 0.062042236328125, -0.055877685546875, -0.05926513671875, -0.0252685546875, -0.026702880859375, -0.05401611328125, 0.0645751953125, 0.00850677490234375, 0.0023784637451171875, -0.001781463623046875, -0.036041259765625, -0.09393310546875, 0.08123779296875, 0.00041031837463378906, -0.04345703125, -0.01436614990234375, 0.014251708984375, 0.0418701171875, -0.0106048583984375, 0.01493072509765625, 0.025665283203125, 0.040191650390625, 0.01068115234375, -0.07684326171875, 0.0009751319885253906, -0.0170135498046875, 0.01166534423828125, 0.0235137939453125, -0.08660888671875, 0.0782470703125, -0.042633056640625, -0.022857666015625, 0.017486572265625, 0.046661376953125, 0.0013580322265625, 0.0015411376953125, 0.028564453125, 0.04052734375, 0.053558349609375, -0.01280975341796875, 0.07080078125, -0.0308685302734375, 0.0292205810546875, 0.05181884765625, -0.002422332763671875, 0.057708740234375, 0.01297760009765625, -0.047027587890625, 0.042938232421875, 0.07196044921875, -0.0205535888671875, 0.05621337890625, 0.034576416015625, -0.01433563232421875, 0.0023593902587890625, -0.0224151611328125, -0.054107666015625, 0.032318115234375, 0.007236480712890625, -0.0197601318359375, -0.019683837890625, 0.0084228515625, 0.00542449951171875, -0.02630615234375, -0.0287017822265625, 0.039520263671875, 0.0226898193359375, -0.0430908203125, 0.057464599609375, -0.00591278076171875, 0.021728515625, -0.063232421875, 0.017578125, -0.0190887451171875, 0.01788330078125, -0.0180816650390625, -0.0504150390625, 0.0280303955078125, -0.028961181640625, -0.036895751953125, -0.0374755859375, 0.0389404296875, -0.0134429931640625, -0.0253753662109375, 0.01465606689453125, 0.01473236083984375, 0.044677734375, -0.0021305084228515625, -0.044891357421875, 0.024810791015625, 0.005237579345703125, 0.00665283203125, 0.010406494140625, 0.0236053466796875, 0.01027679443359375, 0.04547119140625, 0.036285400390625, 0.00522613525390625, -0.0032367706298828125, -0.0305023193359375, 0.050933837890625, -0.059600830078125, -0.051361083984375, -0.057647705078125, 0.05548095703125, -0.0017223358154296875, -0.022735595703125, 0.049041748046875, 0.051605224609375, 0.046173095703125, -0.0160369873046875, 0.07257080078125, -0.061553955078125, 0.032501220703125, -0.006832122802734375, 0.053924560546875, -0.049652099609375, -0.0014801025390625, -0.0287628173828125, -0.06829833984375, -0.005710601806640625, 0.06854248046875, -0.018280029296875, 0.002666473388671875, 0.0655517578125, 0.06414794921875, 0.020904541015625, -0.00582122802734375, 0.0178680419921875, 0.040679931640625, 0.017333984375, 0.0447998046875, 0.077392578125, -0.02349853515625, 0.060333251953125, -0.040252685546875, -0.05047607421875, -0.02166748046875, -0.0714111328125, -0.053680419921875, -0.05078125, -0.02532958984375, -0.0259246826171875, 0.004619598388671875, 0.06683349609375, 0.0570068359375, -0.056610107421875, -0.052490234375, -0.0102996826171875, 0.0101165771484375, -0.02740478515625, -0.0159149169921875, 0.03033447265625, -0.01299285888671875, -0.046661376953125, 0.00415802001953125, 0.025115966796875, 0.00270843505859375, -0.0159149169921875, 0.023895263671875, -0.0182647705078125, 0.0034465789794921875, 0.0718994140625, 0.050689697265625, -0.056427001953125, -0.01258087158203125, 0.0056610107421875, -0.020538330078125, 0.006893157958984375, 0.030853271484375, -0.049041748046875, 0.0288543701171875, 0.049774169921875, 0.01198577880859375, 0.04217529296875, 0.0200042724609375, 0.02264404296875, -0.00936126708984375, -0.00714111328125, -0.01245880126953125, 0.0206298828125, 0.00991058349609375, -0.0262451171875, 0.02178955078125, 0.03662109375, -0.0693359375, -0.04736328125, 0.0263519287109375, -0.091064453125, -0.0245819091796875, 0.07183837890625, 0.0305023193359375, -0.019378662109375, -0.01438140869140625, -0.05609130859375, 0.0143280029296875, -0.0249176025390625, 0.0657958984375, 0.05694580078125, -0.04388427734375, 0.023162841796875, -0.0640869140625, 0.029144287109375, 0.0200042724609375, -0.044677734375, -0.0175323486328125, 0.03375244140625, 0.0303802490234375, 0.0482177734375, 0.0670166015625, 0.0005865097045898438, 0.01776123046875, 0.00679779052734375, 0.0273284912109375, 0.019775390625, -0.0185394287109375, 0.00455474853515625, 0.0293426513671875, -0.0004425048828125, -0.048248291015625 ] ]
aisquared/dlite-v2-1_5b
2023-05-10T16:55:56.000Z
[ "transformers", "pytorch", "gpt2", "text-generation", "en", "dataset:aisquared/databricks-dolly-15k", "license:apache-2.0", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
aisquared
null
null
aisquared/dlite-v2-1_5b
10
5,811
transformers
2023-04-16T03:38:49
--- license: apache-2.0 datasets: - aisquared/databricks-dolly-15k language: - en library_name: transformers --- # Model Card for `dlite-v2-1.5b` <!-- Provide a quick summary of what the model is/does. --> AI Squared's `dlite-v2-1.5b` is a large language model which is derived from OpenAI's large [GPT-2](https://huggingface.co/gpt2-large) model and fine-tuned on a corpus of 15k records ([Databricks' "Dolly 15k" Dataset](https://huggingface.co/datasets/aisquared/databricks-dolly-15k)) to help it exhibit chat-based capabilities. Just like [Databricks' Dolly V2 models](https://www.databricks.com/blog/2023/04/12/dolly-first-open-commercially-viable-instruction-tuned-llm), `dlite-v2-1.5b` (and all other members of the `dlite-v2` family) is licensed for both **research and commercial use.** We are extremely grateful for the work that Databricks has done to create the `databricks-dolly-15k` dataset, for without it we would not be able to create and release this model under such an open and permissive license. While `dlite-v2-1.5b` is **not a state-of-the-art model**, we believe that the level of interactivity that can be achieved on such a small model that is trained so cheaply is important to showcase, as it continues to demonstrate that creating powerful AI capabilities may be much more accessible than previously thought. ### Model Description <!-- Provide a longer summary of what this model is. --> - **Developed by:** AI Squared, Inc. - **Shared by:** AI Squared, Inc. - **Model type:** Large Language Model - **Language(s) (NLP):** EN - **License:** Apache v2.0 - **Finetuned from model:** GPT-2 ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> **`dlite-v2-1.5b` is not a state-of-the-art language model.** `dlite-v2-1.5b` is an experimental technology, and as with any experimental technology, AI Squared urges potential users of this technology to test its capabilities thoroughly before usage. Furthermore, the model can sometimes exhibit undesired behaviors. Some of these behaviors include, but are not limited to: factual inaccuracies, biases, offensive responses, toxicity, and hallucinations. Just as with any other LLM, we advise users of this technology to exercise good judgment when applying this technology. ## Usage To use the model with the `transformers` library on a machine with GPUs, first make sure you have the `transformers` and `accelerate` libraries installed. From your terminal, run: ```python pip install "accelerate>=0.16.0,<1" "transformers[torch]>=4.28.1,<5" "torch>=1.13.1,<2" ``` The instruction following pipeline can be loaded using the `pipeline` function as shown below. This loads a custom `InstructionTextGenerationPipeline` found in the model repo [here](https://huggingface.co/aisquared/dlite-v2-1_5b/blob/main/instruct_pipeline.py), which is why `trust_remote_code=True` is required. Including `torch_dtype=torch.bfloat16` is generally recommended if this type is supported in order to reduce memory usage. It does not appear to impact output quality. It is also fine to remove it if there is sufficient memory. ```python from transformers import pipeline import torch generate_text = pipeline(model="aisquared/dlite-v2-1_5b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto") ``` You can then use the pipeline to answer instructions: ```python res = generate_text("Who was George Washington?") print(res) ``` Alternatively, if you prefer to not use `trust_remote_code=True` you can download [instruct_pipeline.py](https://huggingface.co/aisquared/dlite-v2-1_5b/blob/main/instruct_pipeline.py), store it alongside your notebook, and construct the pipeline yourself from the loaded model and tokenizer: ```python from instruct_pipeline import InstructionTextGenerationPipeline from transformers import AutoModelForCausalLM, AutoTokenizer import torch tokenizer = AutoTokenizer.from_pretrained("aisquared/dlite-v2-1_5b", padding_side="left") model = AutoModelForCausalLM.from_pretrained("aisquared/dlite-v2-1_5b", device_map="auto", torch_dtype=torch.bfloat16) generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer) ``` ### Model Performance Metrics We present the results from various model benchmarks on the EleutherAI LLM Evaluation Harness for all models in the DLite family. Model results are sorted by mean score, ascending, to provide an ordering. These metrics serve to further show that none of the DLite models are state of the art, but rather further show that chat-like behaviors in LLMs can be trained almost independent of model size. | Model | arc_challenge | arc_easy | boolq | hellaswag | openbookqa | piqa | winogrande | |:--------------|----------------:|-----------:|---------:|------------:|-------------:|---------:|-------------:| | dlite-v2-124m | 0.199659 | 0.447811 | 0.494801 | 0.291675 | 0.156 | 0.620239 | 0.487766 | | gpt2 | 0.190273 | 0.438131 | 0.487156 | 0.289185 | 0.164 | 0.628945 | 0.51618 | | dlite-v1-124m | 0.223549 | 0.462542 | 0.502446 | 0.293268 | 0.17 | 0.622416 | 0.494081 | | gpt2-medium | 0.215017 | 0.490741 | 0.585933 | 0.333101 | 0.186 | 0.676279 | 0.531176 | | dlite-v2-355m | 0.251706 | 0.486111 | 0.547401 | 0.344354 | 0.216 | 0.671926 | 0.52723 | | dlite-v1-355m | 0.234642 | 0.507576 | 0.600306 | 0.338478 | 0.216 | 0.664309 | 0.496448 | | gpt2-large | 0.216724 | 0.531566 | 0.604893 | 0.363971 | 0.194 | 0.703482 | 0.553275 | | dlite-v1-774m | 0.250853 | 0.545875 | 0.614985 | 0.375124 | 0.218 | 0.698041 | 0.562747 | | dlite-v2-774m | 0.269625 | 0.52904 | 0.613761 | 0.395937 | 0.256 | 0.691513 | 0.566693 | | gpt2-xl | 0.25 | 0.582912 | 0.617737 | 0.400418 | 0.224 | 0.708379 | 0.583268 | | dlite-v1-1_5b | 0.268771 | 0.588384 | 0.624159 | 0.401414 | 0.226 | 0.708379 | 0.584846 | | dlite-v2-1_5b | 0.289249 | 0.565657 | 0.601223 | 0.434077 | 0.272 | 0.703482 | 0.588003 | ### Limitations *DLite is an experimental technology and is not designed for use in any environment without significant testing and safety consideration. Furthermore, the model can sometimes exhibit undesired behaviors. Some of these behaviors include, but are not limited to: factual inaccuracies, biases, offensive responses, toxicity, and hallucinations. Just as with any other LLM, we advise users of this technology to exercise good judgment when applying this technology.*
6,756
[ [ -0.02130126953125, -0.063720703125, 0.03302001953125, 0.0048828125, 0.0009493827819824219, 0.003345489501953125, -0.00208282470703125, -0.031951904296875, 0.01751708984375, 0.01334381103515625, -0.060791015625, -0.042999267578125, -0.048309326171875, -0.007472991943359375, -0.032501220703125, 0.070068359375, 0.0014896392822265625, -0.0081787109375, -0.01183319091796875, -0.0002548694610595703, -0.0180206298828125, -0.0308990478515625, -0.055633544921875, -0.028778076171875, 0.0301971435546875, 0.00562286376953125, 0.05419921875, 0.03826904296875, 0.0261383056640625, 0.0303955078125, -0.023162841796875, 0.0266571044921875, -0.0413818359375, -0.00982666015625, 0.01380157470703125, -0.03515625, -0.061859130859375, 0.01715087890625, 0.0186767578125, 0.03680419921875, 0.004711151123046875, 0.0174407958984375, 0.00502777099609375, 0.054840087890625, -0.027557373046875, 0.0194854736328125, -0.033294677734375, 0.001377105712890625, 0.0005087852478027344, -0.005764007568359375, -0.02850341796875, -0.0271759033203125, 0.01039886474609375, -0.043792724609375, 0.0026721954345703125, 0.0078582763671875, 0.08172607421875, 0.02947998046875, -0.03143310546875, -0.02191162109375, -0.0474853515625, 0.0673828125, -0.06170654296875, 0.0142364501953125, 0.0302886962890625, -0.00897216796875, -0.016021728515625, -0.0653076171875, -0.04638671875, -0.01003265380859375, -0.02081298828125, 0.0096893310546875, -0.0117950439453125, 0.01151275634765625, 0.0266876220703125, 0.04351806640625, -0.04693603515625, 0.01611328125, -0.041656494140625, -0.0159149169921875, 0.057891845703125, 0.027099609375, 0.0208282470703125, -0.01108551025390625, -0.0229644775390625, -0.005680084228515625, -0.04864501953125, 0.00836181640625, 0.0330810546875, 0.00946044921875, -0.03143310546875, 0.046722412109375, -0.007389068603515625, 0.020111083984375, 0.01546478271484375, -0.00872039794921875, 0.03656005859375, -0.01413726806640625, -0.0273284912109375, -0.0028018951416015625, 0.066650390625, 0.0330810546875, 0.0119781494140625, -0.0017518997192382812, -0.0006895065307617188, -0.000044465065002441406, -0.0073699951171875, -0.08026123046875, -0.03533935546875, 0.05828857421875, -0.0421142578125, -0.03887939453125, -0.0170440673828125, -0.040863037109375, -0.0296478271484375, -0.011444091796875, 0.03131103515625, -0.0184326171875, -0.028167724609375, -0.0006041526794433594, 0.00986480712890625, 0.0076904296875, 0.0268096923828125, -0.08502197265625, 0.017242431640625, 0.042022705078125, 0.07147216796875, -0.0194549560546875, -0.01122283935546875, -0.02154541015625, -0.020263671875, -0.0200958251953125, 0.043670654296875, -0.0062103271484375, -0.032501220703125, -0.00495147705078125, 0.0117034912109375, -0.01514434814453125, -0.038909912109375, 0.032501220703125, -0.037384033203125, 0.041717529296875, -0.0035724639892578125, -0.052642822265625, -0.01629638671875, 0.01418304443359375, -0.05828857421875, 0.08502197265625, 0.00536346435546875, -0.0704345703125, 0.0239410400390625, -0.041046142578125, -0.0019855499267578125, -0.01131439208984375, -0.01324462890625, -0.04046630859375, -0.010650634765625, 0.0196533203125, 0.02362060546875, -0.021881103515625, 0.007122039794921875, -0.012420654296875, -0.03179931640625, 0.0174560546875, -0.0416259765625, 0.08966064453125, 0.017730712890625, -0.037994384765625, 0.01215362548828125, -0.060333251953125, 0.0175628662109375, 0.011322021484375, -0.038482666015625, 0.005863189697265625, 0.002613067626953125, 0.000926971435546875, 0.01617431640625, 0.04193115234375, -0.03729248046875, 0.018951416015625, -0.040557861328125, 0.0296783447265625, 0.07244873046875, -0.0166168212890625, 0.03460693359375, -0.0390625, 0.016754150390625, 0.003143310546875, 0.015289306640625, 0.003292083740234375, -0.04107666015625, -0.080078125, -0.037689208984375, 0.01194000244140625, 0.05242919921875, -0.0308074951171875, 0.031707763671875, -0.0064239501953125, -0.029571533203125, -0.0447998046875, 0.0101318359375, 0.0157928466796875, 0.02008056640625, 0.019256591796875, 0.004352569580078125, -0.04522705078125, -0.06011962890625, 0.01226806640625, -0.0151519775390625, 0.006557464599609375, 0.03839111328125, 0.044769287109375, -0.0122222900390625, 0.070556640625, -0.038238525390625, -0.015960693359375, -0.027557373046875, 0.0037517547607421875, 0.056304931640625, 0.029022216796875, 0.03656005859375, -0.046630859375, -0.044708251953125, -0.001926422119140625, -0.074462890625, 0.00823211669921875, -0.00449371337890625, -0.0013017654418945312, 0.039337158203125, 0.0229949951171875, -0.0662841796875, 0.03863525390625, 0.0673828125, -0.0333251953125, 0.0386962890625, -0.02447509765625, 0.01200103759765625, -0.08111572265625, 0.0080108642578125, -0.017547607421875, -0.00891876220703125, -0.050567626953125, -0.007068634033203125, -0.00786590576171875, 0.00821685791015625, -0.04449462890625, 0.06451416015625, -0.0254669189453125, 0.0078582763671875, -0.0151824951171875, 0.018646240234375, -0.002414703369140625, 0.03936767578125, -0.0013723373413085938, 0.07281494140625, 0.042755126953125, -0.047454833984375, 0.04510498046875, 0.0141754150390625, -0.0341796875, 0.021453857421875, -0.05816650390625, 0.0216522216796875, 0.00501251220703125, -0.002017974853515625, -0.0694580078125, -0.0007686614990234375, 0.01511383056640625, -0.0400390625, 0.024261474609375, -0.011322021484375, -0.0335693359375, -0.042755126953125, -0.037353515625, 0.007061004638671875, 0.05078125, -0.04693603515625, 0.043792724609375, 0.008514404296875, 0.0009164810180664062, -0.052642822265625, -0.046417236328125, -0.00926971435546875, -0.0279388427734375, -0.07513427734375, 0.0305023193359375, -0.031951904296875, -0.031494140625, -0.0107879638671875, -0.00041222572326660156, -0.00644683837890625, 0.013885498046875, 0.023956298828125, 0.037811279296875, -0.005126953125, -0.0011224746704101562, -0.006725311279296875, -0.01490020751953125, 0.01120758056640625, 0.003017425537109375, 0.056549072265625, -0.0047607421875, -0.01824951171875, -0.05889892578125, 0.0059356689453125, 0.045928955078125, 0.007061004638671875, 0.071044921875, 0.043182373046875, -0.0176239013671875, -0.011810302734375, -0.030914306640625, -0.028594970703125, -0.041290283203125, 0.0421142578125, -0.0217132568359375, -0.05010986328125, 0.043670654296875, -0.005214691162109375, 0.0262298583984375, 0.033203125, 0.054718017578125, -0.0206298828125, 0.0863037109375, 0.040252685546875, -0.0194091796875, 0.0205841064453125, -0.056121826171875, 0.0190887451171875, -0.0623779296875, -0.02191162109375, -0.024261474609375, -0.0255279541015625, -0.052001953125, -0.0195159912109375, 0.01544952392578125, -0.004119873046875, -0.03607177734375, 0.0161285400390625, -0.029449462890625, 0.01313018798828125, 0.04376220703125, 0.014892578125, 0.0266571044921875, 0.0006880760192871094, 0.0015993118286132812, -0.01294708251953125, -0.043212890625, -0.038909912109375, 0.088134765625, 0.0298004150390625, 0.07269287109375, 0.009246826171875, 0.06390380859375, 0.02130126953125, -0.00014269351959228516, -0.052734375, 0.03680419921875, -0.00595855712890625, -0.062103271484375, -0.01116180419921875, -0.0270233154296875, -0.06201171875, 0.01947021484375, -0.0133819580078125, -0.08148193359375, 0.0145721435546875, 0.01122283935546875, -0.0258331298828125, 0.035675048828125, -0.05072021484375, 0.055328369140625, -0.029754638671875, -0.040802001953125, -0.01248931884765625, -0.0206451416015625, 0.0216522216796875, 0.01329803466796875, 0.0223388671875, -0.01345062255859375, 0.01151275634765625, 0.046722412109375, -0.045501708984375, 0.060516357421875, -0.0072479248046875, -0.0177154541015625, 0.023773193359375, 0.01222991943359375, 0.05889892578125, -0.0002892017364501953, -0.0079193115234375, -0.00034928321838378906, -0.00527191162109375, -0.013885498046875, -0.02850341796875, 0.07025146484375, -0.08538818359375, -0.055999755859375, -0.039031982421875, -0.05731201171875, 0.0163421630859375, 0.003055572509765625, 0.0207672119140625, 0.0269012451171875, 0.004711151123046875, 0.0162200927734375, 0.065185546875, -0.0251922607421875, 0.042877197265625, 0.013671875, -0.025146484375, -0.013916015625, 0.062469482421875, 0.00147247314453125, 0.031463623046875, 0.01314544677734375, 0.0010538101196289062, -0.0197296142578125, -0.030731201171875, -0.02734375, 0.026153564453125, -0.045501708984375, -0.006927490234375, -0.055145263671875, -0.0408935546875, -0.021575927734375, -0.0154266357421875, -0.036865234375, -0.04132080078125, -0.04339599609375, -0.0153656005859375, 0.050201416015625, 0.055419921875, 0.002460479736328125, 0.00891876220703125, -0.0382080078125, 0.00579833984375, 0.033050537109375, 0.0051116943359375, -0.005340576171875, -0.055633544921875, -0.0152587890625, 0.01343536376953125, -0.028717041015625, -0.034393310546875, 0.041473388671875, 0.01317596435546875, 0.041290283203125, 0.01482391357421875, 0.005096435546875, 0.042083740234375, -0.02459716796875, 0.06787109375, 0.0022869110107421875, -0.053985595703125, 0.042572021484375, -0.008819580078125, 0.00882720947265625, 0.0225982666015625, 0.035003662109375, -0.02288818359375, -0.0291900634765625, -0.051300048828125, -0.07525634765625, 0.07025146484375, 0.0352783203125, 0.02325439453125, 0.0038547515869140625, 0.0127410888671875, -0.0194244384765625, 0.009002685546875, -0.0802001953125, -0.054168701171875, -0.00385284423828125, -0.02032470703125, 0.0252838134765625, -0.01873779296875, -0.0175628662109375, -0.01056671142578125, 0.058837890625, 0.011993408203125, 0.03326416015625, 0.00562286376953125, -0.018707275390625, -0.0012235641479492188, 0.00510406494140625, 0.040008544921875, 0.050018310546875, -0.04595947265625, -0.00562286376953125, 0.0230560302734375, -0.04669189453125, 0.004608154296875, 0.00860595703125, -0.0235748291015625, -0.0100860595703125, 0.0264739990234375, 0.04766845703125, -0.01053619384765625, -0.03106689453125, 0.03558349609375, -0.007061004638671875, -0.0302581787109375, -0.028106689453125, 0.014617919921875, 0.016815185546875, 0.051361083984375, 0.0323486328125, -0.0137176513671875, 0.0019397735595703125, -0.046844482421875, 0.0013494491577148438, 0.03271484375, -0.0181732177734375, -0.008758544921875, 0.06683349609375, 0.0223388671875, -0.0187835693359375, 0.063232421875, -0.01090240478515625, -0.0284423828125, 0.0638427734375, 0.03875732421875, 0.05352783203125, -0.0094451904296875, 0.008270263671875, 0.055938720703125, 0.0214385986328125, 0.01468658447265625, 0.0236968994140625, 0.02105712890625, -0.040771484375, -0.0197601318359375, -0.0401611328125, -0.01531219482421875, 0.01522064208984375, -0.0291595458984375, 0.02154541015625, -0.03546142578125, -0.018890380859375, -0.019195556640625, 0.02838134765625, -0.04974365234375, 0.01529693603515625, 0.0206298828125, 0.0692138671875, -0.0533447265625, 0.073974609375, 0.02178955078125, -0.04052734375, -0.05230712890625, -0.02532958984375, 0.0154876708984375, -0.06707763671875, 0.0782470703125, 0.0142059326171875, 0.0004189014434814453, 0.003894805908203125, -0.032379150390625, -0.0810546875, 0.1229248046875, -0.0002791881561279297, -0.041259765625, -0.00725555419921875, 0.0005855560302734375, 0.03204345703125, -0.020599365234375, 0.042266845703125, 0.035552978515625, 0.0259246826171875, 0.0079803466796875, -0.080810546875, 0.0178070068359375, -0.0194091796875, -0.00556182861328125, -0.006198883056640625, -0.055419921875, 0.10162353515625, -0.028778076171875, -0.0163116455078125, 0.0302581787109375, 0.048553466796875, 0.029327392578125, 0.006832122802734375, 0.024871826171875, 0.0489501953125, 0.0633544921875, -0.0200042724609375, 0.08111572265625, -0.0298919677734375, 0.061614990234375, 0.0755615234375, -0.0081634521484375, 0.049407958984375, 0.025634765625, -0.037139892578125, 0.044036865234375, 0.05438232421875, 0.003337860107421875, 0.042755126953125, 0.00627899169921875, -0.0335693359375, -0.0038280487060546875, 0.00955963134765625, -0.036956787109375, 0.01073455810546875, 0.0322265625, -0.042022705078125, -0.004913330078125, -0.0194549560546875, 0.032623291015625, -0.01233673095703125, -0.01218414306640625, 0.04437255859375, 0.001514434814453125, -0.0163421630859375, 0.05584716796875, -0.01178741455078125, 0.06683349609375, -0.040130615234375, -0.0011014938354492188, -0.035552978515625, 0.026397705078125, -0.0399169921875, -0.057159423828125, -0.0013551712036132812, -0.00319671630859375, -0.01068878173828125, 0.005054473876953125, 0.0284423828125, -0.01418304443359375, -0.034027099609375, 0.0306243896484375, 0.0223541259765625, 0.0174407958984375, 0.018096923828125, -0.06268310546875, 0.0136260986328125, 0.01171112060546875, -0.05078125, 0.0196533203125, 0.0186004638671875, 0.01470947265625, 0.0638427734375, 0.04345703125, -0.00330352783203125, 0.01279449462890625, 0.0013532638549804688, 0.0882568359375, -0.037139892578125, 0.005207061767578125, -0.06298828125, 0.056488037109375, -0.02008056640625, -0.013885498046875, 0.06341552734375, 0.0467529296875, 0.04766845703125, 0.01024627685546875, 0.052154541015625, -0.040252685546875, 0.0261383056640625, -0.048126220703125, 0.051025390625, -0.043426513671875, 0.0274658203125, -0.005710601806640625, -0.06719970703125, -0.0099029541015625, 0.0679931640625, -0.024749755859375, 0.0220947265625, 0.062347412109375, 0.078369140625, 0.005428314208984375, -0.02020263671875, 0.0221405029296875, 0.03729248046875, 0.034515380859375, 0.050079345703125, 0.049560546875, -0.050994873046875, 0.051422119140625, -0.042144775390625, -0.01302337646484375, -0.0178070068359375, -0.060821533203125, -0.08575439453125, -0.03558349609375, -0.028076171875, -0.0384521484375, -0.0010499954223632812, 0.0792236328125, 0.0576171875, -0.049285888671875, -0.0233154296875, -0.0015716552734375, 0.01329803466796875, 0.0068817138671875, -0.0178985595703125, 0.053314208984375, -0.0273284912109375, -0.08587646484375, 0.017364501953125, 0.00293731689453125, 0.01245880126953125, -0.0308990478515625, -0.0214080810546875, -0.0266876220703125, 0.0010385513305664062, 0.04461669921875, 0.00733184814453125, -0.050628662109375, -0.0211334228515625, 0.005542755126953125, -0.01503753662109375, 0.00864410400390625, 0.018310546875, -0.052581787109375, 0.0266571044921875, 0.0288848876953125, 0.014495849609375, 0.0501708984375, -0.019378662109375, 0.0181427001953125, -0.035552978515625, 0.0243682861328125, -0.0008578300476074219, 0.0260772705078125, 0.01128387451171875, -0.045928955078125, 0.04229736328125, 0.010009765625, -0.06170654296875, -0.04681396484375, 0.0030879974365234375, -0.0772705078125, -0.015289306640625, 0.08642578125, -0.0218353271484375, -0.02642822265625, 0.0064239501953125, -0.031494140625, -0.004405975341796875, -0.03076171875, 0.06463623046875, 0.048919677734375, -0.0170135498046875, -0.0260467529296875, -0.0611572265625, 0.037353515625, 0.007411956787109375, -0.05731201171875, 0.0006952285766601562, 0.0252838134765625, 0.0286865234375, 0.01412200927734375, 0.0491943359375, -0.01091766357421875, 0.01441192626953125, 0.0119171142578125, 0.004825592041015625, -0.005107879638671875, -0.010162353515625, -0.0015497207641601562, -0.016571044921875, 0.016082763671875, -0.015289306640625 ] ]
Norquinal/llama-2-7b-claude-chat
2023-08-12T01:57:58.000Z
[ "transformers", "pytorch", "llama", "text-generation", "dataset:Norquinal/claude_multiround_chat_1k", "endpoints_compatible", "has_space", "text-generation-inference", "region:us" ]
text-generation
Norquinal
null
null
Norquinal/llama-2-7b-claude-chat
0
5,810
transformers
2023-08-11T06:50:06
--- datasets: - Norquinal/claude_multiround_chat_1k --- This is a [LLaMA-2-7b-hf](https://huggingface.co/NousResearch/Llama-2-7b-hf) model fine-tuned using QLoRA (4-bit precision) on my [claude_multiround_chat_1k](https://huggingface.co/datasets/Norquinal/claude_multiround_chat_1k) dataset, which is a randomized subset of ~1000 samples from my [claude_multiround_chat_30k](https://huggingface.co/datasets/Norquinal/claude_multiround_chat_30k) dataset. Do not take this model very seriously, it is probably not very good. I haven't a clue of what I'm doing. I just thought it was a fun thing to make. ## Prompt Format The model was finetuned with the Vicuna 1.1 prompt format: ``` A chat between a curious user and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions. USER: {instruction} ASSISTANT: ``` ## Example Usage >USER: First, give a brief overview of Chinese cuisine, including its regional diversity. next, describe the importance of the five flavors in Chinese cooking. Then, mention three popular Chinese dishes and their main ingredients. After that, provide a popular Chinese cooking technique and an example of a dish prepared using that technique. Finally, discuss the role of tea in traditional Chinese culture. >ASSISTANT: Chinese cuisine is diverse due to China's vast geography and climate. Different regions have developed distinct styles of food based on local ingredients and techniques. > >The Five Flavor Theory states that all tastes - sweetness, sourness, saltiness, bitterness, and savory umami - are essential components of good Chinese cooking. Chefs use these flavors harmoniously to create balanced meals with complex taste profiles. > >Three famous Chinese dishes include Peking duck (roasted duck), Sichuan hot pot (spicy broth with vegetables) and dim sum (small buns or steamed dumplings). Their key ingredient is usually meat, poultry or seafood. > >A common cooking method is stir-frying where ingredients like chicken, beef or vegetables are quickly sautéed in oil at high heat until just cooked through. Stir fried green beans would be an example. > >Tea has been consumed for thousands of years as part of daily life in China. It is served before meals to cleanse palates and afterward to aid digestion. Teas range from mildly fragrant white teas to robust oolong varieties.
2,398
[ [ -0.0211181640625, -0.068603515625, 0.0230865478515625, 0.0391845703125, -0.01837158203125, -0.0122222900390625, 0.001190185546875, -0.028594970703125, 0.0308990478515625, 0.05218505859375, -0.028472900390625, -0.028717041015625, -0.01184844970703125, 0.002025604248046875, -0.011474609375, 0.05078125, 0.0083770751953125, 0.017242431640625, 0.03350830078125, -0.0194854736328125, -0.05743408203125, -0.03961181640625, -0.07415771484375, -0.035675048828125, 0.059295654296875, 0.03546142578125, 0.0472412109375, 0.0556640625, 0.0230560302734375, 0.01503753662109375, -0.0163421630859375, 0.0450439453125, -0.04571533203125, -0.007076263427734375, 0.011993408203125, -0.048614501953125, -0.0390625, 0.0014629364013671875, 0.024139404296875, 0.03338623046875, -0.0154571533203125, 0.01096343994140625, -0.0006818771362304688, 0.040313720703125, -0.0189208984375, 0.01309967041015625, -0.021728515625, -0.0064849853515625, -0.023345947265625, -0.01058197021484375, -0.006175994873046875, -0.035614013671875, -0.003742218017578125, -0.048828125, -0.0206298828125, 0.018157958984375, 0.0841064453125, 0.00888824462890625, -0.038970947265625, -0.01763916015625, -0.055633544921875, 0.06561279296875, -0.07470703125, 0.01206207275390625, 0.04931640625, 0.037078857421875, -0.034515380859375, -0.049041748046875, -0.0579833984375, -0.0269622802734375, -0.010650634765625, 0.0152740478515625, 0.022491455078125, -0.025604248046875, 0.019012451171875, 0.0270538330078125, -0.03955078125, 0.00530242919921875, -0.0198516845703125, 0.00323486328125, 0.065673828125, 0.034698486328125, 0.040740966796875, -0.02001953125, -0.0406494140625, -0.021026611328125, -0.047576904296875, 0.034820556640625, 0.0307464599609375, 0.038177490234375, -0.031341552734375, 0.03729248046875, -0.015472412109375, 0.040863037109375, 0.01033782958984375, -0.0301513671875, 0.016021728515625, -0.041717529296875, -0.011322021484375, -0.01959228515625, 0.053314208984375, 0.049652099609375, 0.0149383544921875, 0.034423828125, -0.0027103424072265625, -0.0158233642578125, -0.009521484375, -0.042816162109375, -0.050445556640625, 0.0380859375, -0.042755126953125, -0.0149688720703125, 0.004528045654296875, -0.03582763671875, -0.03857421875, -0.0107574462890625, 0.02349853515625, -0.0271759033203125, -0.022216796875, 0.004100799560546875, -0.037353515625, 0.055419921875, 0.033050537109375, -0.0665283203125, 0.020904541015625, 0.0242767333984375, 0.054443359375, -0.005435943603515625, -0.0199127197265625, -0.0186920166015625, 0.003818511962890625, -0.033233642578125, 0.061676025390625, -0.033477783203125, -0.0279693603515625, -0.0240936279296875, 0.017913818359375, 0.02130126953125, -0.05084228515625, 0.045623779296875, -0.0206756591796875, 0.0159912109375, -0.07977294921875, -0.02557373046875, -0.03839111328125, 0.027069091796875, -0.06268310546875, 0.06097412109375, 0.0185089111328125, -0.05084228515625, 0.01342010498046875, -0.04754638671875, -0.03643798828125, 0.01454925537109375, -0.0099334716796875, -0.0114288330078125, -0.017852783203125, 0.0150604248046875, 0.007144927978515625, -0.037750244140625, 0.006031036376953125, -0.0261383056640625, -0.03570556640625, 0.015472412109375, -0.019744873046875, 0.08282470703125, 0.021209716796875, -0.0109100341796875, 0.0091552734375, -0.0570068359375, -0.01229095458984375, 0.034576416015625, -0.0252532958984375, -0.033782958984375, 0.00746917724609375, 0.01108551025390625, -0.007495880126953125, 0.019134521484375, -0.03253173828125, 0.02496337890625, -0.0214385986328125, 0.046112060546875, 0.051727294921875, 0.0156402587890625, 0.0202484130859375, -0.06353759765625, -0.0073394775390625, 0.021820068359375, 0.0263519287109375, -0.01558685302734375, -0.04266357421875, -0.064697265625, -0.006305694580078125, 0.031341552734375, 0.0640869140625, -0.048370361328125, 0.0263671875, 0.018890380859375, -0.054534912109375, -0.0316162109375, 0.001922607421875, 0.003574371337890625, 0.038421630859375, 0.0214691162109375, -0.033599853515625, -0.0235748291015625, -0.0648193359375, -0.0038166046142578125, -0.00867462158203125, -0.00908660888671875, 0.0280303955078125, 0.012451171875, 0.00408172607421875, 0.045257568359375, -0.058685302734375, -0.022308349609375, -0.0240020751953125, -0.007099151611328125, 0.0154266357421875, 0.05255126953125, 0.05902099609375, -0.07220458984375, -0.046234130859375, 0.0194854736328125, -0.059417724609375, 0.015228271484375, -0.008514404296875, -0.0499267578125, -0.0101470947265625, -0.004425048828125, -0.024658203125, 0.037933349609375, 0.0194244384765625, -0.01346588134765625, 0.03302001953125, 0.0005049705505371094, 0.019744873046875, -0.0853271484375, -0.004215240478515625, -0.0013761520385742188, 0.016815185546875, -0.0538330078125, 0.0004911422729492188, -0.00545501708984375, 0.01143646240234375, -0.042327880859375, 0.07098388671875, -0.0318603515625, 0.0242462158203125, -0.0216064453125, -0.0140228271484375, 0.0103912353515625, 0.062744140625, 0.006183624267578125, 0.03399658203125, 0.027496337890625, -0.0545654296875, 0.054290771484375, 0.01251983642578125, -0.0013551712036132812, 0.0400390625, -0.062744140625, 0.015960693359375, 0.007678985595703125, 0.01372528076171875, -0.0836181640625, -0.032318115234375, 0.05230712890625, -0.021392822265625, 0.0010347366333007812, 0.0283966064453125, -0.049072265625, -0.034759521484375, -0.0290374755859375, 0.0265655517578125, 0.027618408203125, -0.044342041015625, 0.0574951171875, 0.0199432373046875, 0.018585205078125, -0.035736083984375, -0.055694580078125, -0.007671356201171875, -0.032196044921875, -0.04315185546875, 0.0202484130859375, -0.024658203125, -0.02337646484375, -0.005207061767578125, 0.005413055419921875, -0.01275634765625, 0.0178070068359375, 0.019317626953125, 0.01140594482421875, -0.0124359130859375, -0.01593017578125, -0.01042938232421875, 0.029144287109375, -0.0021820068359375, 0.006534576416015625, 0.052734375, -0.00927734375, -0.010101318359375, -0.049407958984375, 0.02734375, 0.0241851806640625, -0.004985809326171875, 0.041534423828125, 0.06756591796875, -0.0249786376953125, -0.0031566619873046875, -0.038421630859375, -0.02349853515625, -0.035400390625, 0.000301361083984375, -0.035491943359375, -0.029266357421875, 0.0784912109375, 0.0192108154296875, 0.0247650146484375, 0.032745361328125, 0.03826904296875, -0.0211944580078125, 0.0570068359375, 0.0413818359375, -0.004581451416015625, 0.044769287109375, -0.0118865966796875, 0.0031871795654296875, -0.04638671875, -0.020843505859375, -0.027008056640625, -0.036865234375, -0.056671142578125, -0.0214691162109375, 0.024139404296875, -0.0176544189453125, -0.046875, 0.0271148681640625, -0.0313720703125, 0.0266265869140625, 0.065185546875, 0.03729248046875, 0.0224456787109375, -0.01068878173828125, 0.00971221923828125, 0.02728271484375, -0.05078125, -0.03472900390625, 0.061065673828125, 0.034759521484375, 0.07965087890625, 0.0187530517578125, 0.04681396484375, 0.0158843994140625, -0.003032684326171875, -0.037139892578125, 0.0291595458984375, -0.0270538330078125, -0.05621337890625, -0.036651611328125, -0.026702880859375, -0.06793212890625, 0.0267486572265625, -0.01654052734375, -0.035400390625, 0.0166473388671875, 0.0119171142578125, -0.05560302734375, 0.0157623291015625, -0.0643310546875, 0.052398681640625, -0.05133056640625, -0.034027099609375, 0.032958984375, -0.049652099609375, 0.051910400390625, 0.0187225341796875, 0.0040740966796875, -0.0182647705078125, 0.0267486572265625, 0.052001953125, -0.007656097412109375, 0.073486328125, -0.010406494140625, -0.002674102783203125, 0.034088134765625, 0.0080718994140625, 0.0012750625610351562, 0.0216522216796875, -0.0107574462890625, 0.01531219482421875, 0.026519775390625, -0.051971435546875, -0.054107666015625, 0.03839111328125, -0.08258056640625, -0.060699462890625, -0.031768798828125, -0.032623291015625, 0.01107025146484375, 0.015380859375, 0.01325225830078125, -0.00013685226440429688, -0.0159912109375, 0.0031375885009765625, 0.027618408203125, -0.020263671875, 0.0248870849609375, 0.0185394287109375, -0.015716552734375, -0.01910400390625, 0.0711669921875, 0.019805908203125, 0.00937652587890625, 0.0265655517578125, 0.037109375, -0.0087890625, 0.00394439697265625, -0.02825927734375, 0.021331787109375, -0.047210693359375, -0.0013370513916015625, -0.0287322998046875, -0.0472412109375, -0.0251617431640625, 0.005466461181640625, -0.00994873046875, -0.01171875, -0.047576904296875, -0.00196075439453125, 0.0247650146484375, 0.04766845703125, -0.00943756103515625, 0.04315185546875, -0.046966552734375, 0.03082275390625, 0.032806396484375, -0.0003266334533691406, 0.02972412109375, -0.0357666015625, -0.0028133392333984375, 0.01404571533203125, -0.03546142578125, -0.07330322265625, 0.0548095703125, -0.004638671875, 0.058685302734375, 0.04937744140625, 0.010406494140625, 0.04498291015625, -0.00921630859375, 0.08868408203125, 0.002727508544921875, -0.0506591796875, 0.0350341796875, -0.01904296875, 0.004405975341796875, 0.0252532958984375, 0.02978515625, -0.04766845703125, -0.0221099853515625, -0.0311126708984375, -0.050201416015625, 0.016448974609375, 0.0114593505859375, 0.02130126953125, -0.00010317564010620117, 0.03448486328125, 0.011322021484375, 0.00897216796875, -0.0765380859375, -0.0357666015625, -0.037445068359375, 0.01525115966796875, 0.006137847900390625, -0.002323150634765625, -0.0272216796875, -0.04266357421875, 0.03350830078125, 0.00925445556640625, 0.0102691650390625, -0.0036449432373046875, 0.04193115234375, -0.00417327880859375, -0.00450897216796875, 0.05352783203125, 0.034515380859375, -0.031585693359375, 0.0048828125, 0.02386474609375, -0.03338623046875, 0.00272369384765625, -0.0236053466796875, 0.00165557861328125, -0.0201873779296875, 0.0185089111328125, 0.05108642578125, -0.0304718017578125, -0.0250396728515625, 0.003955841064453125, -0.030853271484375, -0.003147125244140625, -0.02777099609375, 0.0357666015625, 0.02294921875, 0.0172119140625, 0.0262603759765625, -0.0143280029296875, 0.0102996826171875, -0.058746337890625, 0.00447845458984375, 0.0194854736328125, 0.0014543533325195312, -0.031341552734375, 0.033111572265625, 0.0304107666015625, -0.0184326171875, 0.06011962890625, -0.020477294921875, -0.0294952392578125, 0.0609130859375, 0.041290283203125, 0.036651611328125, -0.0211639404296875, 0.011871337890625, 0.040863037109375, 0.0083770751953125, -0.0178680419921875, 0.0474853515625, 0.00707244873046875, -0.06097412109375, 0.01128387451171875, -0.04345703125, -0.0325927734375, 0.0284576416015625, -0.036529541015625, 0.034698486328125, -0.044189453125, -0.018768310546875, -0.004634857177734375, 0.0185699462890625, -0.0626220703125, 0.00823974609375, 0.005817413330078125, 0.0618896484375, -0.07513427734375, 0.062408447265625, 0.038421630859375, -0.0423583984375, -0.094970703125, 0.00493621826171875, -0.0014734268188476562, -0.0673828125, 0.0390625, -0.01025390625, 0.017120361328125, -0.026824951171875, -0.07391357421875, -0.075439453125, 0.083984375, -0.0016870498657226562, -0.0299224853515625, -0.005157470703125, -0.00670623779296875, 0.04656982421875, -0.0298309326171875, 0.0305938720703125, 0.04241943359375, 0.048675537109375, 0.030548095703125, -0.058685302734375, 0.00616455078125, -0.017364501953125, 0.014404296875, -0.03875732421875, -0.10406494140625, 0.11553955078125, -0.0181732177734375, -0.01031494140625, 0.006565093994140625, 0.0709228515625, 0.035491943359375, 0.03448486328125, 0.0250396728515625, 0.018463134765625, 0.06378173828125, 0.005771636962890625, 0.0625, -0.01308441162109375, 0.007152557373046875, 0.07855224609375, -0.03460693359375, 0.0467529296875, 0.0230865478515625, -0.01355743408203125, 0.028594970703125, 0.1053466796875, -0.0194549560546875, 0.047210693359375, 0.02947998046875, 0.0165557861328125, -0.003936767578125, -0.01555633544921875, -0.04449462890625, 0.047576904296875, 0.0216827392578125, -0.0176544189453125, -0.004360198974609375, -0.0196685791015625, 0.007080078125, -0.00806427001953125, 0.000514984130859375, 0.059173583984375, 0.02105712890625, -0.049163818359375, 0.053497314453125, 0.001964569091796875, 0.041656494140625, -0.02203369140625, -0.031829833984375, -0.04876708984375, -0.0175018310546875, -0.01551055908203125, -0.06494140625, -0.004055023193359375, -0.040679931640625, -0.0046844482421875, 0.03326416015625, 0.032989501953125, -0.03582763671875, -0.0341796875, 0.0159759521484375, 0.052459716796875, 0.0178070068359375, 0.005207061767578125, -0.049591064453125, 0.005954742431640625, 0.0085906982421875, -0.01045989990234375, 0.00699615478515625, 0.03875732421875, -0.004608154296875, 0.042877197265625, 0.04852294921875, 0.0048828125, -0.007904052734375, 0.002410888671875, 0.07586669921875, -0.06451416015625, -0.043731689453125, -0.05889892578125, 0.027557373046875, 0.0033931732177734375, -0.033599853515625, 0.057159423828125, 0.01140594482421875, 0.0560302734375, -0.001861572265625, 0.05169677734375, -0.0021514892578125, 0.025634765625, -0.03912353515625, 0.05157470703125, -0.06732177734375, 0.0269927978515625, 0.004917144775390625, -0.0699462890625, -0.00705718994140625, 0.057373046875, -0.01125335693359375, 0.0129241943359375, 0.03692626953125, 0.05340576171875, 0.01318359375, 0.010894775390625, 0.0318603515625, 0.0226593017578125, 0.038970947265625, 0.036041259765625, 0.07635498046875, -0.0288848876953125, 0.007778167724609375, -0.034637451171875, -0.021270751953125, -0.0153045654296875, -0.04632568359375, -0.068603515625, -0.044464111328125, 0.006679534912109375, -0.0235137939453125, 0.00992584228515625, 0.0750732421875, 0.065673828125, -0.07086181640625, -0.042877197265625, 0.04052734375, 0.026824951171875, -0.040313720703125, -0.0172119140625, 0.023712158203125, -0.004390716552734375, -0.050201416015625, 0.0024318695068359375, 0.00223541259765625, 0.0185699462890625, -0.0224609375, 0.0228424072265625, -0.0272674560546875, 0.03045654296875, 0.0526123046875, 0.039276123046875, -0.0709228515625, -0.038421630859375, 0.010101318359375, -0.01641845703125, 0.022979736328125, 0.01009368896484375, -0.023223876953125, 0.013763427734375, 0.0254364013671875, 0.022735595703125, 0.02496337890625, 0.03326416015625, 0.0294189453125, -0.043304443359375, -0.01151275634765625, -0.0003516674041748047, 0.01511383056640625, 0.0207672119140625, -0.05072021484375, 0.039093017578125, 0.006961822509765625, -0.043365478515625, -0.0491943359375, -0.005901336669921875, -0.0997314453125, -0.0172576904296875, 0.08868408203125, -0.01337432861328125, -0.052001953125, 0.00705718994140625, -0.042755126953125, 0.03192138671875, -0.04937744140625, 0.07080078125, 0.05560302734375, -0.047210693359375, -0.00960540771484375, -0.029449462890625, 0.043670654296875, 0.013397216796875, -0.061279296875, -0.007617950439453125, 0.0180511474609375, 0.041717529296875, 0.0272216796875, 0.04754638671875, 0.016937255859375, 0.0187225341796875, 0.0058441162109375, -0.0006489753723144531, 0.0045928955078125, -0.0330810546875, 0.00499725341796875, 0.0170440673828125, 0.000019848346710205078, -0.0279693603515625 ] ]
bofenghuang/vigogne-7b-instruct
2023-07-11T10:18:13.000Z
[ "transformers", "pytorch", "llama", "text-generation", "LLM", "fr", "license:openrail", "has_space", "text-generation-inference", "region:us" ]
text-generation
bofenghuang
null
null
bofenghuang/vigogne-7b-instruct
21
5,809
transformers
2023-03-22T21:36:45
--- license: openrail language: - fr pipeline_tag: text-generation library_name: transformers tags: - llama - LLM inference: false --- <p align="center" width="100%"> <img src="https://huggingface.co/bofenghuang/vigogne-7b-instruct/resolve/main/vigogne_logo.png" alt="Vigogne" style="width: 40%; min-width: 300px; display: block; margin: auto;"> </p> # Vigogne-7B-Instruct: A French Instruction-following LLaMA Model Vigogne-7B-Instruct is a LLaMA-7B model fine-tuned to follow the French instructions. For more information, please visit the Github repo: https://github.com/bofenghuang/vigogne **Usage and License Notices**: Same as [Stanford Alpaca](https://github.com/tatsu-lab/stanford_alpaca), Vigogne is intended and licensed for research use only. The dataset is CC BY NC 4.0 (allowing only non-commercial use) and models trained using the dataset should not be used outside of research purposes. ## Changelog All versions are available in branches. - **V1.0**: Initial release, trained on the translated Stanford Alpaca dataset. - **V1.1**: Improved translation quality of the Stanford Alpaca dataset. - **V2.0**: Expanded training dataset to 224k for better performance. - **V3.0**: Further expanded training dataset to 262k for improved results. ## Usage ```python import torch from transformers import AutoModelForCausalLM, AutoTokenizer, GenerationConfig from vigogne.preprocess import generate_instruct_prompt model_name_or_path = "bofenghuang/vigogne-7b-instruct" tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, padding_side="right", use_fast=False) model = AutoModelForCausalLM.from_pretrained(model_name_or_path, torch_dtype=torch.float16, device_map="auto") user_query = "Expliquez la différence entre DoS et phishing." prompt = generate_instruct_prompt(user_query) input_ids = tokenizer(prompt, return_tensors="pt")["input_ids"].to(model.device) input_length = input_ids.shape[1] generated_outputs = model.generate( input_ids=input_ids, generation_config=GenerationConfig( temperature=0.1, do_sample=True, repetition_penalty=1.0, max_new_tokens=512, ), return_dict_in_generate=True, ) generated_tokens = generated_outputs.sequences[0, input_length:] generated_text = tokenizer.decode(generated_tokens, skip_special_tokens=True) print(generated_text) ``` You can also infer this model by using the following Google Colab Notebook. <a href="https://colab.research.google.com/github/bofenghuang/vigogne/blob/main/notebooks/infer_instruct.ipynb" target="_blank"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> ## Limitations Vigogne is still under development, and there are many limitations that have to be addressed. Please note that it is possible that the model generates harmful or biased content, incorrect information or generally unhelpful answers.
2,895
[ [ -0.030029296875, -0.0518798828125, 0.0218963623046875, 0.0221405029296875, -0.032867431640625, -0.01505279541015625, -0.007843017578125, -0.040740966796875, 0.01213836669921875, 0.0196075439453125, -0.053009033203125, -0.052398681640625, -0.0518798828125, 0.005374908447265625, -0.019439697265625, 0.059539794921875, 0.003932952880859375, -0.0022106170654296875, -0.0105743408203125, 0.001483917236328125, -0.0283355712890625, -0.03558349609375, -0.04925537109375, -0.03631591796875, 0.02581787109375, 0.0084228515625, 0.034332275390625, 0.037933349609375, 0.03594970703125, 0.026885986328125, -0.024505615234375, 0.032562255859375, -0.0411376953125, -0.0135040283203125, 0.004146575927734375, -0.0214385986328125, -0.046478271484375, 0.0084228515625, 0.0266876220703125, 0.008819580078125, -0.00009036064147949219, 0.02630615234375, 0.00909423828125, 0.00815582275390625, -0.03106689453125, 0.022918701171875, -0.048553466796875, -0.018646240234375, -0.00011593103408813477, -0.00872802734375, -0.029083251953125, -0.022918701171875, 0.00923919677734375, -0.05279541015625, 0.042388916015625, -0.00689697265625, 0.10260009765625, 0.007049560546875, -0.0296630859375, -0.0291290283203125, -0.03668212890625, 0.0477294921875, -0.0545654296875, 0.0066680908203125, 0.0278472900390625, 0.00572967529296875, -0.0257720947265625, -0.059661865234375, -0.051910400390625, -0.0175628662109375, -0.01093292236328125, 0.0094146728515625, -0.0130767822265625, -0.01369476318359375, 0.0233612060546875, 0.0280609130859375, -0.050537109375, -0.0105133056640625, -0.06658935546875, -0.012481689453125, 0.05780029296875, 0.013885498046875, 0.01678466796875, -0.0241851806640625, -0.04388427734375, -0.0256500244140625, -0.0462646484375, 0.018646240234375, 0.02301025390625, 0.014862060546875, -0.032135009765625, 0.03338623046875, -0.01507568359375, 0.053619384765625, 0.0137786865234375, -0.01055908203125, 0.057342529296875, -0.0125885009765625, -0.030364990234375, -0.00042939186096191406, 0.07733154296875, 0.02880859375, -0.00601959228515625, -0.0037078857421875, -0.004878997802734375, -0.01256561279296875, 0.004680633544921875, -0.06719970703125, -0.0095672607421875, 0.01873779296875, -0.020050048828125, -0.039703369140625, 0.0174407958984375, -0.03765869140625, -0.02581787109375, -0.003204345703125, 0.062347412109375, -0.0299835205078125, -0.0079193115234375, 0.0229339599609375, -0.008087158203125, 0.04803466796875, 0.0108489990234375, -0.062103271484375, -0.0076751708984375, 0.01194000244140625, 0.06292724609375, 0.0181427001953125, -0.02935791015625, -0.0151214599609375, 0.007793426513671875, 0.0017118453979492188, 0.049163818359375, -0.01113128662109375, -0.059234619140625, -0.0160675048828125, 0.037261962890625, -0.02099609375, -0.0214385986328125, 0.034454345703125, -0.0219879150390625, 0.0235137939453125, -0.02166748046875, -0.018951416015625, -0.0296783447265625, 0.00998687744140625, -0.03717041015625, 0.0784912109375, 0.008270263671875, -0.049560546875, 0.01885986328125, -0.0226593017578125, -0.02313232421875, -0.015533447265625, 0.00695037841796875, -0.055938720703125, -0.001979827880859375, 0.02972412109375, 0.039520263671875, -0.036529541015625, -0.006267547607421875, -0.028411865234375, -0.0285797119140625, 0.02166748046875, -0.0284423828125, 0.086669921875, 0.0035839080810546875, -0.037811279296875, 0.0130157470703125, -0.0723876953125, -0.00685882568359375, 0.045013427734375, -0.0172882080078125, 0.00023663043975830078, -0.0248260498046875, -0.0116424560546875, 0.0159912109375, 0.0243072509765625, -0.029296875, 0.03143310546875, -0.024261474609375, 0.03326416015625, 0.0587158203125, -0.004032135009765625, 0.032745361328125, -0.024322509765625, 0.033599853515625, -0.006175994873046875, 0.04254150390625, -0.00043892860412597656, -0.041259765625, -0.07427978515625, -0.032989501953125, -0.0007214546203613281, 0.048431396484375, -0.04864501953125, 0.030609130859375, -0.0092926025390625, -0.06793212890625, -0.035400390625, 0.01325225830078125, 0.02301025390625, 0.046539306640625, 0.042266845703125, -0.014923095703125, -0.0212860107421875, -0.04315185546875, 0.017730712890625, -0.00030231475830078125, 0.00469207763671875, 0.0242156982421875, 0.046844482421875, -0.01158905029296875, 0.03973388671875, -0.044097900390625, -0.0197601318359375, -0.013702392578125, 0.0036334991455078125, 0.039825439453125, 0.03961181640625, 0.056396484375, -0.04302978515625, -0.024505615234375, -0.006504058837890625, -0.08026123046875, -0.001979827880859375, 0.008331298828125, -0.0220947265625, -0.00839996337890625, 0.03045654296875, -0.06134033203125, 0.049774169921875, 0.0423583984375, -0.044830322265625, 0.0287322998046875, -0.0293731689453125, 0.01045989990234375, -0.089111328125, -0.00252532958984375, 0.005397796630859375, -0.007167816162109375, -0.03167724609375, -0.0098876953125, 0.0087890625, 0.005275726318359375, -0.061126708984375, 0.05340576171875, -0.04779052734375, 0.031219482421875, -0.0226287841796875, -0.0056915283203125, 0.0196380615234375, 0.0260162353515625, -0.01024627685546875, 0.054107666015625, 0.059600830078125, -0.059661865234375, 0.04583740234375, 0.0261993408203125, -0.0231170654296875, 0.017608642578125, -0.07244873046875, 0.01751708984375, -0.01108551025390625, 0.0132293701171875, -0.058349609375, -0.020355224609375, 0.048828125, -0.05364990234375, 0.0307159423828125, -0.0229339599609375, -0.0279388427734375, -0.041778564453125, -0.00830841064453125, 0.03057861328125, 0.0535888671875, -0.058197021484375, 0.053375244140625, 0.01132965087890625, 0.01364898681640625, -0.049163818359375, -0.03607177734375, -0.0274505615234375, -0.035125732421875, -0.060821533203125, 0.0308837890625, 0.0008373260498046875, 0.003040313720703125, -0.007419586181640625, 0.005359649658203125, -0.0018949508666992188, 0.0040435791015625, 0.034637451171875, 0.045318603515625, -0.0031871795654296875, 0.00382232666015625, -0.0031585693359375, -0.00815582275390625, 0.003997802734375, -0.0247039794921875, 0.059112548828125, -0.031707763671875, -0.00325775146484375, -0.039642333984375, -0.00922393798828125, 0.033843994140625, -0.030029296875, 0.08746337890625, 0.058349609375, -0.027191162109375, -0.00975799560546875, -0.0307464599609375, -0.01485443115234375, -0.04193115234375, 0.00812530517578125, -0.0242462158203125, -0.021209716796875, 0.055938720703125, 0.02166748046875, 0.002780914306640625, 0.05517578125, 0.04449462890625, -0.0004305839538574219, 0.058746337890625, 0.04901123046875, 0.0204620361328125, 0.044647216796875, -0.0653076171875, 0.002288818359375, -0.06219482421875, -0.05364990234375, -0.0340576171875, 0.004547119140625, -0.032989501953125, -0.044586181640625, 0.0217437744140625, 0.0123748779296875, -0.0223236083984375, 0.050933837890625, -0.0538330078125, 0.0232086181640625, 0.045257568359375, 0.0257568359375, -0.0005826950073242188, 0.00800323486328125, 0.0020046234130859375, 0.0240325927734375, -0.045074462890625, -0.038665771484375, 0.08001708984375, 0.0124359130859375, 0.07147216796875, -0.004131317138671875, 0.0521240234375, -0.0030994415283203125, 0.031951904296875, -0.03753662109375, 0.03387451171875, 0.0118255615234375, -0.0445556640625, -0.007106781005859375, -0.0210418701171875, -0.077880859375, 0.033477783203125, -0.032073974609375, -0.066162109375, 0.0281524658203125, 0.03179931640625, -0.016357421875, 0.01540374755859375, -0.05340576171875, 0.0631103515625, -0.02374267578125, -0.038330078125, 0.0165557861328125, -0.0284881591796875, 0.0212249755859375, 0.016265869140625, 0.005115509033203125, -0.0176849365234375, 0.03955078125, 0.052337646484375, -0.0419921875, 0.06072998046875, -0.006317138671875, -0.009552001953125, 0.046234130859375, -0.0251312255859375, 0.047760009765625, 0.01398468017578125, -0.0196075439453125, 0.032318115234375, -0.0183868408203125, -0.034027099609375, -0.01904296875, 0.055450439453125, -0.06414794921875, -0.051300048828125, -0.048248291015625, -0.0295257568359375, 0.011749267578125, 0.01763916015625, 0.0743408203125, 0.0217742919921875, -0.0019989013671875, 0.005748748779296875, 0.059539794921875, -0.0293426513671875, 0.0250244140625, 0.0158843994140625, -0.011383056640625, -0.042816162109375, 0.07073974609375, -0.0004968643188476562, 0.017974853515625, -0.001438140869140625, 0.0078277587890625, -0.033203125, -0.0191497802734375, -0.04071044921875, 0.0421142578125, -0.059906005859375, -0.024139404296875, -0.04083251953125, -0.021087646484375, -0.045013427734375, -0.0216827392578125, -0.0294342041015625, -0.0115203857421875, -0.05078125, -0.0166015625, 0.044342041015625, 0.03167724609375, 0.00013363361358642578, 0.048309326171875, -0.03875732421875, 0.0160675048828125, 0.017547607421875, -0.00521087646484375, -0.01244354248046875, -0.06414794921875, -0.01468658447265625, 0.01061248779296875, -0.032989501953125, -0.06488037109375, 0.039398193359375, -0.007419586181640625, 0.038238525390625, 0.02294921875, -0.004878997802734375, 0.0499267578125, -0.011871337890625, 0.0518798828125, 0.031707763671875, -0.06781005859375, 0.043487548828125, -0.02215576171875, 0.0180206298828125, 0.01372528076171875, 0.0123291015625, -0.00907135009765625, -0.023681640625, -0.05316162109375, -0.08551025390625, 0.047119140625, 0.0294647216796875, 0.01708984375, -0.0003688335418701172, 0.028289794921875, -0.00267791748046875, 0.0030155181884765625, -0.07379150390625, -0.0284423828125, -0.0360107421875, -0.0135040283203125, -0.01163482666015625, 0.0006933212280273438, -0.00921630859375, -0.033538818359375, 0.07464599609375, 0.005725860595703125, 0.037841796875, 0.0294647216796875, -0.0250091552734375, 0.01482391357421875, -0.01496124267578125, 0.058929443359375, 0.049774169921875, -0.0211334228515625, -0.008087158203125, 0.0264129638671875, -0.036468505859375, 0.0181121826171875, 0.025909423828125, -0.0289764404296875, 0.00922393798828125, 0.01428985595703125, 0.07525634765625, -0.0276641845703125, -0.01490020751953125, 0.037811279296875, -0.0208892822265625, -0.026763916015625, -0.034881591796875, 0.0260162353515625, 0.01509857177734375, 0.032196044921875, 0.01453399658203125, -0.0166015625, -0.00612640380859375, -0.02215576171875, -0.01419830322265625, 0.03582763671875, -0.0269622802734375, -0.0213623046875, 0.0882568359375, 0.00899505615234375, -0.006389617919921875, 0.0479736328125, -0.01422119140625, -0.0236053466796875, 0.0762939453125, 0.04449462890625, 0.059814453125, -0.01274871826171875, 0.01325225830078125, 0.046356201171875, 0.034912109375, -0.008819580078125, 0.022003173828125, 0.0159759521484375, -0.0275726318359375, 0.00274658203125, -0.050262451171875, 0.00612640380859375, 0.02685546875, -0.04736328125, 0.043792724609375, -0.033935546875, -0.0313720703125, -0.0081329345703125, -0.01428985595703125, -0.08026123046875, 0.0380859375, 0.010955810546875, 0.0521240234375, -0.0635986328125, 0.055633544921875, 0.04022216796875, -0.053070068359375, -0.0703125, -0.0090484619140625, -0.003936767578125, -0.0772705078125, 0.03497314453125, 0.0271759033203125, -0.0263214111328125, -0.004985809326171875, -0.0609130859375, -0.0726318359375, 0.1103515625, 0.030364990234375, -0.0211334228515625, 0.0027942657470703125, -0.00936126708984375, 0.0205230712890625, -0.0182647705078125, 0.0186309814453125, 0.0113983154296875, 0.017303466796875, 0.016693115234375, -0.060760498046875, 0.0163116455078125, -0.0233001708984375, -0.007297515869140625, 0.0010738372802734375, -0.0791015625, 0.0811767578125, -0.0288543701171875, -0.0016384124755859375, 0.010589599609375, 0.07489013671875, 0.017822265625, 0.0247955322265625, 0.026763916015625, 0.027679443359375, 0.06829833984375, -0.01499176025390625, 0.07366943359375, -0.0255126953125, 0.06500244140625, 0.05810546875, 0.007038116455078125, 0.0270233154296875, 0.0261383056640625, 0.00848388671875, 0.024749755859375, 0.05780029296875, -0.0294342041015625, 0.040740966796875, 0.01873779296875, -0.033721923828125, -0.01165771484375, 0.00543975830078125, -0.056854248046875, 0.02899169921875, 0.028289794921875, -0.050323486328125, 0.00417327880859375, 0.0002925395965576172, 0.0187225341796875, -0.040740966796875, -0.0147247314453125, 0.0232086181640625, 0.01520538330078125, -0.034393310546875, 0.07073974609375, -0.00084686279296875, 0.0443115234375, -0.051544189453125, 0.0113677978515625, -0.017791748046875, 0.0184478759765625, -0.01558685302734375, -0.036224365234375, -0.0083465576171875, 0.0029754638671875, -0.0121612548828125, 0.007167816162109375, 0.0479736328125, -0.0207672119140625, -0.06298828125, 0.01708984375, 0.00420379638671875, 0.0184326171875, 0.014495849609375, -0.044647216796875, 0.0105438232421875, 0.0008401870727539062, -0.04510498046875, 0.0011606216430664062, 0.0144805908203125, 0.005096435546875, 0.05963134765625, 0.047515869140625, -0.0031414031982421875, 0.030670166015625, -0.010986328125, 0.06634521484375, -0.044586181640625, -0.0185699462890625, -0.059722900390625, 0.044830322265625, -0.0152740478515625, -0.03643798828125, 0.06292724609375, 0.04132080078125, 0.08251953125, -0.0140533447265625, 0.06640625, -0.0171966552734375, -0.009674072265625, -0.05413818359375, 0.050079345703125, -0.05035400390625, 0.0123443603515625, -0.020294189453125, -0.059295654296875, -0.0106201171875, 0.08270263671875, -0.0244140625, 0.008514404296875, 0.041290283203125, 0.0802001953125, -0.01439666748046875, -0.0265960693359375, 0.00888824462890625, 0.0164642333984375, 0.04388427734375, 0.035858154296875, 0.04583740234375, -0.059356689453125, 0.05743408203125, -0.039520263671875, -0.01088714599609375, -0.02081298828125, -0.054595947265625, -0.08612060546875, -0.0345458984375, -0.022064208984375, -0.06988525390625, -0.0021228790283203125, 0.0643310546875, 0.050506591796875, -0.036529541015625, -0.01149749755859375, 0.006397247314453125, 0.01013946533203125, -0.01436614990234375, -0.0129241943359375, 0.04583740234375, 0.0012426376342773438, -0.0711669921875, 0.014007568359375, 0.0014257431030273438, 0.042236328125, 0.0027103424072265625, -0.00762176513671875, -0.0008015632629394531, 0.014678955078125, 0.03814697265625, 0.033599853515625, -0.043548583984375, -0.0090179443359375, 0.0102691650390625, -0.026702880859375, 0.0083770751953125, 0.020294189453125, -0.0361328125, 0.02984619140625, 0.038299560546875, 0.0023899078369140625, 0.055938720703125, -0.006412506103515625, 0.0265045166015625, -0.0196075439453125, 0.034027099609375, -0.015228271484375, 0.046234130859375, 0.01953125, -0.033203125, 0.02471923828125, 0.0301055908203125, -0.040679931640625, -0.053924560546875, -0.004138946533203125, -0.09344482421875, -0.011138916015625, 0.10107421875, -0.01317596435546875, -0.044586181640625, 0.0207977294921875, -0.030609130859375, 0.038818359375, -0.02227783203125, 0.06573486328125, 0.037139892578125, -0.004878997802734375, 0.003513336181640625, -0.043212890625, 0.021240234375, 0.0034961700439453125, -0.057403564453125, -0.02130126953125, 0.022705078125, 0.048553466796875, 0.0017871856689453125, 0.05035400390625, 0.0007166862487792969, 0.02667236328125, 0.00481414794921875, 0.026458740234375, -0.01959228515625, -0.01654052734375, -0.005123138427734375, -0.00325775146484375, -0.014434814453125, -0.034698486328125 ] ]
gsdf/CounterfeitXL
2023-08-23T21:11:27.000Z
[ "diffusers", "license:creativeml-openrail-m", "endpoints_compatible", "diffusers:StableDiffusionXLPipeline", "region:us" ]
null
gsdf
null
null
gsdf/CounterfeitXL
74
5,809
diffusers
2023-07-29T07:44:35
--- license: creativeml-openrail-m --- Civitai https://civitai.com/models/118406/counterfeitxl Negative Embeddings A:Standard B:Realistic C:Anime like
161
[ [ -0.0249786376953125, -0.06671142578125, 0.027099609375, 0.03009033203125, -0.03741455078125, 0.009368896484375, 0.03070068359375, -0.03765869140625, 0.06982421875, 0.036865234375, -0.038055419921875, -0.02978515625, -0.01342010498046875, -0.01149749755859375, -0.04693603515625, 0.020050048828125, -0.006572723388671875, 0.039642333984375, -0.0190582275390625, -0.012603759765625, -0.031707763671875, -0.04742431640625, -0.0672607421875, -0.03558349609375, 0.02960205078125, 0.0276641845703125, 0.06439208984375, 0.034759521484375, 0.003997802734375, 0.007266998291015625, 0.042999267578125, -0.020751953125, -0.036102294921875, -0.0067138671875, -0.009521484375, -0.0377197265625, -0.043914794921875, 0.018585205078125, -0.001598358154296875, 0.004154205322265625, -0.01421356201171875, 0.0255584716796875, -0.0222015380859375, 0.006961822509765625, -0.0504150390625, -0.010467529296875, 0.0270233154296875, 0.0228729248046875, -0.008331298828125, 0.0110931396484375, -0.034332275390625, -0.054473876953125, -0.00029540061950683594, -0.081298828125, 0.00848388671875, 0.0017404556274414062, 0.09381103515625, 0.00255584716796875, -0.030548095703125, -0.038818359375, -0.0430908203125, 0.051055908203125, -0.03564453125, 0.0477294921875, 0.023193359375, 0.036224365234375, 0.007259368896484375, -0.053192138671875, -0.0396728515625, -0.0017881393432617188, -0.006763458251953125, 0.0202789306640625, -0.03009033203125, -0.011077880859375, 0.024017333984375, 0.034942626953125, -0.033477783203125, 0.0218048095703125, -0.011810302734375, -0.00426483154296875, 0.039886474609375, 0.0088958740234375, 0.050048828125, 0.0157318115234375, -0.0206756591796875, 0.0016450881958007812, -0.08197021484375, -0.01904296875, 0.054229736328125, -0.03466796875, -0.0175323486328125, 0.0391845703125, -0.01158905029296875, 0.032623291015625, 0.0158843994140625, 0.00908660888671875, 0.03216552734375, -0.005558013916015625, -0.0223388671875, 0.001338958740234375, 0.01047515869140625, 0.0567626953125, 0.04315185546875, -0.00789642333984375, -0.0084991455078125, 0.01122283935546875, 0.0231781005859375, -0.08636474609375, -0.0225677490234375, 0.0245361328125, -0.05712890625, -0.034332275390625, 0.0109710693359375, -0.03131103515625, -0.0184173583984375, -0.03167724609375, 0.05169677734375, -0.034881591796875, -0.0202789306640625, 0.00042891502380371094, -0.0298004150390625, 0.0206756591796875, 0.029998779296875, -0.033172607421875, 0.02740478515625, 0.0338134765625, 0.045318603515625, 0.017425537109375, 0.01605224609375, 0.001983642578125, 0.0013437271118164062, -0.041778564453125, 0.049102783203125, -0.0276641845703125, -0.03717041015625, 0.016815185546875, 0.0175933837890625, 0.02667236328125, -0.02740478515625, 0.040863037109375, -0.045135498046875, -0.01306915283203125, -0.024688720703125, -0.046661376953125, -0.055328369140625, 0.034332275390625, -0.053802490234375, 0.0205535888671875, 0.001552581787109375, -0.0330810546875, 0.04058837890625, -0.0267181396484375, -0.0121002197265625, 0.01255035400390625, -0.0013713836669921875, -0.0206756591796875, 0.0089263916015625, -0.02410888671875, 0.026031494140625, -0.0054168701171875, 0.01143646240234375, -0.06524658203125, -0.0555419921875, 0.0212860107421875, -0.051788330078125, 0.0682373046875, 0.04022216796875, -0.016632080078125, -0.0184783935546875, -0.08258056640625, 0.0028896331787109375, 0.0128173828125, 0.02874755859375, -0.041168212890625, -0.00286865234375, 0.0124969482421875, -0.0014171600341796875, 0.0254364013671875, -0.0672607421875, 0.0242156982421875, -0.00040841102600097656, 0.014739990234375, 0.01568603515625, 0.04388427734375, -0.007808685302734375, -0.049072265625, 0.052520751953125, 0.0165252685546875, 0.0303955078125, 0.0097808837890625, -0.02459716796875, -0.06640625, -0.02557373046875, 0.035430908203125, 0.060302734375, -0.06103515625, 0.010772705078125, -0.0003993511199951172, -0.063232421875, -0.04522705078125, -0.0112457275390625, 0.03192138671875, 0.0015125274658203125, 0.011871337890625, -0.027008056640625, -0.034149169921875, -0.09332275390625, 0.00267791748046875, 0.0185546875, -0.01953125, 0.0243072509765625, 0.03668212890625, -0.0266265869140625, 0.032623291015625, -0.04461669921875, -0.016693115234375, 0.01430511474609375, -0.000579833984375, 0.0352783203125, 0.054290771484375, 0.071533203125, -0.06427001953125, -0.0557861328125, -0.036376953125, -0.0560302734375, 0.007160186767578125, -0.0003733634948730469, -0.00908660888671875, -0.002948760986328125, 0.029144287109375, -0.034210205078125, 0.07513427734375, 0.0333251953125, -0.0269927978515625, 0.01702880859375, -0.03009033203125, 0.0382080078125, -0.0853271484375, 0.02313232421875, 0.0002574920654296875, -0.01690673828125, -0.04693603515625, 0.0430908203125, 0.0077667236328125, -0.01361846923828125, -0.07366943359375, 0.033843994140625, -0.034942626953125, 0.0167694091796875, -0.0008511543273925781, -0.0005826950073242188, -0.0006847381591796875, 0.0230560302734375, 0.02496337890625, 0.04693603515625, 0.0273284912109375, -0.034912109375, 0.0440673828125, 0.01297760009765625, -0.0205230712890625, 0.05712890625, -0.08111572265625, 0.00787353515625, -0.00315093994140625, 0.017425537109375, -0.042694091796875, -0.0200653076171875, 0.032562255859375, -0.0204620361328125, 0.0252227783203125, -0.0206756591796875, -0.06451416015625, -0.015350341796875, -0.0423583984375, 0.060638427734375, 0.045166015625, -0.05517578125, 0.0171966552734375, 0.00875091552734375, 0.032379150390625, -0.0160980224609375, -0.057891845703125, 0.0122528076171875, -0.017822265625, -0.003314971923828125, 0.0227203369140625, -0.0107421875, -0.0280609130859375, 0.004032135009765625, 0.03985595703125, -0.021484375, -0.0029773712158203125, 0.02215576171875, 0.0019502639770507812, -0.0276641845703125, -0.0053253173828125, 0.01296234130859375, 0.002288818359375, -0.0160675048828125, 0.01922607421875, 0.03741455078125, -0.006008148193359375, -0.0379638671875, -0.0460205078125, 0.039520263671875, 0.05242919921875, 0.0091094970703125, 0.039276123046875, 0.0274810791015625, -0.0758056640625, 0.00730133056640625, -0.0511474609375, 0.006191253662109375, -0.0333251953125, -0.01087188720703125, -0.046539306640625, -0.0179901123046875, 0.0482177734375, 0.0216217041015625, -0.01910400390625, 0.026214599609375, 0.0222015380859375, 0.00504302978515625, 0.09161376953125, 0.03045654296875, 0.0193939208984375, 0.00952911376953125, -0.04266357421875, 0.0030536651611328125, -0.045196533203125, -0.0199737548828125, -0.01107025146484375, -0.02789306640625, -0.0579833984375, -0.0221405029296875, -0.0079498291015625, 0.01336669921875, -0.049957275390625, 0.046661376953125, -0.0194244384765625, 0.05596923828125, 0.058807373046875, 0.0175933837890625, 0.01220703125, -0.019744873046875, 0.01152801513671875, -0.00600433349609375, -0.0296630859375, -0.052490234375, 0.06878662109375, 0.033355712890625, 0.06951904296875, 0.040069580078125, 0.047760009765625, 0.0164947509765625, 0.05828857421875, -0.046630859375, 0.007038116455078125, -0.0531005859375, -0.08819580078125, 0.007480621337890625, 0.0168914794921875, -0.055084228515625, -0.007366180419921875, -0.049041748046875, -0.048736572265625, 0.054473876953125, 0.0186004638671875, -0.028594970703125, 0.0280609130859375, -0.058929443359375, 0.058807373046875, -0.006404876708984375, -0.02093505859375, -0.0202484130859375, -0.0128021240234375, 0.0231475830078125, 0.009613037109375, 0.00737762451171875, 0.028564453125, 0.0010776519775390625, 0.01480865478515625, -0.0188751220703125, 0.0638427734375, -0.016448974609375, 0.007843017578125, 0.0426025390625, 0.045074462890625, -0.0020732879638671875, 0.051666259765625, 0.0016069412231445312, 0.00846099853515625, 0.0178985595703125, -0.0516357421875, -0.036163330078125, 0.06549072265625, -0.05975341796875, -0.039825439453125, -0.050750732421875, 0.007457733154296875, 0.005649566650390625, 0.0307159423828125, 0.0528564453125, 0.080810546875, -0.05419921875, 0.046630859375, 0.05828857421875, -0.031890869140625, 0.0316162109375, 0.03448486328125, -0.026763916015625, -0.0360107421875, 0.07666015625, 0.02606201171875, 0.006443023681640625, 0.0352783203125, 0.0027065277099609375, -0.031890869140625, -0.019500732421875, -0.060394287109375, 0.026763916015625, -0.023193359375, -0.0167694091796875, -0.01462554931640625, -0.035675048828125, -0.0230865478515625, -0.0257415771484375, -0.0278472900390625, -0.04632568359375, -0.0650634765625, -0.01091766357421875, 0.007778167724609375, 0.058013916015625, -0.0010223388671875, 0.01291656494140625, -0.0579833984375, 0.030670166015625, 0.028564453125, 0.04833984375, 0.0027923583984375, -0.0313720703125, -0.00015020370483398438, 0.0224761962890625, -0.04815673828125, -0.051361083984375, 0.032379150390625, 0.002246856689453125, 0.040618896484375, 0.077392578125, 0.004901885986328125, 0.0611572265625, -0.041259765625, 0.066650390625, 0.03448486328125, -0.06549072265625, 0.0289459228515625, -0.03240966796875, -0.0014209747314453125, 0.050689697265625, 0.06671142578125, -0.0250396728515625, -0.01462554931640625, -0.082763671875, -0.029083251953125, -0.017120361328125, 0.005550384521484375, 0.0308380126953125, -0.005786895751953125, 0.040985107421875, 0.024627685546875, 0.024749755859375, -0.05828857421875, -0.0275421142578125, -0.052032470703125, -0.01512908935546875, -0.027618408203125, -0.0269012451171875, 0.004413604736328125, -0.011932373046875, 0.0382080078125, 0.02197265625, 0.0196990966796875, 0.024322509765625, -0.0261077880859375, 0.005290985107421875, -0.00464630126953125, 0.07696533203125, 0.043365478515625, -0.040435791015625, 0.00478363037109375, -0.0013208389282226562, -0.03887939453125, 0.03485107421875, -0.0258026123046875, -0.035186767578125, 0.002140045166015625, 0.034332275390625, 0.03948974609375, 0.00513458251953125, -0.018402099609375, 0.052276611328125, -0.0037822723388671875, -0.00669097900390625, -0.0626220703125, 0.01629638671875, 0.0009007453918457031, 0.0031032562255859375, -0.00115966796875, 0.00846099853515625, 0.0482177734375, -0.072021484375, 0.0202178955078125, -0.0007839202880859375, -0.05352783203125, -0.04718017578125, 0.052032470703125, 0.027374267578125, -0.045745849609375, 0.062408447265625, -0.038299560546875, -0.023712158203125, 0.027191162109375, 0.049102783203125, 0.062103271484375, -0.0194549560546875, 0.0335693359375, 0.048370361328125, 0.006103515625, 0.00030684471130371094, 0.0192413330078125, 0.0198516845703125, -0.0457763671875, -0.0013189315795898438, -0.00939178466796875, -0.055877685546875, 0.01088714599609375, -0.0601806640625, 0.04876708984375, -0.0621337890625, 0.0005578994750976562, -0.0281982421875, 0.00795745849609375, -0.04840087890625, 0.05841064453125, 0.0032787322998046875, 0.092529296875, -0.06695556640625, 0.08203125, 0.053863525390625, -0.055328369140625, -0.044921875, -0.03057861328125, 0.000598907470703125, -0.04937744140625, 0.0179901123046875, 0.0178070068359375, 0.00980377197265625, -0.007678985595703125, -0.041015625, -0.06842041015625, 0.082275390625, 0.008056640625, -0.04925537109375, 0.00675201416015625, -0.0298309326171875, 0.065673828125, -0.041839599609375, 0.035247802734375, 0.032562255859375, 0.016204833984375, 0.01157379150390625, -0.0132293701171875, 0.0094757080078125, -0.0462646484375, 0.003849029541015625, 0.0016613006591796875, -0.079833984375, 0.040069580078125, 0.02679443359375, -0.0230560302734375, 0.042083740234375, 0.03570556640625, 0.02069091796875, 0.048583984375, 0.036956787109375, 0.08038330078125, 0.0049591064453125, -0.0026531219482421875, 0.07373046875, -0.016082763671875, 0.004180908203125, 0.06451416015625, 0.005535125732421875, 0.057098388671875, 0.03338623046875, 0.00579071044921875, 0.06695556640625, 0.0731201171875, -0.043914794921875, 0.039764404296875, 0.0280303955078125, 0.0006036758422851562, -0.0102081298828125, -0.006137847900390625, -0.035400390625, 0.016845703125, 0.033111572265625, 0.00016987323760986328, 0.01251220703125, -0.00463104248046875, -0.0088958740234375, 0.00833892822265625, -0.01244354248046875, 0.036956787109375, 0.0174713134765625, -0.03131103515625, 0.0232086181640625, -0.01291656494140625, 0.036163330078125, -0.0150604248046875, -0.033355712890625, -0.0272369384765625, 0.017974853515625, -0.0096282958984375, -0.043243408203125, 0.016387939453125, -0.01430511474609375, -0.007251739501953125, -0.0135498046875, 0.033538818359375, -0.0170745849609375, -0.044464111328125, 0.01067352294921875, -0.00783538818359375, 0.01183319091796875, 0.01953125, -0.04644775390625, -0.014404296875, -0.0157623291015625, -0.0194091796875, -0.0049591064453125, 0.01355743408203125, -0.01165771484375, 0.05694580078125, 0.0222320556640625, 0.0178985595703125, -0.02154541015625, 0.002185821533203125, 0.01229095458984375, -0.034393310546875, -0.045684814453125, -0.0310516357421875, 0.01904296875, -0.030548095703125, -0.043609619140625, 0.06494140625, 0.06475830078125, 0.04803466796875, -0.046112060546875, 0.007129669189453125, 0.017578125, 0.0372314453125, -0.0269622802734375, 0.06884765625, -0.044281005859375, -0.0184478759765625, -0.0164031982421875, -0.07989501953125, -0.046630859375, 0.046142578125, 0.02081298828125, -0.0233917236328125, 0.03924560546875, 0.042572021484375, -0.030670166015625, 0.007053375244140625, 0.037994384765625, 0.0215606689453125, 0.022125244140625, 0.007556915283203125, 0.06231689453125, -0.04547119140625, 0.001644134521484375, -0.022186279296875, -0.0233917236328125, -0.0255279541015625, -0.0682373046875, -0.05694580078125, -0.0175323486328125, -0.04046630859375, -0.0159759521484375, -0.0210418701171875, 0.051513671875, 0.0611572265625, -0.078125, -0.02899169921875, 0.00963592529296875, 0.0022487640380859375, 0.006195068359375, -0.01611328125, 0.0179290771484375, 0.0469970703125, -0.060302734375, -0.00972747802734375, 0.00415802001953125, 0.0494384765625, -0.0243377685546875, 0.01152801513671875, -0.005413055419921875, 0.0478515625, 0.042572021484375, 0.025634765625, -0.04937744140625, -0.07177734375, -0.01153564453125, 0.004329681396484375, -0.0264892578125, 0.047332763671875, -0.01064300537109375, 0.021331787109375, 0.03515625, 0.008758544921875, 0.056304931640625, 0.01168060302734375, 0.04718017578125, -0.032470703125, 0.01666259765625, 0.020721435546875, 0.0311126708984375, 0.00522613525390625, -0.030792236328125, 0.04400634765625, 0.032562255859375, -0.0212860107421875, -0.052032470703125, 0.0247344970703125, -0.072021484375, -0.019317626953125, 0.048126220703125, -0.027191162109375, -0.021759033203125, 0.00331878662109375, -0.0193023681640625, 0.007236480712890625, -0.00391387939453125, 0.043304443359375, 0.056060791015625, 0.0117950439453125, -0.001956939697265625, -0.01214599609375, 0.002330780029296875, -0.0139007568359375, -0.027679443359375, -0.056243896484375, 0.0211334228515625, 0.041107177734375, 0.036773681640625, 0.01535797119140625, -0.0364990234375, 0.04388427734375, 0.0249176025390625, 0.0039043426513671875, -0.0038318634033203125, -0.0192718505859375, -0.0006351470947265625, 0.001392364501953125, -0.00475311279296875, -0.029571533203125 ] ]
NewstaR/Starlight-7B
2023-09-17T17:00:36.000Z
[ "transformers", "pytorch", "llama", "text-generation", "llama2", "en", "dataset:FinchResearch/AboveTheClouds", "arxiv:1803.05457", "arxiv:1905.07830", "arxiv:2009.03300", "arxiv:2109.07958", "license:other", "endpoints_compatible", "text-generation-inference", "region:us" ]
text-generation
NewstaR
null
null
NewstaR/Starlight-7B
0
5,808
transformers
2023-09-11T12:57:21
--- datasets: - FinchResearch/AboveTheClouds language: - en tags: - llama - llama2 license: other --- # Starlight (7B) | Model | Average ⬆️ | ARC | HellaSwag | MMLU | TruthfulQA | |----------------------|------------|-------|-----------|-------|------------| | NewstaR/Starlight-13B| 58.63 | 59.3 | 82.15 | 55.67 | 37.39 | | NewstaR/Starlight-7B | 54.3 | 53.07 | 78.57 | 46.8 | 38.75 | ## The model follows the Alpaca template: ``` ### Instruction: {prompt} ### Response: ``` ## Example: ``` ### Instruction: Summarize the key details of the Starlight model in a few sentences. ### Response: Starlight is a 7B parameter transformer model trained on the AverageData and Above the Clouds datasets for conversational text generation. It has strong language modeling capabilities but lacks true language understanding and may generate incorrect or biased text, so outputs should be monitored and safeguards implemented. The model is intended for use in chatbots and content creation applications. ``` ## Instructions for Safe Use - Avoid exposing Starlight to offensive, unethical, dangerous or illegal prompts - Monitor outputs for signs of bias, toxicity or factual incorrectness - Do not rely on Starlight for high-stakes or safety critical applications ## Limitations - May hallucinate or generate incorrect information - Large model size leads to high compute requirements ``` @misc{open-llm-leaderboard, author = {Edward Beeching, Clémentine Fourrier, Nathan Habib, Sheon Han, Nathan Lambert, Nazneen Rajani, Omar Sanseviero, Lewis Tunstall, Thomas Wolf}, title = {Open LLM Leaderboard}, year = {2023}, publisher = {Hugging Face}, howpublished = "\url{https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard}" } ``` ``` @software{eval-harness, author = {Gao, Leo and Tow, Jonathan and Biderman, Stella and Black, Sid and DiPofi, Anthony and Foster, Charles and Golding, Laurence and Hsu, Jeffrey and McDonell, Kyle and Muennighoff, Niklas and Phang, Jason and Reynolds, Laria and Tang, Eric and Thite, Anish and Wang, Ben and Wang, Kevin and Zou, Andy}, title = {A framework for few-shot language model evaluation}, month = sep, year = 2021, publisher = {Zenodo}, version = {v0.0.1}, doi = {10.5281/zenodo.5371628}, url = {https://doi.org/10.5281/zenodo.5371628} } ``` ``` @misc{clark2018think, title={Think you have Solved Question Answering? Try ARC, the AI2 Reasoning Challenge}, author={Peter Clark and Isaac Cowhey and Oren Etzioni and Tushar Khot and Ashish Sabharwal and Carissa Schoenick and Oyvind Tafjord}, year={2018}, eprint={1803.05457}, archivePrefix={arXiv}, primaryClass={cs.AI} } ``` ``` @misc{zellers2019hellaswag, title={HellaSwag: Can a Machine Really Finish Your Sentence?}, author={Rowan Zellers and Ari Holtzman and Yonatan Bisk and Ali Farhadi and Yejin Choi}, year={2019}, eprint={1905.07830}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ``` @misc{hendrycks2021measuring, title={Measuring Massive Multitask Language Understanding}, author={Dan Hendrycks and Collin Burns and Steven Basart and Andy Zou and Mantas Mazeika and Dawn Song and Jacob Steinhardt}, year={2021}, eprint={2009.03300}, archivePrefix={arXiv}, primaryClass={cs.CY} } ``` ``` @misc{lin2022truthfulqa, title={TruthfulQA: Measuring How Models Mimic Human Falsehoods}, author={Stephanie Lin and Jacob Hilton and Owain Evans}, year={2022}, eprint={2109.07958}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
3,990
[ [ -0.0360107421875, -0.06829833984375, 0.0310516357421875, 0.0106353759765625, -0.00921630859375, -0.021087646484375, -0.0166473388671875, -0.04888916015625, 0.006687164306640625, 0.04632568359375, -0.04241943359375, -0.051025390625, -0.045166015625, -0.000926971435546875, -0.0116119384765625, 0.07366943359375, -0.0035552978515625, -0.0251312255859375, -0.01384735107421875, -0.020233154296875, -0.0242767333984375, -0.0269927978515625, -0.032073974609375, -0.0185089111328125, 0.021942138671875, 0.03167724609375, 0.0606689453125, 0.04052734375, 0.055328369140625, 0.0227508544921875, -0.021392822265625, 0.0145111083984375, -0.031402587890625, -0.01120758056640625, 0.0115814208984375, -0.044219970703125, -0.054931640625, 0.0164031982421875, 0.033905029296875, 0.04595947265625, -0.0033721923828125, 0.04327392578125, 0.0010433197021484375, 0.035400390625, -0.035125732421875, 0.023345947265625, -0.036865234375, -0.0036830902099609375, -0.0217742919921875, 0.00927734375, -0.028961181640625, -0.034393310546875, -0.00841522216796875, -0.03546142578125, 0.009613037109375, 0.036285400390625, 0.0869140625, 0.033203125, -0.01309967041015625, -0.00408172607421875, -0.04901123046875, 0.054718017578125, -0.07977294921875, 0.01514434814453125, 0.0263671875, 0.0243682861328125, -0.0034961700439453125, -0.051605224609375, -0.071044921875, -0.0032711029052734375, -0.01427459716796875, 0.0277557373046875, -0.037689208984375, 0.00502777099609375, 0.0226593017578125, 0.032073974609375, -0.054290771484375, 0.0104827880859375, -0.0285186767578125, -0.01232147216796875, 0.04461669921875, 0.01123046875, 0.0135345458984375, -0.027618408203125, -0.037811279296875, -0.0184478759765625, -0.043914794921875, 0.0311279296875, 0.0230560302734375, 0.01532745361328125, -0.0261993408203125, 0.05438232421875, -0.0037136077880859375, 0.03875732421875, -0.00563812255859375, -0.0141143798828125, 0.040130615234375, -0.0301361083984375, -0.01776123046875, -0.0164031982421875, 0.07647705078125, 0.048309326171875, 0.007114410400390625, -0.0003914833068847656, 0.0032367706298828125, 0.005584716796875, 0.0020236968994140625, -0.059112548828125, 0.0093841552734375, 0.032257080078125, -0.038421630859375, -0.009735107421875, 0.0099945068359375, -0.055938720703125, -0.016326904296875, 0.00911712646484375, 0.016632080078125, -0.0230560302734375, -0.0197906494140625, 0.008453369140625, 0.018096923828125, 0.0304412841796875, 0.0089263916015625, -0.05328369140625, 0.023162841796875, 0.035064697265625, 0.06048583984375, -0.00550079345703125, -0.037841796875, -0.0257720947265625, -0.0206451416015625, -0.03656005859375, 0.058380126953125, -0.01922607421875, -0.0084991455078125, -0.0207366943359375, -0.00618743896484375, -0.0184783935546875, -0.04144287109375, 0.049774169921875, -0.0219879150390625, 0.029022216796875, -0.016815185546875, -0.038848876953125, -0.0306549072265625, 0.0106048583984375, -0.03936767578125, 0.09075927734375, 0.0091705322265625, -0.04638671875, 0.0007872581481933594, -0.03594970703125, 0.00487518310546875, -0.0081024169921875, -0.006534576416015625, -0.048309326171875, 0.0031490325927734375, 0.0160369873046875, 0.030181884765625, -0.03753662109375, 0.01067352294921875, -0.0215911865234375, -0.0194549560546875, 0.0158233642578125, -0.0203094482421875, 0.0684814453125, 0.0167236328125, -0.037689208984375, 0.013458251953125, -0.052215576171875, 0.01177978515625, 0.02166748046875, -0.0032367706298828125, -0.004253387451171875, -0.018341064453125, -0.005157470703125, 0.029083251953125, 0.02191162109375, -0.04595947265625, 0.0160064697265625, -0.042877197265625, 0.005397796630859375, 0.05029296875, 0.005615234375, 0.0154876708984375, -0.0276031494140625, 0.047943115234375, 0.00885009765625, 0.0166168212890625, -0.005153656005859375, -0.0557861328125, -0.0869140625, 0.0004432201385498047, 0.00476837158203125, 0.053314208984375, -0.046905517578125, 0.0643310546875, 0.0058746337890625, -0.058624267578125, -0.062469482421875, 0.0140228271484375, 0.054046630859375, 0.057586669921875, 0.05120849609375, -0.01271820068359375, -0.033843994140625, -0.080078125, -0.00989532470703125, -0.02813720703125, 0.016510009765625, 0.0413818359375, 0.03948974609375, -0.007110595703125, 0.0767822265625, -0.059906005859375, -0.02203369140625, -0.035614013671875, -0.0027675628662109375, 0.033599853515625, 0.029388427734375, 0.04736328125, -0.0384521484375, -0.0235137939453125, -0.0139007568359375, -0.0780029296875, -0.0208892822265625, -0.0031032562255859375, -0.026519775390625, 0.0199737548828125, 0.0221099853515625, -0.0579833984375, 0.0374755859375, 0.04388427734375, -0.061553955078125, 0.05377197265625, -0.0005450248718261719, 0.01418304443359375, -0.08935546875, 0.0214385986328125, 0.0100860595703125, 0.0020923614501953125, -0.049774169921875, 0.00949859619140625, -0.00885772705078125, -0.0033321380615234375, -0.051025390625, 0.058197021484375, -0.0313720703125, 0.0008139610290527344, -0.01091766357421875, 0.01320648193359375, -0.0163421630859375, 0.05523681640625, -0.0174713134765625, 0.053070068359375, 0.0462646484375, -0.02642822265625, 0.0251617431640625, 0.03289794921875, -0.037628173828125, 0.0238037109375, -0.056640625, 0.010345458984375, 0.00264739990234375, 0.02105712890625, -0.072998046875, -0.003208160400390625, 0.029022216796875, -0.03839111328125, -0.0122222900390625, 0.0165557861328125, -0.0362548828125, -0.04766845703125, -0.00628662109375, 0.0196380615234375, 0.04913330078125, -0.0293731689453125, 0.02679443359375, 0.01049041748046875, -0.015594482421875, -0.04345703125, -0.045166015625, 0.0002117156982421875, -0.01416778564453125, -0.06298828125, 0.02313232421875, -0.0232696533203125, -0.03033447265625, -0.0059967041015625, 0.0032215118408203125, 0.007671356201171875, -0.0029163360595703125, 0.0199432373046875, 0.040283203125, -0.0208892822265625, -0.0111541748046875, -0.005035400390625, -0.0303802490234375, -0.003093719482421875, 0.010223388671875, 0.043304443359375, -0.03466796875, -0.028045654296875, -0.0367431640625, 0.026947021484375, 0.038543701171875, -0.0292510986328125, 0.05560302734375, 0.04345703125, -0.03509521484375, -0.0047760009765625, -0.0421142578125, -0.006542205810546875, -0.035614013671875, 0.0263519287109375, -0.0285491943359375, -0.05963134765625, 0.06719970703125, 0.02203369140625, 0.01904296875, 0.05853271484375, 0.054931640625, 0.00792694091796875, 0.0689697265625, 0.056732177734375, 0.0160369873046875, 0.0168304443359375, -0.0391845703125, 0.021484375, -0.07122802734375, -0.029815673828125, -0.041015625, -0.0312347412109375, -0.027008056640625, -0.03570556640625, 0.030120849609375, 0.034942626953125, -0.02484130859375, 0.0438232421875, -0.035797119140625, 0.0288238525390625, 0.043609619140625, 0.013092041015625, 0.0298614501953125, -0.0263214111328125, -0.0094757080078125, -0.003330230712890625, -0.045623779296875, -0.046875, 0.05859375, 0.0293121337890625, 0.052093505859375, 0.0232391357421875, 0.0516357421875, -0.00909423828125, 0.01007080078125, -0.04779052734375, 0.05474853515625, -0.00014412403106689453, -0.0465087890625, -0.01490020751953125, -0.0250244140625, -0.0640869140625, 0.0196380615234375, -0.00797271728515625, -0.0732421875, 0.01300811767578125, 0.005229949951171875, -0.044769287109375, 0.020355224609375, -0.052276611328125, 0.0775146484375, -0.01800537109375, -0.00954437255859375, 0.01007843017578125, -0.05474853515625, 0.035675048828125, 0.0196380615234375, 0.00934600830078125, -0.02392578125, -0.006824493408203125, 0.07342529296875, -0.045379638671875, 0.06500244140625, -0.0015611648559570312, -0.0037555694580078125, 0.049346923828125, -0.0057373046875, 0.035247802734375, -0.0031528472900390625, -0.00677490234375, 0.02557373046875, -0.022247314453125, -0.0266265869140625, -0.04241943359375, 0.04296875, -0.0628662109375, -0.0496826171875, -0.0343017578125, -0.034881591796875, -0.0024127960205078125, 0.0288848876953125, 0.04473876953125, 0.0177001953125, 0.0004820823669433594, 0.00940704345703125, 0.033660888671875, -0.01629638671875, 0.0579833984375, 0.037872314453125, -0.0251922607421875, -0.0288238525390625, 0.060302734375, 0.01332855224609375, 0.0286407470703125, -0.00119781494140625, 0.01432037353515625, -0.0140228271484375, -0.034576416015625, -0.040618896484375, 0.03179931640625, -0.04412841796875, -0.0240020751953125, -0.049713134765625, -0.0306854248046875, -0.0230255126953125, -0.0088348388671875, -0.041717529296875, -0.026611328125, -0.032806396484375, -0.01171875, 0.0276947021484375, 0.038848876953125, 0.0242767333984375, 0.0181732177734375, -0.03802490234375, 0.0231781005859375, 0.00787353515625, 0.03497314453125, 0.004161834716796875, -0.04327392578125, -0.0194549560546875, -0.006626129150390625, -0.02801513671875, -0.064453125, 0.026763916015625, 0.007152557373046875, 0.048492431640625, 0.01181793212890625, 0.0193634033203125, 0.042236328125, -0.029083251953125, 0.08197021484375, 0.0290679931640625, -0.06097412109375, 0.046234130859375, -0.00806427001953125, 0.0289306640625, 0.04278564453125, 0.0171661376953125, -0.04266357421875, -0.05133056640625, -0.066162109375, -0.06298828125, 0.052215576171875, 0.0360107421875, -0.00502777099609375, 0.003314971923828125, 0.021087646484375, 0.0029163360595703125, 0.0001322031021118164, -0.0667724609375, -0.0499267578125, -0.018280029296875, -0.0202178955078125, -0.0042572021484375, -0.0005216598510742188, -0.00522613525390625, -0.012359619140625, 0.0538330078125, -0.019989013671875, 0.041046142578125, 0.006298065185546875, -0.0028247833251953125, -0.006256103515625, 0.02032470703125, 0.048370361328125, 0.0408935546875, -0.017486572265625, 0.006511688232421875, 0.01157379150390625, -0.0399169921875, -0.0006961822509765625, 0.020538330078125, -0.0155181884765625, -0.00925445556640625, 0.04974365234375, 0.07525634765625, 0.0010213851928710938, -0.051727294921875, 0.044677734375, 0.00870513916015625, -0.00812530517578125, -0.01543426513671875, 0.0271453857421875, 0.0017404556274414062, 0.03790283203125, 0.040740966796875, 0.018096923828125, -0.004329681396484375, -0.0347900390625, 0.0030117034912109375, 0.01837158203125, -0.03564453125, -0.0217437744140625, 0.06451416015625, 0.0089111328125, -0.02947998046875, 0.033294677734375, -0.01519775390625, -0.040985107421875, 0.06561279296875, 0.046661376953125, 0.06463623046875, -0.005859375, -0.0021800994873046875, 0.03387451171875, 0.02899169921875, -0.0010614395141601562, 0.026153564453125, 0.0301055908203125, -0.052734375, -0.007350921630859375, -0.047454833984375, -0.0269927978515625, 0.0120849609375, -0.033966064453125, 0.029815673828125, -0.0251922607421875, -0.011474609375, 0.0024929046630859375, 0.0060882568359375, -0.051239013671875, 0.0209808349609375, 0.010223388671875, 0.06842041015625, -0.0545654296875, 0.044952392578125, 0.034698486328125, -0.068115234375, -0.0675048828125, -0.00662994384765625, 0.01189422607421875, -0.0655517578125, 0.01357269287109375, 0.0180816650390625, -0.0092926025390625, 0.00785064697265625, -0.04736328125, -0.07135009765625, 0.099853515625, 0.026519775390625, -0.0283966064453125, -0.0088348388671875, -0.0038509368896484375, 0.038848876953125, -0.0309906005859375, 0.0438232421875, 0.04266357421875, 0.036346435546875, 0.0125274658203125, -0.07562255859375, 0.0167694091796875, -0.0396728515625, -0.004322052001953125, -0.0032978057861328125, -0.08514404296875, 0.06787109375, -0.03912353515625, -0.012939453125, 0.017242431640625, 0.06787109375, 0.02569580078125, 0.020233154296875, 0.026824951171875, 0.05316162109375, 0.05010986328125, -0.01245880126953125, 0.0799560546875, -0.02142333984375, 0.048583984375, 0.079345703125, -0.008697509765625, 0.073486328125, 0.0086822509765625, -0.0301055908203125, 0.0438232421875, 0.049774169921875, -0.0067138671875, 0.0193939208984375, -0.00855255126953125, -0.0031528472900390625, -0.00951385498046875, -0.0029087066650390625, -0.04412841796875, 0.044403076171875, 0.000629425048828125, -0.0298614501953125, -0.0118255615234375, -0.004154205322265625, 0.0191192626953125, -0.0015649795532226562, -0.004444122314453125, 0.055511474609375, 0.01493072509765625, -0.05816650390625, 0.0709228515625, 0.0022945404052734375, 0.050018310546875, -0.03717041015625, 0.0019512176513671875, -0.035675048828125, 0.01371002197265625, -0.0238800048828125, -0.062744140625, 0.0166015625, 0.01800537109375, -0.019775390625, 0.00214385986328125, 0.036346435546875, -0.0304718017578125, -0.052215576171875, 0.016693115234375, 0.047760009765625, 0.0222320556640625, 0.01506805419921875, -0.0560302734375, 0.0122222900390625, 0.00859832763671875, -0.042938232421875, 0.0028076171875, 0.0023975372314453125, 0.01107025146484375, 0.052886962890625, 0.038177490234375, -0.00725555419921875, 0.005535125732421875, 0.0006146430969238281, 0.05889892578125, -0.0643310546875, -0.04388427734375, -0.0615234375, 0.052490234375, -0.0035800933837890625, -0.071044921875, 0.06475830078125, 0.055389404296875, 0.045501708984375, 0.01922607421875, 0.051025390625, -0.0266265869140625, 0.04144287109375, -0.043670654296875, 0.064697265625, -0.037445068359375, 0.0231781005859375, -0.026641845703125, -0.08990478515625, -0.023651123046875, 0.0423583984375, -0.01885986328125, 0.00925445556640625, 0.06781005859375, 0.07196044921875, 0.003826141357421875, -0.0038700103759765625, 0.01157379150390625, 0.03802490234375, 0.023193359375, 0.060272216796875, 0.0295867919921875, -0.0509033203125, 0.052276611328125, -0.0296630859375, -0.03729248046875, -0.01128387451171875, -0.047882080078125, -0.06427001953125, -0.049713134765625, -0.02728271484375, -0.0389404296875, -0.01535797119140625, 0.06378173828125, 0.0433349609375, -0.055938720703125, -0.0180206298828125, 0.0023326873779296875, 0.007366180419921875, -0.03472900390625, -0.0208892822265625, 0.040069580078125, -0.002246856689453125, -0.049530029296875, 0.0202484130859375, 0.006893157958984375, 0.01050567626953125, -0.02667236328125, -0.0217132568359375, -0.031036376953125, 0.014556884765625, 0.033935546875, 0.0218658447265625, -0.0758056640625, -0.026275634765625, 0.0182037353515625, -0.0264739990234375, 0.015289306640625, 0.0197601318359375, -0.050506591796875, 0.0223846435546875, 0.0244140625, 0.0248870849609375, 0.0458984375, 0.0058746337890625, 0.0093231201171875, -0.04205322265625, 0.00827789306640625, 0.0221099853515625, 0.0242462158203125, 0.01708984375, -0.0247650146484375, 0.039337158203125, 0.02557373046875, -0.034210205078125, -0.0506591796875, 0.0123138427734375, -0.096435546875, -0.01302337646484375, 0.08917236328125, -0.00738525390625, -0.023345947265625, 0.0204315185546875, -0.0205841064453125, 0.0192718505859375, -0.0307159423828125, 0.04632568359375, 0.050201416015625, -0.034759521484375, -0.038604736328125, -0.04669189453125, 0.0214385986328125, 0.0212860107421875, -0.08685302734375, 0.0012884140014648438, 0.032989501953125, -0.006679534912109375, 0.0276641845703125, 0.0509033203125, 0.0025177001953125, 0.0243072509765625, 0.01213836669921875, 0.0178070068359375, -0.0167083740234375, -0.0218963623046875, -0.01055908203125, 0.008087158203125, -0.00927734375, -0.008758544921875 ] ]