You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
together fine-tuning create --training-file "$file_id" --model "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference" > fine-tuning-job.json
message='The default LoRA rank for meta-llama/Meta-Llama-3.1-8B-Instruct-Reference has been changed to 64 as the max available.\nAlso, the default learning rate for LoRA fine-tuning has been changed to 0.001.'
message='The default LoRA rank for meta-llama/Meta-Llama-3.1-8B-Instruct-Reference has been changed to 64 as the max available.\nAlso, the default learning rate for LoRA fine-tuning has been changed to 0.001.'
message='Starting from together>=1.3.0, the default batch size is set to the maximum allowed value for each model.'
message='Starting from together>=1.3.0, the default batch size is set to the maximum allowed value for each model.'
Traceback (most recent call last):
File "/home/silvacarl/.local/bin/together", line 8, in
sys.exit(main())
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/decorators.py", line 33, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/cli/api/finetune.py", line 239, in create
response = client.fine_tuning.create(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/resources/finetune.py", line 256, in create
response, _, _ = requestor.request(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 249, in request
resp, got_stream = self._interpret_response(result, stream)
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 632, in _interpret_response
self._interpret_response_line(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 701, in interpret_response_line
raise self.handle_error_response(resp, rcode, stream_error=stream)
together.error.InvalidRequestError: Error code: 404 - {"message": "Invalid finetune param: ", "type": "invalid_request_error", "param": "", "code": ""}
The text was updated successfully, but these errors were encountered:
Hi @silvacarl2 , can you verify that $file_id is set and not empty? I am able to reproduce this error when an empty string is passed into the training file.
getting strange error from: together fine-tuning create --training-file "$file_id" --model "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference"
together fine-tuning create --training-file "$file_id" --model "meta-llama/Meta-Llama-3.1-8B-Instruct-Reference" > fine-tuning-job.json
message='The default LoRA rank for meta-llama/Meta-Llama-3.1-8B-Instruct-Reference has been changed to 64 as the max available.\nAlso, the default learning rate for LoRA fine-tuning has been changed to 0.001.'
message='The default LoRA rank for meta-llama/Meta-Llama-3.1-8B-Instruct-Reference has been changed to 64 as the max available.\nAlso, the default learning rate for LoRA fine-tuning has been changed to 0.001.'
message='Starting from together>=1.3.0, the default batch size is set to the maximum allowed value for each model.'
message='Starting from together>=1.3.0, the default batch size is set to the maximum allowed value for each model.'
Traceback (most recent call last):
File "/home/silvacarl/.local/bin/together", line 8, in
sys.exit(main())
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/click/decorators.py", line 33, in new_func
return f(get_current_context(), *args, **kwargs)
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/cli/api/finetune.py", line 239, in create
response = client.fine_tuning.create(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/resources/finetune.py", line 256, in create
response, _, _ = requestor.request(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 249, in request
resp, got_stream = self._interpret_response(result, stream)
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 632, in _interpret_response
self._interpret_response_line(
File "/home/silvacarl/.local/lib/python3.10/site-packages/together/abstract/api_requestor.py", line 701, in interpret_response_line
raise self.handle_error_response(resp, rcode, stream_error=stream)
together.error.InvalidRequestError: Error code: 404 - {"message": "Invalid finetune param: ", "type": "invalid_request_error", "param": "", "code": ""}
The text was updated successfully, but these errors were encountered: