code stringlengths 31 2.05k | label_name stringclasses 5 values | label int64 0 4 |
|---|---|---|
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
} | Base | 1 |
public void setSort(Integer sort) {
this.sort = sort;
} | Base | 1 |
public void setId(Integer id) {
this.id = id;
} | Base | 1 |
public void setLinkUrl(String linkUrl) {
this.linkUrl = linkUrl;
} | Base | 1 |
public Long getUpdateUserId() {
return updateUserId;
} | Base | 1 |
public String getLinkUrl() {
return linkUrl;
} | Base | 1 |
public Integer getSort() {
return sort;
} | Base | 1 |
public Long getCreateUserId() {
return createUserId;
} | Base | 1 |
public void setIsOpen(Integer isOpen) {
this.isOpen = isOpen;
} | Base | 1 |
public void setCreateTime(Date createTime) {
this.createTime = createTime;
} | Base | 1 |
public static CBORObject RandomCBORMap(IRandomGenExtended rand, int depth) {
int x = rand.GetInt32(100);
int count = (x < 80) ? 2 : ((x < 93) ? 1 : ((x < 98) ? 0 : 10));
CBORObject cborRet = CBORObject.NewMap();
for (var i = 0; i < count; ++i) {
CBORObject key = RandomCBORObject(rand, depth + 1);
CBORObject value = RandomCBORObject(rand, depth + 1);
cborRet[key] = value;
}
return cborRet;
} | Class | 2 |
private static bool ByteArraysEqual(byte[] arr1, byte[] arr2) {
if (arr1 == null) {
return arr2 == null;
}
if (arr2 == null) {
return false;
}
if (arr1.Length != arr2.Length) {
return false;
}
for (var i = 0; i < arr1.Length; ++i) {
if (arr1[i] != arr2[i]) {
return false;
}
}
return true;
} | Class | 2 |
def _configure_templating(cls, app):
tempdir = app.config["PYLOAD_API"].get_cachedir()
cache_path = os.path.join(tempdir, "jinja")
os.makedirs(cache_path, exist_ok=True)
app.create_jinja_environment()
# NOTE: enable autoescape for all file extensions (included .js)
# maybe this will break .txt rendering, but we don't render this kind of files actually
# that does not change 'default_for_string=False' (by default)
app.jinja_env.autoescape = jinja2.select_autoescape(default=True)
app.jinja_env.bytecode_cache = jinja2.FileSystemBytecodeCache(cache_path)
for fn in cls.JINJA_TEMPLATE_FILTERS:
app.add_template_filter(fn)
for fn in cls.JINJA_TEMPLATE_GLOBALS:
app.add_template_global(fn)
for fn in cls.JINJA_CONTEXT_PROCESSORS:
app.context_processor(fn) | Base | 1 |
def _configure_handlers(cls, app):
"""
Register error handlers.
"""
for exc, fn in cls.FLASK_ERROR_HANDLERS:
app.register_error_handler(exc, fn) | Base | 1 |
def get_events(self, uuid):
"""
Lists occured events, may be affected to changes in future.
:param uuid:
:return: list of `Events`
"""
events = self.pyload.event_manager.get_events(uuid)
new_events = []
def conv_dest(d):
return (Destination.QUEUE if d == "queue" else Destination.COLLECTOR).value
for e in events:
event = EventInfo()
event.eventname = e[0]
if e[0] in ("update", "remove", "insert"):
event.id = e[3]
event.type = (
ElementType.PACKAGE if e[2] == "pack" else ElementType.FILE
).value
event.destination = conv_dest(e[1])
elif e[0] == "order":
if e[1]:
event.id = e[1]
event.type = (
ElementType.PACKAGE if e[2] == "pack" else ElementType.FILE
)
event.destination = conv_dest(e[3])
elif e[0] == "reload":
event.destination = conv_dest(e[1])
new_events.append(event)
return new_events | Base | 1 |
def is_authenticated(session=flask.session):
return session.get("name") and session.get(
"authenticated"
) # NOTE: why checks name? | Base | 1 |
def cast(self, typ, value):
"""
cast value to given format.
"""
if typ == "int":
return int(value)
elif typ == "float":
return float(value)
elif typ == "str":
return "" if value is None else str(value)
elif typ == "bytes":
return b"" if value is None else bytes(value)
elif typ == "bool":
value = "" if value is None else str(value)
return value.lower() in ("1", "true", "on", "yes", "y")
elif typ == "time":
value = "" if value is None else str(value)
if not value:
value = "0:00"
if ":" not in value:
value += ":00"
return value
elif typ in ("file", "folder"):
return "" if value in (None, "") else os.path.realpath(os.path.expanduser(os.fsdecode(value)))
else:
return value | Class | 2 |
def init_handle(self):
"""
sets common options to curl handle.
"""
self.c.setopt(pycurl.FOLLOWLOCATION, 1)
self.c.setopt(pycurl.MAXREDIRS, 10)
self.c.setopt(pycurl.CONNECTTIMEOUT, 30)
self.c.setopt(pycurl.NOSIGNAL, 1)
self.c.setopt(pycurl.NOPROGRESS, 1)
if hasattr(pycurl, "AUTOREFERER"):
self.c.setopt(pycurl.AUTOREFERER, 1)
self.c.setopt(pycurl.SSL_VERIFYPEER, 0)
self.c.setopt(pycurl.LOW_SPEED_TIME, 60)
self.c.setopt(pycurl.LOW_SPEED_LIMIT, 5)
if hasattr(pycurl, "USE_SSL"):
self.c.setopt(pycurl.USE_SSL, pycurl.USESSL_TRY)
# self.c.setopt(pycurl.VERBOSE, 1)
# self.c.setopt(pycurl.HTTP_VERSION, pycurl.CURL_HTTP_VERSION_1_1)
self.c.setopt(
pycurl.USERAGENT,
b"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36",
)
if pycurl.version_info()[7]:
self.c.setopt(pycurl.ENCODING, b"gzip, deflate")
self.c.setopt(
pycurl.HTTPHEADER,
[
b"Accept: */*",
b"Accept-Language: en-US,en",
b"Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7",
b"Connection: keep-alive",
b"Keep-Alive: 300",
b"Expect:",
],
) | Base | 1 |
def login():
user = flask.request.form["username"]
password = flask.request.form["password"]
api = flask.current_app.config["PYLOAD_API"]
user_info = api.check_auth(user, password)
if not user_info:
log.error(f"Login failed for user '{user}'")
return jsonify(False)
s = set_session(user_info)
log.info(f"User '{user}' successfully logged in")
flask.flash("Logged in successfully")
return jsonify(s) | Class | 2 |
def login():
api = flask.current_app.config["PYLOAD_API"]
next = get_redirect_url(fallback=flask.url_for("app.dashboard"))
if flask.request.method == "POST":
user = flask.request.form["username"]
password = flask.request.form["password"]
user_info = api.check_auth(user, password)
if not user_info:
log.error(f"Login failed for user '{user}'")
return render_template("login.html", next=next, errors=True)
set_session(user_info)
log.info(f"User '{user}' successfully logged in")
flask.flash("Logged in successfully")
if is_authenticated():
return flask.redirect(next)
if api.get_config_value("webui", "autologin"):
allusers = api.get_all_userdata()
if len(allusers) == 1: # TODO: check if localhost
user_info = list(allusers.values())[0]
set_session(user_info)
# NOTE: Double-check authentication here because if session[name] is empty,
# next login_required redirects here again and all loop out.
if is_authenticated():
return flask.redirect(next)
return render_template("login.html", next=next) | Class | 2 |
def _configure_session(cls, app):
tempdir = app.config["PYLOAD_API"].get_cachedir()
cache_path = os.path.join(tempdir, "flask")
os.makedirs(cache_path, exist_ok=True)
app.config["SESSION_FILE_DIR"] = cache_path
app.config["SESSION_TYPE"] = "filesystem"
app.config["SESSION_COOKIE_NAME"] = "pyload_session"
app.config["SESSION_COOKIE_SAMESITE"] = "None"
app.config["SESSION_COOKIE_SECURE"] = app.config["PYLOAD_API"].get_config_value("webui", "use_ssl")
app.config["SESSION_PERMANENT"] = False
session_lifetime = max(app.config["PYLOAD_API"].get_config_value("webui", "session_lifetime"), 1) * 60
app.config["PERMANENT_SESSION_LIFETIME"] = session_lifetime | Compound | 4 |
def get_throttles(self):
throttles = super().get_throttles()
if self.action == "reset_password":
throttles.append(PasswordResetRequestThrottle())
return throttles | Class | 2 |
def clean_identity(self):
v = self.cleaned_data["identity"]
if len(v) > 254:
raise forms.ValidationError("Address is too long.")
if User.objects.filter(email=v).exists():
raise forms.ValidationError(
"An account with this email address already exists."
)
return v | Base | 1 |
def test_it_checks_for_existing_users(self):
alice = User(username="alice", email="alice@example.org")
alice.save()
form = {"identity": "alice@example.org", "tz": ""}
r = self.client.post("/accounts/signup/", form)
self.assertContains(r, "already exists") | Base | 1 |
def test_it_ignores_bad_tz(self):
form = {"identity": "alice@example.org", "tz": "Foo/Bar"}
r = self.client.post("/accounts/signup/", form)
self.assertContains(r, "Account created")
self.assertIn("auto-login", r.cookies)
profile = Profile.objects.get()
self.assertEqual(profile.tz, "UTC") | Base | 1 |
def test_it_works(self):
form = {"identity": "alice@example.org", "tz": "Europe/Riga"}
r = self.client.post("/accounts/signup/", form)
self.assertContains(r, "Account created")
self.assertIn("auto-login", r.cookies)
# An user should have been created
user = User.objects.get()
# A profile should have been created
profile = Profile.objects.get()
self.assertEqual(profile.check_limit, 10000)
self.assertEqual(profile.sms_limit, 10000)
self.assertEqual(profile.call_limit, 10000)
self.assertEqual(profile.tz, "Europe/Riga")
# And email sent
self.assertEqual(len(mail.outbox), 1)
subject = "Log in to %s" % settings.SITE_NAME
self.assertEqual(mail.outbox[0].subject, subject)
# A project should have been created
project = Project.objects.get()
self.assertEqual(project.owner, user)
self.assertEqual(project.badge_key, user.username)
# And check should be associated with the new user
check = Check.objects.get()
self.assertEqual(check.name, "My First Check")
self.assertEqual(check.slug, "my-first-check")
self.assertEqual(check.project, project)
# A channel should have been created
channel = Channel.objects.get()
self.assertEqual(channel.project, project) | Base | 1 |
def login(request):
form = forms.PasswordLoginForm()
magic_form = forms.EmailLoginForm()
if request.method == "POST":
if request.POST.get("action") == "login":
form = forms.PasswordLoginForm(request.POST)
if form.is_valid():
return _check_2fa(request, form.user)
else:
magic_form = forms.EmailLoginForm(request.POST)
if magic_form.is_valid():
redirect_url = request.GET.get("next")
if not _allow_redirect(redirect_url):
redirect_url = None
profile = Profile.objects.for_user(magic_form.user)
profile.send_instant_login_link(redirect_url=redirect_url)
response = redirect("hc-login-link-sent")
# check_token looks for this cookie to decide if
# it needs to do the extra POST step.
response.set_cookie("auto-login", "1", max_age=300, httponly=True)
return response
if request.user.is_authenticated:
return _redirect_after_login(request)
bad_link = request.session.pop("bad_link", None)
ctx = {
"page": "login",
"form": form,
"magic_form": magic_form,
"bad_link": bad_link,
"registration_open": settings.REGISTRATION_OPEN,
"support_email": settings.SUPPORT_EMAIL,
}
return render(request, "accounts/login.html", ctx) | Base | 1 |
def signup(request):
if not settings.REGISTRATION_OPEN:
return HttpResponseForbidden()
ctx = {}
form = forms.SignupForm(request.POST)
if form.is_valid():
email = form.cleaned_data["identity"]
tz = form.cleaned_data["tz"]
user = _make_user(email, tz)
profile = Profile.objects.for_user(user)
profile.send_instant_login_link()
ctx["created"] = True
else:
ctx = {"form": form}
response = render(request, "accounts/signup_result.html", ctx)
if ctx.get("created"):
response.set_cookie("auto-login", "1", max_age=300, httponly=True)
return response | Base | 1 |
def _validate_source(source: str, run_id: str) -> None:
if not is_local_uri(source):
return
if run_id:
store = _get_tracking_store()
run = store.get_run(run_id)
source = pathlib.Path(local_file_uri_to_path(source)).resolve()
run_artifact_dir = pathlib.Path(local_file_uri_to_path(run.info.artifact_uri)).resolve()
if run_artifact_dir in [source, *source.parents]:
return
raise MlflowException(
f"Invalid source: '{source}'. To use a local path as source, the run_id request parameter "
"has to be specified and the local path has to be contained within the artifact directory "
"of the run specified by the run_id.",
INVALID_PARAMETER_VALUE,
) | Variant | 0 |
def is_local_uri(uri):
"""Returns true if this is a local file path (/foo or file:/foo)."""
if uri == "databricks":
return False
if is_windows() and uri.startswith("\\\\"):
# windows network drive path looks like: "\\<server name>\path\..."
return False
parsed_uri = urllib.parse.urlparse(uri)
if parsed_uri.hostname:
return False
scheme = parsed_uri.scheme
if scheme == "" or scheme == "file":
return True
if is_windows() and len(scheme) == 1 and scheme.lower() == pathlib.Path(uri).drive.lower()[0]:
return True
return False | Variant | 0 |
def _init_server(backend_uri, root_artifact_uri):
"""
Launch a new REST server using the tracking store specified by backend_uri and root artifact
directory specified by root_artifact_uri.
:returns A tuple (url, process) containing the string URL of the server and a handle to the
server process (a multiprocessing.Process object).
"""
mlflow.set_tracking_uri(None)
server_port = get_safe_port()
process = Popen(
[
sys.executable,
"-c",
f'from mlflow.server import app; app.run("{LOCALHOST}", {server_port})',
],
env={
**os.environ,
BACKEND_STORE_URI_ENV_VAR: backend_uri,
ARTIFACT_ROOT_ENV_VAR: root_artifact_uri,
},
)
_await_server_up_or_die(server_port)
url = f"http://{LOCALHOST}:{server_port}"
_logger.info(f"Launching tracking server against backend URI {backend_uri}. Server URL: {url}")
return url, process | Variant | 0 |
def test_create_model_version_with_path_source(mlflow_client):
name = "mode"
mlflow_client.create_registered_model(name)
exp_id = mlflow_client.create_experiment("test")
run = mlflow_client.create_run(experiment_id=exp_id)
response = requests.post(
f"{mlflow_client.tracking_uri}/api/2.0/mlflow/model-versions/create",
json={
"name": name,
"source": run.info.artifact_uri[len("file://") :],
"run_id": run.info.run_id,
},
)
assert response.status_code == 200
# run_id is not specified
response = requests.post(
f"{mlflow_client.tracking_uri}/api/2.0/mlflow/model-versions/create",
json={
"name": name,
"source": run.info.artifact_uri[len("file://") :],
},
)
assert response.status_code == 400
assert "To use a local path as a model version" in response.json()["message"]
# run_id is specified but source is not in the run's artifact directory
response = requests.post(
f"{mlflow_client.tracking_uri}/api/2.0/mlflow/model-versions/create",
json={
"name": name,
"source": "/tmp",
"run_id": run.info.run_id,
},
)
assert response.status_code == 400
assert "To use a local path as a model version" in response.json()["message"] | Base | 1 |
def predict(self, model_uri, input_path, output_path, content_type):
"""
Generate predictions using generic python model saved with MLflow. The expected format of
the input JSON is the Mlflow scoring format.
Return the prediction results as a JSON.
"""
local_path = _download_artifact_from_uri(model_uri)
# NB: Absolute windows paths do not work with mlflow apis, use file uri to ensure
# platform compatibility.
local_uri = path_to_local_file_uri(local_path)
if self._env_manager != _EnvManager.LOCAL:
command = (
'python -c "from mlflow.pyfunc.scoring_server import _predict; _predict('
"model_uri={model_uri}, "
"input_path={input_path}, "
"output_path={output_path}, "
"content_type={content_type})"
'"'
).format(
model_uri=repr(local_uri),
input_path=repr(input_path),
output_path=repr(output_path),
content_type=repr(content_type),
)
return self.prepare_env(local_path).execute(command)
else:
scoring_server._predict(local_uri, input_path, output_path, content_type) | Base | 1 |
def predict(self, model_uri, input_path, output_path, content_type):
"""
Generate predictions using generic python model saved with MLflow. The expected format of
the input JSON is the Mlflow scoring format.
Return the prediction results as a JSON.
"""
local_path = _download_artifact_from_uri(model_uri)
# NB: Absolute windows paths do not work with mlflow apis, use file uri to ensure
# platform compatibility.
local_uri = path_to_local_file_uri(local_path)
if self._env_manager != _EnvManager.LOCAL:
command = (
'python -c "from mlflow.pyfunc.scoring_server import _predict; _predict('
"model_uri={model_uri}, "
"input_path={input_path}, "
"output_path={output_path}, "
"content_type={content_type})"
'"'
).format(
model_uri=repr(local_uri),
input_path=repr(input_path),
output_path=repr(output_path),
content_type=repr(content_type),
)
return self.prepare_env(local_path).execute(command)
else:
scoring_server._predict(local_uri, input_path, output_path, content_type) | Base | 1 |
def get_cmd(
model_uri: str, port: int = None, host: int = None, timeout: int = None, nworkers: int = None
) -> Tuple[str, Dict[str, str]]:
local_uri = path_to_local_file_uri(model_uri)
timeout = timeout or MLFLOW_SCORING_SERVER_REQUEST_TIMEOUT.get()
# NB: Absolute windows paths do not work with mlflow apis, use file uri to ensure
# platform compatibility.
if os.name != "nt":
args = [f"--timeout={timeout}"]
if port and host:
args.append(f"-b {host}:{port}")
elif host:
args.append(f"-b {host}")
if nworkers:
args.append(f"-w {nworkers}")
command = (
f"gunicorn {' '.join(args)} ${{GUNICORN_CMD_ARGS}}"
" -- mlflow.pyfunc.scoring_server.wsgi:app"
)
else:
args = []
if host:
args.append(f"--host={host}")
if port:
args.append(f"--port={port}")
command = (
f"waitress-serve {' '.join(args)} "
"--ident=mlflow mlflow.pyfunc.scoring_server.wsgi:app"
)
command_env = os.environ.copy()
command_env[_SERVER_MODEL_PATH] = local_uri
return command, command_env | Base | 1 |
def get_cmd(
model_uri: str, port: int = None, host: int = None, timeout: int = None, nworkers: int = None
) -> Tuple[str, Dict[str, str]]:
local_uri = path_to_local_file_uri(model_uri)
timeout = timeout or MLFLOW_SCORING_SERVER_REQUEST_TIMEOUT.get()
# NB: Absolute windows paths do not work with mlflow apis, use file uri to ensure
# platform compatibility.
if os.name != "nt":
args = [f"--timeout={timeout}"]
if port and host:
args.append(f"-b {host}:{port}")
elif host:
args.append(f"-b {host}")
if nworkers:
args.append(f"-w {nworkers}")
command = (
f"gunicorn {' '.join(args)} ${{GUNICORN_CMD_ARGS}}"
" -- mlflow.pyfunc.scoring_server.wsgi:app"
)
else:
args = []
if host:
args.append(f"--host={host}")
if port:
args.append(f"--port={port}")
command = (
f"waitress-serve {' '.join(args)} "
"--ident=mlflow mlflow.pyfunc.scoring_server.wsgi:app"
)
command_env = os.environ.copy()
command_env[_SERVER_MODEL_PATH] = local_uri
return command, command_env | Base | 1 |
def create_user():
content_type = request.headers.get("Content-Type")
if content_type == "application/x-www-form-urlencoded":
username = request.form["username"]
password = request.form["password"]
if store.has_user(username):
flash(f"Username has already been taken: {username}")
return alert(href=SIGNUP)
store.create_user(username, password)
flash(f"Successfully signed up user: {username}")
return alert(href=HOME)
elif content_type == "application/json":
username = _get_request_param("username")
password = _get_request_param("password")
user = store.create_user(username, password)
return make_response({"user": user.to_json()})
else:
return make_response(f"Invalid content type: '{content_type}'", 400) | Base | 1 |
def to_html(self) -> str:
"""
Returns a rendered HTML representing the content of the tab.
:return: a HTML string
"""
import jinja2
j2_env = jinja2.Environment(loader=jinja2.BaseLoader()).from_string(self.template)
return j2_env.render({**self._context}) | Base | 1 |
def is_local_uri(uri, is_tracking_or_registry_uri=True):
"""
Returns true if the specified URI is a local file path (/foo or file:/foo).
:param uri: The URI.
:param is_tracking_uri: Whether or not the specified URI is an MLflow Tracking or MLflow
Model Registry URI. Examples of other URIs are MLflow artifact URIs,
filesystem paths, etc.
"""
if uri == "databricks" and is_tracking_or_registry_uri:
return False
if is_windows() and uri.startswith("\\\\"):
# windows network drive path looks like: "\\<server name>\path\..."
return False
parsed_uri = urllib.parse.urlparse(uri)
if parsed_uri.hostname and not (
parsed_uri.hostname == "."
or parsed_uri.hostname.startswith("localhost")
or parsed_uri.hostname.startswith("127.0.0.1")
):
return False
scheme = parsed_uri.scheme
if scheme == "" or scheme == "file":
return True
if is_windows() and len(scheme) == 1 and scheme.lower() == pathlib.Path(uri).drive.lower()[0]:
return True
return False | Variant | 0 |
def validate_path_is_safe(path):
"""
Validates that the specified path is safe to join with a trusted prefix. This is a security
measure to prevent path traversal attacks.
A valid path should:
not contain separators other than '/'
not contain .. to navigate to parent dir in path
not be an absolute path
"""
from mlflow.utils.file_utils import local_file_uri_to_path
exc = MlflowException(f"Invalid path: {path}", error_code=INVALID_PARAMETER_VALUE)
if any((s in path) for s in ("#", "%23")):
raise exc
if is_file_uri(path):
path = local_file_uri_to_path(path)
if (
any((s in path) for s in _OS_ALT_SEPS)
or ".." in path.split("/")
or pathlib.PureWindowsPath(path).is_absolute()
or pathlib.PurePosixPath(path).is_absolute()
or (is_windows() and len(path) >= 2 and path[1] == ":")
):
raise exc | Base | 1 |
def validate_path_is_safe(path):
"""
Validates that the specified path is safe to join with a trusted prefix. This is a security
measure to prevent path traversal attacks.
A valid path should:
not contain separators other than '/'
not contain .. to navigate to parent dir in path
not be an absolute path
"""
from mlflow.utils.file_utils import local_file_uri_to_path
exc = MlflowException(f"Invalid path: {path}", error_code=INVALID_PARAMETER_VALUE)
if any((s in path) for s in ("#", "%23")):
raise exc
if is_file_uri(path):
path = local_file_uri_to_path(path)
if (
any((s in path) for s in _OS_ALT_SEPS)
or ".." in path.split("/")
or pathlib.PureWindowsPath(path).is_absolute()
or pathlib.PurePosixPath(path).is_absolute()
or (is_windows() and len(path) >= 2 and path[1] == ":")
):
raise exc | Variant | 0 |
def test_list_artifacts_malicious_path(http_artifact_repo, path):
with mock.patch(
"mlflow.store.artifact.http_artifact_repo.http_request",
return_value=MockResponse(
{
"files": [
{"path": path, "is_dir": False, "file_size": 1},
]
},
200,
),
):
with pytest.raises(MlflowException, match=f"Invalid path: {path}"):
http_artifact_repo.list_artifacts() | Base | 1 |
def test_list_artifacts_malicious_path(http_artifact_repo, path):
with mock.patch(
"mlflow.store.artifact.http_artifact_repo.http_request",
return_value=MockResponse(
{
"files": [
{"path": path, "is_dir": False, "file_size": 1},
]
},
200,
),
):
with pytest.raises(MlflowException, match=f"Invalid path: {path}"):
http_artifact_repo.list_artifacts() | Variant | 0 |
def test_validate_path_is_safe_windows_good(path):
validate_path_is_safe(path) | Base | 1 |
def test_validate_path_is_safe_windows_good(path):
validate_path_is_safe(path) | Variant | 0 |
def assert_response(resp):
assert resp.status_code == 400
assert response.json() == {
"error_code": "INVALID_PARAMETER_VALUE",
"message": f"Invalid path: {invalid_path}",
} | Base | 1 |
def assert_response(resp):
assert resp.status_code == 400
assert response.json() == {
"error_code": "INVALID_PARAMETER_VALUE",
"message": f"Invalid path: {invalid_path}",
} | Variant | 0 |
def test_path_validation(mlflow_client):
experiment_id = mlflow_client.create_experiment("tags validation")
created_run = mlflow_client.create_run(experiment_id)
run_id = created_run.info.run_id
invalid_path = "../path"
def assert_response(resp):
assert resp.status_code == 400
assert response.json() == {
"error_code": "INVALID_PARAMETER_VALUE",
"message": f"Invalid path: {invalid_path}",
}
response = requests.get(
f"{mlflow_client.tracking_uri}/api/2.0/mlflow/artifacts/list",
params={"run_id": run_id, "path": invalid_path},
)
assert_response(response)
response = requests.get(
f"{mlflow_client.tracking_uri}/get-artifact",
params={"run_id": run_id, "path": invalid_path},
)
assert_response(response)
response = requests.get(
f"{mlflow_client.tracking_uri}//model-versions/get-artifact",
params={"name": "model", "version": 1, "path": invalid_path},
)
assert_response(response) | Base | 1 |
def test_path_validation(mlflow_client):
experiment_id = mlflow_client.create_experiment("tags validation")
created_run = mlflow_client.create_run(experiment_id)
run_id = created_run.info.run_id
invalid_path = "../path"
def assert_response(resp):
assert resp.status_code == 400
assert response.json() == {
"error_code": "INVALID_PARAMETER_VALUE",
"message": f"Invalid path: {invalid_path}",
}
response = requests.get(
f"{mlflow_client.tracking_uri}/api/2.0/mlflow/artifacts/list",
params={"run_id": run_id, "path": invalid_path},
)
assert_response(response)
response = requests.get(
f"{mlflow_client.tracking_uri}/get-artifact",
params={"run_id": run_id, "path": invalid_path},
)
assert_response(response)
response = requests.get(
f"{mlflow_client.tracking_uri}//model-versions/get-artifact",
params={"name": "model", "version": 1, "path": invalid_path},
)
assert_response(response) | Variant | 0 |
def test_validate_path_is_safe_windows_bad(path):
with pytest.raises(MlflowException, match="Invalid path"):
validate_path_is_safe(path) | Base | 1 |
def test_validate_path_is_safe_windows_bad(path):
with pytest.raises(MlflowException, match="Invalid path"):
validate_path_is_safe(path) | Variant | 0 |
def test_validate_path_is_safe_good(path):
validate_path_is_safe(path) | Base | 1 |
def test_validate_path_is_safe_good(path):
validate_path_is_safe(path) | Variant | 0 |
def test_validate_path_is_safe_bad(path):
with pytest.raises(MlflowException, match="Invalid path"):
validate_path_is_safe(path) | Base | 1 |
def test_validate_path_is_safe_bad(path):
with pytest.raises(MlflowException, match="Invalid path"):
validate_path_is_safe(path) | Variant | 0 |
def load(self, dst_path=None) -> str:
"""
Downloads the dataset source to the local filesystem.
:param dst_path: Path of the local filesystem destination directory to which to download the
dataset source. If the directory does not exist, it is created. If
unspecified, the dataset source is downloaded to a new uniquely-named
directory on the local filesystem.
:return: The path to the downloaded dataset source on the local filesystem.
"""
resp = cloud_storage_http_request(
method="GET",
url=self.url,
stream=True,
)
augmented_raise_for_status(resp)
path = urlparse(self.url).path
content_disposition = resp.headers.get("Content-Disposition")
if content_disposition is not None and (
file_name := next(re.finditer(r"filename=(.+)", content_disposition), None)
):
# NB: If the filename is quoted, unquote it
basename = file_name[1].strip("'\"")
if _is_path(basename):
raise MlflowException.invalid_parameter_value(
f"Invalid filename in Content-Disposition header: {basename}. "
"It must be a file name, not a path."
)
elif path is not None and len(posixpath.basename(path)) > 0:
basename = posixpath.basename(path)
else:
basename = "dataset_source"
if dst_path is None:
dst_path = create_tmp_dir()
dst_path = os.path.join(dst_path, basename)
with open(dst_path, "wb") as f:
chunk_size = 1024 * 1024 # 1 MB
for chunk in resp.iter_content(chunk_size=chunk_size):
f.write(chunk)
return dst_path | Base | 1 |
def list_artifacts(self, path=None):
with self.get_ftp_client() as ftp:
artifact_dir = self.path
list_dir = posixpath.join(artifact_dir, path) if path else artifact_dir
if not self._is_dir(ftp, list_dir):
return []
artifact_files = ftp.nlst(list_dir)
artifact_files = list(filter(lambda x: x != "." and x != "..", artifact_files))
# Make sure artifact_files is a list of file names because ftp.nlst
# may return absolute paths.
artifact_files = [os.path.basename(f) for f in artifact_files]
infos = []
for file_name in artifact_files:
file_path = file_name if path is None else posixpath.join(path, file_name)
full_file_path = posixpath.join(list_dir, file_name)
if self._is_dir(ftp, full_file_path):
infos.append(FileInfo(file_path, True, None))
else:
size = self._size(ftp, full_file_path)
infos.append(FileInfo(file_path, False, size))
return infos | Variant | 0 |
def _create_multipart_upload_artifact(artifact_path):
"""
A request handler for `POST /mlflow-artifacts/mpu/create` to create a multipart upload
to `artifact_path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
request_message = _get_request_message(
CreateMultipartUpload(),
schema={
"path": [_assert_required, _assert_string],
"num_parts": [_assert_intlike],
},
)
path = request_message.path
num_parts = request_message.num_parts
artifact_repo = _get_artifact_repo_mlflow_artifacts()
_validate_support_multipart_upload(artifact_repo)
create_response = artifact_repo.create_multipart_upload(
path,
num_parts,
artifact_path,
)
response_message = create_response.to_proto()
response = Response(mimetype="application/json")
response.set_data(message_to_json(response_message))
return response | Base | 1 |
def get_artifact_handler():
from querystring_parser import parser
query_string = request.query_string.decode("utf-8")
request_dict = parser.parse(query_string, normalized=True)
run_id = request_dict.get("run_id") or request_dict.get("run_uuid")
path = request_dict["path"]
validate_path_is_safe(path)
run = _get_tracking_store().get_run(run_id)
if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):
artifact_repo = _get_artifact_repo_mlflow_artifacts()
artifact_path = _get_proxied_run_artifact_destination_path(
proxied_artifact_root=run.info.artifact_uri,
relative_path=path,
)
else:
artifact_repo = _get_artifact_repo(run)
artifact_path = path
return _send_artifact(artifact_repo, artifact_path) | Base | 1 |
def _delete_artifact_mlflow_artifacts(artifact_path):
"""
A request handler for `DELETE /mlflow-artifacts/artifacts?path=<value>` to delete artifacts in
`path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
_get_request_message(DeleteArtifact())
artifact_repo = _get_artifact_repo_mlflow_artifacts()
artifact_repo.delete_artifacts(artifact_path)
response_message = DeleteArtifact.Response()
response = Response(mimetype="application/json")
response.set_data(message_to_json(response_message))
return response | Base | 1 |
def _list_artifacts():
request_message = _get_request_message(
ListArtifacts(),
schema={
"run_id": [_assert_string, _assert_required],
"path": [_assert_string],
"page_token": [_assert_string],
},
)
response_message = ListArtifacts.Response()
if request_message.HasField("path"):
path = request_message.path
validate_path_is_safe(path)
else:
path = None
run_id = request_message.run_id or request_message.run_uuid
run = _get_tracking_store().get_run(run_id)
if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):
artifact_entities = _list_artifacts_for_proxied_run_artifact_root(
proxied_artifact_root=run.info.artifact_uri,
relative_path=path,
)
else:
artifact_entities = _get_artifact_repo(run).list_artifacts(path)
response_message.files.extend([a.to_proto() for a in artifact_entities])
response_message.root_uri = run.info.artifact_uri
response = Response(mimetype="application/json")
response.set_data(message_to_json(response_message))
return response | Base | 1 |
def _abort_multipart_upload_artifact(artifact_path):
"""
A request handler for `POST /mlflow-artifacts/mpu/abort` to abort a multipart upload
to `artifact_path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
request_message = _get_request_message(
AbortMultipartUpload(),
schema={
"path": [_assert_required, _assert_string],
"upload_id": [_assert_string],
},
)
path = request_message.path
upload_id = request_message.upload_id
artifact_repo = _get_artifact_repo_mlflow_artifacts()
_validate_support_multipart_upload(artifact_repo)
artifact_repo.abort_multipart_upload(
path,
upload_id,
artifact_path,
)
return _wrap_response(AbortMultipartUpload.Response()) | Base | 1 |
def get_model_version_artifact_handler():
from querystring_parser import parser
query_string = request.query_string.decode("utf-8")
request_dict = parser.parse(query_string, normalized=True)
name = request_dict.get("name")
version = request_dict.get("version")
path = request_dict["path"]
validate_path_is_safe(path)
artifact_uri = _get_model_registry_store().get_model_version_download_uri(name, version)
if _is_servable_proxied_run_artifact_root(artifact_uri):
artifact_repo = _get_artifact_repo_mlflow_artifacts()
artifact_path = _get_proxied_run_artifact_destination_path(
proxied_artifact_root=artifact_uri,
relative_path=path,
)
else:
artifact_repo = get_artifact_repository(artifact_uri)
artifact_path = path
return _send_artifact(artifact_repo, artifact_path) | Base | 1 |
def _download_artifact(artifact_path):
"""
A request handler for `GET /mlflow-artifacts/artifacts/<artifact_path>` to download an artifact
from `artifact_path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
tmp_dir = tempfile.TemporaryDirectory()
artifact_repo = _get_artifact_repo_mlflow_artifacts()
dst = artifact_repo.download_artifacts(artifact_path, tmp_dir.name)
# Ref: https://stackoverflow.com/a/24613980/6943581
file_handle = open(dst, "rb") # noqa: SIM115
def stream_and_remove_file():
yield from file_handle
file_handle.close()
tmp_dir.cleanup()
file_sender_response = current_app.response_class(stream_and_remove_file())
return _response_with_file_attachment_headers(artifact_path, file_sender_response) | Base | 1 |
def _complete_multipart_upload_artifact(artifact_path):
"""
A request handler for `POST /mlflow-artifacts/mpu/complete` to complete a multipart upload
to `artifact_path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
request_message = _get_request_message(
CompleteMultipartUpload(),
schema={
"path": [_assert_required, _assert_string],
"upload_id": [_assert_string],
"parts": [_assert_required],
},
)
path = request_message.path
upload_id = request_message.upload_id
parts = [MultipartUploadPart.from_proto(part) for part in request_message.parts]
artifact_repo = _get_artifact_repo_mlflow_artifacts()
_validate_support_multipart_upload(artifact_repo)
artifact_repo.complete_multipart_upload(
path,
upload_id,
parts,
artifact_path,
)
return _wrap_response(CompleteMultipartUpload.Response()) | Base | 1 |
def _list_artifacts_mlflow_artifacts():
"""
A request handler for `GET /mlflow-artifacts/artifacts?path=<value>` to list artifacts in `path`
(a relative path from the root artifact directory).
"""
request_message = _get_request_message(ListArtifactsMlflowArtifacts())
if request_message.HasField("path"):
validate_path_is_safe(request_message.path)
path = request_message.path
else:
path = None
artifact_repo = _get_artifact_repo_mlflow_artifacts()
files = []
for file_info in artifact_repo.list_artifacts(path):
basename = posixpath.basename(file_info.path)
new_file_info = FileInfo(basename, file_info.is_dir, file_info.file_size)
files.append(new_file_info.to_proto())
response_message = ListArtifacts.Response()
response_message.files.extend(files)
response = Response(mimetype="application/json")
response.set_data(message_to_json(response_message))
return response | Base | 1 |
def _upload_artifact(artifact_path):
"""
A request handler for `PUT /mlflow-artifacts/artifacts/<artifact_path>` to upload an artifact
to `artifact_path` (a relative path from the root artifact directory).
"""
validate_path_is_safe(artifact_path)
head, tail = posixpath.split(artifact_path)
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_path = os.path.join(tmp_dir, tail)
with open(tmp_path, "wb") as f:
chunk_size = 1024 * 1024 # 1 MB
while True:
chunk = request.stream.read(chunk_size)
if len(chunk) == 0:
break
f.write(chunk)
artifact_repo = _get_artifact_repo_mlflow_artifacts()
artifact_repo.log_artifact(tmp_path, artifact_path=head or None)
return _wrap_response(UploadArtifact.Response()) | Base | 1 |
def list_artifacts(self, path=None):
endpoint = "/mlflow-artifacts/artifacts"
url, tail = self.artifact_uri.split(endpoint, maxsplit=1)
root = tail.lstrip("/")
params = {"path": posixpath.join(root, path) if path else root}
host_creds = _get_default_host_creds(url)
resp = http_request(host_creds, endpoint, "GET", params=params)
augmented_raise_for_status(resp)
file_infos = []
for f in resp.json().get("files", []):
validate_path_is_safe(f["path"])
file_info = FileInfo(
posixpath.join(path, f["path"]) if path else f["path"],
f["is_dir"],
int(f["file_size"]) if ("file_size" in f) else None,
)
file_infos.append(file_info)
return sorted(file_infos, key=lambda f: f.path) | Base | 1 |
def _get_http_response_with_retries(
method,
url,
max_retries,
backoff_factor,
backoff_jitter,
retry_codes,
raise_on_status=True,
**kwargs,
):
"""
Performs an HTTP request using Python's `requests` module with an automatic retry policy.
:param method: a string indicating the method to use, e.g. "GET", "POST", "PUT".
:param url: the target URL address for the HTTP request.
:param max_retries: Maximum total number of retries.
:param backoff_factor: a time factor for exponential backoff. e.g. value 5 means the HTTP
request will be retried with interval 5, 10, 20... seconds. A value of 0 turns off the
exponential backoff.
:param backoff_jitter: A random jitter to add to the backoff interval.
:param retry_codes: a list of HTTP response error codes that qualifies for retry.
:param raise_on_status: whether to raise an exception, or return a response, if status falls
in retry_codes range and retries have been exhausted.
:param kwargs: Additional keyword arguments to pass to `requests.Session.request()`
:return: requests.Response object.
"""
session = _get_request_session(
max_retries, backoff_factor, backoff_jitter, retry_codes, raise_on_status
)
return session.request(method, url, **kwargs) | Base | 1 |
def test_log_artifact_gcp_with_headers(
databricks_artifact_repo, test_file, artifact_path, expected_location
):
expected_headers = {header.name: header.value for header in MOCK_HEADERS}
mock_response = Response()
mock_response.status_code = 200
mock_response.close = lambda: None
mock_credential_info = ArtifactCredentialInfo(
signed_uri=MOCK_GCP_SIGNED_URL,
type=ArtifactCredentialType.GCP_SIGNED_URL,
headers=MOCK_HEADERS,
)
with mock.patch(
f"{DATABRICKS_ARTIFACT_REPOSITORY}._get_credential_infos",
return_value=[mock_credential_info],
) as get_credential_infos_mock, mock.patch(
"requests.Session.request", return_value=mock_response
) as request_mock:
databricks_artifact_repo.log_artifact(test_file, artifact_path)
get_credential_infos_mock.assert_called_with(
GetCredentialsForWrite, MOCK_RUN_ID, [expected_location]
)
request_mock.assert_called_with(
"put", MOCK_GCP_SIGNED_URL, data=ANY, headers=expected_headers, timeout=None
) | Base | 1 |
def test_log_artifact_aws_with_headers(
databricks_artifact_repo, test_file, artifact_path, expected_location
):
expected_headers = {header.name: header.value for header in MOCK_HEADERS}
mock_response = Response()
mock_response.status_code = 200
mock_response.close = lambda: None
mock_credential_info = ArtifactCredentialInfo(
signed_uri=MOCK_AWS_SIGNED_URI,
type=ArtifactCredentialType.AWS_PRESIGNED_URL,
headers=MOCK_HEADERS,
)
with mock.patch(
f"{DATABRICKS_ARTIFACT_REPOSITORY}._get_credential_infos",
return_value=[mock_credential_info],
) as get_credential_infos_mock, mock.patch(
"requests.Session.request", return_value=mock_response
) as request_mock:
databricks_artifact_repo.log_artifact(test_file, artifact_path)
get_credential_infos_mock.assert_called_with(
GetCredentialsForWrite, MOCK_RUN_ID, [expected_location]
)
request_mock.assert_called_with(
"put", MOCK_AWS_SIGNED_URI, data=ANY, headers=expected_headers, timeout=None
) | Base | 1 |
def test_log_artifact_aws(databricks_artifact_repo, test_file, artifact_path, expected_location):
mock_response = Response()
mock_response.status_code = 200
mock_response.close = lambda: None
mock_credential_info = ArtifactCredentialInfo(
signed_uri=MOCK_AWS_SIGNED_URI, type=ArtifactCredentialType.AWS_PRESIGNED_URL
)
with mock.patch(
f"{DATABRICKS_ARTIFACT_REPOSITORY}._get_credential_infos",
return_value=[mock_credential_info],
) as get_credential_infos_mock, mock.patch(
"requests.Session.request", return_value=mock_response
) as request_mock:
databricks_artifact_repo.log_artifact(test_file, artifact_path)
get_credential_infos_mock.assert_called_with(
GetCredentialsForWrite, MOCK_RUN_ID, [expected_location]
)
request_mock.assert_called_with(
"put", MOCK_AWS_SIGNED_URI, data=ANY, headers={}, timeout=None
) | Base | 1 |
def test_log_artifact_gcp(databricks_artifact_repo, test_file, artifact_path, expected_location):
mock_response = Response()
mock_response.status_code = 200
mock_response.close = lambda: None
mock_credential_info = ArtifactCredentialInfo(
signed_uri=MOCK_GCP_SIGNED_URL, type=ArtifactCredentialType.GCP_SIGNED_URL
)
with mock.patch(
f"{DATABRICKS_ARTIFACT_REPOSITORY}._get_credential_infos",
return_value=[mock_credential_info],
) as get_credential_infos_mock, mock.patch(
"requests.Session.request", return_value=mock_response
) as request_mock:
databricks_artifact_repo.log_artifact(test_file, artifact_path)
get_credential_infos_mock.assert_called_with(
GetCredentialsForWrite, MOCK_RUN_ID, [expected_location]
)
request_mock.assert_called_with(
"put", MOCK_GCP_SIGNED_URL, data=ANY, headers={}, timeout=None
) | Base | 1 |
def run_as_real_user(args: list[str]) -> None:
"""Call subprocess.run as real user if called via sudo/pkexec.
If we are called through pkexec/sudo, determine the real user ID and
run the command with it to get the user's web browser settings.
"""
uid = _get_env_int("SUDO_UID", _get_env_int("PKEXEC_UID"))
if uid is None or not get_process_user_and_group().is_root():
subprocess.run(args, check=False)
return
pwuid = pwd.getpwuid(uid)
gid = _get_env_int("SUDO_GID")
if gid is None:
gid = pwuid.pw_gid
env = {
k: v
for k, v in os.environ.items()
if not k.startswith("SUDO_") and k != "PKEXEC_UID"
} | _get_users_environ(uid)
env["HOME"] = pwuid.pw_dir
subprocess.run(
args,
check=False,
env=env,
user=uid,
group=gid,
extra_groups=os.getgrouplist(pwuid.pw_name, gid),
) | Class | 2 |
def open_url(self, url):
"""Open the given URL in a new browser window.
Display an error dialog if everything fails.
"""
(r, w) = os.pipe()
if os.fork() > 0:
os.close(w)
status = os.wait()[1]
if status:
title = _("Unable to start web browser")
error = _("Unable to start web browser to open %s." % url)
message = os.fdopen(r).readline()
if message:
error += "\n" + message
self.ui_error_message(title, error)
try:
os.close(r)
except OSError:
pass
return
os.setsid()
os.close(r)
try:
try:
run_as_real_user(["xdg-open", url])
except OSError:
# fall back to webbrowser
webbrowser.open(url, new=True, autoraise=True)
sys.exit(0)
except Exception as error: # pylint: disable=broad-except
os.write(w, str(error))
sys.exit(1)
os._exit(0) # pylint: disable=protected-access | Class | 2 |
def test_run_as_real_user_no_gvfsd(
self, getpwuid_mock: unittest.mock.MagicMock
) -> None:
"""Test run_as_real_user() without no gvfsd process."""
getpwuid_mock.return_value = pwd.struct_passwd(
(
"testuser",
"x",
1337,
42,
"Test user,,,",
"/home/testuser",
"/bin/bash",
)
)
with unittest.mock.patch(
"subprocess.run", side_effect=mock_run_calls_except_pgrep
) as run_mock:
run_as_real_user(["/bin/true"])
run_mock.assert_called_with(
["/bin/true"],
check=False,
env={"HOME": "/home/testuser"},
user=1337,
group=42,
extra_groups=[42],
)
self.assertEqual(run_mock.call_count, 2) | Class | 2 |
def test_run_as_real_user(self) -> None:
"""Test run_as_real_user() with SUDO_UID set."""
pwuid = pwd.getpwuid(int(os.environ["SUDO_UID"]))
with tempfile.TemporaryDirectory() as tmpdir:
# rename test program to fake gvfsd
gvfsd_mock = os.path.join(tmpdir, "gvfsd")
shutil.copy(self.TEST_EXECUTABLE, gvfsd_mock)
gvfsd_env = {
"XDG_DATA_DIRS": "mocked XDG data dir",
"DBUS_SESSION_BUS_ADDRESS": "/fake/dbus/path",
}
with self._run_test_executable(gvfsd_mock, env=gvfsd_env):
with unittest.mock.patch(
"subprocess.run", side_effect=mock_run_calls_except_pgrep
) as run_mock:
run_as_real_user(["/bin/true"])
run_mock.assert_called_with(
["/bin/true"],
check=False,
env={
"DBUS_SESSION_BUS_ADDRESS": "/fake/dbus/path",
"XDG_DATA_DIRS": "mocked XDG data dir",
"HOME": pwuid.pw_dir,
},
user=int(os.environ["SUDO_UID"]),
group=pwuid.pw_gid,
extra_groups=os.getgrouplist(pwuid.pw_name, pwuid.pw_gid),
)
self.assertEqual(run_mock.call_count, 2) | Class | 2 |
def _get_data(self):
LOG.debug("Machine is a Vultr instance")
# Fetch metadata
self.metadata = self.get_metadata()
self.userdata_raw = self.metadata["user-data"]
# Generate config and process data
self.get_datasource_data(self.metadata)
# Dump some data so diagnosing failures is manageable
LOG.debug("Vultr Vendor Config:")
LOG.debug(util.json_dumps(self.metadata["vendor-data"]))
LOG.debug("SUBID: %s", self.metadata["instance-id"])
LOG.debug("Hostname: %s", self.metadata["local-hostname"])
if self.userdata_raw is not None:
LOG.debug("User-Data:")
LOG.debug(self.userdata_raw)
return True | Base | 1 |
def redact_sensitive_keys(metadata, redact_value=REDACT_SENSITIVE_VALUE):
"""Redact any sensitive keys from to provided metadata dictionary.
Replace any keys values listed in 'sensitive_keys' with redact_value.
"""
if not metadata.get("sensitive_keys", []):
return metadata
md_copy = copy.deepcopy(metadata)
for key_path in metadata.get("sensitive_keys"):
path_parts = key_path.split("/")
obj = md_copy
for path in path_parts:
if isinstance(obj[path], dict) and path != path_parts[-1]:
obj = obj[path]
obj[path] = redact_value
return md_copy | Base | 1 |
def process_instance_metadata(metadata, key_path="", sensitive_keys=()):
"""Process all instance metadata cleaning it up for persisting as json.
Strip ci-b64 prefix and catalog any 'base64_encoded_keys' as a list
@return Dict copy of processed metadata.
"""
md_copy = copy.deepcopy(metadata)
base64_encoded_keys = []
sens_keys = []
for key, val in metadata.items():
if key_path:
sub_key_path = key_path + "/" + key
else:
sub_key_path = key
if key in sensitive_keys or sub_key_path in sensitive_keys:
sens_keys.append(sub_key_path)
if isinstance(val, str) and val.startswith("ci-b64:"):
base64_encoded_keys.append(sub_key_path)
md_copy[key] = val.replace("ci-b64:", "")
if isinstance(val, dict):
return_val = process_instance_metadata(
val, sub_key_path, sensitive_keys
)
base64_encoded_keys.extend(return_val.pop("base64_encoded_keys"))
sens_keys.extend(return_val.pop("sensitive_keys"))
md_copy[key] = return_val
md_copy["base64_encoded_keys"] = sorted(base64_encoded_keys)
md_copy["sensitive_keys"] = sorted(sens_keys)
return md_copy | Base | 1 |
def _initialize_filesystem(self):
util.ensure_dirs(self._initial_subdirs())
log_file = util.get_cfg_option_str(self.cfg, "def_log_file")
if log_file:
util.ensure_file(log_file, mode=0o640, preserve_mode=True)
perms = self.cfg.get("syslog_fix_perms")
if not perms:
perms = {}
if not isinstance(perms, list):
perms = [perms]
error = None
for perm in perms:
u, g = util.extract_usergroup(perm)
try:
util.chownbyname(log_file, u, g)
return
except OSError as e:
error = e
LOG.warning(
"Failed changing perms on '%s'. tried: %s. %s",
log_file,
",".join(perms),
error,
) | Base | 1 |
def test_existing_file_permissions_are_not_modified(self, init, tmpdir):
"""If the log file already exists, we should not modify its permissions
See https://bugs.launchpad.net/cloud-init/+bug/1900837.
"""
# Use a mode that will never be made the default so this test will
# always be valid
mode = 0o606
log_file = tmpdir.join("cloud-init.log")
log_file.ensure()
log_file.chmod(mode)
init._cfg = {"def_log_file": str(log_file)}
init._initialize_filesystem()
assert mode == stat.S_IMODE(log_file.stat().mode) | Base | 1 |
def test_regular_user_cant_add_users(self):
response = self.client.get("/admin/auth/user/add/")
self.assertEqual(HTTPStatus.FORBIDDEN, response.status_code)
response = self.client.post(
"/admin/auth/user/add/",
{
"username": "added-by-regular-user",
"password1": "xo-xo-xo",
"password2": "xo-xo-xo",
},
follow=True,
)
self.assertEqual(HTTPStatus.FORBIDDEN, response.status_code)
self.assertFalse(
get_user_model().objects.filter(username="added-by-regular-user").exists()
) | Base | 1 |
def test_moderator_can_add_users(self):
user_should_have_perm(self.moderator, "auth.add_user")
user_should_have_perm(self.moderator, "auth.change_user")
# test for https://github.com/kiwitcms/Kiwi/issues/642
self.client.login( # nosec:B106:hardcoded_password_funcarg
username=self.moderator.username, password="admin-password"
)
response = self.client.get("/admin/auth/user/add/")
self.assertEqual(HTTPStatus.OK, response.status_code)
# only these fields can be edited
self.assertContains(response, "id_username")
self.assertContains(response, "id_password1")
self.assertContains(response, "id_password2")
response = self.client.post(
"/admin/auth/user/add/",
{
"username": "added-by-moderator",
"password1": "xo-xo-xo",
"password2": "xo-xo-xo",
},
follow=True,
)
self.assertEqual(HTTPStatus.OK, response.status_code)
self.assertTrue(
get_user_model().objects.filter(username="added-by-moderator").exists()
) | Base | 1 |
def test_superuser_can_add_users(self):
# test for https://github.com/kiwitcms/Kiwi/issues/642
self.client.login( # nosec:B106:hardcoded_password_funcarg
username=self.admin.username, password="admin-password"
)
response = self.client.get("/admin/auth/user/add/")
self.assertEqual(HTTPStatus.OK, response.status_code)
response = self.client.post(
"/admin/auth/user/add/",
{
"username": "added-by-admin",
"password1": "xo-xo-xo",
"password2": "xo-xo-xo",
},
follow=True,
)
self.assertEqual(HTTPStatus.OK, response.status_code)
self.assertTrue(
get_user_model().objects.filter(username="added-by-admin").exists()
) | Base | 1 |
def setUp(self):
self.data = {
"username": "test_user",
"password1": "password",
"password2": "password",
"email": "new-tester@example.com",
} | Base | 1 |
def test_invalid_form(self):
response = self.client.post(
self.register_url,
{
"username": "kiwi-tester",
"password1": "password-1",
"password2": "password-2",
"email": "new-tester@example.com",
},
follow=False,
)
self.assertContains(response, _("The two password fields didn’t match."))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/registration_form.html") | Base | 1 |
def assert_user_registration(self, username, follow=False):
with patch("tcms.kiwi_auth.models.secrets") as _secrets:
_secrets.token_hex.return_value = self.fake_activate_key
try:
# https://github.com/mbi/django-simple-captcha/issues/84
# pylint: disable=import-outside-toplevel
from captcha.conf import settings as captcha_settings
captcha_settings.CAPTCHA_TEST_MODE = True
response = self.client.post(
self.register_url,
{
"username": username,
"password1": "password",
"password2": "password",
"email": "new-tester@example.com",
"captcha_0": "PASSED",
"captcha_1": "PASSED",
},
follow=follow,
)
finally:
captcha_settings.CAPTCHA_TEST_MODE = False
user = User.objects.get(username=username)
self.assertEqual("new-tester@example.com", user.email)
if User.objects.filter(is_superuser=True).count() == 1 and user.is_superuser:
self.assertTrue(user.is_active)
else:
self.assertFalse(user.is_active)
key = UserActivationKey.objects.get(user=user)
self.assertEqual(self.fake_activate_key, key.activation_key)
return response, user | Base | 1 |
def test_register_user_already_registered(self):
User.objects.create_user("kiwi-tester", "new-tester@example.com", "password")
response = self.client.post(
self.register_url,
{
"username": "test_user",
"password1": "password",
"password2": "password",
"email": "new-tester@example.com",
},
follow=False,
)
self.assertContains(response, _("A user with that email already exists."))
user = User.objects.filter(username="test_user")
self.assertEqual(user.count(), 0) | Base | 1 |
def test_send_mail_for_password_reset(self, mail_sent):
user = User.objects.create_user("kiwi-tester", "tester@example.com", "password")
user.is_active = True
user.save()
data = {"email": "tester@example.com"}
response = self.client.post(self.password_reset_url, data, follow=True)
self.assertContains(response, _("Password reset email was sent"))
# Verify mail is sent
mail_sent.assert_called_once() | Base | 1 |
def render_GET(self, request):
template = env.get_template('generate_new.html')
sites_len = len(get_all_canary_sites())
now = datetime.datetime.now()
return template.render(settings=settings, sites_len=sites_len, now=now).encode('utf8') | Base | 1 |
def get_signed_upload_url(path: str, download: bool = False) -> str:
client = boto3.client(
"s3",
aws_access_key_id=settings.S3_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
region_name=settings.S3_REGION,
endpoint_url=settings.S3_ENDPOINT_URL,
)
params = {
"Bucket": settings.S3_AUTH_UPLOADS_BUCKET,
"Key": path,
}
if download:
params["ResponseContentDisposition"] = "attachment"
return client.generate_presigned_url(
ClientMethod="get_object",
Params=params,
ExpiresIn=SIGNED_UPLOAD_URL_DURATION,
HttpMethod="GET",
) | Class | 2 |
def check_xsend_links(
name: str,
name_str_for_test: str,
content_disposition: str = "",
download: bool = False,
) -> None:
self.login("hamlet")
fp = StringIO("zulip!")
fp.name = name
result = self.client_post("/json/user_uploads", {"file": fp})
uri = self.assert_json_success(result)["uri"]
fp_path_id = re.sub("/user_uploads/", "", uri)
fp_path = os.path.split(fp_path_id)[0]
if download:
uri = uri.replace("/user_uploads/", "/user_uploads/download/")
with self.settings(DEVELOPMENT=False):
response = self.client_get(uri)
assert settings.LOCAL_UPLOADS_DIR is not None
test_run, worker = os.path.split(os.path.dirname(settings.LOCAL_UPLOADS_DIR))
self.assertEqual(
response["X-Accel-Redirect"],
"/internal/uploads/" + fp_path + "/" + name_str_for_test,
)
if content_disposition != "":
self.assertIn("attachment;", response["Content-disposition"])
self.assertIn(content_disposition, response["Content-disposition"])
else:
self.assertIn("inline;", response["Content-disposition"])
self.assertEqual(set(response["Cache-Control"].split(", ")), {"private", "immutable"}) | Class | 2 |
def test_avatar_url_local(self) -> None:
self.login("hamlet")
with get_test_image_file("img.png") as image_file:
result = self.client_post("/json/users/me/avatar", {"file": image_file})
response_dict = self.assert_json_success(result)
self.assertIn("avatar_url", response_dict)
base = "/user_avatars/"
url = self.assert_json_success(result)["avatar_url"]
self.assertEqual(base, url[: len(base)])
# That URL is accessible when logged out
self.logout()
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
# We get a resized avatar from it
image_data = read_test_image_file("img.png")
resized_avatar = resize_avatar(image_data)
assert isinstance(result, StreamingHttpResponse)
self.assertEqual(resized_avatar, b"".join(result.streaming_content))
with self.settings(DEVELOPMENT=False):
# In production, this is an X-Accel-Redirect to the
# on-disk content, which nginx serves
result = self.client_get(url)
self.assertEqual(result.status_code, 200)
internal_redirect_path = urlparse(url).path.replace(
"/user_avatars/", "/internal/user_avatars/"
)
self.assertEqual(result["X-Accel-Redirect"], internal_redirect_path)
self.assertEqual(b"", result.content) | Class | 2 |
def serve_local(
request: HttpRequest, path_id: str, url_only: bool, download: bool = False
) -> HttpResponseBase:
assert settings.LOCAL_FILES_DIR is not None
local_path = os.path.join(settings.LOCAL_FILES_DIR, path_id)
assert_is_local_storage_path("files", local_path)
if not os.path.isfile(local_path):
return HttpResponseNotFound("<p>File not found</p>")
if url_only:
url = generate_unauthed_file_access_url(path_id)
return json_success(request, data=dict(url=url))
mimetype, encoding = guess_type(local_path)
attachment = download or mimetype not in INLINE_MIME_TYPES
if settings.DEVELOPMENT:
# In development, we do not have the nginx server to offload
# the response to; serve it directly ourselves.
# FileResponse handles setting Content-Disposition, etc.
response: HttpResponseBase = FileResponse(open(local_path, "rb"), as_attachment=attachment)
patch_cache_control(response, private=True, immutable=True)
return response
response = internal_nginx_redirect(quote(f"/internal/uploads/{path_id}"))
patch_disposition_header(response, local_path, attachment)
patch_cache_control(response, private=True, immutable=True)
return response | Class | 2 |
def get_local_file_path_id_from_token(token: str) -> Optional[str]:
signer = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT)
try:
signed_data = base64.b16decode(token).decode()
path_id = signer.unsign(signed_data, max_age=timedelta(seconds=60))
except (BadSignature, binascii.Error):
return None
return path_id | Class | 2 |
def serve_local_avatar_unauthed(request: HttpRequest, path: str) -> HttpResponseBase:
"""Serves avatar images off disk, via nginx (or directly in dev), with no auth.
This is done unauthed because these need to be accessed from HTML
emails, where the client does not have any auth. We rely on the
URL being generated using the AVATAR_SALT secret.
"""
if settings.LOCAL_AVATARS_DIR is None:
# We do not expect clients to hit this URL when using the S3
# backend; however, there is no reason to not serve the
# redirect to S3 where the content lives.
return redirect(
get_public_upload_root_url() + path + "?" + request.GET.urlencode(), permanent=True
)
local_path = os.path.join(settings.LOCAL_AVATARS_DIR, path)
assert_is_local_storage_path("avatars", local_path)
if not os.path.isfile(local_path):
return HttpResponseNotFound("<p>File not found</p>")
if settings.DEVELOPMENT:
response: HttpResponseBase = FileResponse(open(local_path, "rb"))
else:
response = internal_nginx_redirect(quote(f"/internal/user_avatars/{path}"))
# We do _not_ mark the contents as immutable for caching purposes,
# since the path for avatar images is hashed only by their user-id
# and a salt, and as such are reused when a user's avatar is
# updated.
return response | Class | 2 |
def serve_s3(
request: HttpRequest, url_path: str, url_only: bool, download: bool = False
) -> HttpResponse:
url = get_signed_upload_url(url_path, download=download)
if url_only:
return json_success(request, data=dict(url=url))
return redirect(url) | Class | 2 |
def serve_file(
request: HttpRequest,
maybe_user_profile: Union[UserProfile, AnonymousUser],
realm_id_str: str,
filename: str,
url_only: bool = False,
download: bool = False,
) -> HttpResponseBase:
path_id = f"{realm_id_str}/{filename}"
realm = get_valid_realm_from_request(request)
is_authorized = validate_attachment_request(maybe_user_profile, path_id, realm)
if is_authorized is None:
return HttpResponseNotFound(_("<p>File not found.</p>"))
if not is_authorized:
return HttpResponseForbidden(_("<p>You are not authorized to view this file.</p>"))
if settings.LOCAL_UPLOADS_DIR is not None:
return serve_local(request, path_id, url_only, download=download)
return serve_s3(request, path_id, url_only, download=download) | Class | 2 |
def generate_unauthed_file_access_url(path_id: str) -> str:
signed_data = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT).sign(path_id)
token = base64.b16encode(signed_data.encode()).decode()
filename = path_id.split("/")[-1]
return reverse("local_file_unauthed", args=[token, filename]) | Class | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.