query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Get full version information of webservice as a string.
def _get_webservice_versionstring(self, service): version = self.get_webservice_version(service) return ".".join(map(str, version))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_version(self):\n return self.http_call(\"get\", url=f\"{self.base_url}/version\").json()", "def GetVersion(self):\n return self._SendRequest(HTTP_GET, \"/version\", None, None)", "def version():\n version_info = pbr.version.VersionInfo('ardana-service')\n return version_info.version_str...
[ "0.75000274", "0.7449257", "0.73129904", "0.7277117", "0.7266704", "0.7241125", "0.72253454", "0.7220058", "0.7202378", "0.7146718", "0.7130265", "0.711573", "0.71138686", "0.71088123", "0.7105085", "0.7094832", "0.7075875", "0.69911677", "0.69908917", "0.6990632", "0.6978907...
0.7781521
0
Attaches the actually used dataselet URL to each Trace.
def _attach_dataselect_url_to_stream(self, st): url = self._build_url("dataselect", "query") for tr in st: tr.stats._fdsnws_dataselect_url = url
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __traces_url(self):\n path = AGENT_TRACES_PATH % self.from_.pid\n return \"http://%s:%s/%s\" % (self.host, self.port, path)", "def getDataUrls(self):\n sub1 = self.id[0:3]\n sub2 = self.id[3:6]\n sub3 = self.id[6:9]\n self.xml = \"%s/static/model/%s/%s/%s/%s.xml\" % ...
[ "0.58800995", "0.557478", "0.5461198", "0.5454662", "0.5379516", "0.5288198", "0.51027226", "0.50803655", "0.5057965", "0.50400174", "0.500458", "0.4987462", "0.49711695", "0.49326184", "0.49326184", "0.49260262", "0.49156776", "0.4909598", "0.4874118", "0.48735803", "0.48605...
0.58549017
1
Takes any value and converts it to a string compliant with the FDSN webservices. Will raise a ValueError if the value could not be converted. >>> print(convert_to_string("abcd")) abcd >>> print(convert_to_string(1)) 1 >>> print(convert_to_string(1.2)) 1.2 >>> print(convert_to_string( \ UTCDateTime(2012, 1, 2, 3, 4, 5, 666666)))
def convert_to_string(value): if isinstance(value, str): return value # Boolean test must come before integer check! elif isinstance(value, bool): return str(value).lower() elif isinstance(value, int): return str(value) elif isinstance(value, float): return str(value) elif isinstance(value, UTCDateTime): return str(value).replace("Z", "") else: raise TypeError("Unexpected type %s" % repr(value))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def convert_to_string(value: Any) -> str:\n if isinstance(value, str):\n return value\n\n if isinstance(value, bytes):\n return value.decode(\"utf-8\")\n\n return str(value)", "def convert_to_str(input_string):\n\n if sys.version < '3':\n\n if isinstance(input_string, str) \\\n ...
[ "0.6892246", "0.6521637", "0.64140946", "0.6409152", "0.6387546", "0.630994", "0.6246186", "0.62287396", "0.6162186", "0.61320966", "0.61231935", "0.6070894", "0.6000176", "0.5963317", "0.59516305", "0.5947143", "0.5946218", "0.589599", "0.58766305", "0.5855638", "0.5848334",...
0.7913156
0
URL builder for the FDSN webservices. Built as a separate function to enhance testability.
def build_url(base_url, service, major_version, resource_type, parameters=None, service_mappings=None, subpath='fdsnws'): # Avoid mutable kwargs. if parameters is None: parameters = {} if service_mappings is None: service_mappings = {} # Only allow certain resource types. if service not in ["dataselect", "station"]: msg = "Resource type '%s' not allowed. Allowed types: \n%s" % \ (service, ",".join(("dataselect", "station"))) raise ValueError(msg) # Special location handling. if "location" in parameters: loc = parameters["location"].replace(" ", "") # Empty location. if not loc: loc = "--" # Empty location at start of list. if loc.startswith(','): loc = "--" + loc # Empty location at end of list. if loc.endswith(','): loc += "--" # Empty location in middle of list. loc = loc.replace(",,", ",--,") parameters["location"] = loc # Apply per-service mappings if any. if service in service_mappings: url = "/".join((service_mappings[service], resource_type)) else: if subpath is None: parts = (base_url, service, str(major_version), resource_type) else: parts = (base_url, subpath.lstrip('/'), service, str(major_version), resource_type) url = "/".join(parts) if parameters: # Strip parameters. for key, value in parameters.items(): try: parameters[key] = value.strip() except Exception: pass url = "?".join((url, urlencode(parameters, safe=':,*'))) return url
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _uri(helper):\n return '/'.join((\n helper.context_meta['server_uri'],\n 'servicesNS',\n 'nobody',\n 'Splunk_TA_paloalto',\n 'storage',\n 'collections',\n 'data',\n 'minemeldfeeds'))", "def new_url(module):\n # To create the URL, we need to take t...
[ "0.66425973", "0.65290934", "0.64784485", "0.6391138", "0.6334783", "0.6291143", "0.6275944", "0.62580895", "0.6257961", "0.6254215", "0.6222721", "0.6197548", "0.61817586", "0.61761576", "0.61630815", "0.6116728", "0.6090364", "0.6049933", "0.6026856", "0.60260284", "0.60106...
0.6375497
4
Raise an error for non200 HTTP response codes
def raise_on_error(code, data): # get detailed server response message if code != 200: try: server_info = data.read() except Exception: server_info = None else: server_info = server_info.decode('ASCII', errors='ignore') if server_info: server_info = "\n".join( line for line in server_info.splitlines() if line) # No data. if code == 204: raise FDSNNoDataException("No data available for request.", server_info) elif code == 400: msg = ("Bad request. If you think your request was valid " "please contact the developers.") raise FDSNBadRequestException(msg, server_info) elif code == 401: raise FDSNUnauthorizedException("Unauthorized, authentication " "required.", server_info) elif code == 403: raise FDSNForbiddenException("Authentication failed.", server_info) elif code == 413: raise FDSNRequestTooLargeException("Request would result in too much " "data. Denied by the datacenter. " "Split the request in smaller " "parts", server_info) # Request URI too large. elif code == 414: msg = ("The request URI is too large. Please contact the ObsPy " "developers.", server_info) raise NotImplementedError(msg) elif code == 429: msg = ("Sent too many requests in a given amount of time ('rate " "limiting'). Wait before making a new request.", server_info) raise FDSNTooManyRequestsException(msg, server_info) elif code == 500: raise FDSNInternalServerException("Service responds: Internal server " "error", server_info) elif code == 503: raise FDSNServiceUnavailableException("Service temporarily " "unavailable", server_info) elif code is None: if "timeout" in str(data).lower() or "timed out" in str(data).lower(): raise FDSNTimeoutException("Timed Out") else: raise FDSNException("Unknown Error (%s): %s" % ( (str(data.__class__.__name__), str(data)))) # Catch any non 200 codes. elif code != 200: raise FDSNException("Unknown HTTP code: %i" % code, server_info)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _raise_if_error(response):\n if response.status_code != 200:\n raise SimpleHTTPException(response)", "def httperror( status_code=500, message=b'' ):", "def _error_response(self):\r\n response_dict = {'success': False, 'version': 1}\r\n self.send_response(\r\n 400, content...
[ "0.79923856", "0.78246117", "0.75727075", "0.7558555", "0.7428551", "0.7423152", "0.7419725", "0.7410135", "0.7408078", "0.7386633", "0.7350406", "0.73329675", "0.7314682", "0.7308306", "0.7260337", "0.72574466", "0.72411", "0.7198853", "0.7189374", "0.71558493", "0.714456", ...
0.0
-1
Returns a pair of tuples. The first one is the returned HTTP code and the second the data as string. Will return a tuple of Nones if the service could not be found. All encountered exceptions will get raised unless `debug=True` is specified. Performs a http GET if data=None, otherwise a http POST.
def download_url(url, opener, timeout=10, headers={}, debug=False, return_string=True, data=None, use_gzip=True, use_jwt=None): if debug is True: print("Downloading %s %s requesting gzip compression" % ( url, "with" if use_gzip else "without")) if data: print("Sending along the following payload:") print("-" * 70) print(data.decode()) print("-" * 70) try: request = urllib_request.Request(url=url, headers=headers) # Request gzip encoding if desired. if use_gzip: request.add_header("Accept-encoding", "gzip") if use_jwt: request.add_header("accept", "application/json") request.add_header("Authorization", f'JWT {use_jwt}') url_obj = opener.open(request, timeout=timeout, data=data) # Catch HTTP errors. except urllib_request.HTTPError as e: if debug is True: msg = "HTTP error %i, reason %s, while downloading '%s': %s" % \ (e.code, str(e.reason), url, e.read()) print(msg) return e.code, e except Exception as e: if debug is True: print("Error while downloading: %s" % url) return None, e code = url_obj.getcode() # Unpack gzip if necessary. if url_obj.info().get("Content-Encoding") == "gzip": if debug is True: print("Uncompressing gzipped response for %s" % url) # Cannot directly stream to gzip from urllib! # http://www.enricozini.org/2011/cazzeggio/python-gzip/ buf = io.BytesIO(url_obj.read()) buf.seek(0, 0) f = gzip.GzipFile(fileobj=buf) else: f = url_obj if return_string is False: data = io.BytesIO(f.read()) else: data = f.read() if debug is True: print("Downloaded %s with HTTP code: %i" % (url, code)) return code, data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request(self, service, data):\n _res = self._request(service, data)\n res = _res.json()[0][0]\n if res[\"success\"] == True:\n return res[\"result\"]\n else:\n err_msg = res[\"errmsg\"]\n raise Exception(\"Request not successful: '{0}'\".format(err_m...
[ "0.6488388", "0.6167007", "0.6129654", "0.59609777", "0.5832977", "0.5625253", "0.561716", "0.55700266", "0.5563676", "0.5545068", "0.5503565", "0.5501214", "0.5465591", "0.5437202", "0.5437178", "0.5410425", "0.5378788", "0.5372288", "0.53615326", "0.53574157", "0.5351421", ...
0.0
-1
Test `construct_compose_dict` returns expected compose dict.
def test_construct_compose_dict(self): expected_examplescraper_compose_dict = { "version": "3", "services": { "scp1": { "container_name": "scp1", "environment": [ "TOR_PORT=9051", "TOR_PASSWORD=I-solemnly-swear-I-am-up-to-no-good", "PRIVOXY_PORT=8118", "PRIVOXY_HOST=127.0.0.1", "IPSTORE_PORT=5000", "IPSTORE_HOST=scp1", "URLBROKER_PORT=6000", "URLBROKER_HOST=scp1", "DATASTORE_PORT=7000", "DATASTORE_HOST=scp1", "HEALTHCHECK_PORT=8000", "HEALTHCHECK_HOST=scp1", "SCRAPER_PACKAGE=examplescraper", "DOCKER_HOST_IP=fake_docker_host_ip", "SCRAPER_CONFIG=tests.integration.fake_config", ], "hostname": "scp1", "image": "scp:latest", "volumes": ["/fake_curent_dir:/scp"], "build": { "context": "/fake_curent_dir", "dockerfile": "/fake_curent_dir/Dockerfile", }, "entrypoint": "/scp/scrapemeagain/dockerized/entrypoints/entrypoint.scp1.sh", }, "scp2": { "container_name": "scp2", "environment": [ "TOR_PORT=9051", "TOR_PASSWORD=I-solemnly-swear-I-am-up-to-no-good", "PRIVOXY_PORT=8118", "PRIVOXY_HOST=127.0.0.1", "IPSTORE_PORT=5000", "IPSTORE_HOST=scp1", "URLBROKER_PORT=6000", "URLBROKER_HOST=scp1", "DATASTORE_PORT=7000", "DATASTORE_HOST=scp1", "HEALTHCHECK_PORT=8000", "HEALTHCHECK_HOST=scp1", "SCRAPER_PACKAGE=examplescraper", "DOCKER_HOST_IP=fake_docker_host_ip", "SCRAPER_CONFIG=tests.integration.fake_config", ], "hostname": "scp2", "image": "scp:latest", "volumes": ["/fake_curent_dir:/scp"], "depends_on": ["scp1"], "entrypoint": "/scp/scrapemeagain/dockerized/entrypoints/entrypoint.scpx.sh", }, }, } self.assertEqual( expected_examplescraper_compose_dict, docker_compose.construct_compose_dict( "examplescraper", "tests.integration.fake_config" ), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_construct_compose_dict_nonexisting_scraper(self):\n with self.assertRaises(ModuleNotFoundError):\n docker_compose.construct_compose_dict(\"nonexisting\")", "def test_build_exchange_dictionary(self):\n expected = {\n \"USD\": {\"USD\": Decimal(1.0), \"AUD\": Decimal(2....
[ "0.611908", "0.59375006", "0.56059444", "0.5426353", "0.54253703", "0.54189974", "0.5415257", "0.5411503", "0.5383426", "0.537418", "0.52742", "0.52094966", "0.51827496", "0.5154304", "0.51462847", "0.51409686", "0.5090625", "0.5080768", "0.50691545", "0.5063073", "0.50582147...
0.70209265
0
Test `construct_compose_dict` raises `ModuleNotFoundError` for a nonexisting scraper.
def test_construct_compose_dict_nonexisting_scraper(self): with self.assertRaises(ModuleNotFoundError): docker_compose.construct_compose_dict("nonexisting")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_construct_compose_dict(self):\n expected_examplescraper_compose_dict = {\n \"version\": \"3\",\n \"services\": {\n \"scp1\": {\n \"container_name\": \"scp1\",\n \"environment\": [\n \"TOR_PORT=9051\",\...
[ "0.6682241", "0.5706916", "0.5090552", "0.5024181", "0.4941659", "0.48633698", "0.48114425", "0.4799155", "0.47261176", "0.4679527", "0.46407944", "0.46372876", "0.46235257", "0.46136138", "0.45923778", "0.45748225", "0.45726362", "0.45706356", "0.4561969", "0.4499325", "0.44...
0.862838
0
The right side of the deque contains the most recent experiences
def __init__(self, buffer_size, random_seed=0): self.buffer_size = buffer_size self.count = 0 self.buffer = deque() random.seed(random_seed)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def latest(scores):\n return scores[-1]", "def latest(scores):\n return scores[-1]", "def pop(self):\r\n tep = []\r\n res = -1\r\n cur = self.num\r\n cache = 0\r\n while self.queue and cur>1:\r\n cache = self.queue.pop(0)\r\n tep.append(cache)\r\n ...
[ "0.6058396", "0.6058396", "0.57243985", "0.56735945", "0.5633339", "0.5598546", "0.55795336", "0.55455464", "0.5523354", "0.5502296", "0.5471962", "0.54705167", "0.54230124", "0.54040325", "0.5395359", "0.5392009", "0.53752583", "0.53745955", "0.53745955", "0.53739977", "0.53...
0.0
-1
Helper function for creating comments
def create_comment(host, ratings): comment2 = models.Comment(text='test', host=host) comment2.save() types = models.RatingType.objects.all() items = [] for value, type in zip(ratings, types): tmp_obj = models.Rating(comment=comment2, type=type, value=value) tmp_obj.save() items.append(tmp_obj) return items
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def comment():", "def make_comment(self, offset, comment):\n #self.ret = idc.MakeComm(offset, comment)\n return self.ret", "def _generate_pr_comment_markdown(self, data):\n pass", "def make_comment(body, who, when):\n out = et.Element(\"comment\")\n et.SubElement(out, \"author\").t...
[ "0.8127862", "0.7648583", "0.73642707", "0.7284331", "0.71694934", "0.7156341", "0.7098395", "0.7096016", "0.7049658", "0.7032663", "0.70154613", "0.6999756", "0.6958425", "0.694555", "0.6930876", "0.68902373", "0.6805101", "0.6797719", "0.6752024", "0.674669", "0.6606376", ...
0.0
-1
Test the rating limit
def _testRatingLimit(self): comment = models.Comment.objects.all()[0] type = models.RatingType.objects.all()[0] try: val = type.limit + 10 rating = models.Rating(comment=comment, type=type, value=val) rating.save() assert rating.value == type.limit finally: rating.delete()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_unsuccessful_rating_with_rate_value_more_than_five(self):\n response = self.client.post(\n reverse('articles:rate', kwargs={'slug': self.slug}),\n {'rating': 6},\n format=\"json\",\n **self.headers)\n self.assertEqual(response.status_code, status.H...
[ "0.70283306", "0.696573", "0.68655944", "0.670783", "0.649507", "0.6459995", "0.6455193", "0.64220494", "0.63810766", "0.63534164", "0.63452977", "0.6326962", "0.6323487", "0.6303592", "0.6256071", "0.6230649", "0.62118495", "0.6192096", "0.61686665", "0.6168111", "0.6138926"...
0.7624382
0
Test individual comment rating
def _testCommentRating(self): try: host = models.Host.objects.all()[0] comment = models.Comment(text='test', host=host) comment.save() types = models.RatingType.objects.all() items = [] for value, type in zip([3, 4, 5], types): tmp_obj = models.Rating(comment=comment, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment.rating() - 4.0 < .0001, comment.rating() for tmp_obj in items: tmp_obj.delete() items = [] for value, type in zip([3, 3], types): tmp_obj = models.Rating(comment=comment, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment.rating() == 3.0, comment.rating() finally: for tmp_obj in items: tmp_obj.delete() comment.delete()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_upvote_modifies_comment_score(self):\n comment = Comment.objects.get(body=\"987XYZ\")\n self.assertEqual(comment.score, DEFAULT_SCORE)\n vote = Vote.create(comment=comment, value=1, voter=self.user)\n comment = Comment.objects.get(body=\"987XYZ\")\n self.assertEqual(comm...
[ "0.70396376", "0.68224984", "0.6756108", "0.65874946", "0.6557403", "0.65549046", "0.64839643", "0.6482616", "0.64394224", "0.6431737", "0.63834304", "0.6381632", "0.63514405", "0.6258549", "0.62077975", "0.6185496", "0.613789", "0.61266005", "0.6110438", "0.61005175", "0.609...
0.7833201
0
Test individual host rating
def _testHostRating(self): try: user = auth.User.objects.all()[0] category = models.Category.objects.all()[0] host = models.Host(user=user, category=category, url='http://blah.com') host.save() comment = models.Comment(text='test', host=host) comment.save() types = models.RatingType.objects.all() items = [] for value, type in zip([3, 4, 5], types): tmp_obj = models.Rating(comment=comment, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment.rating() - 4 < .0001, comment.rating() comment2 = models.Comment(text='test', host=host) comment2.save() for value, type in zip([3, 3, 3], types): tmp_obj = models.Rating(comment=comment2, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment2.rating() - 3.0 < .0001, comment2.rating() assert host.rating() == 3.5, host.rating() assert host.rating(100) == 70, host.rating(100) finally: try: for tmp_obj in items: tmp_obj.delete() comment.delete() comment2.delete() host.delete() except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_connection_score(self, current_connection):\n orange_score = int(Setup.orange_score)\n red_score = int(Setup.red_score)\n if Setup.system_status == 'orange':\n if current_connection['score'] <= int(orange_score):\n self.block_ip_address(current_connection['ip...
[ "0.6140125", "0.60586387", "0.5988735", "0.5964071", "0.5828688", "0.58104444", "0.5805761", "0.5743598", "0.55654544", "0.55423003", "0.54669577", "0.5444215", "0.5438256", "0.5420251", "0.54023296", "0.5397459", "0.5385342", "0.5385126", "0.5348706", "0.5313948", "0.530791"...
0.72438645
0
Test the different rating categories
def _testRatingCategories(self): try: user = auth.User.objects.all()[0] category = models.Category.objects.all()[0] host = models.Host(user=user, category=category, url='http://blah.com') host.save() comment = models.Comment(text='test', host=host) comment.save() types = models.RatingType.objects.all() items = [] for value, type in zip([3, 4, 5], types): tmp_obj = models.Rating(comment=comment, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment.rating() - 4.0 < .0001, comment.rating() comment2 = models.Comment(text='test', host=host) comment2.save() for value, type in zip([3, 3, 3], types): tmp_obj = models.Rating(comment=comment2, type=type, value=value) tmp_obj.save() items.append(tmp_obj) assert comment2.rating() - 3.0 < .0001, comment2.rating() assert host.rating() == 3.5, host.rating() ratings = host.ratings() assert ratings['Support'] == 3.5, ratings assert ratings['Features'] == 3.0 assert ratings['Uptime'] == 4.0 finally: try: for tmp_obj in items: tmp_obj.delete() comment.delete() comment2.delete() host.delete() except: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_cat_score(self):\n classes = ['blue skin', 'pointy ears']\n negated_classes = []\n categories = ['ear feature', 'skin feature']\n\n categorical_score = self.annot_scorer._get_categorical_score(\n classes, negated_classes, categories,\n self.negation_we...
[ "0.67347944", "0.6602649", "0.6267001", "0.62622076", "0.588277", "0.5818572", "0.57982117", "0.5775886", "0.5757851", "0.57468426", "0.5741155", "0.57397753", "0.5724523", "0.57127404", "0.5693196", "0.5673478", "0.56529504", "0.56025386", "0.5571396", "0.5566037", "0.556075...
0.7431953
0
Test the host leaderboard. This assigns a rank to every host in the system using their overall rating
def testHostLeaderboard(self): try: hosts = models.Host.objects.leaderboard() host1, host2 = hosts[0], hosts[1] comments = [] for x in xrange(2): comments.extend(create_comment(host2, [5, 5, 5])) # Verify host2 is now in the #1 position hosts = models.Host.objects.leaderboard() assert host2 == hosts[0], (host2, hosts[0]) assert host2.rank() == 1, host2.rank() finally: try: for comment in comments: comment.delete() except UnboundLocalError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_score(self, hosts, vm, args):\n try:\n hostScores = []\n # use hosts IDs and VM ID to call the Rest API and make a decision\n for hostID in hosts:\n # Do work\n hostScores.append((hostID, 50))\n print(hostScores)\n excep...
[ "0.6449874", "0.6170163", "0.60771066", "0.59071666", "0.5904425", "0.59005624", "0.58133054", "0.578773", "0.5751117", "0.5686264", "0.5666117", "0.56557053", "0.56431925", "0.5618589", "0.5613032", "0.5603022", "0.55905026", "0.55885845", "0.55877984", "0.55605876", "0.5533...
0.6035998
3
Hit a BJcard and append it. Then, find all possible sums and the current hand. The current hand is defined as max. of possible sums The current hand should be 1 if burst
def hit(self, card): self.append(card) values=[] values.append(card.value()) if values[0] < 2: values.append(values[0]+ 10) new_sums =set([v+s for v in values for s in self.possible_sums if v+s <=21]) new_sums =sorted(new_sums) if len(new_sums) ==0: self.hand=-1 else: self.hand = new_sums[-1] self.possible_sums = new_sums
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sum_hand(self, cards):\n self.totalValue = 0\n for card in cards:\n self.totalValue += DeckOfCards.value(self, card)\n\n for card in cards:\n if self.totalValue > 21 and 'A' in card:\n self.totalValue -= 10\n \n if self.totalValue ...
[ "0.688753", "0.6112849", "0.6100922", "0.6085325", "0.5997567", "0.5966301", "0.5873897", "0.5858032", "0.58062357", "0.5799236", "0.5777938", "0.57645684", "0.5712703", "0.5693552", "0.5690028", "0.5632079", "0.5579677", "0.557833", "0.55535036", "0.5553048", "0.5537502", ...
0.70891213
0
Is current cards the Blackjack?
def is_blackjack(self): if self.hand == 21 and len(list(self)) ==2: print '%s = Blackjack'%self return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hasBlackjack(self):\n return len(self.cards) == 2 and self.getPoints() == 21", "def is_blackjack(self) -> bool:\n if self.score == 21 and len(self.cards) == 2:\n return True\n else:\n return False", "def check_for_blackjack(self):\n if (self.dealer.hand.val...
[ "0.8288965", "0.81736314", "0.7500575", "0.72958475", "0.7241689", "0.7119909", "0.7114075", "0.69564337", "0.68808556", "0.6869995", "0.67837375", "0.6718618", "0.66275245", "0.6555914", "0.6503698", "0.6475533", "0.6465826", "0.64576113", "0.64328235", "0.642792", "0.642430...
0.81807315
1
Restart another round. Check the remaining budget and leave the game if budget <= 0. Create new BJCards
def restart(self): self.state ='active' if self.budget <= 0: return self.leave() self.cards =BJCards() self.bet_amount =0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def restart(self):\r\n\r\n self.pot = 0\r\n self.actions = 0\r\n self.previous_bet = self.small_blind\r\n self.initiate_blind(self.small_blind + self.big_blind)\r\n\r\n for player in self.players:\r\n player.credits = self.starting_credits\r\n\r\n # Let the firs...
[ "0.61270714", "0.6029881", "0.5949783", "0.5931026", "0.58528876", "0.5843963", "0.5828268", "0.5805273", "0.5793747", "0.5761536", "0.5664966", "0.5653092", "0.5644708", "0.5637475", "0.5613593", "0.5606919", "0.5594004", "0.5582903", "0.55661905", "0.5557825", "0.5552996", ...
0.77978367
0
join the Blackjack game
def join(self, game): self.game = game self.game.join(self) return self.game
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def blackjack(self, ctx, arg: int): \n db = sqlite3.connect('main.sqlite')\n cursor = db.cursor()\n cursor.execute(f'SELECT user_id, jacks FROM main WHERE user_id = {ctx.author.id}')\n result = cursor.fetchone()\n embed = discord.Embed(color=0x228b22, title=\"Blackjack\")\n...
[ "0.7030959", "0.6990644", "0.6882294", "0.67322326", "0.6602182", "0.6552863", "0.6547645", "0.64960307", "0.6491208", "0.6406294", "0.63787186", "0.6371268", "0.6298644", "0.6256555", "0.6254558", "0.62455046", "0.6233357", "0.6187183", "0.6178893", "0.61660373", "0.6058592"...
0.6059402
20
Leave the Blackjack game
def leave(self): self.game.leave(self) return self.game
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def endgame(winner):", "def endGame(self):\n pass", "def endGame(self):\n #self.active = False\n self.inGame = False\n self.hand = []\n self.position = None", "def endGame(self, message):\n print(self.board)\n print(\"Game over! \" + message)\n self.gam...
[ "0.74645424", "0.7270123", "0.7228892", "0.7090672", "0.70323604", "0.7011221", "0.6981158", "0.69738483", "0.69464296", "0.69343877", "0.68973416", "0.6895848", "0.68888074", "0.68749535", "0.68722606", "0.68307114", "0.68303573", "0.6770282", "0.6745754", "0.6744514", "0.67...
0.67128646
22
Bet the amount of money. Cannot exceed player's budget
def bet(self, amount): if amount >self.budget: print 'you cannot bet because of little money' else: self.bet_amount = amount print 'you bet %s' % (amount)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bet(self, amount):\r\n\r\n if self.players[self.active_player].credits < self.big_blind:\r\n message = \"Player {} won! Not enough money remaining.\".format(self.players[(self.active_player + 1) %\r\n len(se...
[ "0.7924277", "0.77066535", "0.7635262", "0.7549707", "0.73498285", "0.7271914", "0.7244077", "0.7236459", "0.7130203", "0.70558107", "0.70258206", "0.69620943", "0.6960594", "0.6937988", "0.6884604", "0.68403745", "0.67884886", "0.6780848", "0.6771703", "0.67711383", "0.67351...
0.8240085
0
Hit a card and check if bust
def hit(self, card): self.cards.hit(card) if self.cards.hand ==-1: self.state ='burst'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hit(player):\n deal_random_card(player)", "def action_hit(self) -> None:\n print(self.deal_card(self.user))", "def hit(self, player):\n\n hit_card = self.deck.draw()\n hit_card.flip()\n player.take_card(hit_card)\n\n if self.verbose:\n print(player, 'receive...
[ "0.74350667", "0.71425444", "0.70626", "0.7058777", "0.70395243", "0.69725394", "0.691524", "0.68729144", "0.6838547", "0.68238586", "0.6760089", "0.6660787", "0.6624675", "0.6563094", "0.6508721", "0.6479972", "0.6469464", "0.642803", "0.63956666", "0.63560945", "0.63471985"...
0.76179534
0
Get a card from the deck
def get_card(self): return self.deck.pop()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_card(self, suit, face):\n for card in self.deck:\n if card.suit == suit and card.value == face:\n return card", "def get_card (self, card):\n\t\treturn self._card", "def getCard(self,id):\n if not self.cardExists(id):\n return None\n return self...
[ "0.82108986", "0.7907498", "0.75914544", "0.7576521", "0.7507439", "0.7427973", "0.7373642", "0.7182664", "0.71470886", "0.714387", "0.7045964", "0.7023739", "0.70161986", "0.69853085", "0.6985152", "0.69819593", "0.6978282", "0.697606", "0.6964646", "0.6952393", "0.69453347"...
0.7492779
5
join a Blackjack game
def join(self, game): self.game = game self.game.dealer_join(self) return self.game
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def blackjack(self, ctx, arg: int): \n db = sqlite3.connect('main.sqlite')\n cursor = db.cursor()\n cursor.execute(f'SELECT user_id, jacks FROM main WHERE user_id = {ctx.author.id}')\n result = cursor.fetchone()\n embed = discord.Embed(color=0x228b22, title=\"Blackjack\")\n...
[ "0.71508825", "0.70144653", "0.6957493", "0.6783386", "0.6752723", "0.66632354", "0.65668845", "0.6473319", "0.6463963", "0.64573056", "0.6366622", "0.63504195", "0.63378847", "0.6292621", "0.62136537", "0.61802137", "0.6176531", "0.61553186", "0.6077009", "0.60752976", "0.60...
0.60142314
23
Leave the Blackjack game
def leave(self): self.game.dealer_leave(self) return self.game
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def endgame(winner):", "def endGame(self):\n pass", "def endGame(self):\n #self.active = False\n self.inGame = False\n self.hand = []\n self.position = None", "def endGame(self, message):\n print(self.board)\n print(\"Game over! \" + message)\n self.gam...
[ "0.74646693", "0.7270419", "0.72290385", "0.7090806", "0.7031714", "0.7010953", "0.69811714", "0.6973975", "0.694579", "0.6934921", "0.6897715", "0.68954486", "0.6889108", "0.68746746", "0.6872009", "0.68316245", "0.6830617", "0.6770507", "0.6744722", "0.6717705", "0.67132074...
0.6745219
18
Face up dealer's hidden card and balance with players in the game
def showdown(self): print "%s: %s" %(self.name, repr(self.cards)) # open dealer's cards for player in self.game.players: win = self.balance(player) if win > 0: print player.name, 'wins', win elif win == 0: print player.name, 'draws' elif win <0: print player.name, 'loses', -(win) self.budget -= win player.budget += win print 'budget of %s : %s'%(player.name,player.budget) print 'budget of %s : %s'%(self.name,self.budget)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def balance(self, player):\n print 'hand of %s: %s'%(player.name,player.cards.hand)\n print 'hand of %s: %s'%(self.name,self.cards.hand)\n if player.cards.hand == self.cards.hand:\n return 0\n elif player.cards.hand > self.cards.hand:\n return player.bet_amount*2\n...
[ "0.75316435", "0.6755466", "0.6731837", "0.66685313", "0.6465247", "0.6453599", "0.644847", "0.63940364", "0.63748866", "0.6362192", "0.63309693", "0.6303815", "0.6255874", "0.6199245", "0.61905754", "0.61843073", "0.6167011", "0.61617035", "0.6152823", "0.61407375", "0.61354...
0.71721745
1
Who wins? Caculate payback according to player's betting amount.
def balance(self, player): print 'hand of %s: %s'%(player.name,player.cards.hand) print 'hand of %s: %s'%(self.name,self.cards.hand) if player.cards.hand == self.cards.hand: return 0 elif player.cards.hand > self.cards.hand: return player.bet_amount*2 else: return -player.bet_amount
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def event_player_blackjack(self) -> None:\n win_amount = self.user.bet + 1.5\n print(\"Congratulations, you win:\", win_amount)\n self.user.win_balance(win_amount)", "def event_player_wins(self) -> None:\n win_amount = self.user.bet\n print(\"Congratulations, you win:\", win_am...
[ "0.78889173", "0.7863441", "0.74159753", "0.7214008", "0.7061695", "0.6963988", "0.69313955", "0.6821517", "0.6811732", "0.6775343", "0.6758019", "0.675369", "0.6688329", "0.66653264", "0.6658345", "0.6645285", "0.6638779", "0.6634569", "0.6626352", "0.6548291", "0.65355986",...
0.71189225
4
Player can choose hit or stand
def deal_player(self, player): answer = self.__ask_hit_or_stand(player) if answer in ('hit'): player.hit(self.get_card()) elif answer in('stand'): player.stand()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_user_input(self, game, hand, message, allowed_actions):\n if random.random() < 0.5:\n return 'hit'\n else:\n return 'stand'", "def get_player_action(self) -> None:\n print(f\"\\nYou have: {self.user.hand.cards} totalling to {self.user.hand.value}\")\n whi...
[ "0.69956726", "0.6720179", "0.66457313", "0.65549785", "0.6437031", "0.6429562", "0.6400316", "0.6282642", "0.6279392", "0.627747", "0.62070674", "0.61437625", "0.6102284", "0.6101464", "0.6083852", "0.6042383", "0.602248", "0.602248", "0.60220647", "0.5976155", "0.5967995", ...
0.5991139
19
Dealer have no choice. Stand if hand >= 17, otherwise hit
def deal_self(self): self.cards.hit(self.get_card()) if self.cards.hand < 17 and self.cards.hand>=0: self.state = 'active' elif self.cards.hand >= 17 and self.cards.hand <= 21: self.state = 'stand' elif self.cards.hand==-1: self.state = 'burst'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stand(hand=bj.player1.hand):\r\n phv = bj.player1.hand_value_check(hand) # check player hand value\r\n phv = [x for x in phv if x <= 21]\r\n if hand == bj.player1.hand:\r\n if len(phv) > 0:\r\n bj.player1.final_hand_val = max(phv)\r\n ...
[ "0.7471603", "0.74018157", "0.72898954", "0.7104887", "0.7001504", "0.6908795", "0.6888352", "0.68266976", "0.6750696", "0.6739227", "0.6719116", "0.66749984", "0.6645196", "0.66190434", "0.65957236", "0.6576811", "0.6549812", "0.65478945", "0.65337396", "0.6461222", "0.64541...
0.74406487
1
Finds valid positions for Tile mover in Skilaverkefni 8 Takes in current position of the game
def validpositions(tile): if tile == 11 or tile == 21: valid_pos = "n" elif tile == 12: valid_pos = "nes" elif tile == 13: valid_pos = "es" elif tile == 22 or tile == 33: valid_pos = "sw" elif tile == 23: valid_pos = "ew" elif tile == 32: valid_pos = "ns" possible_directions(valid_pos) return valid_pos
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkMoves(self,board):\n possibleMoves = []\n\n for c in xrange(0,8):\n for r in xrange(0,8):\n if board.isValidMove(self.tile,c,r):\n possibleMoves.append(c+r*8)\n\n return possibleMoves", "def winningMove():\r\n\tglobal turn, tile1, tile2, ...
[ "0.6583925", "0.6443366", "0.642417", "0.6352862", "0.62645966", "0.62173724", "0.61744225", "0.6168146", "0.61642736", "0.61634237", "0.61494106", "0.6132909", "0.61250454", "0.6120502", "0.6100356", "0.6093781", "0.6078856", "0.6061042", "0.6057439", "0.6056023", "0.6047237...
0.68247694
0
Prints out valid positions to go to
def possible_directions(valid_positions): if valid_positions == "n": print("You can travel: (N)orth.") elif valid_positions == "nes": print("You can travel: (N)orth or (E)ast or (S)outh.") elif valid_positions == "es": print("You can travel: (E)ast or (S)outh.") elif valid_positions == "sw": print("You can travel: (S)outh or (W)est.") elif valid_positions == "ew": print("You can travel: (E)ast or (W)est.") elif valid_positions == "ns": print("You can travel: (N)orth or (S)outh.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def my_print(self):\n if self.size == 0:\n print(\"\")\n return\n for j in range(self.__position[1]):\n print(\"\")\n for i in range(self.size):\n if self.__position[0] > 0:\n print(\" \" * self.__position[0], end=\"\")\n pr...
[ "0.658064", "0.6546358", "0.63674", "0.6359783", "0.62899244", "0.62558407", "0.6252899", "0.6234874", "0.62266046", "0.62041545", "0.613042", "0.6112777", "0.6104966", "0.60872805", "0.6075873", "0.60727435", "0.60556525", "0.60367554", "0.60364425", "0.6023976", "0.60142267...
0.60669726
16
Changes the tile according to what letter was put in a string Takes 2 arguments one for which direction was chosen and one for which tile it is currently located at Returns new tile
def tile_change(direction, tile): lower_direction = direction.lower() if lower_direction == "n": tile += 1 elif lower_direction == "s": tile -= 1 elif lower_direction == "e": tile += 10 else: tile -= 10 return tile
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def changeTile (self, posY, posX, tile=\"t\"):\r\n self.grid[posY][posX] = tile", "def move_character(self, old_y, old_x, y_pos, x_pos):\n self.map[old_y][old_x] = ' '\n self.map[y_pos][x_pos] = 'G'", "def get_tile(self, char):\n if char == \"#\":\n return self.tiles[0:32...
[ "0.65772283", "0.63926816", "0.63005394", "0.6223129", "0.6160206", "0.5935179", "0.59308314", "0.5923742", "0.58938473", "0.5884255", "0.58656144", "0.5764994", "0.5759222", "0.57555896", "0.57501453", "0.5739916", "0.5730088", "0.5726009", "0.5652192", "0.559732", "0.555341...
0.7330949
0
Translate points. This method is mainly used together with image transforms, such as padding and cropping, which translates the top left point of the image
def translate_point(point, y_offset=0, x_offset=0): out_point = point.copy() out_point[:, 0] += y_offset out_point[:, 1] += x_offset return out_point
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def translate(self, tr):\n c = self.c -self.a*tr[0] -self.b*tr[1]\n self.c =c\n self.pointN = self.pointN+tr\n self.point1 = self.point1+tr\n self.points +=tr", "def translate(self, tr):\n self.points = self.points + tr", "def transform_point(self, pt):\r\n\r\n ...
[ "0.6916326", "0.68535155", "0.6655771", "0.6619164", "0.65891886", "0.6571816", "0.6486129", "0.6390449", "0.6384428", "0.6354297", "0.6337685", "0.63048804", "0.62629306", "0.6257778", "0.6250915", "0.6223482", "0.62071604", "0.61853385", "0.61835444", "0.617482", "0.6174301...
0.6074518
29
Schedules a capture for the given host with its assigned image
def run(self, host=None): host = self.getFogHost(host) num = str(self.getHostNumber(host)) url = self.baseURL+'host/'+num+'/task' try: requests.post(url, headers=self.header, json={"taskTypeID": 2}) except Exception: sys.exit(1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run(self, host=None):\n host = self.getFogHost(host)\n num = str(self.getHostNumber(host))\n url = self.baseURL+'host/'+num+'/task'\n try:\n req = requests.post(\n url,\n headers=self.header,\n json={\"taskTypeID\":...
[ "0.6332346", "0.61388904", "0.601226", "0.5848971", "0.5846362", "0.58231384", "0.58039826", "0.5740137", "0.55395263", "0.55391586", "0.5533151", "0.5513474", "0.54072434", "0.54072434", "0.53996444", "0.5392534", "0.5384022", "0.53729296", "0.5365006", "0.5362949", "0.53269...
0.0
-1
Initialize appointment's creation workflow; Pass to date definition
def create_appointment(): msg = render_template('date') return question(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_workflow():\n pass", "def test_cron_workflow_service_create_cron_workflow(self):\n pass", "def _create_schedules(self):\n\n ''''''", "def adc_api_workflow_create():\n workflow_json = request.get_json(force=True)\n\n return jsonify(adc.workflow_create(workflow_json=workflow_jso...
[ "0.6407841", "0.5967415", "0.58864933", "0.57573587", "0.5667341", "0.5589588", "0.5531992", "0.55230147", "0.5515364", "0.54900604", "0.5475908", "0.5444646", "0.544376", "0.54394084", "0.54327065", "0.541028", "0.5404114", "0.5402616", "0.5401141", "0.5400178", "0.5390388",...
0.65402335
0
Set appointment's begin date; Pass to appointment's begin time
def appointment_date(begin_date): session.attributes['begin_date'] = str(begin_date) qs = render_template('time') return question(qs)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_begin_date(self, begin_date):\n self.set_value_into_input_field(self.begin_date_inputbox_locator, begin_date)", "def begin_date(self, value):\n\n if not isinstance(value, datetime):\n raise TypeError(_pretty_message(\n '''\n begin_date must be an ins...
[ "0.7065128", "0.7005564", "0.697397", "0.6942992", "0.65339845", "0.64918524", "0.6489678", "0.647476", "0.6436995", "0.6375545", "0.62956077", "0.61982006", "0.61860764", "0.61311483", "0.6118447", "0.6097708", "0.6097708", "0.6097708", "0.6097708", "0.6097708", "0.6097708",...
0.7202706
0
Set appointment's begin_time; Pass to apppointment's end date
def appointment_time(begin_time): session.attributes['begin_time'] = str(begin_time) msg = render_template('end_date') return question(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def appointment_date(begin_date):\n\n session.attributes['begin_date'] = str(begin_date)\n qs = render_template('time')\n return question(qs)", "def begin_time(self, begin_time):\n if begin_time is None:\n raise ValueError(\"Invalid value for `begin_time`, must not be `None`\") # noqa...
[ "0.6985581", "0.65532726", "0.64899975", "0.6089194", "0.60122746", "0.60122746", "0.60034096", "0.59809184", "0.59809184", "0.59809184", "0.59809184", "0.59809184", "0.59553194", "0.5918142", "0.5905272", "0.5746406", "0.5744839", "0.57072085", "0.56862944", "0.5659291", "0....
0.77024466
0
Set appointment's end date; Pass to appointment's end time
def appointment_end_date(end_date): session.attributes['end_date'] = str(end_date) msg = render_template('end_time') return question(msg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_end_date(self, date):\n pass", "def end_date_time(self, end_date_time):\n\n self._end_date_time = end_date_time", "def end_date(self, end_date):\n self._end_date = end_date", "def change_end_date(self, new_end_date):\n self.end_date = new_end_date", "def appointment_end_...
[ "0.76225775", "0.7512484", "0.7296414", "0.72335875", "0.72125554", "0.71424943", "0.71424943", "0.71424943", "0.71424943", "0.71424943", "0.71424943", "0.71424943", "0.71424943", "0.7120749", "0.7087847", "0.6900266", "0.6880423", "0.6880423", "0.6826085", "0.6826085", "0.68...
0.78313226
0
Set appointment's end time; Create new appointment and rendere result
def appointment_end_time(end_time): session.attributes['end_time'] = str(end_time) form = AppointmentForm(session.attributes) form.submit() return render_result(form)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def appointment_time(begin_time):\n\n session.attributes['begin_time'] = str(begin_time)\n msg = render_template('end_date')\n return question(msg)", "def appointment_end_date(end_date):\n\n session.attributes['end_date'] = str(end_date)\n msg = render_template('end_time')\n return question(msg...
[ "0.7324631", "0.707775", "0.64345574", "0.61964524", "0.6116282", "0.6012545", "0.59737736", "0.5958537", "0.58682275", "0.5739396", "0.56900465", "0.5664058", "0.5664058", "0.5635635", "0.5621515", "0.5621515", "0.5621515", "0.56020176", "0.5596119", "0.5588781", "0.5569192"...
0.75025403
0
Significant duration model by Abrahamson and Silva (1996) Empirical ground motion models, report prepared for Brookhaven National Laboratory. Input
def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type='DS575H'): # map the duration_type to integer key dur_map = {'DS575H': 0, 'DS575V': 1, 'DS595H': 2, 'DS595V': 3} dur_tag = dur_map.get(duration_type.upper(),None) if dur_tag is None: print("SignificantDurationModel.abrahamson_silva_ds_1999: duration_type='DS575H','DS575V','DS595H','DS595V'?") return None, None # modeling coefficients beta = [3.2, 3.2, 3.2, 3.2] b1 = [5.204, 4.610, 5.204, 4.610] b2 = [0.851, 1.536, 0.851, 1.536] m_star = [6, 6, 6, 6] c1 = [0.805, 1.076, 0.805, 1.076] c2 = [0.063, 0.107, 0.063, 0.107] rc = [10, 10, 10, 10] Drat = [0.000, 0.000, 0.845, 0.646] sigma = [0.55, 0.46, 0.49, 0.45] # median if distance > rc[dur_tag]: ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \ (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \ 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \ c1[dur_tag]+c2[dur_tag]*(distance-rc[dur_tag]))+Drat[dur_tag]) else: ds_median = np.exp(np.log((np.exp(b1[dur_tag]+b2[dur_tag]* \ (magnitude-m_star[dur_tag]))/(10**(1.5*magnitude+ \ 16.05)))**(-1/3)/(4.9e6*beta[dur_tag])+soil* \ c1[dur_tag])+Drat[dur_tag]) # sigma ds_sigma = sigma[dur_tag] # return return np.log(ds_median), ds_sigma
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, ztor=0.0, duration_type='DS575H'):\n\n # duration type map\n dur_map = {'DS575H':0, 'DS595H': 1}\n dur_tag = dur_map.get(duration_type.upper(), None)\n if dur_tag is None:\n print(\"SignificantDurationModel.bommer_staf...
[ "0.5894989", "0.5636878", "0.5595273", "0.5590827", "0.5589143", "0.55404943", "0.5539545", "0.55124915", "0.5504166", "0.5484351", "0.5466944", "0.5432563", "0.5431083", "0.5426489", "0.54122007", "0.5407446", "0.54009956", "0.53980666", "0.5388078", "0.53808546", "0.5372438...
0.6149179
0
Singificant duration model by Bommer, Stafford, Alarcon (2009) Empirical Equations for the Prediction of the Significant, Bracketed, and Uniform Duration of Earthquake Ground Motion Input
def bommer_stafford_alarcon_ds_2009(magnitude=7.0, distance=10.0, vs30=760.0, ztor=0.0, duration_type='DS575H'): # duration type map dur_map = {'DS575H':0, 'DS595H': 1} dur_tag = dur_map.get(duration_type.upper(), None) if dur_tag is None: print("SignificantDurationModel.bommer_stafford_alarcon_ds_2009: duration_type='DS575H','DS595H'?") return None, None, None, None # modeling coefficients c0 = [-5.6298, -2.2393] m1 = [1.2619, 0.9368] r1 = [2.0063, 1.5686] r2 = [-0.2520, -0.1953] h1 = [-2.3316, 2.5000] v1 = [-0.2900, -0.3478] z1 = [-0.0522, -0.0365] tauCoeff = [0.3527, 0.3252] phiCoeff = [0.4304, 0.3460] sigma_c = [0.1729, 0.1114] sigma_Tgm = [0.5289, 0.4616] # median ds_median = np.exp(c0[dur_tag]+m1[dur_tag]*magnitude+(r1[dur_tag]+ \ r2[dur_tag]*magnitude)*np.log(np.sqrt(distance**2+h1[dur_tag]**2))+ \ v1[dur_tag]*np.log(vs30)+z1[dur_tag]*ztor) # standard deviations ds_sigma = sigma_Tgm[dur_tag] ds_tau = tauCoeff[dur_tag] ds_phi = phiCoeff[dur_tag] # return return np.log(ds_median), ds_sigma, ds_tau, ds_phi
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def abrahamson_silva_ds_1999(magnitude=7.0,distance=10.0,soil=True,duration_type='DS575H'):\n\n\n # map the duration_type to integer key\n dur_map = {'DS575H': 0,\n 'DS575V': 1,\n 'DS595H': 2,\n 'DS595V': 3}\n dur_tag = dur_map.get(duration_type.upper(),None)\n ...
[ "0.63171774", "0.59800464", "0.5841969", "0.5801413", "0.5742783", "0.5734388", "0.56822574", "0.5675069", "0.5562555", "0.55490863", "0.5539586", "0.55152154", "0.5498051", "0.5493372", "0.5490544", "0.5488294", "0.5482953", "0.5469412", "0.5449055", "0.5438603", "0.5429409"...
0.5884634
2
Significant duration model by Afshari and Stewart (2016) hysically Parameterized Prediction Equations for Significant Duration in Active Crustal Regions Input
def afshari_stewart_ds_2016(magnitude=7.0, distance=10.0, vs30=760.0, mechanism='unknown', z1=None, region='california', duration_type='DS575H'): # mechanism map mech_map = {'unknown':0, 'normal': 1, 'reverse': 2, 'strike-slip': 3} mech_tag = mech_map.get(mechanism.lower(), None) if mech_tag is None: print("SignificantDurationModel.afshari_stewart_ds_2016: mechanism='unknown','normal','reverse','strike-slip'?") return None, None, None, None # region map reg_map = {'california':0, 'japan': 1, 'other': 2} reg_tag = reg_map.get(region.lower(), None) if reg_tag is None: print("SignificantDurationModel.afshari_stewart_ds_2016: region='california', 'japan', 'other'?") return None, None, None, None # duration type map dur_map = {'DS575H':0, 'DS595H': 1, 'DS2080H': 2} dur_tag = dur_map.get(duration_type.upper(), None) if dur_tag is None: print("SignificantDurationModel.afshari_stewart_ds_2016: duration_type='DS575H','DS595H','DS2080H'?") return None, None, None, None # source coefficients M1 = [5.35, 5.20, 5.20] M2 = [7.15, 7.40, 7.40] b0 = [[1.2800, 2.1820, 0.8822], [1.5550, 2.5410, 1.4090], [0.7806, 1.6120, 0.7729], [1.2790, 2.3020, 0.8804]] b1 = [[5.576, 3.628, 6.182], [4.992, 3.170, 4.778], [7.061, 4.536, 6.579], [5.578, 3.467, 6.188]] b2 = [0.9011, 0.9443, 0.7414] b3 = [-1.684, -3.911, -3.164] Mstar = [6, 6, 6] # path coefficients c1 = [0.1159, 0.3165, 0.0646] RR1 = [10, 10, 10] RR2 = [50, 50, 50] c2 = [0.1065, 0.2539, 0.0865] c3 = [0.0682, 0.0932, 0.0373] # site coefficients c4 = [-0.2246, -0.3183, -0.4237] Vref = [368.2, 369.9, 369.6] V1 = [600, 600, 600] c5 = [0.0006, 0.0006, 0.0005] dz1ref = [200, 200, 200] # standard deviation coefficients phi1 = [0.54, 0.43, 0.56] phi2 = [0.41, 0.35, 0.45] tau1 = [0.28, 0.25, 0.30] tau2 = [0.25, 0.19, 0.19] # basin depth if reg_tag == 0: mu_z1 = np.exp(-7.15/4*np.log((vs30**4+570.94**4)/(1360**4+570.94**4))) else: mu_z1 = np.exp(-5.23/4*np.log((vs30**4+412.39**4)/(1360**4+412.39**4))) # differential basin depth if z1 is None or z1 < 0 or reg_tag == 2: dz1 = 0 else: dz1 = z1-mu_z1 # source term if magnitude < M1[dur_tag]: F_E = b0[mech_tag][dur_tag] else: if magnitude < M2[dur_tag]: deltaSigma = np.exp(b1[mech_tag][dur_tag]+b2[dur_tag]*(magnitude-Mstar[dur_tag])) else: deltaSigma = np.exp(b1[mech_tag][dur_tag]+b2[dur_tag]*(M2[dur_tag]-Mstar[dur_tag])+ \ b3[dur_tag]*(magnitude-M2[dur_tag])) M_0 = 10**(1.5*magnitude+16.05) f_0 = 4.9e6*3.2*(deltaSigma/M_0)**(1/3) F_E = 1/f_0 # path term if distance < RR1[dur_tag]: F_P = c1[dur_tag]*distance elif distance < RR2[dur_tag]: F_P = c1[dur_tag]*RR1[dur_tag]+c2[dur_tag]*(distance-RR1[dur_tag]) else: F_P = c1[dur_tag]*RR1[dur_tag]+c2[dur_tag]*(RR2[dur_tag]-RR1[dur_tag])+c3[dur_tag]*(distance-RR2[dur_tag]) # F_deltaz term if dz1 <= dz1ref[dur_tag]: F_deltaz = c5[dur_tag]*dz1 else: F_deltaz = c5[dur_tag]*dz1ref[dur_tag] # site term if vs30 < V1[dur_tag]: F_S = c4[dur_tag]*np.log(vs30/Vref[dur_tag])+F_deltaz else: F_S = c4[dur_tag]*np.log(V1[dur_tag]/Vref[dur_tag])+F_deltaz # median ds_median = np.exp(np.log(F_E+F_P)+F_S) # standard deviations # between event if magnitude < 5.5: ds_phi = phi1[dur_tag] elif magnitude < 5.75: ds_phi = phi1[dur_tag]+(phi2[dur_tag]-phi1[dur_tag])*(magnitude-5.5)/(5.75-5.5) else: ds_phi = phi2[dur_tag] # within event if magnitude < 6.5: ds_tau = tau1[dur_tag] elif magnitude < 7: ds_tau = tau1[dur_tag]+(tau2[dur_tag]-tau1[dur_tag])*(magnitude-6.5)/(7-6.5) else: ds_tau = tau2[dur_tag] # total ds_sigma = np.sqrt(ds_phi**2+ds_tau**2) # return return np.log(ds_median), ds_sigma, ds_tau, ds_phi
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rtest_predictoutcome():\n\n #define cohort size\n npatients = 2\n\n #init healthy patients\n simulator = AbbcEnvironment(patients=npatients)\n\n #simulate healthy patients for long term in short term increments\n nstep = int(long_term/short_term)\n\n #define action taken : -1 means patient...
[ "0.59970397", "0.59766865", "0.58818656", "0.5731749", "0.5704135", "0.5681434", "0.56570554", "0.55817807", "0.55588585", "0.5520618", "0.54998547", "0.54751325", "0.5461763", "0.5432599", "0.5418803", "0.5413159", "0.5413122", "0.541066", "0.5398527", "0.53958756", "0.53857...
0.5816224
3
Returns a FASTA string given a list of sequence objects.
def fasta_from_sequences(seqs, make_seqlabel=None, line_wrap=None): warnings.warn( "`fasta_from_sequences` is deprecated and will be removed in " "scikit-bio 0.3.0. Please update your code to use `skbio.io.write`.", DeprecationWarning) fasta_list = [] for i, seq in enumerate(seqs): # Check if it has a label, or one is to be created label = str(i) if make_seqlabel is not None: label = make_seqlabel(seq) elif hasattr(seq, 'id') and seq.id: label = seq.id elif hasattr(seq, 'Label') and seq.Label: label = seq.Label elif hasattr(seq, 'Name') and seq.Name: label = seq.Name # wrap sequence lines seq_str = str(seq) if line_wrap is not None: numlines, remainder = divmod(len(seq_str), line_wrap) if remainder: numlines += 1 body = [seq_str[j * line_wrap:(j + 1) * line_wrap] for j in range(numlines)] else: body = [seq_str] fasta_list.append('>' + label) fasta_list += body return '\n'.join(fasta_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_string(fasta):\n\n # remove header\n fasta_nh = fasta.readlines()[1:]\n\n # make into single string\n fasta_str = ''.join(fasta_nh)\n\n # remove newline characters\n seq = fasta_str.replace(\"\\n\", \"\")\n\n return seq", "def __str__(self):\n st=\"\"\n for g in self:\n ...
[ "0.6669229", "0.6225485", "0.6078894", "0.6065464", "0.600114", "0.598271", "0.5942299", "0.58825916", "0.5855838", "0.5846919", "0.5789", "0.57847273", "0.5782889", "0.57827187", "0.5757814", "0.57269883", "0.5702477", "0.5654774", "0.56518805", "0.56378794", "0.56164175", ...
0.6035097
4
Returns a FASTA string given an alignment object
def fasta_from_alignment(aln, make_seqlabel=None, line_wrap=None, sort=True): warnings.warn( "`fasta_from_alignment` is deprecated and will be removed in " "scikit-bio 0.3.0. Please update your code to use `skbio.io.write` " "or `skbio.Alignment.write`.", DeprecationWarning) # check if it's an Alignment object or a dictionary if isinstance(aln, Alignment): order = aln.ids() else: order = aln.keys() if sort: order = sorted(order) ordered_seqs = [] for label in order: seq = aln[label] if isinstance(seq, str): seq = BiologicalSequence(seq, label) ordered_seqs.append(seq) return fasta_from_sequences(ordered_seqs, make_seqlabel=make_seqlabel, line_wrap=line_wrap)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def toString(self, format_='fasta', structureSuffix=':structure'):\n if format_ == 'fasta':\n return '>%s\\n%s\\n>%s%s\\n%s\\n' % (\n self.id, self.sequence, self.id, structureSuffix,\n self.structure)\n else:\n raise ValueError(\"Format must be 'fa...
[ "0.67460656", "0.67185", "0.6525832", "0.6444035", "0.63622904", "0.6283782", "0.6256947", "0.61297464", "0.6100429", "0.6099387", "0.6058078", "0.59980303", "0.59817237", "0.596827", "0.59507924", "0.59468234", "0.5945336", "0.5923687", "0.5910547", "0.58886063", "0.585322",...
0.0
-1
Make n bars dataframe seeing past n bars. The row size of `df` must be greater than or equal to `n_bars`, or raise ValueError.
def make_nbars_past(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame: if df.shape[0] < n_bars + 1: raise ValueError( f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})') df = df.rename(columns={col: f'{col}{n_bars}' for col in cols}) for i in reversed(range(n_bars)): inc = n_bars - i for col in cols: df[f'{col}{i}'] = df[f'{col}{n_bars}'][inc:].append( pd.Series([np.nan]*inc)).reset_index(drop=True) # correct bar date (or datetime) if datetime_col is not None: df[datetime_col] = df[datetime_col][n_bars:].append( pd.Series([np.nan]*n_bars)).reset_index(drop=True) df = df.dropna() return df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_nbars_future(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame:\n if df.shape[0] < n_bars + 1:\n raise ValueError(\n f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})')\n...
[ "0.5731061", "0.5586702", "0.5446808", "0.53735137", "0.53575814", "0.53006095", "0.5197947", "0.5093131", "0.5072677", "0.50318915", "0.49762776", "0.49578872", "0.49176425", "0.49153638", "0.48178166", "0.4813536", "0.47499412", "0.47253093", "0.46960294", "0.46573168", "0....
0.65518504
0
Make n bars dataframe seeing future n bars. The row size of `df` must be greater than or equal to `n_bars`, or raise ValueError.
def make_nbars_future(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame: if df.shape[0] < n_bars + 1: raise ValueError( f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})') df = df.rename(columns={col: f'{col}0' for col in cols}) for i in range(1, n_bars+1): for col in cols: df[f'{col}{i}'] = df[f'{col}0'][i:].append( pd.Series([np.nan]*i)).reset_index(drop=True) df = df.dropna() return df
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_nbars_past(df: pd.DataFrame, n_bars: int, cols: List[str] = ['Close'], datetime_col: Union[str, None] = 'Date') -> pd.DataFrame:\n if df.shape[0] < n_bars + 1:\n raise ValueError(\n f'row size of the df (={df.shape[0]}) must be greater than or equal to n_bars + 1 (={n_bars + 1})')\n ...
[ "0.6623224", "0.5880733", "0.56581855", "0.5601714", "0.5332026", "0.52718735", "0.52266806", "0.5171003", "0.51566476", "0.5070246", "0.5067792", "0.4987947", "0.4968835", "0.49510282", "0.49025717", "0.48347977", "0.47774142", "0.4759826", "0.46863613", "0.4677156", "0.4670...
0.61674595
1
split data into two
def split_data(df: pd.DataFrame, ratio: float, purging: bool = True, n_bars: int = 10) -> Tuple[pd.DataFrame, pd.DataFrame]: split_idx = int(df.shape[0] * ratio) df1 = df[:split_idx] df2 = df[split_idx:] if purging: purge_idx = round((n_bars-1) * ratio) df1 = df1[:-purge_idx] df2 = df2[(n_bars - 1 - purge_idx):] return df1, df2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def split(data):\n return data[:len(data) // 2], data[len(data) // 2:]", "def split_data_into_input_and_output(data):\n data_in, data_out = list(zip(*[((x[\"synopsis\"]), x[\"gross\"]) for x in data]))\n return np.array(data_in), np.array(data_out)", "def split_X_y(data):\n return data.drop(columns...
[ "0.7664711", "0.6904235", "0.6826597", "0.678124", "0.67385954", "0.6733232", "0.6674183", "0.66669315", "0.66662866", "0.6537706", "0.6520929", "0.6488977", "0.6449249", "0.6385212", "0.6323068", "0.63213813", "0.63213813", "0.6284112", "0.627194", "0.6256541", "0.6204233", ...
0.0
-1
Initialise and validate arguments.
def __init__( self, did: Optional[str], transfer_tx_id: str, service_id: Union[str, int], userdata: Optional[Dict] = None, ) -> None: assert ( did and transfer_tx_id and service_id is not None ), f"bad argument values: did={did}, transfer_ts_id={transfer_tx_id}, service_id={service_id}" if userdata: assert isinstance(userdata, dict), "Userdata must be a dictionary." self.did = did self.transfer_tx_id = transfer_tx_id self.service_id = service_id self.userdata = userdata
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def initialize(self, args):\n\t\tpass", "def initialise(self, args, environ):", "def _setup_arguments(self):\n\n self._parser.add_argument(\"-a\", \"--area-interest\",\n help=\"Area of interest to process, \"\n \"shapefile path\", re...
[ "0.7312641", "0.6968615", "0.6909323", "0.68516374", "0.68135613", "0.6787214", "0.6784292", "0.67627263", "0.6755122", "0.6752054", "0.67215335", "0.6718858", "0.6716634", "0.6714336", "0.66909677", "0.66909677", "0.66830397", "0.6636629", "0.66306543", "0.66300434", "0.6623...
0.0
-1
This operation applies only to sharded cluster instances. For more information, see [Apply for an endpoint for a shard or Configserver node](~~134037~~). > The requested endpoint can only be accessed over the internal network. If you want to access the endpoint over the Internet, call the [AllocatePublicNetworkAddress](~~67602~~) operation to apply for a public endpoint.
def allocate_node_private_network_address_with_options( self, request: dds_20151201_models.AllocateNodePrivateNetworkAddressRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.AllocateNodePrivateNetworkAddressResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.node_id): query['NodeId'] = request.node_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='AllocateNodePrivateNetworkAddress', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.AllocateNodePrivateNetworkAddressResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_sharding_network_address_with_options(\n self,\n request: dds_20151201_models.DescribeShardingNetworkAddressRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeShardingNetworkAddressResponse:\n UtilClient.validate_model(request)\n qu...
[ "0.5975963", "0.55856633", "0.5472134", "0.5470607", "0.5463846", "0.5260941", "0.52568924", "0.51533407", "0.50733143", "0.5009992", "0.500198", "0.4997431", "0.49441746", "0.49345952", "0.49260488", "0.49037597", "0.48985666", "0.48839447", "0.48782828", "0.4850948", "0.482...
0.0
-1
This operation applies only to sharded cluster instances. For more information, see [Apply for an endpoint for a shard or Configserver node](~~134037~~). > The requested endpoint can only be accessed over the internal network. If you want to access the endpoint over the Internet, call the [AllocatePublicNetworkAddress](~~67602~~) operation to apply for a public endpoint.
async def allocate_node_private_network_address_with_options_async( self, request: dds_20151201_models.AllocateNodePrivateNetworkAddressRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.AllocateNodePrivateNetworkAddressResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.node_id): query['NodeId'] = request.node_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='AllocateNodePrivateNetworkAddress', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.AllocateNodePrivateNetworkAddressResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_sharding_network_address_with_options(\n self,\n request: dds_20151201_models.DescribeShardingNetworkAddressRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeShardingNetworkAddressResponse:\n UtilClient.validate_model(request)\n qu...
[ "0.5976804", "0.5585746", "0.54729366", "0.5471635", "0.54649836", "0.52590406", "0.52580786", "0.51528704", "0.50743604", "0.50106984", "0.50028336", "0.49977505", "0.4944949", "0.49348018", "0.49272078", "0.4904413", "0.48976403", "0.48846352", "0.48782158", "0.48512864", "...
0.0
-1
This operation applies only to sharded cluster instances. For more information, see [Apply for an endpoint for a shard or Configserver node](~~134037~~). > The requested endpoint can only be accessed over the internal network. If you want to access the endpoint over the Internet, call the [AllocatePublicNetworkAddress](~~67602~~) operation to apply for a public endpoint.
def allocate_node_private_network_address( self, request: dds_20151201_models.AllocateNodePrivateNetworkAddressRequest, ) -> dds_20151201_models.AllocateNodePrivateNetworkAddressResponse: runtime = util_models.RuntimeOptions() return self.allocate_node_private_network_address_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_sharding_network_address_with_options(\n self,\n request: dds_20151201_models.DescribeShardingNetworkAddressRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeShardingNetworkAddressResponse:\n UtilClient.validate_model(request)\n qu...
[ "0.5975963", "0.55856633", "0.5472134", "0.5470607", "0.5463846", "0.5260941", "0.52568924", "0.51533407", "0.50733143", "0.5009992", "0.500198", "0.4997431", "0.49441746", "0.49345952", "0.49260488", "0.49037597", "0.48985666", "0.48839447", "0.48782828", "0.4850948", "0.482...
0.0
-1
This operation applies only to sharded cluster instances. For more information, see [Apply for an endpoint for a shard or Configserver node](~~134037~~). > The requested endpoint can only be accessed over the internal network. If you want to access the endpoint over the Internet, call the [AllocatePublicNetworkAddress](~~67602~~) operation to apply for a public endpoint.
async def allocate_node_private_network_address_async( self, request: dds_20151201_models.AllocateNodePrivateNetworkAddressRequest, ) -> dds_20151201_models.AllocateNodePrivateNetworkAddressResponse: runtime = util_models.RuntimeOptions() return await self.allocate_node_private_network_address_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_sharding_network_address_with_options(\n self,\n request: dds_20151201_models.DescribeShardingNetworkAddressRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeShardingNetworkAddressResponse:\n UtilClient.validate_model(request)\n qu...
[ "0.59758043", "0.55844337", "0.547219", "0.5469977", "0.5463206", "0.5260636", "0.52561885", "0.5152298", "0.50711286", "0.5009262", "0.50011724", "0.4996319", "0.49427938", "0.49342892", "0.4926076", "0.4902237", "0.48966217", "0.48821318", "0.4876169", "0.48473135", "0.4828...
0.0
-1
Before you enable Transparent Data Encryption (TDE) by calling the [ModifyDBInstanceTDE](~~131267~~) operation, you can call this operation to check whether KMS keys are authorized to ApsaraDB for MongoDB instances.
def check_cloud_resource_authorized_with_options( self, request: dds_20151201_models.CheckCloudResourceAuthorizedRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CheckCloudResourceAuthorizedResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.target_region_id): query['TargetRegionId'] = request.target_region_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CheckCloudResourceAuthorized', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CheckCloudResourceAuthorizedResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modify_dbinstance_tdewith_options(\n self,\n request: dds_20151201_models.ModifyDBInstanceTDERequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyDBInstanceTDEResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient...
[ "0.57940775", "0.54554147", "0.5252807", "0.5249601", "0.52472556", "0.52276635", "0.5210599", "0.5180775", "0.51518637", "0.51469225", "0.5068418", "0.50625265", "0.5005263", "0.49592647", "0.49395782", "0.49289456", "0.49255323", "0.48954076", "0.48648885", "0.48615065", "0...
0.0
-1
Before you enable Transparent Data Encryption (TDE) by calling the [ModifyDBInstanceTDE](~~131267~~) operation, you can call this operation to check whether KMS keys are authorized to ApsaraDB for MongoDB instances.
async def check_cloud_resource_authorized_with_options_async( self, request: dds_20151201_models.CheckCloudResourceAuthorizedRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CheckCloudResourceAuthorizedResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.target_region_id): query['TargetRegionId'] = request.target_region_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CheckCloudResourceAuthorized', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CheckCloudResourceAuthorizedResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modify_dbinstance_tdewith_options(\n self,\n request: dds_20151201_models.ModifyDBInstanceTDERequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyDBInstanceTDEResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient...
[ "0.57957464", "0.54568666", "0.5251657", "0.5248405", "0.52455413", "0.5225166", "0.5209286", "0.5178954", "0.51536596", "0.5146501", "0.50677025", "0.50611264", "0.5004506", "0.49577722", "0.49385908", "0.49272832", "0.49267793", "0.48935202", "0.48636174", "0.48611277", "0....
0.0
-1
Before you enable Transparent Data Encryption (TDE) by calling the [ModifyDBInstanceTDE](~~131267~~) operation, you can call this operation to check whether KMS keys are authorized to ApsaraDB for MongoDB instances.
def check_cloud_resource_authorized( self, request: dds_20151201_models.CheckCloudResourceAuthorizedRequest, ) -> dds_20151201_models.CheckCloudResourceAuthorizedResponse: runtime = util_models.RuntimeOptions() return self.check_cloud_resource_authorized_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modify_dbinstance_tdewith_options(\n self,\n request: dds_20151201_models.ModifyDBInstanceTDERequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyDBInstanceTDEResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient...
[ "0.57940775", "0.54554147", "0.5252807", "0.5249601", "0.52472556", "0.52276635", "0.5210599", "0.5180775", "0.51518637", "0.51469225", "0.5068418", "0.50625265", "0.5005263", "0.49592647", "0.49395782", "0.49289456", "0.49255323", "0.48954076", "0.48648885", "0.48615065", "0...
0.0
-1
Before you enable Transparent Data Encryption (TDE) by calling the [ModifyDBInstanceTDE](~~131267~~) operation, you can call this operation to check whether KMS keys are authorized to ApsaraDB for MongoDB instances.
async def check_cloud_resource_authorized_async( self, request: dds_20151201_models.CheckCloudResourceAuthorizedRequest, ) -> dds_20151201_models.CheckCloudResourceAuthorizedResponse: runtime = util_models.RuntimeOptions() return await self.check_cloud_resource_authorized_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modify_dbinstance_tdewith_options(\n self,\n request: dds_20151201_models.ModifyDBInstanceTDERequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.ModifyDBInstanceTDEResponse:\n UtilClient.validate_model(request)\n query = {}\n if not UtilClient...
[ "0.57958263", "0.54571706", "0.5251616", "0.5248914", "0.5245301", "0.5226322", "0.5209222", "0.5178404", "0.51535696", "0.5146297", "0.5068407", "0.5060694", "0.50029343", "0.49580246", "0.49379238", "0.49273783", "0.49271196", "0.48940754", "0.486325", "0.4862366", "0.48581...
0.0
-1
You can call this operation to check whether an ApsaraDB for MongoDB instance meets the data recovery conditions.
def check_recovery_condition_with_options( self, request: dds_20151201_models.CheckRecoveryConditionRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CheckRecoveryConditionResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.database_names): query['DatabaseNames'] = request.database_names if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.source_dbinstance): query['SourceDBInstance'] = request.source_dbinstance req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CheckRecoveryCondition', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CheckRecoveryConditionResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def db_healthcheck() -> bool:\n\n try:\n result = query_db(\"Select 1\")\n app.logfile.info(\"Select 1\")\n return True\n except ConnectionError as err:\n app.logger.error(err)\n return False", "def check_database_connection():\n try:\n engine = create_engine(DB...
[ "0.6475063", "0.59561867", "0.5909198", "0.590827", "0.58934665", "0.5884424", "0.5813175", "0.57891566", "0.5722669", "0.57047", "0.57047", "0.57047", "0.5700496", "0.56553364", "0.56532806", "0.56195915", "0.5593647", "0.5560636", "0.55524915", "0.55173624", "0.55170524", ...
0.0
-1
You can call this operation to check whether an ApsaraDB for MongoDB instance meets the data recovery conditions.
async def check_recovery_condition_with_options_async( self, request: dds_20151201_models.CheckRecoveryConditionRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CheckRecoveryConditionResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.database_names): query['DatabaseNames'] = request.database_names if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.source_dbinstance): query['SourceDBInstance'] = request.source_dbinstance req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CheckRecoveryCondition', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CheckRecoveryConditionResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def db_healthcheck() -> bool:\n\n try:\n result = query_db(\"Select 1\")\n app.logfile.info(\"Select 1\")\n return True\n except ConnectionError as err:\n app.logger.error(err)\n return False", "def check_database_connection():\n try:\n engine = create_engine(DB...
[ "0.6474419", "0.59555435", "0.5908085", "0.59068346", "0.58937764", "0.5885362", "0.5810954", "0.5789529", "0.57233125", "0.5703921", "0.5703921", "0.5703921", "0.5701056", "0.56554455", "0.5653818", "0.5617752", "0.559518", "0.55608857", "0.55521846", "0.5516432", "0.5516353...
0.0
-1
You can call this operation to check whether an ApsaraDB for MongoDB instance meets the data recovery conditions.
def check_recovery_condition( self, request: dds_20151201_models.CheckRecoveryConditionRequest, ) -> dds_20151201_models.CheckRecoveryConditionResponse: runtime = util_models.RuntimeOptions() return self.check_recovery_condition_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def db_healthcheck() -> bool:\n\n try:\n result = query_db(\"Select 1\")\n app.logfile.info(\"Select 1\")\n return True\n except ConnectionError as err:\n app.logger.error(err)\n return False", "def check_database_connection():\n try:\n engine = create_engine(DB...
[ "0.6475063", "0.59561867", "0.5909198", "0.590827", "0.58934665", "0.5884424", "0.5813175", "0.57891566", "0.5722669", "0.57047", "0.57047", "0.57047", "0.5700496", "0.56553364", "0.56532806", "0.56195915", "0.5593647", "0.5560636", "0.55524915", "0.55173624", "0.55170524", ...
0.0
-1
You can call this operation to check whether an ApsaraDB for MongoDB instance meets the data recovery conditions.
async def check_recovery_condition_async( self, request: dds_20151201_models.CheckRecoveryConditionRequest, ) -> dds_20151201_models.CheckRecoveryConditionResponse: runtime = util_models.RuntimeOptions() return await self.check_recovery_condition_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def db_healthcheck() -> bool:\n\n try:\n result = query_db(\"Select 1\")\n app.logfile.info(\"Select 1\")\n return True\n except ConnectionError as err:\n app.logger.error(err)\n return False", "def check_database_connection():\n try:\n engine = create_engine(DB...
[ "0.6471628", "0.59526837", "0.5906501", "0.59047586", "0.58901316", "0.5882065", "0.5808634", "0.5789524", "0.57223743", "0.57008713", "0.57008713", "0.57008713", "0.57006454", "0.56540054", "0.5652248", "0.5619137", "0.55929124", "0.55583525", "0.5549184", "0.5514339", "0.55...
0.0
-1
Usage When you call this operation, the instance must be in the Running state.
def create_backup_with_options( self, request: dds_20151201_models.CreateBackupRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateBackupResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_method): query['BackupMethod'] = request.backup_method if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateBackup', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateBackupResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def running(self):\n pass", "def mark_running(self):\r\n self.status = RUNNING", "def execute(self) -> None:\n self.state()", "def run(self):\n \n pass", "def run(self): \r\n return", "def running(self) -> bool:", "def run(self):\n\t\t\n\t\tpass", "def run...
[ "0.7484519", "0.67636037", "0.65005904", "0.64012855", "0.63773376", "0.63619155", "0.63597435", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119...
0.0
-1
Usage When you call this operation, the instance must be in the Running state.
async def create_backup_with_options_async( self, request: dds_20151201_models.CreateBackupRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateBackupResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_method): query['BackupMethod'] = request.backup_method if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateBackup', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateBackupResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def running(self):\n pass", "def mark_running(self):\r\n self.status = RUNNING", "def execute(self) -> None:\n self.state()", "def run(self):\n \n pass", "def run(self): \r\n return", "def running(self) -> bool:", "def run(self):\n\t\t\n\t\tpass", "def run...
[ "0.7482264", "0.67624557", "0.64995867", "0.64005274", "0.6376072", "0.63606864", "0.63579", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "0.63561314", "...
0.0
-1
Usage When you call this operation, the instance must be in the Running state.
def create_backup( self, request: dds_20151201_models.CreateBackupRequest, ) -> dds_20151201_models.CreateBackupResponse: runtime = util_models.RuntimeOptions() return self.create_backup_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def running(self):\n pass", "def mark_running(self):\r\n self.status = RUNNING", "def execute(self) -> None:\n self.state()", "def run(self):\n \n pass", "def run(self): \r\n return", "def running(self) -> bool:", "def run(self):\n\t\t\n\t\tpass", "def run...
[ "0.7484519", "0.67636037", "0.65005904", "0.64012855", "0.63773376", "0.63619155", "0.63597435", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119", "0.6358119...
0.0
-1
Usage When you call this operation, the instance must be in the Running state.
async def create_backup_async( self, request: dds_20151201_models.CreateBackupRequest, ) -> dds_20151201_models.CreateBackupResponse: runtime = util_models.RuntimeOptions() return await self.create_backup_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def running(self):\n pass", "def mark_running(self):\r\n self.status = RUNNING", "def execute(self) -> None:\n self.state()", "def run(self):\n \n pass", "def run(self): \r\n return", "def running(self) -> bool:", "def run(self):\n\t\t\n\t\tpass", "def run...
[ "0.74834305", "0.6762313", "0.6500899", "0.640097", "0.6377246", "0.6361036", "0.63591534", "0.63573486", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", "0.6357224", ...
0.0
-1
Creates or clones an ApsaraDB for MongoDB replica set instance.
def create_dbinstance_with_options( self, request: dds_20151201_models.CreateDBInstanceRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateDBInstanceResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.auto_renew): query['AutoRenew'] = request.auto_renew if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.business_info): query['BusinessInfo'] = request.business_info if not UtilClient.is_unset(request.charge_type): query['ChargeType'] = request.charge_type if not UtilClient.is_unset(request.client_token): query['ClientToken'] = request.client_token if not UtilClient.is_unset(request.cluster_id): query['ClusterId'] = request.cluster_id if not UtilClient.is_unset(request.coupon_no): query['CouponNo'] = request.coupon_no if not UtilClient.is_unset(request.dbinstance_class): query['DBInstanceClass'] = request.dbinstance_class if not UtilClient.is_unset(request.dbinstance_description): query['DBInstanceDescription'] = request.dbinstance_description if not UtilClient.is_unset(request.dbinstance_storage): query['DBInstanceStorage'] = request.dbinstance_storage if not UtilClient.is_unset(request.database_names): query['DatabaseNames'] = request.database_names if not UtilClient.is_unset(request.encrypted): query['Encrypted'] = request.encrypted if not UtilClient.is_unset(request.encryption_key): query['EncryptionKey'] = request.encryption_key if not UtilClient.is_unset(request.engine): query['Engine'] = request.engine if not UtilClient.is_unset(request.engine_version): query['EngineVersion'] = request.engine_version if not UtilClient.is_unset(request.global_security_group_ids): query['GlobalSecurityGroupIds'] = request.global_security_group_ids if not UtilClient.is_unset(request.hidden_zone_id): query['HiddenZoneId'] = request.hidden_zone_id if not UtilClient.is_unset(request.network_type): query['NetworkType'] = request.network_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.period): query['Period'] = request.period if not UtilClient.is_unset(request.readonly_replicas): query['ReadonlyReplicas'] = request.readonly_replicas if not UtilClient.is_unset(request.region_id): query['RegionId'] = request.region_id if not UtilClient.is_unset(request.replication_factor): query['ReplicationFactor'] = request.replication_factor if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.secondary_zone_id): query['SecondaryZoneId'] = request.secondary_zone_id if not UtilClient.is_unset(request.security_iplist): query['SecurityIPList'] = request.security_iplist if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.src_dbinstance_id): query['SrcDBInstanceId'] = request.src_dbinstance_id if not UtilClient.is_unset(request.storage_engine): query['StorageEngine'] = request.storage_engine if not UtilClient.is_unset(request.storage_type): query['StorageType'] = request.storage_type if not UtilClient.is_unset(request.tag): query['Tag'] = request.tag if not UtilClient.is_unset(request.v_switch_id): query['VSwitchId'] = request.v_switch_id if not UtilClient.is_unset(request.vpc_id): query['VpcId'] = request.vpc_id if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateDBInstance', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateDBInstanceResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_mongodb(config):\n\n \n mongo_url = \"mongodb://\"\n mongo_url += \",\".join(map(lambda srv: srv['host'] + \":\" + str(srv['port']), config['data']['mongoServers']))\n \n if 'replica' in config['data']:\n mongo_url += \"/?replicaSet={0}\".format(config['data']['replica'])\n\n ...
[ "0.6223406", "0.60013074", "0.5782864", "0.5747709", "0.56893855", "0.56779116", "0.56532794", "0.56076694", "0.5558675", "0.5521635", "0.55150175", "0.5483562", "0.54343545", "0.5430869", "0.5419964", "0.54073054", "0.54006696", "0.5372172", "0.537116", "0.5366805", "0.53625...
0.0
-1
Creates or clones an ApsaraDB for MongoDB replica set instance.
async def create_dbinstance_with_options_async( self, request: dds_20151201_models.CreateDBInstanceRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateDBInstanceResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.auto_renew): query['AutoRenew'] = request.auto_renew if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.business_info): query['BusinessInfo'] = request.business_info if not UtilClient.is_unset(request.charge_type): query['ChargeType'] = request.charge_type if not UtilClient.is_unset(request.client_token): query['ClientToken'] = request.client_token if not UtilClient.is_unset(request.cluster_id): query['ClusterId'] = request.cluster_id if not UtilClient.is_unset(request.coupon_no): query['CouponNo'] = request.coupon_no if not UtilClient.is_unset(request.dbinstance_class): query['DBInstanceClass'] = request.dbinstance_class if not UtilClient.is_unset(request.dbinstance_description): query['DBInstanceDescription'] = request.dbinstance_description if not UtilClient.is_unset(request.dbinstance_storage): query['DBInstanceStorage'] = request.dbinstance_storage if not UtilClient.is_unset(request.database_names): query['DatabaseNames'] = request.database_names if not UtilClient.is_unset(request.encrypted): query['Encrypted'] = request.encrypted if not UtilClient.is_unset(request.encryption_key): query['EncryptionKey'] = request.encryption_key if not UtilClient.is_unset(request.engine): query['Engine'] = request.engine if not UtilClient.is_unset(request.engine_version): query['EngineVersion'] = request.engine_version if not UtilClient.is_unset(request.global_security_group_ids): query['GlobalSecurityGroupIds'] = request.global_security_group_ids if not UtilClient.is_unset(request.hidden_zone_id): query['HiddenZoneId'] = request.hidden_zone_id if not UtilClient.is_unset(request.network_type): query['NetworkType'] = request.network_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.period): query['Period'] = request.period if not UtilClient.is_unset(request.readonly_replicas): query['ReadonlyReplicas'] = request.readonly_replicas if not UtilClient.is_unset(request.region_id): query['RegionId'] = request.region_id if not UtilClient.is_unset(request.replication_factor): query['ReplicationFactor'] = request.replication_factor if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.secondary_zone_id): query['SecondaryZoneId'] = request.secondary_zone_id if not UtilClient.is_unset(request.security_iplist): query['SecurityIPList'] = request.security_iplist if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.src_dbinstance_id): query['SrcDBInstanceId'] = request.src_dbinstance_id if not UtilClient.is_unset(request.storage_engine): query['StorageEngine'] = request.storage_engine if not UtilClient.is_unset(request.storage_type): query['StorageType'] = request.storage_type if not UtilClient.is_unset(request.tag): query['Tag'] = request.tag if not UtilClient.is_unset(request.v_switch_id): query['VSwitchId'] = request.v_switch_id if not UtilClient.is_unset(request.vpc_id): query['VpcId'] = request.vpc_id if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateDBInstance', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateDBInstanceResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_mongodb(config):\n\n \n mongo_url = \"mongodb://\"\n mongo_url += \",\".join(map(lambda srv: srv['host'] + \":\" + str(srv['port']), config['data']['mongoServers']))\n \n if 'replica' in config['data']:\n mongo_url += \"/?replicaSet={0}\".format(config['data']['replica'])\n\n ...
[ "0.622102", "0.5999783", "0.57820976", "0.57459533", "0.56896466", "0.56764424", "0.5653182", "0.5604997", "0.5556561", "0.55223054", "0.55130494", "0.5482603", "0.54321265", "0.54321194", "0.541827", "0.540541", "0.5400194", "0.53717893", "0.5368951", "0.5364506", "0.5361408...
0.0
-1
Creates or clones an ApsaraDB for MongoDB replica set instance.
def create_dbinstance( self, request: dds_20151201_models.CreateDBInstanceRequest, ) -> dds_20151201_models.CreateDBInstanceResponse: runtime = util_models.RuntimeOptions() return self.create_dbinstance_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_mongodb(config):\n\n \n mongo_url = \"mongodb://\"\n mongo_url += \",\".join(map(lambda srv: srv['host'] + \":\" + str(srv['port']), config['data']['mongoServers']))\n \n if 'replica' in config['data']:\n mongo_url += \"/?replicaSet={0}\".format(config['data']['replica'])\n\n ...
[ "0.6223406", "0.60013074", "0.5782864", "0.5747709", "0.56893855", "0.56779116", "0.56532794", "0.56076694", "0.5558675", "0.5521635", "0.55150175", "0.5483562", "0.54343545", "0.5430869", "0.5419964", "0.54073054", "0.54006696", "0.5372172", "0.537116", "0.5366805", "0.53625...
0.0
-1
Creates or clones an ApsaraDB for MongoDB replica set instance.
async def create_dbinstance_async( self, request: dds_20151201_models.CreateDBInstanceRequest, ) -> dds_20151201_models.CreateDBInstanceResponse: runtime = util_models.RuntimeOptions() return await self.create_dbinstance_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_mongodb(config):\n\n \n mongo_url = \"mongodb://\"\n mongo_url += \",\".join(map(lambda srv: srv['host'] + \":\" + str(srv['port']), config['data']['mongoServers']))\n \n if 'replica' in config['data']:\n mongo_url += \"/?replicaSet={0}\".format(config['data']['replica'])\n\n ...
[ "0.6222429", "0.60006994", "0.5783417", "0.5747048", "0.5689551", "0.5677842", "0.5654198", "0.5607225", "0.5557893", "0.5522509", "0.551385", "0.54838175", "0.5433453", "0.5430938", "0.5419658", "0.5405892", "0.5401281", "0.5372678", "0.53699636", "0.53670853", "0.5360969", ...
0.0
-1
The ID of the request.
def create_node_batch_with_options( self, request: dds_20151201_models.CreateNodeBatchRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateNodeBatchResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.auto_pay): query['AutoPay'] = request.auto_pay if not UtilClient.is_unset(request.business_info): query['BusinessInfo'] = request.business_info if not UtilClient.is_unset(request.client_token): query['ClientToken'] = request.client_token if not UtilClient.is_unset(request.coupon_no): query['CouponNo'] = request.coupon_no if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.from_app): query['FromApp'] = request.from_app if not UtilClient.is_unset(request.nodes_info): query['NodesInfo'] = request.nodes_info if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.shard_direct): query['ShardDirect'] = request.shard_direct req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateNodeBatch', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateNodeBatchResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request_id(self) -> Optional[str]:\n return self._request_id", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def req_id(self) -> str...
[ "0.8368801", "0.8361394", "0.8240447", "0.8162492", "0.7957824", "0.77528495", "0.7722504", "0.76823556", "0.7521728", "0.7500343", "0.748814", "0.748814", "0.7445728", "0.74192345", "0.73776406", "0.73737794", "0.73517877", "0.7346024", "0.726597", "0.7248543", "0.7190313", ...
0.0
-1
The ID of the request.
async def create_node_batch_with_options_async( self, request: dds_20151201_models.CreateNodeBatchRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.CreateNodeBatchResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.account_password): query['AccountPassword'] = request.account_password if not UtilClient.is_unset(request.auto_pay): query['AutoPay'] = request.auto_pay if not UtilClient.is_unset(request.business_info): query['BusinessInfo'] = request.business_info if not UtilClient.is_unset(request.client_token): query['ClientToken'] = request.client_token if not UtilClient.is_unset(request.coupon_no): query['CouponNo'] = request.coupon_no if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.from_app): query['FromApp'] = request.from_app if not UtilClient.is_unset(request.nodes_info): query['NodesInfo'] = request.nodes_info if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.shard_direct): query['ShardDirect'] = request.shard_direct req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='CreateNodeBatch', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.CreateNodeBatchResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request_id(self) -> Optional[str]:\n return self._request_id", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def req_id(self) -> str...
[ "0.8368801", "0.8361394", "0.8240447", "0.8162492", "0.7957824", "0.77528495", "0.7722504", "0.76823556", "0.7521728", "0.7500343", "0.748814", "0.748814", "0.7445728", "0.74192345", "0.73776406", "0.73737794", "0.73517877", "0.7346024", "0.726597", "0.7248543", "0.7190313", ...
0.0
-1
The ID of the request.
def create_node_batch( self, request: dds_20151201_models.CreateNodeBatchRequest, ) -> dds_20151201_models.CreateNodeBatchResponse: runtime = util_models.RuntimeOptions() return self.create_node_batch_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request_id(self) -> Optional[str]:\n return self._request_id", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def req_id(self) -> str...
[ "0.8368801", "0.8361394", "0.8240447", "0.8162492", "0.7957824", "0.77528495", "0.7722504", "0.76823556", "0.7521728", "0.7500343", "0.748814", "0.748814", "0.7445728", "0.74192345", "0.73776406", "0.73737794", "0.73517877", "0.7346024", "0.726597", "0.7248543", "0.7190313", ...
0.0
-1
The ID of the request.
async def create_node_batch_async( self, request: dds_20151201_models.CreateNodeBatchRequest, ) -> dds_20151201_models.CreateNodeBatchResponse: runtime = util_models.RuntimeOptions() return await self.create_node_batch_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request_id(self) -> Optional[str]:\n return self._request_id", "def request_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"request_id\")", "def request_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"request_id\")", "def req_id(self) -> str...
[ "0.836924", "0.83613664", "0.8240659", "0.81644905", "0.7959101", "0.7753083", "0.7722351", "0.76823366", "0.75218236", "0.7501476", "0.74901116", "0.74901116", "0.7446364", "0.7418162", "0.7379212", "0.7373775", "0.73522216", "0.7346336", "0.72655106", "0.72484", "0.7189714"...
0.0
-1
> This operation can query only the information of the root account.
def describe_accounts_with_options( self, request: dds_20151201_models.DescribeAccountsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAccountsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAccounts', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAccountsResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_accounts_information(self):\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_ENTITY_OWNER_SUDO_OPERATION, us.SERVER_COMMAND_GET_ALL_ACCOUNTS_INFORMATION)", "def GetAccount(host):\n return FetchUrlJson(host, 'accounts/self')", "def account(self):\n return self.request('/acco...
[ "0.60618746", "0.60531956", "0.5949562", "0.5929087", "0.590232", "0.5809233", "0.5754317", "0.56826806", "0.5678622", "0.5647712", "0.5637695", "0.5624888", "0.5613417", "0.55882704", "0.55844957", "0.55466443", "0.55272794", "0.55219424", "0.5499662", "0.54993296", "0.54942...
0.0
-1
> This operation can query only the information of the root account.
async def describe_accounts_with_options_async( self, request: dds_20151201_models.DescribeAccountsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAccountsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.account_name): query['AccountName'] = request.account_name if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAccounts', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAccountsResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_accounts_information(self):\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_ENTITY_OWNER_SUDO_OPERATION, us.SERVER_COMMAND_GET_ALL_ACCOUNTS_INFORMATION)", "def GetAccount(host):\n return FetchUrlJson(host, 'accounts/self')", "def account(self):\n return self.request('/acco...
[ "0.60600203", "0.60517555", "0.5946838", "0.59270346", "0.5900741", "0.58067447", "0.5751905", "0.56770086", "0.5676639", "0.5646173", "0.56338227", "0.56216747", "0.56110007", "0.55853283", "0.5581051", "0.55435324", "0.5527605", "0.551848", "0.55033326", "0.5494173", "0.548...
0.0
-1
> This operation can query only the information of the root account.
def describe_accounts( self, request: dds_20151201_models.DescribeAccountsRequest, ) -> dds_20151201_models.DescribeAccountsResponse: runtime = util_models.RuntimeOptions() return self.describe_accounts_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_accounts_information(self):\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_ENTITY_OWNER_SUDO_OPERATION, us.SERVER_COMMAND_GET_ALL_ACCOUNTS_INFORMATION)", "def GetAccount(host):\n return FetchUrlJson(host, 'accounts/self')", "def account(self):\n return self.request('/acco...
[ "0.60618746", "0.60531956", "0.5949562", "0.5929087", "0.590232", "0.5809233", "0.5754317", "0.56826806", "0.5678622", "0.5647712", "0.5637695", "0.5624888", "0.5613417", "0.55882704", "0.55844957", "0.55466443", "0.55272794", "0.55219424", "0.5499662", "0.54993296", "0.54942...
0.0
-1
> This operation can query only the information of the root account.
async def describe_accounts_async( self, request: dds_20151201_models.DescribeAccountsRequest, ) -> dds_20151201_models.DescribeAccountsResponse: runtime = util_models.RuntimeOptions() return await self.describe_accounts_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_accounts_information(self):\n\t\treturn self._send_command_to_entity_server(us.SERVER_COMMAND_ENTITY_OWNER_SUDO_OPERATION, us.SERVER_COMMAND_GET_ALL_ACCOUNTS_INFORMATION)", "def GetAccount(host):\n return FetchUrlJson(host, 'accounts/self')", "def account(self):\n return self.request('/acco...
[ "0.6062021", "0.60535145", "0.5948453", "0.5929802", "0.5903632", "0.5808768", "0.57539886", "0.56822604", "0.56786215", "0.5648916", "0.5636117", "0.56245905", "0.56137556", "0.5588924", "0.55840164", "0.5546056", "0.5526658", "0.5520463", "0.5500864", "0.54988945", "0.54941...
0.0
-1
The instance must be in the running state when you call this operation. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
def describe_audit_policy_with_options( self, request: dds_20151201_models.DescribeAuditPolicyRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAuditPolicyResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAuditPolicy', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAuditPolicyResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def monitorStore():\n # commented to use psutil system info system_info = systeminfo.get_all_info()\n\n system_info = nodeinfo.node_all()\n system_info ['monitored_timestamp'] = config.get_current_system_timestamp()\n\n # Attach sliver info to system info\n system_info.update(sliverinfo.collectAllDat...
[ "0.58156765", "0.55455977", "0.5419773", "0.5353307", "0.5311246", "0.52902937", "0.52496463", "0.5209651", "0.51289505", "0.50947905", "0.50946975", "0.50858665", "0.50722444", "0.50575197", "0.5044451", "0.504037", "0.5036916", "0.50341827", "0.50292313", "0.5020206", "0.50...
0.0
-1
The instance must be in the running state when you call this operation. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
async def describe_audit_policy_with_options_async( self, request: dds_20151201_models.DescribeAuditPolicyRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAuditPolicyResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAuditPolicy', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAuditPolicyResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def monitorStore():\n # commented to use psutil system info system_info = systeminfo.get_all_info()\n\n system_info = nodeinfo.node_all()\n system_info ['monitored_timestamp'] = config.get_current_system_timestamp()\n\n # Attach sliver info to system info\n system_info.update(sliverinfo.collectAllDat...
[ "0.5816251", "0.55439824", "0.54192", "0.5350156", "0.53101283", "0.5288235", "0.5250576", "0.5207726", "0.5130923", "0.5095212", "0.50946444", "0.50885844", "0.5069727", "0.5060447", "0.5045108", "0.50422746", "0.5036676", "0.503287", "0.5031638", "0.50175893", "0.50110024",...
0.0
-1
The instance must be in the running state when you call this operation. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
def describe_audit_policy( self, request: dds_20151201_models.DescribeAuditPolicyRequest, ) -> dds_20151201_models.DescribeAuditPolicyResponse: runtime = util_models.RuntimeOptions() return self.describe_audit_policy_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def monitorStore():\n # commented to use psutil system info system_info = systeminfo.get_all_info()\n\n system_info = nodeinfo.node_all()\n system_info ['monitored_timestamp'] = config.get_current_system_timestamp()\n\n # Attach sliver info to system info\n system_info.update(sliverinfo.collectAllDat...
[ "0.58156765", "0.55455977", "0.5419773", "0.5353307", "0.5311246", "0.52902937", "0.52496463", "0.5209651", "0.51289505", "0.50947905", "0.50946975", "0.50858665", "0.50722444", "0.50575197", "0.5044451", "0.504037", "0.5036916", "0.50341827", "0.50292313", "0.5020206", "0.50...
0.0
-1
The instance must be in the running state when you call this operation. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
async def describe_audit_policy_async( self, request: dds_20151201_models.DescribeAuditPolicyRequest, ) -> dds_20151201_models.DescribeAuditPolicyResponse: runtime = util_models.RuntimeOptions() return await self.describe_audit_policy_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def monitorStore():\n # commented to use psutil system info system_info = systeminfo.get_all_info()\n\n system_info = nodeinfo.node_all()\n system_info ['monitored_timestamp'] = config.get_current_system_timestamp()\n\n # Attach sliver info to system info\n system_info.update(sliverinfo.collectAllDat...
[ "0.58150995", "0.5542857", "0.54209995", "0.5350285", "0.53098565", "0.52885276", "0.5250991", "0.5207924", "0.5130098", "0.5095916", "0.5095136", "0.5088931", "0.50700444", "0.50613415", "0.5044827", "0.5042709", "0.50386816", "0.50331056", "0.503233", "0.5017697", "0.500915...
0.0
-1
When you call this operation, ensure that the audit log feature of the instance is enabled. Otherwise, the operation returns an empty audit log. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
def describe_audit_records_with_options( self, request: dds_20151201_models.DescribeAuditRecordsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAuditRecordsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.database): query['Database'] = request.database if not UtilClient.is_unset(request.end_time): query['EndTime'] = request.end_time if not UtilClient.is_unset(request.form): query['Form'] = request.form if not UtilClient.is_unset(request.node_id): query['NodeId'] = request.node_id if not UtilClient.is_unset(request.order_type): query['OrderType'] = request.order_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size if not UtilClient.is_unset(request.query_keywords): query['QueryKeywords'] = request.query_keywords if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.start_time): query['StartTime'] = request.start_time if not UtilClient.is_unset(request.user): query['User'] = request.user req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAuditRecords', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAuditRecordsResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_getAuditLogsWithNoParams(self):\r\n logs = self.client.getAuditLogs()\r\n return logs", "def enable_audit_monitoring():\n __enable_data_access_logging()\n __enable_log_streaming()\n __create_audit_alerts()\n __get_incidents_history()", "def __enable_data_access_logging():\n ...
[ "0.64446086", "0.62588435", "0.5892038", "0.57147324", "0.56653476", "0.5664631", "0.56301963", "0.55668545", "0.54649603", "0.54649603", "0.54543316", "0.5371827", "0.5329789", "0.5322089", "0.5311146", "0.5310638", "0.5301255", "0.52584666", "0.52535313", "0.52063483", "0.5...
0.0
-1
When you call this operation, ensure that the audit log feature of the instance is enabled. Otherwise, the operation returns an empty audit log. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
async def describe_audit_records_with_options_async( self, request: dds_20151201_models.DescribeAuditRecordsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAuditRecordsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.database): query['Database'] = request.database if not UtilClient.is_unset(request.end_time): query['EndTime'] = request.end_time if not UtilClient.is_unset(request.form): query['Form'] = request.form if not UtilClient.is_unset(request.node_id): query['NodeId'] = request.node_id if not UtilClient.is_unset(request.order_type): query['OrderType'] = request.order_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size if not UtilClient.is_unset(request.query_keywords): query['QueryKeywords'] = request.query_keywords if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.start_time): query['StartTime'] = request.start_time if not UtilClient.is_unset(request.user): query['User'] = request.user req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAuditRecords', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAuditRecordsResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_getAuditLogsWithNoParams(self):\r\n logs = self.client.getAuditLogs()\r\n return logs", "def enable_audit_monitoring():\n __enable_data_access_logging()\n __enable_log_streaming()\n __create_audit_alerts()\n __get_incidents_history()", "def __enable_data_access_logging():\n ...
[ "0.6445617", "0.6258791", "0.5892536", "0.57143676", "0.56648254", "0.5663997", "0.56277156", "0.5568767", "0.5466888", "0.5466888", "0.5455449", "0.5371057", "0.53292465", "0.5321605", "0.53134197", "0.5309756", "0.530172", "0.52579325", "0.52530557", "0.5208595", "0.5206626...
0.0
-1
When you call this operation, ensure that the audit log feature of the instance is enabled. Otherwise, the operation returns an empty audit log. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
def describe_audit_records( self, request: dds_20151201_models.DescribeAuditRecordsRequest, ) -> dds_20151201_models.DescribeAuditRecordsResponse: runtime = util_models.RuntimeOptions() return self.describe_audit_records_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_getAuditLogsWithNoParams(self):\r\n logs = self.client.getAuditLogs()\r\n return logs", "def enable_audit_monitoring():\n __enable_data_access_logging()\n __enable_log_streaming()\n __create_audit_alerts()\n __get_incidents_history()", "def __enable_data_access_logging():\n ...
[ "0.64446086", "0.62588435", "0.5892038", "0.57147324", "0.56653476", "0.5664631", "0.56301963", "0.55668545", "0.54649603", "0.54649603", "0.54543316", "0.5371827", "0.5329789", "0.5322089", "0.5311146", "0.5310638", "0.5301255", "0.52584666", "0.52535313", "0.52063483", "0.5...
0.0
-1
When you call this operation, ensure that the audit log feature of the instance is enabled. Otherwise, the operation returns an empty audit log. This operation is applicable only to generalpurpose localdisk and dedicated localdisk instances. You can call this operation up to 30 times per minute. To call this operation at a higher frequency, use a Logstore. For more information, see [Manage a Logstore](~~48990~~).
async def describe_audit_records_async( self, request: dds_20151201_models.DescribeAuditRecordsRequest, ) -> dds_20151201_models.DescribeAuditRecordsResponse: runtime = util_models.RuntimeOptions() return await self.describe_audit_records_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_getAuditLogsWithNoParams(self):\r\n logs = self.client.getAuditLogs()\r\n return logs", "def enable_audit_monitoring():\n __enable_data_access_logging()\n __enable_log_streaming()\n __create_audit_alerts()\n __get_incidents_history()", "def __enable_data_access_logging():\n ...
[ "0.6444057", "0.62613404", "0.58935", "0.5716179", "0.566535", "0.5664581", "0.5631282", "0.55675215", "0.546644", "0.546644", "0.5454982", "0.53715456", "0.5330038", "0.532172", "0.53130406", "0.53122586", "0.5303453", "0.52579725", "0.5254205", "0.52071667", "0.5206211", ...
0.0
-1
You can call this operation to query zones in which you can create an ApsaraDB for MongoDB instance.
def describe_availability_zones_with_options( self, request: dds_20151201_models.DescribeAvailabilityZonesRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAvailabilityZonesResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accept_language): query['AcceptLanguage'] = request.accept_language if not UtilClient.is_unset(request.db_type): query['DbType'] = request.db_type if not UtilClient.is_unset(request.exclude_secondary_zone_id): query['ExcludeSecondaryZoneId'] = request.exclude_secondary_zone_id if not UtilClient.is_unset(request.exclude_zone_id): query['ExcludeZoneId'] = request.exclude_zone_id if not UtilClient.is_unset(request.instance_charge_type): query['InstanceChargeType'] = request.instance_charge_type if not UtilClient.is_unset(request.mongo_type): query['MongoType'] = request.mongo_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.region_id): query['RegionId'] = request.region_id if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.storage_support): query['StorageSupport'] = request.storage_support if not UtilClient.is_unset(request.storage_type): query['StorageType'] = request.storage_type if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAvailabilityZones', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAvailabilityZonesResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_db_zone(self, context):\n zone_objs = self.dns_manager.get_all_db_zone(context)\n return zone_objs", "def get_db_zones(self, context, zones):\n zone_objs = self.dns_manager.get_db_zones(context, zones)\n return zone_objs", "def get_all_db_region(self, context):\n ...
[ "0.6203123", "0.6167161", "0.6126965", "0.5621415", "0.56168133", "0.5590212", "0.55828345", "0.5578767", "0.5524153", "0.55237156", "0.5507523", "0.5407913", "0.53904384", "0.5384361", "0.5374826", "0.5361223", "0.5351975", "0.5332274", "0.5330482", "0.53106487", "0.5309314"...
0.46437985
97
You can call this operation to query zones in which you can create an ApsaraDB for MongoDB instance.
async def describe_availability_zones_with_options_async( self, request: dds_20151201_models.DescribeAvailabilityZonesRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeAvailabilityZonesResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.accept_language): query['AcceptLanguage'] = request.accept_language if not UtilClient.is_unset(request.db_type): query['DbType'] = request.db_type if not UtilClient.is_unset(request.exclude_secondary_zone_id): query['ExcludeSecondaryZoneId'] = request.exclude_secondary_zone_id if not UtilClient.is_unset(request.exclude_zone_id): query['ExcludeZoneId'] = request.exclude_zone_id if not UtilClient.is_unset(request.instance_charge_type): query['InstanceChargeType'] = request.instance_charge_type if not UtilClient.is_unset(request.mongo_type): query['MongoType'] = request.mongo_type if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.region_id): query['RegionId'] = request.region_id if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.storage_support): query['StorageSupport'] = request.storage_support if not UtilClient.is_unset(request.storage_type): query['StorageType'] = request.storage_type if not UtilClient.is_unset(request.zone_id): query['ZoneId'] = request.zone_id req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeAvailabilityZones', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeAvailabilityZonesResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_db_zone(self, context):\n zone_objs = self.dns_manager.get_all_db_zone(context)\n return zone_objs", "def get_db_zones(self, context, zones):\n zone_objs = self.dns_manager.get_db_zones(context, zones)\n return zone_objs", "def get_all_db_region(self, context):\n ...
[ "0.62009054", "0.616521", "0.61248976", "0.56200445", "0.56139994", "0.55891585", "0.55817425", "0.5577883", "0.55221754", "0.5521274", "0.550603", "0.5408523", "0.539022", "0.53846765", "0.5373089", "0.5360322", "0.5353252", "0.5330746", "0.5328601", "0.5308971", "0.53078073...
0.0
-1
You can call this operation to query zones in which you can create an ApsaraDB for MongoDB instance.
def describe_availability_zones( self, request: dds_20151201_models.DescribeAvailabilityZonesRequest, ) -> dds_20151201_models.DescribeAvailabilityZonesResponse: runtime = util_models.RuntimeOptions() return self.describe_availability_zones_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_db_zone(self, context):\n zone_objs = self.dns_manager.get_all_db_zone(context)\n return zone_objs", "def get_db_zones(self, context, zones):\n zone_objs = self.dns_manager.get_db_zones(context, zones)\n return zone_objs", "def get_all_db_region(self, context):\n ...
[ "0.6203123", "0.6167161", "0.6126965", "0.5621415", "0.56168133", "0.5590212", "0.55828345", "0.5578767", "0.5524153", "0.55237156", "0.5507523", "0.5407913", "0.53904384", "0.5384361", "0.5374826", "0.5361223", "0.5351975", "0.5332274", "0.5330482", "0.53106487", "0.5309314"...
0.46618956
92
You can call this operation to query zones in which you can create an ApsaraDB for MongoDB instance.
async def describe_availability_zones_async( self, request: dds_20151201_models.DescribeAvailabilityZonesRequest, ) -> dds_20151201_models.DescribeAvailabilityZonesResponse: runtime = util_models.RuntimeOptions() return await self.describe_availability_zones_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_db_zone(self, context):\n zone_objs = self.dns_manager.get_all_db_zone(context)\n return zone_objs", "def get_db_zones(self, context, zones):\n zone_objs = self.dns_manager.get_db_zones(context, zones)\n return zone_objs", "def get_all_db_region(self, context):\n ...
[ "0.6200916", "0.6165996", "0.6125184", "0.5621011", "0.56150806", "0.55883425", "0.5581309", "0.55779856", "0.55233276", "0.552181", "0.5506578", "0.54070085", "0.5390536", "0.5383279", "0.5373094", "0.5361262", "0.53543204", "0.5331395", "0.5328404", "0.53102696", "0.5309388...
0.0
-1
Precautions You can call the [CreateDBInstance](~~61763~~) operation to restore a database for an ApsaraDB for MongoDB instance. For more information, see [Restore one or more databases of an ApsaraDB for MongoDB instance](~~112274~~).
def describe_backup_dbs_with_options( self, request: dds_20151201_models.DescribeBackupDBsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeBackupDBsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.source_dbinstance): query['SourceDBInstance'] = request.source_dbinstance req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeBackupDBs', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeBackupDBsResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mongoRestore( self, db, infile ):\n\t\tsys_command = \"mongorestore --db \" + db + \" --host \" + self.host + \" --port \" + str( self.port ) + \" \" + infile \n\t\tos.system(sys_command)", "def newDb(options, dbName, adminPswd, userPswd, viewerPswd):\n if not re.match(\"^[A-Za-z][A-Za-z0-9_]*$\", dbName)...
[ "0.5919263", "0.58436376", "0.58316594", "0.5817874", "0.57264364", "0.5723861", "0.5637776", "0.56310433", "0.56009275", "0.56000507", "0.55804306", "0.5555646", "0.5523822", "0.5507321", "0.550587", "0.5490671", "0.54519933", "0.54179496", "0.54134303", "0.53781307", "0.531...
0.0
-1
Precautions You can call the [CreateDBInstance](~~61763~~) operation to restore a database for an ApsaraDB for MongoDB instance. For more information, see [Restore one or more databases of an ApsaraDB for MongoDB instance](~~112274~~).
async def describe_backup_dbs_with_options_async( self, request: dds_20151201_models.DescribeBackupDBsRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeBackupDBsResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.backup_id): query['BackupId'] = request.backup_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.page_number): query['PageNumber'] = request.page_number if not UtilClient.is_unset(request.page_size): query['PageSize'] = request.page_size if not UtilClient.is_unset(request.resource_group_id): query['ResourceGroupId'] = request.resource_group_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.restore_time): query['RestoreTime'] = request.restore_time if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token if not UtilClient.is_unset(request.source_dbinstance): query['SourceDBInstance'] = request.source_dbinstance req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeBackupDBs', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeBackupDBsResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mongoRestore( self, db, infile ):\n\t\tsys_command = \"mongorestore --db \" + db + \" --host \" + self.host + \" --port \" + str( self.port ) + \" \" + infile \n\t\tos.system(sys_command)", "def newDb(options, dbName, adminPswd, userPswd, viewerPswd):\n if not re.match(\"^[A-Za-z][A-Za-z0-9_]*$\", dbName)...
[ "0.59217155", "0.5840126", "0.5833199", "0.58179116", "0.57253283", "0.5724098", "0.5639648", "0.562868", "0.5600819", "0.55996966", "0.55794525", "0.5553458", "0.5525459", "0.55086654", "0.5505341", "0.5490055", "0.5451436", "0.5417792", "0.54116344", "0.5381589", "0.5310766...
0.0
-1
Precautions You can call the [CreateDBInstance](~~61763~~) operation to restore a database for an ApsaraDB for MongoDB instance. For more information, see [Restore one or more databases of an ApsaraDB for MongoDB instance](~~112274~~).
def describe_backup_dbs( self, request: dds_20151201_models.DescribeBackupDBsRequest, ) -> dds_20151201_models.DescribeBackupDBsResponse: runtime = util_models.RuntimeOptions() return self.describe_backup_dbs_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mongoRestore( self, db, infile ):\n\t\tsys_command = \"mongorestore --db \" + db + \" --host \" + self.host + \" --port \" + str( self.port ) + \" \" + infile \n\t\tos.system(sys_command)", "def newDb(options, dbName, adminPswd, userPswd, viewerPswd):\n if not re.match(\"^[A-Za-z][A-Za-z0-9_]*$\", dbName)...
[ "0.5919263", "0.58436376", "0.58316594", "0.5817874", "0.57264364", "0.5723861", "0.5637776", "0.56310433", "0.56009275", "0.56000507", "0.55804306", "0.5555646", "0.5523822", "0.5507321", "0.550587", "0.5490671", "0.54519933", "0.54179496", "0.54134303", "0.53781307", "0.531...
0.0
-1
Precautions You can call the [CreateDBInstance](~~61763~~) operation to restore a database for an ApsaraDB for MongoDB instance. For more information, see [Restore one or more databases of an ApsaraDB for MongoDB instance](~~112274~~).
async def describe_backup_dbs_async( self, request: dds_20151201_models.DescribeBackupDBsRequest, ) -> dds_20151201_models.DescribeBackupDBsResponse: runtime = util_models.RuntimeOptions() return await self.describe_backup_dbs_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mongoRestore( self, db, infile ):\n\t\tsys_command = \"mongorestore --db \" + db + \" --host \" + self.host + \" --port \" + str( self.port ) + \" \" + infile \n\t\tos.system(sys_command)", "def newDb(options, dbName, adminPswd, userPswd, viewerPswd):\n if not re.match(\"^[A-Za-z][A-Za-z0-9_]*$\", dbName)...
[ "0.5919891", "0.584343", "0.5832707", "0.58185637", "0.5727084", "0.5724684", "0.56387365", "0.5631168", "0.56005603", "0.55993086", "0.558136", "0.5554274", "0.55219203", "0.5506672", "0.5506641", "0.54907274", "0.54535127", "0.54183096", "0.54140645", "0.5377761", "0.531104...
0.0
-1
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
def describe_dbinstance_encryption_key_with_options( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.encryption_key): query['EncryptionKey'] = request.encryption_key if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeDBInstanceEncryptionKey', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_dbinstance_encryption_key(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_dbinstance_encryption_key_with_op...
[ "0.673887", "0.6716472", "0.65085775", "0.6401395", "0.6124461", "0.6019866", "0.5743563", "0.5593969", "0.5453083", "0.53697926", "0.50822043", "0.5020621", "0.50204563", "0.50042874", "0.4984543", "0.49620974", "0.49530983", "0.48969638", "0.4866419", "0.4756271", "0.472960...
0.6771073
0
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
async def describe_dbinstance_encryption_key_with_options_async( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.encryption_key): query['EncryptionKey'] = request.encryption_key if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeDBInstanceEncryptionKey', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_dbinstance_encryption_key_with_options(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n UtilClient.validate_model(request)\n ...
[ "0.67736393", "0.6740733", "0.6716724", "0.64031655", "0.61244464", "0.60200423", "0.5743947", "0.55937266", "0.54525197", "0.5369969", "0.5083985", "0.5020417", "0.50197154", "0.5004098", "0.49828136", "0.4963517", "0.4952876", "0.4897045", "0.4868045", "0.4755675", "0.47292...
0.6510804
3
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
def describe_dbinstance_encryption_key( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: runtime = util_models.RuntimeOptions() return self.describe_dbinstance_encryption_key_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_dbinstance_encryption_key_with_options(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n UtilClient.validate_model(request)\n ...
[ "0.6771073", "0.6716472", "0.65085775", "0.6401395", "0.6124461", "0.6019866", "0.5743563", "0.5593969", "0.5453083", "0.53697926", "0.50822043", "0.5020621", "0.50204563", "0.50042874", "0.4984543", "0.49620974", "0.49530983", "0.48969638", "0.4866419", "0.4756271", "0.47296...
0.673887
1
Usage When you call the DescribeDBInstanceEncryptionKey operation, the instance must have transparent data encryption (TDE) enabled in BYOK mode. You can call the [ModifyDBInstanceTDE](~~131267~~) operation to enable TDE.
async def describe_dbinstance_encryption_key_async( self, request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest, ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse: runtime = util_models.RuntimeOptions() return await self.describe_dbinstance_encryption_key_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def describe_dbinstance_encryption_key_with_options(\n self,\n request: dds_20151201_models.DescribeDBInstanceEncryptionKeyRequest,\n runtime: util_models.RuntimeOptions,\n ) -> dds_20151201_models.DescribeDBInstanceEncryptionKeyResponse:\n UtilClient.validate_model(request)\n ...
[ "0.6772166", "0.6739101", "0.671771", "0.6509689", "0.6125098", "0.60211706", "0.5744258", "0.5594484", "0.5452331", "0.5370367", "0.50826246", "0.5019888", "0.5019626", "0.500507", "0.49832922", "0.49626267", "0.49528944", "0.48964608", "0.48669487", "0.47548133", "0.4728359...
0.6401872
4
You can call this operation to query whether TDE is enabled for an ApsaraDB for MongoDB instance.
def describe_dbinstance_tdeinfo_with_options( self, request: dds_20151201_models.DescribeDBInstanceTDEInfoRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeDBInstanceTDEInfoResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeDBInstanceTDEInfo', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeDBInstanceTDEInfoResponse(), self.call_api(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def in_smartctl_database(self) -> bool:\n return self._in_smartctl_database", "def electrolytedb():\n if not check_for_mongodb():\n pytest.skip(\"MongoDB is required\")", "def is_mongod_running(self):\r\n \r\n try:\r\n _connect_to_mongo_port(int(self.port))\r\n ...
[ "0.5924647", "0.5714203", "0.53105587", "0.5305546", "0.52914995", "0.52460915", "0.5220277", "0.5192149", "0.51821816", "0.5150939", "0.5114558", "0.50682026", "0.50487125", "0.50102746", "0.49994388", "0.49988487", "0.4979508", "0.49714425", "0.49660787", "0.49513412", "0.4...
0.0
-1
You can call this operation to query whether TDE is enabled for an ApsaraDB for MongoDB instance.
async def describe_dbinstance_tdeinfo_with_options_async( self, request: dds_20151201_models.DescribeDBInstanceTDEInfoRequest, runtime: util_models.RuntimeOptions, ) -> dds_20151201_models.DescribeDBInstanceTDEInfoResponse: UtilClient.validate_model(request) query = {} if not UtilClient.is_unset(request.dbinstance_id): query['DBInstanceId'] = request.dbinstance_id if not UtilClient.is_unset(request.owner_account): query['OwnerAccount'] = request.owner_account if not UtilClient.is_unset(request.owner_id): query['OwnerId'] = request.owner_id if not UtilClient.is_unset(request.resource_owner_account): query['ResourceOwnerAccount'] = request.resource_owner_account if not UtilClient.is_unset(request.resource_owner_id): query['ResourceOwnerId'] = request.resource_owner_id if not UtilClient.is_unset(request.security_token): query['SecurityToken'] = request.security_token req = open_api_models.OpenApiRequest( query=OpenApiUtilClient.query(query) ) params = open_api_models.Params( action='DescribeDBInstanceTDEInfo', version='2015-12-01', protocol='HTTPS', pathname='/', method='POST', auth_type='AK', style='RPC', req_body_type='formData', body_type='json' ) return TeaCore.from_map( dds_20151201_models.DescribeDBInstanceTDEInfoResponse(), await self.call_api_async(params, req, runtime) )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def in_smartctl_database(self) -> bool:\n return self._in_smartctl_database", "def electrolytedb():\n if not check_for_mongodb():\n pytest.skip(\"MongoDB is required\")", "def is_mongod_running(self):\r\n \r\n try:\r\n _connect_to_mongo_port(int(self.port))\r\n ...
[ "0.59242505", "0.5713555", "0.5309751", "0.5305572", "0.5293364", "0.52466565", "0.52212167", "0.51937973", "0.5182682", "0.5151327", "0.51129353", "0.50678843", "0.5049087", "0.5011104", "0.49999186", "0.49974996", "0.49802878", "0.49719286", "0.49677187", "0.4951272", "0.49...
0.0
-1
You can call this operation to query whether TDE is enabled for an ApsaraDB for MongoDB instance.
def describe_dbinstance_tdeinfo( self, request: dds_20151201_models.DescribeDBInstanceTDEInfoRequest, ) -> dds_20151201_models.DescribeDBInstanceTDEInfoResponse: runtime = util_models.RuntimeOptions() return self.describe_dbinstance_tdeinfo_with_options(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def in_smartctl_database(self) -> bool:\n return self._in_smartctl_database", "def electrolytedb():\n if not check_for_mongodb():\n pytest.skip(\"MongoDB is required\")", "def is_mongod_running(self):\r\n \r\n try:\r\n _connect_to_mongo_port(int(self.port))\r\n ...
[ "0.5924647", "0.5714203", "0.53105587", "0.5305546", "0.52914995", "0.52460915", "0.5220277", "0.5192149", "0.51821816", "0.5150939", "0.5114558", "0.50682026", "0.50487125", "0.50102746", "0.49994388", "0.49988487", "0.4979508", "0.49714425", "0.49660787", "0.49513412", "0.4...
0.0
-1
You can call this operation to query whether TDE is enabled for an ApsaraDB for MongoDB instance.
async def describe_dbinstance_tdeinfo_async( self, request: dds_20151201_models.DescribeDBInstanceTDEInfoRequest, ) -> dds_20151201_models.DescribeDBInstanceTDEInfoResponse: runtime = util_models.RuntimeOptions() return await self.describe_dbinstance_tdeinfo_with_options_async(request, runtime)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def in_smartctl_database(self) -> bool:\n return self._in_smartctl_database", "def electrolytedb():\n if not check_for_mongodb():\n pytest.skip(\"MongoDB is required\")", "def is_mongod_running(self):\r\n \r\n try:\r\n _connect_to_mongo_port(int(self.port))\r\n ...
[ "0.59242594", "0.5712957", "0.53105", "0.53063715", "0.5291589", "0.5246864", "0.52204335", "0.5191934", "0.51834506", "0.51516616", "0.51128775", "0.50659907", "0.5047623", "0.5010192", "0.4998476", "0.49962136", "0.4980182", "0.49710995", "0.4966058", "0.49504077", "0.49502...
0.0
-1