text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def form_sent(request, slug, template="forms/form_sent.html"): """ Show the response message. """
published = Form.objects.published(for_user=request.user) context = {"form": get_object_or_404(published, slug=slug)} return render_to_response(template, context, RequestContext(request))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_queryset(self, request): """ Annotate the queryset with the entries count for use in the admin list view. """
qs = super(FormAdmin, self).get_queryset(request) return qs.annotate(total_entries=Count("entries"))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def file_view(self, request, field_entry_id): """ Output the file for the requested field entry. """
model = self.fieldentry_model field_entry = get_object_or_404(model, id=field_entry_id) path = join(fs.location, field_entry.value) response = HttpResponse(content_type=guess_type(path)[0]) f = open(path, "r+b") response["Content-Disposition"] = "attachment; filename=%s" % f.name response.write(f.read()) f.close() return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_live_scores(self, use_12_hour_format): """Gets the live scores"""
req = requests.get(RequestHandler.LIVE_URL) if req.status_code == requests.codes.ok: scores_data = [] scores = req.json() if len(scores["games"]) == 0: click.secho("No live action currently", fg="red", bold=True) return for score in scores['games']: # match football-data api structure d = {} d['homeTeam'] = {'name': score['homeTeamName']} d['awayTeam'] = {'name': score['awayTeamName']} d['score'] = {'fullTime': {'homeTeam': score['goalsHomeTeam'], 'awayTeam': score['goalsAwayTeam']}} d['league'] = score['league'] d['time'] = score['time'] scores_data.append(d) self.writer.live_scores(scores_data) else: click.secho("There was problem getting live scores", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_team_scores(self, team, time, show_upcoming, use_12_hour_format): """Queries the API and gets the particular team scores"""
team_id = self.team_names.get(team, None) time_frame = 'n' if show_upcoming else 'p' if team_id: try: req = self._get('teams/{team_id}/matches?timeFrame={time_frame}{time}'.format( team_id=team_id, time_frame=time_frame, time=time)) team_scores = req.json() if len(team_scores["matches"]) == 0: click.secho("No action during past week. Change the time " "parameter to get more fixtures.", fg="red", bold=True) else: self.writer.team_scores(team_scores, time, show_upcoming, use_12_hour_format) except APIErrorException as e: click.secho(e.args[0], fg="red", bold=True) else: click.secho("Team code is not correct.", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_standings(self, league): """Queries the API and gets the standings for a particular league"""
league_id = self.league_ids[league] try: req = self._get('competitions/{id}/standings'.format( id=league_id)) self.writer.standings(req.json(), league) except APIErrorException: # Click handles incorrect League codes so this will only come up # if that league does not have standings available. ie. Champions League click.secho("No standings availble for {league}.".format(league=league), fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_league_scores(self, league, time, show_upcoming, use_12_hour_format): """ Queries the API and fetches the scores for fixtures based upon the league and time parameter """
time_frame = 'n' if show_upcoming else 'p' if league: try: league_id = self.league_ids[league] req = self._get('competitions/{id}/matches?timeFrame={time_frame}{time}'.format( id=league_id, time_frame=time_frame, time=str(time))) fixtures_results = req.json() # no fixtures in the past week. display a help message and return if len(fixtures_results["matches"]) == 0: click.secho("No {league} matches in the past week.".format(league=league), fg="red", bold=True) return self.writer.league_scores(fixtures_results, time, show_upcoming, use_12_hour_format) except APIErrorException: click.secho("No data for the given league.", fg="red", bold=True) else: # When no league specified. Print all available in time frame. try: req = self._get('matches?timeFrame={time_frame}{time}'.format( time_frame=time_frame, time=str(time))) fixtures_results = req.json() self.writer.league_scores(fixtures_results, time, show_upcoming, use_12_hour_format) except APIErrorException: click.secho("No data available.", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_team_players(self, team): """ Queries the API and fetches the players for a particular team """
team_id = self.team_names.get(team, None) try: req = self._get('teams/{}/'.format(team_id)) team_players = req.json()['squad'] if not team_players: click.secho("No players found for this team", fg="red", bold=True) else: self.writer.team_players(team_players) except APIErrorException: click.secho("No data for the team. Please check the team code.", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_json(file): """Load JSON file at app start"""
here = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(here, file)) as jfile: data = json.load(jfile) return data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_input_key(): """Input API key and validate"""
click.secho("No API key found!", fg="yellow", bold=True) click.secho("Please visit {} and get an API token.".format(RequestHandler.BASE_URL), fg="yellow", bold=True) while True: confkey = click.prompt(click.style("Enter API key", fg="yellow", bold=True)) if len(confkey) == 32: # 32 chars try: int(confkey, 16) # hexadecimal except ValueError: click.secho("Invalid API key", fg="red", bold=True) else: break else: click.secho("Invalid API key", fg="red", bold=True) return confkey
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_config_key(): """Load API key from config file, write if needed"""
global api_token try: api_token = os.environ['SOCCER_CLI_API_TOKEN'] except KeyError: home = os.path.expanduser("~") config = os.path.join(home, ".soccer-cli.ini") if not os.path.exists(config): with open(config, "w") as cfile: key = get_input_key() cfile.write(key) else: with open(config, "r") as cfile: key = cfile.read() if key: api_token = key else: os.remove(config) # remove 0-byte file click.secho('No API Token detected. ' 'Please visit {0} and get an API Token, ' 'which will be used by Soccer CLI ' 'to get access to the data.' .format(RequestHandler.BASE_URL), fg="red", bold=True) sys.exit(1) return api_token
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def map_team_id(code): """Take in team ID, read JSON file to map ID to name"""
for team in TEAM_DATA: if team["code"] == code: click.secho(team["name"], fg="green") break else: click.secho("No team found for this code", fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_team_codes(): """List team names in alphabetical order of team ID, per league."""
# Sort teams by league, then alphabetical by code cleanlist = sorted(TEAM_DATA, key=lambda k: (k["league"]["name"], k["code"])) # Get league names leaguenames = sorted(list(set([team["league"]["name"] for team in cleanlist]))) for league in leaguenames: teams = [team for team in cleanlist if team["league"]["name"] == league] click.secho(league, fg="green", bold=True) for team in teams: if team["code"] != "null": click.secho(u"{0}: {1}".format(team["code"], team["name"]), fg="yellow") click.secho("")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(league, time, standings, team, live, use12hour, players, output_format, output_file, upcoming, lookup, listcodes, apikey): """ A CLI for live and past football scores from various football leagues. League codes: \b - WC: World Cup - EC: European Championship - CL: Champions League - PL: English Premier League - ELC: English Championship - FL1: French Ligue 1 - BL: German Bundesliga - SA: Serie A - DED: Eredivisie - PPL: Primeira Liga - PD: Primera Division - BSA: Brazil Serie A """
headers = {'X-Auth-Token': apikey} try: if output_format == 'stdout' and output_file: raise IncorrectParametersException('Printing output to stdout and ' 'saving to a file are mutually exclusive') writer = get_writer(output_format, output_file) rh = RequestHandler(headers, LEAGUE_IDS, TEAM_NAMES, writer) if listcodes: list_team_codes() return if live: rh.get_live_scores(use12hour) return if standings: if not league: raise IncorrectParametersException('Please specify a league. ' 'Example --standings --league=PL') if league == 'CL': raise IncorrectParametersException('Standings for CL - ' 'Champions League not supported') rh.get_standings(league) return if team: if lookup: map_team_id(team) return if players: rh.get_team_players(team) return else: rh.get_team_scores(team, time, upcoming, use12hour) return rh.get_league_scores(league, time, upcoming, use12hour) except IncorrectParametersException as e: click.secho(str(e), fg="red", bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def live_scores(self, live_scores): """Prints the live scores in a pretty format"""
scores = sorted(live_scores, key=lambda x: x["league"]) for league, games in groupby(scores, key=lambda x: x["league"]): self.league_header(league) for game in games: self.scores(self.parse_result(game), add_new_line=False) click.secho(' %s' % Stdout.utc_to_local(game["time"], use_12_hour_format=False), fg=self.colors.TIME) click.echo()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_scores(self, team_scores, time, show_datetime, use_12_hour_format): """Prints the teams scores in a pretty format"""
for score in team_scores["matches"]: if score["status"] == "FINISHED": click.secho("%s\t" % score["utcDate"].split('T')[0], fg=self.colors.TIME, nl=False) self.scores(self.parse_result(score)) elif show_datetime: self.scores(self.parse_result(score), add_new_line=False) click.secho(' %s' % Stdout.utc_to_local(score["utcDate"], use_12_hour_format, show_datetime), fg=self.colors.TIME)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_players(self, team): """Prints the team players in a pretty format"""
players = sorted(team, key=lambda d: d['shirtNumber']) click.secho("%-4s %-25s %-20s %-20s %-15s" % ("N.", "NAME", "POSITION", "NATIONALITY", "BIRTHDAY"), bold=True, fg=self.colors.MISC) fmt = (u"{shirtNumber:<4} {name:<28} {position:<23} {nationality:<23}" u" {dateOfBirth:<18}") for player in players: click.secho(fmt.format(**player), bold=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def standings(self, league_table, league): """ Prints the league standings in a pretty way """
click.secho("%-6s %-30s %-10s %-10s %-10s" % ("POS", "CLUB", "PLAYED", "GOAL DIFF", "POINTS")) for team in league_table["standings"][0]["table"]: if team["goalDifference"] >= 0: team["goalDifference"] = ' ' + str(team["goalDifference"]) # Define the upper and lower bounds for Champions League, # Europa League and Relegation places. # This is so we can highlight them appropriately. cl_upper, cl_lower = LEAGUE_PROPERTIES[league]['cl'] el_upper, el_lower = LEAGUE_PROPERTIES[league]['el'] rl_upper, rl_lower = LEAGUE_PROPERTIES[league]['rl'] team['teamName'] = team['team']['name'] team_str = (u"{position:<7} {teamName:<33} {playedGames:<12}" u" {goalDifference:<14} {points}").format(**team) if cl_upper <= team["position"] <= cl_lower: click.secho(team_str, bold=True, fg=self.colors.CL_POSITION) elif el_upper <= team["position"] <= el_lower: click.secho(team_str, fg=self.colors.EL_POSITION) elif rl_upper <= team["position"] <= rl_lower: click.secho(team_str, fg=self.colors.RL_POSITION) else: click.secho(team_str, fg=self.colors.POSITION)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def league_scores(self, total_data, time, show_datetime, use_12_hour_format): """Prints the data in a pretty format"""
for match in total_data['matches']: self.scores(self.parse_result(match), add_new_line=not show_datetime) if show_datetime: click.secho(' %s' % Stdout.utc_to_local(match["utcDate"], use_12_hour_format, show_datetime), fg=self.colors.TIME) click.echo()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def league_header(self, league): """Prints the league header"""
league_name = " {0} ".format(league) click.secho("{:=^62}".format(league_name), fg=self.colors.MISC) click.echo()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scores(self, result, add_new_line=True): """Prints out the scores in a pretty format"""
if result.goalsHomeTeam > result.goalsAwayTeam: homeColor, awayColor = (self.colors.WIN, self.colors.LOSE) elif result.goalsHomeTeam < result.goalsAwayTeam: homeColor, awayColor = (self.colors.LOSE, self.colors.WIN) else: homeColor = awayColor = self.colors.TIE click.secho('%-25s %2s' % (result.homeTeam, result.goalsHomeTeam), fg=homeColor, nl=False) click.secho(" vs ", nl=False) click.secho('%2s %s' % (result.goalsAwayTeam, result.awayTeam.rjust(25)), fg=awayColor, nl=add_new_line)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_result(self, data): """Parses the results and returns a Result namedtuple"""
def valid_score(score): return "" if score is None else score return self.Result( data["homeTeam"]["name"], valid_score(data["score"]["fullTime"]["homeTeam"]), data["awayTeam"]["name"], valid_score(data["score"]["fullTime"]["awayTeam"]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def utc_to_local(time_str, use_12_hour_format, show_datetime=False): """Converts the API UTC time string to the local user time."""
if not (time_str.endswith(" UTC") or time_str.endswith("Z")): return time_str today_utc = datetime.datetime.utcnow() utc_local_diff = today_utc - datetime.datetime.now() if time_str.endswith(" UTC"): time_str, _ = time_str.split(" UTC") utc_time = datetime.datetime.strptime(time_str, '%I:%M %p') utc_datetime = datetime.datetime(today_utc.year, today_utc.month, today_utc.day, utc_time.hour, utc_time.minute) else: utc_datetime = datetime.datetime.strptime(time_str, '%Y-%m-%dT%H:%M:%SZ') local_time = utc_datetime - utc_local_diff if use_12_hour_format: date_format = '%I:%M %p' if not show_datetime else '%a %d, %I:%M %p' else: date_format = '%H:%M' if not show_datetime else '%a %d, %H:%M' return datetime.datetime.strftime(local_time, date_format)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def live_scores(self, live_scores): """Store output of live scores to a CSV file"""
headers = ['League', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] result.extend([game['league'], game['homeTeamName'], game['goalsHomeTeam'], game['goalsAwayTeam'], game['awayTeamName']] for game in live_scores['games']) self.generate_output(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_scores(self, team_scores, time): """Store output of team scores to a CSV file"""
headers = ['Date', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] result.extend([score["utcDate"].split('T')[0], score['homeTeam']['name'], score['score']['fullTime']['homeTeam'], score['score']['fullTime']['awayTeam'], score['awayTeam']['name']] for score in team_scores['matches'] if score['status'] == 'FINISHED') self.generate_output(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_players(self, team): """Store output of team players to a CSV file"""
headers = ['Jersey Number', 'Name', 'Position', 'Nationality', 'Date of Birth'] result = [headers] result.extend([player['shirtNumber'], player['name'], player['position'], player['nationality'], player['dateOfBirth']] for player in team) self.generate_output(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def standings(self, league_table, league): """Store output of league standings to a CSV file"""
headers = ['Position', 'Team Name', 'Games Played', 'Goal For', 'Goals Against', 'Goal Difference', 'Points'] result = [headers] result.extend([team['position'], team['team']['name'], team['playedGames'], team['goalsFor'], team['goalsAgainst'], team['goalDifference'], team['points']] for team in league_table['standings'][0]['table']) self.generate_output(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def league_scores(self, total_data, time, show_upcoming, use_12_hour_format): """Store output of fixtures based on league and time to a CSV file"""
headers = ['League', 'Home Team Name', 'Home Team Goals', 'Away Team Goals', 'Away Team Name'] result = [headers] league = total_data['competition']['name'] result.extend([league, score['homeTeam']['name'], score['score']['fullTime']['homeTeam'], score['score']['fullTime']['awayTeam'], score['awayTeam']['name']] for score in total_data['matches']) self.generate_output(result)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_scores(self, team_scores, time): """Store output of team scores to a JSON file"""
data = [] for score in team_scores['matches']: if score['status'] == 'FINISHED': item = {'date': score["utcDate"].split('T')[0], 'homeTeamName': score['homeTeam']['name'], 'goalsHomeTeam': score['score']['fullTime']['homeTeam'], 'goalsAwayTeam': score['score']['fullTime']['awayTeam'], 'awayTeamName': score['awayTeam']['name']} data.append(item) self.generate_output({'team_scores': data})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def standings(self, league_table, league): """Store output of league standings to a JSON file"""
data = [] for team in league_table['standings'][0]['table']: item = {'position': team['position'], 'teamName': team['team'], 'playedGames': team['playedGames'], 'goalsFor': team['goalsFor'], 'goalsAgainst': team['goalsAgainst'], 'goalDifference': team['goalDifference'], 'points': team['points']} data.append(item) self.generate_output({'standings': data})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def team_players(self, team): """Store output of team players to a JSON file"""
keys = 'shirtNumber name position nationality dateOfBirth'.split() data = [{key: player[key] for key in keys} for player in team] self.generate_output({'players': data})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def league_scores(self, total_data, time): """Store output of fixtures based on league and time to a JSON file"""
data = [] for league, score in self.supported_leagues(total_data): item = {'league': league, 'homeTeamName': score['homeTeamName'], 'goalsHomeTeam': score['result']['goalsHomeTeam'], 'goalsAwayTeam': score['result']['goalsAwayTeam'], 'awayTeamName': score['awayTeamName']} data.append(item) self.generate_output({'league_scores': data, 'time': time})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def example_camera(): """ Example with `morphological_chan_vese` with using the default initialization of the level-set. """
logging.info('Running: example_camera (MorphACWE)...') # Load the image. img = imread(PATH_IMG_CAMERA)/255.0 # Callback for visual plotting callback = visual_callback_2d(img) # Morphological Chan-Vese (or ACWE) ms.morphological_chan_vese(img, 35, smoothing=3, lambda1=1, lambda2=1, iter_callback=callback)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def operator_si(u): """operator_si operator."""
global _aux if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") if u.shape != _aux.shape[1:]: _aux = np.zeros((len(P),) + u.shape) for _aux_i, P_i in zip(_aux, P): _aux_i[:] = binary_erosion(u, P_i) return _aux.max(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def operator_is(u): """operator_is operator."""
global _aux if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") if u.shape != _aux.shape[1:]: _aux = np.zeros((len(P),) + u.shape) for _aux_i, P_i in zip(_aux, P): _aux_i[:] = binary_dilation(u, P_i) return _aux.min(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gborders(img, alpha=1.0, sigma=1.0): """Stopping criterion for image borders."""
# The norm of the gradient. gradnorm = gaussian_gradient_magnitude(img, sigma, mode='constant') return 1.0/np.sqrt(1.0 + alpha*gradnorm)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def step(self): """Perform a single step of the morphological Chan-Vese evolution."""
# Assign attributes to local variables for convenience. u = self._u if u is None: raise ValueError("the levelset function is not set " "(use set_levelset)") data = self.data # Determine c0 and c1. inside = (u > 0) outside = (u <= 0) c0 = data[outside].sum() / float(outside.sum()) c1 = data[inside].sum() / float(inside.sum()) # Image attachment. dres = np.array(np.gradient(u)) abs_dres = np.abs(dres).sum(0) #aux = abs_dres * (c0 - c1) * (c0 + c1 - 2*data) aux = abs_dres * (self.lambda1*(data - c1) ** 2 - self.lambda2*(data - c0) ** 2) res = np.copy(u) res[aux < 0] = 1 res[aux > 0] = 0 # Smoothing. for i in range(self.smoothing): res = curvop(res) self._u = res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _update_mask(self): """Pre-compute masks for speed."""
self._threshold_mask = self._data > self._theta self._threshold_mask_v = self._data > self._theta/np.abs(self._v)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def step(self): """Perform a single step of the morphological snake evolution."""
# Assign attributes to local variables for convenience. u = self._u gI = self._data dgI = self._ddata theta = self._theta v = self._v if u is None: raise ValueError("the levelset is not set (use set_levelset)") res = np.copy(u) # Balloon. if v > 0: aux = binary_dilation(u, self.structure) elif v < 0: aux = binary_erosion(u, self.structure) if v!= 0: res[self._threshold_mask_v] = aux[self._threshold_mask_v] # Image attachment. aux = np.zeros_like(res) dres = np.gradient(res) for el1, el2 in zip(dgI, dres): aux += el1*el2 res[aux > 0] = 1 res[aux < 0] = 0 # Smoothing. for i in range(self.smoothing): res = curvop(res) self._u = res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sup_inf(u): """SI operator."""
if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") erosions = [] for P_i in P: erosions.append(ndi.binary_erosion(u, P_i)) return np.array(erosions, dtype=np.int8).max(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inf_sup(u): """IS operator."""
if np.ndim(u) == 2: P = _P2 elif np.ndim(u) == 3: P = _P3 else: raise ValueError("u has an invalid number of dimensions " "(should be 2 or 3)") dilations = [] for P_i in P: dilations.append(ndi.binary_dilation(u, P_i)) return np.array(dilations, dtype=np.int8).min(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_input(image, init_level_set): """Check that shapes of `image` and `init_level_set` match."""
if not image.ndim in [2, 3]: raise ValueError("`image` must be a 2 or 3-dimensional array.") if len(image.shape) != len(init_level_set.shape): raise ValueError("The dimensions of the initial level set do not " "match the dimensions of the image.")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_level_set(init_level_set, image_shape): """Auxiliary function for initializing level sets with a string. If `init_level_set` is not a string, it is returned as is. """
if isinstance(init_level_set, str): if init_level_set == 'checkerboard': res = checkerboard_level_set(image_shape) elif init_level_set == 'circle': res = circle_level_set(image_shape) else: raise ValueError("`init_level_set` not in " "['checkerboard', 'circle']") else: res = init_level_set return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def circle_level_set(image_shape, center=None, radius=None): """Create a circle level set with binary values. Parameters image_shape : tuple of positive integers Shape of the image center : tuple of positive integers, optional Coordinates of the center of the circle given in (row, column). If not given, it defaults to the center of the image. radius : float, optional Radius of the circle. If not given, it is set to the 75% of the smallest image dimension. Returns ------- out : array with shape `image_shape` Binary level set of the circle with the given `radius` and `center`. See also -------- checkerboard_level_set """
if center is None: center = tuple(i // 2 for i in image_shape) if radius is None: radius = min(image_shape) * 3.0 / 8.0 grid = np.mgrid[[slice(i) for i in image_shape]] grid = (grid.T - center).T phi = radius - np.sqrt(np.sum((grid)**2, 0)) res = np.int8(phi > 0) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def checkerboard_level_set(image_shape, square_size=5): """Create a checkerboard level set with binary values. Parameters image_shape : tuple of positive integers Shape of the image. square_size : int, optional Size of the squares of the checkerboard. It defaults to 5. Returns ------- out : array with shape `image_shape` Binary level set of the checkerboard. See also -------- circle_level_set """
grid = np.ogrid[[slice(i) for i in image_shape]] grid = [(grid_i // square_size) & 1 for grid_i in grid] checkerboard = np.bitwise_xor.reduce(grid, axis=0) res = np.int8(checkerboard) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inverse_gaussian_gradient(image, alpha=100.0, sigma=5.0): """Inverse of gradient magnitude. Compute the magnitude of the gradients in the image and then inverts the result in the range [0, 1]. Flat areas are assigned values close to 1, while areas close to borders are assigned values close to 0. This function or a similar one defined by the user should be applied over the image as a preprocessing step before calling `morphological_geodesic_active_contour`. Parameters image : (M, N) or (L, M, N) array Grayscale image or volume. alpha : float, optional Controls the steepness of the inversion. A larger value will make the transition between the flat areas and border areas steeper in the resulting array. sigma : float, optional Standard deviation of the Gaussian filter applied over the image. Returns ------- gimage : (M, N) or (L, M, N) array Preprocessed image (or volume) suitable for `morphological_geodesic_active_contour`. """
gradnorm = ndi.gaussian_gradient_magnitude(image, sigma, mode='nearest') return 1.0 / np.sqrt(1.0 + alpha * gradnorm)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_handler(self, *args, **options): """ Returns the static files serving handler wrapping the default handler, if static files should be served. Otherwise just returns the default handler. """
handler = super(Command, self).get_handler(*args, **options) insecure_serving = options.get('insecure_serving', False) if self.should_use_static_handler(options): return StaticFilesHandler(handler) return handler
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def privateKeyToAccount(self, private_key): ''' Returns a convenient object for working with the given private key. :param private_key: The raw private key :type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey` :return: object with methods for signing and encrypting :rtype: LocalAccount .. code-block:: python >>> acct = Account.privateKeyToAccount( 0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364) >>> acct.address '0x5ce9454909639D2D17A3F753ce7d93fa0b9aB12E' >>> acct.privateKey b"\\xb2\\}\\xb3\\x1f\\xee\\xd9\\x12''\\xbf\\t9\\xdcv\\x9a\\x96VK-\\xe4\\xc4rm\\x03[6\\xec\\xf1\\xe5\\xb3d" # These methods are also available: signHash(), signTransaction(), encrypt() # They correspond to the same-named methods in Account.* # but without the private key argument ''' key = self._parsePrivateKey(private_key) return LocalAccount(key, self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def recoverTransaction(self, serialized_transaction): ''' Get the address of the account that signed this transaction. :param serialized_transaction: the complete signed transaction :type serialized_transaction: hex str, bytes or int :returns: address of signer, hex-encoded & checksummed :rtype: str .. code-block:: python >>> raw_transaction = '0xf86a8086d55698372431831e848094f0109fc8df283027b6285cc889f5aa624eac1f55843b9aca008025a009ebb6ca057a0535d6186462bc0b465b561c94a295bdb0621fc19208ab149a9ca0440ffd775ce91a833ab410777204d5341a6f9fa91216a6f3ee2c051fea6a0428', # noqa: E501 >>> Account.recoverTransaction(raw_transaction) '0x2c7536E3605D9C16a7a3D7b1898e529396a65c23' ''' txn_bytes = HexBytes(serialized_transaction) txn = Transaction.from_bytes(txn_bytes) msg_hash = hash_of_signed_transaction(txn) return self.recoverHash(msg_hash, vrs=vrs_from(txn))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def signHash(self, message_hash, private_key): ''' Sign the hash provided. .. WARNING:: *Never* sign a hash that you didn't generate, it can be an arbitrary transaction. For example, it might send all of your account's ether to an attacker. If you would like compatibility with :meth:`w3.eth.sign() <web3.eth.Eth.sign>` you can use :meth:`~eth_account.messages.defunct_hash_message`. Several other message standards are proposed, but none have a clear consensus. You'll need to manually comply with any of those message standards manually. :param message_hash: the 32-byte message hash to be signed :type message_hash: hex str, bytes or int :param private_key: the key to sign the message with :type private_key: hex str, bytes, int or :class:`eth_keys.datatypes.PrivateKey` :returns: Various details about the signature - most importantly the fields: v, r, and s :rtype: ~eth_account.datastructures.AttributeDict .. code-block:: python >>> msg = "I♥SF" >>> from eth_account.messages import defunct_hash_message >>> msghash = defunct_hash_message(text=msg) HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750') >>> key = "0xb25c7db31feed9122727bf0939dc769a96564b2de4c4726d035b36ecf1e5b364" >>> Account.signHash(msghash, key) {'messageHash': HexBytes('0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750'), # noqa: E501 'r': 104389933075820307925104709181714897380569894203213074526835978196648170704563, 's': 28205917190874851400050446352651915501321657673772411533993420917949420456142, 'signature': HexBytes('0xe6ca9bba58c88611fad66a6ce8f996908195593807c4b38bd528d2cff09d4eb33e5bfbbf4d3e39b1a2fd816a7680c19ebebaf3a141b239934ad43cb33fcec8ce1c'), # noqa: E501 'v': 28} # these are equivalent: >>> Account.signHash( 0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750, key ) >>> Account.signHash( "0x1476abb745d423bf09273f1afd887d951181d25adc66c4834a70491911b7f750", key ) ''' msg_hash_bytes = HexBytes(message_hash) if len(msg_hash_bytes) != 32: raise ValueError("The message hash must be exactly 32-bytes") key = self._parsePrivateKey(private_key) (v, r, s, eth_signature_bytes) = sign_message_hash(key, msg_hash_bytes) return AttributeDict({ 'messageHash': msg_hash_bytes, 'r': r, 's': s, 'v': v, 'signature': HexBytes(eth_signature_bytes), })
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_dependencies(primary_type, types): """ Perform DFS to get all the dependencies of the primary_type """
deps = set() struct_names_yet_to_be_expanded = [primary_type] while len(struct_names_yet_to_be_expanded) > 0: struct_name = struct_names_yet_to_be_expanded.pop() deps.add(struct_name) fields = types[struct_name] for field in fields: if field["type"] not in types: # We don't need to expand types that are not user defined (customized) continue elif field["type"] in deps: # skip types that we have already encountered continue else: # Custom Struct Type struct_names_yet_to_be_expanded.append(field["type"]) # Don't need to make a struct as dependency of itself deps.remove(primary_type) return tuple(deps)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_valid_abi_type(type_name): """ This function is used to make sure that the ``type_name`` is a valid ABI Type. Please note that this is a temporary function and should be replaced by the corresponding ABI function, once the following issue has been resolved. https://github.com/ethereum/eth-abi/issues/125 """
valid_abi_types = {"address", "bool", "bytes", "int", "string", "uint"} is_bytesN = type_name.startswith("bytes") and 1 <= int(type_name[5:]) <= 32 is_intN = ( type_name.startswith("int") and 8 <= int(type_name[3:]) <= 256 and int(type_name[3:]) % 8 == 0 ) is_uintN = ( type_name.startswith("uint") and 8 <= int(type_name[4:]) <= 256 and int(type_name[4:]) % 8 == 0 ) if type_name in valid_abi_types: return True elif is_bytesN: # bytes1 to bytes32 return True elif is_intN: # int8 to int256 return True elif is_uintN: # uint8 to uint256 return True return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_depths_and_dimensions(data, depth): """ Yields 2-length tuples of depth and dimension of each element at that depth """
if not isinstance(data, (list, tuple)): # Not checking for Iterable instance, because even Dictionaries and strings # are considered as iterables, but that's not what we want the condition to be. return () yield depth, len(data) for item in data: # iterating over all 1 dimension less sub-data items yield from get_depths_and_dimensions(item, depth + 1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def hash_of_signed_transaction(txn_obj): ''' Regenerate the hash of the signed transaction object. 1. Infer the chain ID from the signature 2. Strip out signature from transaction 3. Annotate the transaction with that ID, if available 4. Take the hash of the serialized, unsigned, chain-aware transaction Chain ID inference and annotation is according to EIP-155 See details at https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md :return: the hash of the provided transaction, to be signed ''' (chain_id, _v) = extract_chain_id(txn_obj.v) unsigned_parts = strip_signature(txn_obj) if chain_id is None: signable_transaction = UnsignedTransaction(*unsigned_parts) else: extended_transaction = unsigned_parts + [chain_id, 0, 0] signable_transaction = ChainAwareUnsignedTransaction(*extended_transaction) return signable_transaction.hash()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def extract_chain_id(raw_v): ''' Extracts chain ID, according to EIP-155 @return (chain_id, v) ''' above_id_offset = raw_v - CHAIN_ID_OFFSET if above_id_offset < 0: if raw_v in {0, 1}: return (None, raw_v + V_OFFSET) elif raw_v in {27, 28}: return (None, raw_v) else: raise ValueError("v %r is invalid, must be one of: 0, 1, 27, 28, 35+") else: (chain_id, v_bit) = divmod(above_id_offset, 2) return (chain_id, v_bit + V_OFFSET)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_occurrence(event_id, occurrence_id=None, year=None, month=None, day=None, hour=None, minute=None, second=None, tzinfo=None): """ Because occurrences don't have to be persisted, there must be two ways to retrieve them. both need an event, but if its persisted the occurrence can be retrieved with an id. If it is not persisted it takes a date to retrieve it. This function returns an event and occurrence regardless of which method is used. """
if(occurrence_id): occurrence = get_object_or_404(Occurrence, id=occurrence_id) event = occurrence.event elif None not in (year, month, day, hour, minute, second): event = get_object_or_404(Event, id=event_id) date = timezone.make_aware(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute), int(second)), tzinfo) occurrence = event.get_occurrence(date) if occurrence is None: raise Http404 else: raise Http404 return event, occurrence
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_calendars_for_object(self, obj, distinction=''): """ This function allows you to get calendars for a specific object If distinction is set it will filter out any relation that doesnt have that distinction. """
ct = ContentType.objects.get_for_model(obj) if distinction: dist_q = Q(calendarrelation__distinction=distinction) else: dist_q = Q() return self.filter(dist_q, calendarrelation__content_type=ct, calendarrelation__object_id=obj.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_relation(self, calendar, content_object, distinction='', inheritable=True): """ Creates a relation between calendar and content_object. See CalendarRelation for help on distinction and inheritable """
return CalendarRelation.objects.create( calendar=calendar, distinction=distinction, content_object=content_object)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def occurrences_after(self, after=None): """ It is often useful to know what the next occurrence is given a list of events. This function produces a generator that yields the the most recent occurrence after the date ``after`` from any of the events in ``self.events`` """
from schedule.models import Occurrence if after is None: after = timezone.now() occ_replacer = OccurrenceReplacer( Occurrence.objects.filter(event__in=self.events)) generators = [event._occurrences_after_generator(after) for event in self.events] occurrences = [] for generator in generators: try: heapq.heappush(occurrences, (next(generator), generator)) except StopIteration: pass while occurrences: generator = occurrences[0][1] try: next_occurrence = heapq.heapreplace(occurrences, (next(generator), generator))[0] except StopIteration: next_occurrence = heapq.heappop(occurrences)[0] yield occ_replacer.get_occurrence(next_occurrence)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_events_for_object(self, content_object, distinction='', inherit=True): ''' returns a queryset full of events, that relate to the object through, the distinction If inherit is false it will not consider the calendars that the events belong to. If inherit is true it will inherit all of the relations and distinctions that any calendar that it belongs to has, as long as the relation has inheritable set to True. (See Calendar) >>> event = Event.objects.get(title='Test1') >>> user = User.objects.get(username = 'alice') >>> EventRelation.objects.get_events_for_object(user, 'owner', inherit=False) [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] If a distinction is not declared it will not vet the relations based on distinction. >>> EventRelation.objects.get_events_for_object(user, inherit=False) [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] Now if there is a Calendar >>> calendar = Calendar(name = 'MyProject') >>> calendar.save() And an event that belongs to that calendar >>> event = Event.objects.get(title='Test2') >>> calendar.events.add(event) If we relate this calendar to some object with inheritable set to true, that relation will be inherited >>> user = User.objects.get(username='bob') >>> cr = calendar.create_relation(user, 'viewer', True) >>> EventRelation.objects.get_events_for_object(user, 'viewer') [<Event: Test1: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>, <Event: Test2: Tuesday, Jan. 1, 2008-Friday, Jan. 11, 2008>] ''' ct = ContentType.objects.get_for_model(type(content_object)) if distinction: dist_q = Q(eventrelation__distinction=distinction) cal_dist_q = Q(calendar__calendarrelation__distinction=distinction) else: dist_q = Q() cal_dist_q = Q() if inherit: inherit_q = Q( cal_dist_q, calendar__calendarrelation__content_type=ct, calendar__calendarrelation__object_id=content_object.id, calendar__calendarrelation__inheritable=True, ) else: inherit_q = Q() event_q = Q(dist_q, eventrelation__content_type=ct, eventrelation__object_id=content_object.id) return Event.objects.filter(inherit_q | event_q)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_relation(self, event, content_object, distinction=''): """ Creates a relation between event and content_object. See EventRelation for help on distinction. """
return EventRelation.objects.create( event=event, distinction=distinction, content_object=content_object)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def init_db(): """ Populate a small db with some example entries. """
db.drop_all() db.create_all() # Create sample Post title = "de Finibus Bonorum et Malorum - Part I" text = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor \ incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \ exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure \ dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. \ Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt \ mollit anim id est laborum." post = Post(title=title, text=text) db.session.add(post) db.session.commit()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(custom_url=None, pkg_type=None, serve_local=None, version='4.9.2'): """Load CKEditor resource from CDN or local. :param custom_url: The custom resource url to use, build your CKEditor on `CKEditor builder <https://ckeditor.com/cke4/builder>`_. :param pkg_type: The type of CKEditor package, one of ``basic``, ``standard`` and ``full``. Default to ``standard``. It's a mirror argument to overwrite ``CKEDITOR_PKG_TYPE``. :param serve_local: Mirror argument to overwrite ``CKEDITOR_SERVE_LOCAL``. :param version: The version of CKEditor. """
pkg_type = pkg_type or current_app.config['CKEDITOR_PKG_TYPE'] if pkg_type not in ['basic', 'standard', 'full']: warnings.warn('The provided pkg_type string was invalid, ' 'it should be one of basic/standard/full.') pkg_type = 'standard' if serve_local or current_app.config['CKEDITOR_SERVE_LOCAL']: url = url_for('ckeditor.static', filename='%s/ckeditor.js' % pkg_type) else: url = '//cdn.ckeditor.com/%s/%s/ckeditor.js' % (version, pkg_type) if custom_url: url = custom_url return Markup('<script src="%s"></script>' % url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def interfaces(self): """Collect the available wlan interfaces."""
self._ifaces = [] wifi_ctrl = wifiutil.WifiUtil() for interface in wifi_ctrl.interfaces(): iface = Interface(interface) self._ifaces.append(iface) self._logger.info("Get interface: %s", iface.name()) if not self._ifaces: self._logger.error("Can't get wifi interface") return self._ifaces
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def network_profile_name_list(self, obj): """Get AP profile names."""
profile_list = pointer(WLAN_PROFILE_INFO_LIST()) self._wlan_get_profile_list(self._handle, byref(obj['guid']), byref(profile_list)) profiles = cast(profile_list.contents.ProfileInfo, POINTER(WLAN_PROFILE_INFO)) profile_name_list = [] for i in range(profile_list.contents.dwNumberOfItems): profile_name = '' for j in range(len(profiles[i].strProfileName)): profile_name += profiles[i].strProfileName[j] profile_name_list.append(profile_name) return profile_name_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_network_profile(self, obj, params): """Remove the specified AP profile."""
self._logger.debug("delete profile: %s", params.ssid) str_buf = create_unicode_buffer(params.ssid) ret = self._wlan_delete_profile(self._handle, obj['guid'], str_buf) self._logger.debug("delete result %d", ret)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_all_network_profiles(self, obj): """Remove all the AP profiles."""
profile_name_list = self.network_profile_name_list(obj) for profile_name in profile_name_list: self._logger.debug("delete profile: %s", profile_name) str_buf = create_unicode_buffer(profile_name) ret = self._wlan_delete_profile(self._handle, obj['guid'], str_buf) self._logger.debug("delete result %d", ret)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_network_profile(self, obj, params): """Remove the specified AP profiles"""
network_id = -1 profiles = self.network_profiles(obj) for profile in profiles: if profile == params: network_id = profile.id if network_id != -1: self._send_cmd_to_wpas(obj['name'], 'REMOVE_NETWORK {}'.format(network_id))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scan(self): """Trigger the wifi interface to scan."""
self._logger.info("iface '%s' scans", self.name()) self._wifi_ctrl.scan(self._raw_obj)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def scan_results(self): """Return the scan result."""
bsses = self._wifi_ctrl.scan_results(self._raw_obj) if self._logger.isEnabledFor(logging.INFO): for bss in bsses: self._logger.info("Find bss:") self._logger.info("\tbssid: %s", bss.bssid) self._logger.info("\tssid: %s", bss.ssid) self._logger.info("\tfreq: %d", bss.freq) self._logger.info("\tauth: %s", bss.auth) self._logger.info("\takm: %s", bss.akm) self._logger.info("\tsignal: %d", bss.signal) return bsses
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def network_profiles(self): """Get all the AP profiles."""
profiles = self._wifi_ctrl.network_profiles(self._raw_obj) if self._logger.isEnabledFor(logging.INFO): for profile in profiles: self._logger.info("Get profile:") self._logger.info("\tssid: %s", profile.ssid) self._logger.info("\tauth: %s", profile.auth) self._logger.info("\takm: %s", profile.akm) self._logger.info("\tcipher: %s", profile.cipher) return profiles
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def disconnect(self): """Disconnect from the specified AP."""
self._logger.info("iface '%s' disconnects", self.name()) self._wifi_ctrl.disconnect(self._raw_obj)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def label(self): """Convert a module name to a formatted node label. This is a default policy - please override. """
if len(self.name) > 14 and '.' in self.name: return '\\.\\n'.join(self.name.split('.')) # pragma: nocover return self.name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def proximity_metric(self, a, b): """Return the weight of the dependency from a to b. Higher weights usually have shorter straighter edges. Return 1 if it has normal weight. A value of 4 is usually good for ensuring that a related pair of modules are drawn next to each other. Returns an int between 1 (unknown, default), and 4 (very related). """
# if self._is_pylib(a) and self._is_pylib(b): # return 1 res = 1 for ap, bp, n in zip(a.path_parts, b.path_parts, list(range(4))): res += ap == bp if n >= 3: break return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dissimilarity_metric(self, a, b): """Return non-zero if references to this module are strange, and should be drawn extra-long. The value defines the length, in rank. This is also good for putting some vertical space between seperate subsystems. Returns an int between 1 (default) and 4 (highly unrelated). """
# if self._is_pylib(a) and self._is_pylib(b): # return 1 res = 4 for an, bn, n in zip_longest(a.name_parts, b.name_parts, list(range(4))): res -= an == bn if n >= 3: break return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect_generations(self): """Traverse depth-first adding imported_by. """
# for src in list(self.sources.values()): for src in self.sources.values(): for _child in src.imports: if _child in self.sources: child = self.sources[_child] child.imported_by.add(src.name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_excluded(self): """Remove all sources marked as excluded. """
# import yaml # print yaml.dump({k:v.__json__() for k,v in self.sources.items()}, default_flow_style=False) sources = list(self.sources.values()) for src in sources: if src.excluded: del self.sources[src.name] src.imports = [m for m in src.imports if not self._exclude(m)] src.imported_by = [m for m in src.imported_by if not self._exclude(m)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_bytes(s): # pragma: nocover """Convert an item into bytes. """
if isinstance(s, bytes): return s if isinstance(s, str) or is_unicode(s): return s.encode("utf-8") try: return unicode(s).encode("utf-8") except NameError: return str(s).encode("utf-8")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cmd2args(cmd): """Prepare a command line for execution by Popen. """
if isinstance(cmd, str): return cmd if win32 else shlex.split(cmd) return cmd
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pipe(cmd, txt): """Pipe `txt` into the command `cmd` and return the output. """
return Popen( cmd2args(cmd), stdout=subprocess.PIPE, stdin=subprocess.PIPE, shell=win32 ).communicate(txt)[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dot(src, **kw): """Execute the dot command to create an svg output. """
cmd = "dot -T%s" % kw.pop('T', 'svg') for k, v in list(kw.items()): if v is True: cmd += " -%s" % k else: cmd += " -%s%s" % (k, v) return pipe(cmd, to_bytes(src))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def call_graphviz_dot(src, fmt): """Call dot command, and provide helpful error message if we cannot find it. """
try: svg = dot(src, T=fmt) except OSError as e: # pragma: nocover if e.errno == 2: cli.error(""" cannot find 'dot' pydeps calls dot (from graphviz) to create svg diagrams, please make sure that the dot executable is available on your path. """) raise return svg
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def display_svg(kw, fname): # pragma: nocover """Try to display the svg file on this platform. """
if kw['display'] is None: cli.verbose("Displaying:", fname) if sys.platform == 'win32': os.startfile(fname) else: opener = "open" if sys.platform == "darwin" else "xdg-open" subprocess.call([opener, fname]) else: cli.verbose(kw['display'] + " " + fname) os.system(kw['display'] + " " + fname)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pystdlib(): """Return a set of all module-names in the Python standard library. """
curver = '.'.join(str(x) for x in sys.version_info[:2]) return (set(stdlib_list.stdlib_list(curver)) | { '_LWPCookieJar', '_MozillaCookieJar', '_abcoll', 'email._parseaddr', 'email.base64mime', 'email.feedparser', 'email.quoprimime', 'encodings', 'genericpath', 'ntpath', 'nturl2path', 'os2emxpath', 'posixpath', 'sre_compile', 'sre_parse', 'unittest.case', 'unittest.loader', 'unittest.main', 'unittest.result', 'unittest.runner', 'unittest.signals', 'unittest.suite', 'unittest.util', '_threading_local', 'sre_constants', 'strop', 'repr', 'opcode', 'nt', 'encodings.aliases', '_bisect', '_codecs', '_collections', '_functools', '_hashlib', '_heapq', '_io', '_locale', '_LWPCookieJar', '_md5', '_MozillaCookieJar', '_random', '_sha', '_sha256', '_sha512', '_socket', '_sre', '_ssl', '_struct', '_subprocess', '_threading_local', '_warnings', '_weakref', '_weakrefset', '_winreg' }) - {'__main__'}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def report(self): # pragma: nocover """Print a report to stdout, listing the found modules with their paths, as well as modules that are missing, or seem to be missing. """
print() print(" %-25s %s" % ("Name", "File")) print(" %-25s %s" % ("----", "----")) # Print modules found keys = list(self.modules.keys()) keys.sort() for key in keys: m = self.modules[key] if m.__path__: print("P", end=' ') else: print("m", end=' ') print("%-25s" % key, m.__file__ or "") # Print missing modules missing, maybe = self.any_missing_maybe() if missing: print() print("Missing modules:") for name in missing: mods = list(self.badmodules[name].keys()) mods.sort() print("?", name, "imported from", ', '.join(mods)) # Print modules that may be missing, but then again, maybe not... if maybe: print() print("Submodules thay appear to be missing, but could also be", end=' ') print("global names in the parent package:") for name in maybe: mods = list(self.badmodules[name].keys()) mods.sort() print("?", name, "imported from", ', '.join(mods))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def name2rgb(hue): """Originally used to calculate color based on module name. """
r, g, b = colorsys.hsv_to_rgb(hue / 360.0, .8, .7) return tuple(int(x * 256) for x in [r, g, b])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def foreground(background, *options): """Find the best foreground color from `options` based on `background` color. """
def absdiff(a, b): return brightnessdiff(a, b) # return 3 * brightnessdiff(a, b) + colordiff(a, b) diffs = [(absdiff(background, color), color) for color in options] diffs.sort(reverse=True) return diffs[0][1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def base_argparser(argv=()): """Initial parser that can set values for the rest of the parsing process. """
global verbose verbose = _not_verbose _p = argparse.ArgumentParser(add_help=False) _p.add_argument('--debug', action='store_true', help="turn on all the show and verbose options (mainly for debugging pydeps itself)") _p.add_argument('--config', help="specify config file", metavar="FILE") _p.add_argument('--no-config', help="disable processing of config files", action='store_true') _p.add_argument('--version', action='store_true', help='print pydeps version') _p.add_argument('-L', '--log', help=textwrap.dedent(''' set log-level to one of CRITICAL, ERROR, WARNING, INFO, DEBUG, NOTSET. ''')) _args, argv = _p.parse_known_args(argv) if _args.log: loglevels = "CRITICAL DEBUG ERROR FATAL INFO WARN" if _args.log not in loglevels: # pragma: nocover error('legal values for the -L parameter are:', loglevels) loglevel = getattr(logging, _args.log) else: loglevel = None logging.basicConfig( level=loglevel, format='%(filename)s:%(lineno)d: %(levelname)s: %(message)s' ) if _args.version: # pragma: nocover print("pydeps v" + __version__) sys.exit(0) return _p, _args, argv
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dep2req(name, imported_by): """Convert dependency to requirement. """
lst = [item for item in sorted(imported_by) if not item.startswith(name)] res = '%-15s # from: ' % name imps = ', '.join(lst) if len(imps) < WIDTH - 24: return res + imps return res + imps[:WIDTH - 24 - 3] + '...'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pydeps2reqs(deps): """Convert a deps instance into requirements. """
reqs = defaultdict(set) for k, v in list(deps.items()): # not a built-in p = v['path'] if p and not p.startswith(sys.real_prefix): if p.startswith(sys.prefix) and 'site-packages' in p: if not p.endswith('.pyd'): if '/win32/' in p.replace('\\', '/'): reqs['win32'] |= set(v['imported_by']) else: name = k.split('.', 1)[0] if name not in skiplist: reqs[name] |= set(v['imported_by']) if '_dummy' in reqs: del reqs['_dummy'] return '\n'.join(dep2req(name, reqs[name]) for name in sorted(reqs))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(): """Cli entrypoint. """
if len(sys.argv) == 2: fname = sys.argv[1] data = json.load(open(fname, 'rb')) else: data = json.loads(sys.stdin.read()) print(pydeps2reqs(data))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pydeps(**args): """Entry point for the ``pydeps`` command. This function should do all the initial parameter and environment munging before calling ``_pydeps`` (so that function has a clean execution path). """
_args = args if args else cli.parse_args(sys.argv[1:]) inp = target.Target(_args['fname']) log.debug("Target: %r", inp) if _args.get('output'): _args['output'] = os.path.abspath(_args['output']) else: _args['output'] = os.path.join( inp.calling_dir, inp.modpath.replace('.', '_') + '.' + _args.get('format', 'svg') ) with inp.chdir_work(): _args['fname'] = inp.fname _args['isdir'] = inp.is_dir if _args.get('externals'): del _args['fname'] exts = externals(inp, **_args) print(json.dumps(exts, indent=4)) return exts # so the tests can assert else: # this is the call you're looking for :-) return _pydeps(inp, **_args)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _path_parts(self, pth): """Return a list of all directories in the path ``pth``. """
res = re.split(r"[\\/]", pth) if res and os.path.splitdrive(res[0]) == (res[0], ''): res[0] += os.path.sep return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cursor(self): """ Create a new ``Cursor`` instance associated with this ``Connection`` :return: A new ``Cursor`` instance """
self._assert_valid() c = Cursor(self.impl.cursor()) self.cursors.add(c) return c
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def close(self): """ Close the connection and all associated cursors. This will implicitly roll back any uncommitted operations. """
for c in self.cursors: c.close() self.cursors = [] self.impl = None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_options(read_buffer_size=None, parameter_sets_to_buffer=None, varchar_max_character_limit=None, prefer_unicode=None, use_async_io=None, autocommit=None, large_decimals_as_64_bit_types=None, limit_varchar_results_to_max=None, force_extra_capacity_for_unicode=None, fetch_wchar_as_char=None): """ Create options that control how turbodbc interacts with a database. These options affect performance for the most part, but some options may require adjustment so that all features work correctly with certain databases. If a parameter is set to `None`, this means the default value is used. :param read_buffer_size: Affects performance. Controls the size of batches fetched from the database when reading result sets. Can be either an instance of ``turbodbc.Megabytes`` (recommended) or ``turbodbc.Rows``. :param parameter_sets_to_buffer: Affects performance. Number of parameter sets (rows) which shall be transferred to the server in a single batch when ``executemany()`` is called. Must be an integer. :param varchar_max_character_limit: Affects behavior/performance. If a result set contains fields of type ``VARCHAR(max)`` or ``NVARCHAR(max)`` or the equivalent type of your database, buffers will be allocated to hold the specified number of characters. This may lead to truncation. The default value is ``65535`` characters. Please note that large values reduce the risk of truncation, but may affect the number of rows in a batch of result sets (see ``read_buffer_size``). Please note that this option only relates to retrieving results, not sending parameters to the database. :param use_async_io: Affects performance. Set this option to ``True`` if you want to use asynchronous I/O, i.e., while Python is busy converting database results to Python objects, new result sets are fetched from the database in the background. :param prefer_unicode: May affect functionality and performance. Some databases do not support strings encoded with UTF-8, leading to UTF-8 characters being misinterpreted, misrepresented, or downright rejected. Set this option to ``True`` if you want to transfer character data using the UCS-2/UCS-16 encoding that use (multiple) two-byte instead of (multiple) one-byte characters. :param autocommit: Affects behavior. If set to ``True``, all queries and commands executed with ``cursor.execute()`` or ``cursor.executemany()`` will be succeeded by an implicit ``COMMIT`` operation, persisting any changes made to the database. If not set or set to ``False``, users has to take care of calling ``cursor.commit()`` themselves. :param large_decimals_as_64_bit_types: Affects behavior. If set to ``True``, ``DECIMAL(x, y)`` results with ``x > 18`` will be rendered as 64 bit integers (``y == 0``) or 64 bit floating point numbers (``y > 0``), respectively. Use this option if your decimal data types are larger than the data they actually hold. Using this data type can lead to overflow errors and loss of precision. If not set or set to ``False``, large decimals are rendered as strings. :param limit_varchar_results_to_max: Affects behavior/performance. If set to ``True``, any text-like fields such as ``VARCHAR(n)`` and ``NVARCHAR(n)`` will be limited to a maximum size of ``varchar_max_character_limit`` characters. This may lead to values being truncated, but reduces the amount of memory required to allocate string buffers, leading to larger, more efficient batches. If not set or set to ``False``, strings can exceed ``varchar_max_character_limit`` in size if the database reports them this way. For fields such as ``TEXT``, some databases report a size of 2 billion characters. Please note that this option only relates to retrieving results, not sending parameters to the database. :param force_extra_capacity_for_unicode Affects behavior/performance. Some ODBC drivers report the length of the ``VARCHAR``/``NVARCHAR`` field rather than the number of code points for which space is required to be allocated, resulting in string truncations. Set this option to ``True`` to increase the memory allocated for ``VARCHAR`` and ``NVARCHAR`` fields and prevent string truncations. Please note that this option only relates to retrieving results, not sending parameters to the database. :param fetch_wchar_as_char Affects behavior. Some ODBC drivers retrieve single byte encoded strings into ``NVARCHAR`` fields of result sets, which are decoded incorrectly by turbodbc default settings, resulting in corrupt strings. Set this option to ``True`` to have turbodbc treat ``NVARCHAR`` types as narrow character types when decoding the fields in result sets. Please note that this option only relates to retrieving results, not sending parameters to the database. :return: An option struct that is suitable to pass to the ``turbodbc_options`` parameter of ``turbodbc.connect()`` """
options = Options() if not read_buffer_size is None: options.read_buffer_size = read_buffer_size if not parameter_sets_to_buffer is None: options.parameter_sets_to_buffer = parameter_sets_to_buffer if not varchar_max_character_limit is None: options.varchar_max_character_limit = varchar_max_character_limit if not prefer_unicode is None: options.prefer_unicode = prefer_unicode if not use_async_io is None: options.use_async_io = use_async_io if not autocommit is None: options.autocommit = autocommit if not large_decimals_as_64_bit_types is None: options.large_decimals_as_64_bit_types = large_decimals_as_64_bit_types if not limit_varchar_results_to_max is None: options.limit_varchar_results_to_max = limit_varchar_results_to_max if not force_extra_capacity_for_unicode is None: options.force_extra_capacity_for_unicode = force_extra_capacity_for_unicode if not fetch_wchar_as_char is None: options.fetch_wchar_as_char = fetch_wchar_as_char return options
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_distutils_build_directory(): """ Returns the directory distutils uses to build its files. We need this directory since we build extensions which have to link other ones. """
pattern = "lib.{platform}-{major}.{minor}" return os.path.join('build', pattern.format(platform=sysconfig.get_platform(), major=sys.version_info[0], minor=sys.version_info[1]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_extension_modules(): extension_modules = [] """ Extension module which is actually a plain C++ library without Python bindings """
turbodbc_sources = _get_source_files('cpp_odbc') + _get_source_files('turbodbc') turbodbc_library = Extension('libturbodbc', sources=turbodbc_sources, include_dirs=include_dirs, extra_compile_args=extra_compile_args, extra_link_args=base_library_link_args, libraries=[odbclib], library_dirs=library_dirs) if sys.platform == "win32": turbodbc_libs = [] else: turbodbc_libs = [_get_turbodbc_libname()] extension_modules.append(turbodbc_library) """ An extension module which contains the main Python bindings for turbodbc """ turbodbc_python_sources = _get_source_files('turbodbc_python') if sys.platform == "win32": turbodbc_python_sources = turbodbc_sources + turbodbc_python_sources turbodbc_python = Extension('turbodbc_intern', sources=turbodbc_python_sources, include_dirs=include_dirs, extra_compile_args=extra_compile_args + hidden_visibility_args, libraries=[odbclib] + turbodbc_libs, extra_link_args=python_module_link_args, library_dirs=library_dirs) extension_modules.append(turbodbc_python) """ An extension module which contains Python bindings which require numpy support to work. Not included in the standard Python bindings so this can stay optional. """ if _has_numpy_headers(): import numpy turbodbc_numpy_sources = _get_source_files('turbodbc_numpy') if sys.platform == "win32": turbodbc_numpy_sources = turbodbc_sources + turbodbc_numpy_sources turbodbc_numpy = Extension('turbodbc_numpy_support', sources=turbodbc_numpy_sources, include_dirs=include_dirs + [numpy.get_include()], extra_compile_args=extra_compile_args + hidden_visibility_args, libraries=[odbclib] + turbodbc_libs, extra_link_args=python_module_link_args, library_dirs=library_dirs) extension_modules.append(turbodbc_numpy) """ An extension module which contains Python bindings which require Apache Arrow support to work. Not included in the standard Python bindings so this can stay optional. """ if _has_arrow_headers(): import pyarrow pyarrow_location = os.path.dirname(pyarrow.__file__) # For now, assume that we build against bundled pyarrow releases. pyarrow_include_dir = os.path.join(pyarrow_location, 'include') turbodbc_arrow_sources = _get_source_files('turbodbc_arrow') pyarrow_module_link_args = list(python_module_link_args) if sys.platform == "win32": turbodbc_arrow_sources = turbodbc_sources + turbodbc_arrow_sources elif sys.platform == "darwin": pyarrow_module_link_args.append('-Wl,-rpath,@loader_path/pyarrow') else: pyarrow_module_link_args.append("-Wl,-rpath,$ORIGIN/pyarrow") turbodbc_arrow = Extension('turbodbc_arrow_support', sources=turbodbc_arrow_sources, include_dirs=include_dirs + [pyarrow_include_dir], extra_compile_args=extra_compile_args + hidden_visibility_args, libraries=[odbclib, 'arrow', 'arrow_python'] + turbodbc_libs, extra_link_args=pyarrow_module_link_args, library_dirs=library_dirs + [pyarrow_location]) extension_modules.append(turbodbc_arrow) return extension_modules
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def connect(dsn=None, turbodbc_options=None, connection_string=None, **kwargs): """ Create a connection with the database identified by the ``dsn`` or the ``connection_string``. :param dsn: Data source name as given in the (unix) odbc.ini file or (Windows) ODBC Data Source Administrator tool. :param turbodbc_options: Options that control how turbodbc interacts with the database. Create such a struct with `turbodbc.make_options()` or leave this blank to take the defaults. :param connection_string: Preformatted ODBC connection string. Specifying this and dsn or kwargs at the same time raises ParameterError. :param \**kwargs: You may specify additional options as you please. These options will go into the connection string that identifies the database. Valid options depend on the specific database you would like to connect with (e.g. `user` and `password`, or `uid` and `pwd`) :return: A connection to your database """
if turbodbc_options is None: turbodbc_options = make_options() if connection_string is not None and (dsn is not None or len(kwargs) > 0): raise ParameterError("Both connection_string and dsn or kwargs specified") if connection_string is None: connection_string = _make_connection_string(dsn, **kwargs) connection = Connection(intern_connect(connection_string, turbodbc_options)) return connection
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def description(self): """ Retrieve a description of the columns in the current result set :return: A tuple of seven elements. Only some elements are meaningful:\n * Element #0 is the name of the column * Element #1 is the type code of the column * Element #6 is true if the column may contain ``NULL`` values """
if self.result_set: info = self.result_set.get_column_info() return [(c.name, c.type_code(), None, None, None, None, c.supports_null_values) for c in info] else: return None