body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
0daff9f40b4269454dfd17110c7dbcbecc807365c91b5d025107ddd078262cb9
def isOpen(self) -> bool: 'Indicate if a connection is open.' return self._isOpen
Indicate if a connection is open.
pineboolib/application/database/pnconnection.py
isOpen
deavid/pineboo
2
python
def isOpen(self) -> bool: return self._isOpen
def isOpen(self) -> bool: return self._isOpen<|docstring|>Indicate if a connection is open.<|endoftext|>
0dc9e7cf32699f8e5932e9fe561cf2096e5f1e0622476ed7a7a559ae9370748e
def tables(self, t_: Optional[str]=None) -> List[str]: 'Return a list of available tables in the database, according to a given filter.' return self.driver().tables(t_)
Return a list of available tables in the database, according to a given filter.
pineboolib/application/database/pnconnection.py
tables
deavid/pineboo
2
python
def tables(self, t_: Optional[str]=None) -> List[str]: return self.driver().tables(t_)
def tables(self, t_: Optional[str]=None) -> List[str]: return self.driver().tables(t_)<|docstring|>Return a list of available tables in the database, according to a given filter.<|endoftext|>
d3bd434deacd447511755c8ea77633c4d5b7a7ed9fe5c6a8333ad8a8f3728f48
def database(self, name: str=None) -> 'IConnection': 'Return the connection to a database.' if (name is None): return self return self.useConn(name)
Return the connection to a database.
pineboolib/application/database/pnconnection.py
database
deavid/pineboo
2
python
def database(self, name: str=None) -> 'IConnection': if (name is None): return self return self.useConn(name)
def database(self, name: str=None) -> 'IConnection': if (name is None): return self return self.useConn(name)<|docstring|>Return the connection to a database.<|endoftext|>
eb2eaf41ee4717e62afc49daf9d1c11ae6c80c0f2379a55e9872e44619dfa44b
def DBName(self) -> str: 'Return the database name.' try: return self.driver().DBName() except Exception as e: logger.error('DBName: %s', e) return self.db_name
Return the database name.
pineboolib/application/database/pnconnection.py
DBName
deavid/pineboo
2
python
def DBName(self) -> str: try: return self.driver().DBName() except Exception as e: logger.error('DBName: %s', e) return self.db_name
def DBName(self) -> str: try: return self.driver().DBName() except Exception as e: logger.error('DBName: %s', e) return self.db_name<|docstring|>Return the database name.<|endoftext|>
68fd8c46395958c248905b789a09ef2bf85dedaf14614a693e1fc1579777f241
def driver(self) -> Any: 'Return the instance of the driver that is using the connection.' return self.driver_
Return the instance of the driver that is using the connection.
pineboolib/application/database/pnconnection.py
driver
deavid/pineboo
2
python
def driver(self) -> Any: return self.driver_
def driver(self) -> Any: return self.driver_<|docstring|>Return the instance of the driver that is using the connection.<|endoftext|>
f00162e9566403a84520b5addb52f0f0a916695b24411c0c5f45990a00348284
def session(self) -> Any: '\n Sqlalchemy session.\n\n When using the ORM option this function returns the session for sqlAlchemy.\n ' return self.driver().session()
Sqlalchemy session. When using the ORM option this function returns the session for sqlAlchemy.
pineboolib/application/database/pnconnection.py
session
deavid/pineboo
2
python
def session(self) -> Any: '\n Sqlalchemy session.\n\n When using the ORM option this function returns the session for sqlAlchemy.\n ' return self.driver().session()
def session(self) -> Any: '\n Sqlalchemy session.\n\n When using the ORM option this function returns the session for sqlAlchemy.\n ' return self.driver().session()<|docstring|>Sqlalchemy session. When using the ORM option this function returns the session for sqlAlchemy.<|endoftext|>
c57ad6f6c6d3e6fcc5245e49f80d6b2885c3d807b2c33c1d500311a89e827f01
def engine(self) -> Any: 'Sqlalchemy connection.' return self.driver().engine()
Sqlalchemy connection.
pineboolib/application/database/pnconnection.py
engine
deavid/pineboo
2
python
def engine(self) -> Any: return self.driver().engine()
def engine(self) -> Any: return self.driver().engine()<|docstring|>Sqlalchemy connection.<|endoftext|>
8baa0f8682b9125c67b2cbd111fba12664fd2fd755a07c0c7b1c860991dbe934
def declarative_base(self) -> Any: 'Contain the declared models for Sqlalchemy.' return self.driver().declarative_base()
Contain the declared models for Sqlalchemy.
pineboolib/application/database/pnconnection.py
declarative_base
deavid/pineboo
2
python
def declarative_base(self) -> Any: return self.driver().declarative_base()
def declarative_base(self) -> Any: return self.driver().declarative_base()<|docstring|>Contain the declared models for Sqlalchemy.<|endoftext|>
8570d6a07a2da87421551acbf0febb59a370074ea18398a86faf2707c977879c
def cursor(self) -> 'IApiCursor': 'Return a cursor to the database.' if (self.conn is None): raise Exception('cursor. Empty conn!!') return self.conn.cursor()
Return a cursor to the database.
pineboolib/application/database/pnconnection.py
cursor
deavid/pineboo
2
python
def cursor(self) -> 'IApiCursor': if (self.conn is None): raise Exception('cursor. Empty conn!!') return self.conn.cursor()
def cursor(self) -> 'IApiCursor': if (self.conn is None): raise Exception('cursor. Empty conn!!') return self.conn.cursor()<|docstring|>Return a cursor to the database.<|endoftext|>
c6246ab9aea1977eb158d17f728d7539385500f45eb69d9744571921d9bd5ab7
def conectar(self, db_name: str, db_host: Optional[str], db_port: Optional[int], db_userName: Optional[str], db_password: Optional[str]) -> Any: 'Request a connection to the database.' self.db_name = db_name self.db_host = db_host self.db_port = db_port self.db_userName = db_userName self.db_password = db_password if self.name: self.driver().alias_ = ((self.driverName() + ':') + self.name) return self.driver().connect(db_name, db_host, db_port, db_userName, db_password)
Request a connection to the database.
pineboolib/application/database/pnconnection.py
conectar
deavid/pineboo
2
python
def conectar(self, db_name: str, db_host: Optional[str], db_port: Optional[int], db_userName: Optional[str], db_password: Optional[str]) -> Any: self.db_name = db_name self.db_host = db_host self.db_port = db_port self.db_userName = db_userName self.db_password = db_password if self.name: self.driver().alias_ = ((self.driverName() + ':') + self.name) return self.driver().connect(db_name, db_host, db_port, db_userName, db_password)
def conectar(self, db_name: str, db_host: Optional[str], db_port: Optional[int], db_userName: Optional[str], db_password: Optional[str]) -> Any: self.db_name = db_name self.db_host = db_host self.db_port = db_port self.db_userName = db_userName self.db_password = db_password if self.name: self.driver().alias_ = ((self.driverName() + ':') + self.name) return self.driver().connect(db_name, db_host, db_port, db_userName, db_password)<|docstring|>Request a connection to the database.<|endoftext|>
d2f93a8b5a15ec33f673c774b7725cb1982b4b4ffcab166daceb74edc0ce650d
def driverName(self) -> str: 'Return sql driver name.' return self.driver().driverName()
Return sql driver name.
pineboolib/application/database/pnconnection.py
driverName
deavid/pineboo
2
python
def driverName(self) -> str: return self.driver().driverName()
def driverName(self) -> str: return self.driver().driverName()<|docstring|>Return sql driver name.<|endoftext|>
6d66ce2a0aa1bfddc62404a3d2263ddd46a06d5d0830d03336477ee5cf06d018
def driverAlias(self) -> str: 'Return sql driver alias.' return self.driver().alias_
Return sql driver alias.
pineboolib/application/database/pnconnection.py
driverAlias
deavid/pineboo
2
python
def driverAlias(self) -> str: return self.driver().alias_
def driverAlias(self) -> str: return self.driver().alias_<|docstring|>Return sql driver alias.<|endoftext|>
bc045022643ba7571865ea21153c2468c57f67809d0146f01c362793cfe6cd67
def driverNameToDriverAlias(self, name: str) -> str: 'Return the alias from the name of a sql driver.' if (self.driverSql is None): raise Exception('driverNameoDriverAlias. Sql driver manager is not defined') return self.driverSql.nameToAlias(name)
Return the alias from the name of a sql driver.
pineboolib/application/database/pnconnection.py
driverNameToDriverAlias
deavid/pineboo
2
python
def driverNameToDriverAlias(self, name: str) -> str: if (self.driverSql is None): raise Exception('driverNameoDriverAlias. Sql driver manager is not defined') return self.driverSql.nameToAlias(name)
def driverNameToDriverAlias(self, name: str) -> str: if (self.driverSql is None): raise Exception('driverNameoDriverAlias. Sql driver manager is not defined') return self.driverSql.nameToAlias(name)<|docstring|>Return the alias from the name of a sql driver.<|endoftext|>
ac737c70ca8846f6d36ac12a3d02eb6c310ca6b8e31cfeed6b98a29bd35376c7
def lastError(self) -> str: 'Return the last error reported by the sql driver.' return self.driver().lastError()
Return the last error reported by the sql driver.
pineboolib/application/database/pnconnection.py
lastError
deavid/pineboo
2
python
def lastError(self) -> str: return self.driver().lastError()
def lastError(self) -> str: return self.driver().lastError()<|docstring|>Return the last error reported by the sql driver.<|endoftext|>
4a8ec4ffcb2702c100162452045ffb41c9be60591ebc6d0339442029f385207b
def host(self) -> Optional[str]: 'Return the name of the database host.' return self.db_host
Return the name of the database host.
pineboolib/application/database/pnconnection.py
host
deavid/pineboo
2
python
def host(self) -> Optional[str]: return self.db_host
def host(self) -> Optional[str]: return self.db_host<|docstring|>Return the name of the database host.<|endoftext|>
a4bbebaa0584664132746fef85fbaebe1566dceed596cd80f18c7dfb04cc4268
def port(self) -> Optional[int]: 'Return the port used by the database.' return self.db_port
Return the port used by the database.
pineboolib/application/database/pnconnection.py
port
deavid/pineboo
2
python
def port(self) -> Optional[int]: return self.db_port
def port(self) -> Optional[int]: return self.db_port<|docstring|>Return the port used by the database.<|endoftext|>
7966da71af3f316d849f563fcc4d5e5aa83f05e65aded7be1bf6c62e62332f83
def user(self) -> Optional[str]: 'Return the user name used by the database.' return self.db_userName
Return the user name used by the database.
pineboolib/application/database/pnconnection.py
user
deavid/pineboo
2
python
def user(self) -> Optional[str]: return self.db_userName
def user(self) -> Optional[str]: return self.db_userName<|docstring|>Return the user name used by the database.<|endoftext|>
995a274bfbc0fa5eed38e9cff5d3283a15b5d227791177016865be74191f7e28
def password(self) -> Optional[str]: 'Return the password used by the database.' return self.db_password
Return the password used by the database.
pineboolib/application/database/pnconnection.py
password
deavid/pineboo
2
python
def password(self) -> Optional[str]: return self.db_password
def password(self) -> Optional[str]: return self.db_password<|docstring|>Return the password used by the database.<|endoftext|>
badeef61323bcda22df5aefc52b977d9594dd5a9db083317a6244042fbccc59e
def seek(self, offs, whence=0) -> Any: 'Position the cursor at a position in the database.' if (self.conn is None): raise Exception('seek. Empty conn!!') return self.conn.seek(offs, whence)
Position the cursor at a position in the database.
pineboolib/application/database/pnconnection.py
seek
deavid/pineboo
2
python
def seek(self, offs, whence=0) -> Any: if (self.conn is None): raise Exception('seek. Empty conn!!') return self.conn.seek(offs, whence)
def seek(self, offs, whence=0) -> Any: if (self.conn is None): raise Exception('seek. Empty conn!!') return self.conn.seek(offs, whence)<|docstring|>Position the cursor at a position in the database.<|endoftext|>
d74879e895082017916bcfa5a05d7ce56d41318dd938b7eda6d9e7d7acc4ab97
def manager(self) -> 'flmanager.FLManager': '\n Flmanager instance that manages the connection.\n\n Flmanager manages metadata of fields, tables, queries, etc .. to then be managed this data by the controls of the application.\n ' if (not self._manager): from pineboolib.fllegacy.flmanager import FLManager self._manager = FLManager(self) return self._manager
Flmanager instance that manages the connection. Flmanager manages metadata of fields, tables, queries, etc .. to then be managed this data by the controls of the application.
pineboolib/application/database/pnconnection.py
manager
deavid/pineboo
2
python
def manager(self) -> 'flmanager.FLManager': '\n Flmanager instance that manages the connection.\n\n Flmanager manages metadata of fields, tables, queries, etc .. to then be managed this data by the controls of the application.\n ' if (not self._manager): from pineboolib.fllegacy.flmanager import FLManager self._manager = FLManager(self) return self._manager
def manager(self) -> 'flmanager.FLManager': '\n Flmanager instance that manages the connection.\n\n Flmanager manages metadata of fields, tables, queries, etc .. to then be managed this data by the controls of the application.\n ' if (not self._manager): from pineboolib.fllegacy.flmanager import FLManager self._manager = FLManager(self) return self._manager<|docstring|>Flmanager instance that manages the connection. Flmanager manages metadata of fields, tables, queries, etc .. to then be managed this data by the controls of the application.<|endoftext|>
0796e2a9c03ee8b69f1650423443741612775ed7ffcd0874f2760891a079c5df
@decorators.NotImplementedWarn def md5TuplesStateTable(self, curname: str) -> bool: '\n Return the sum md5 with the total records inserted, deleted and modified in the database so far.\n\n Useful to know if the database has been modified from a given moment.\n ' return True
Return the sum md5 with the total records inserted, deleted and modified in the database so far. Useful to know if the database has been modified from a given moment.
pineboolib/application/database/pnconnection.py
md5TuplesStateTable
deavid/pineboo
2
python
@decorators.NotImplementedWarn def md5TuplesStateTable(self, curname: str) -> bool: '\n Return the sum md5 with the total records inserted, deleted and modified in the database so far.\n\n Useful to know if the database has been modified from a given moment.\n ' return True
@decorators.NotImplementedWarn def md5TuplesStateTable(self, curname: str) -> bool: '\n Return the sum md5 with the total records inserted, deleted and modified in the database so far.\n\n Useful to know if the database has been modified from a given moment.\n ' return True<|docstring|>Return the sum md5 with the total records inserted, deleted and modified in the database so far. Useful to know if the database has been modified from a given moment.<|endoftext|>
d268cf22ba55192ee5a6bd9c6dde372921838287dcc179cb6515172bf9fe402a
def setInteractiveGUI(self, b): 'Set if it is an interactive GUI.' self.interactiveGUI_ = b
Set if it is an interactive GUI.
pineboolib/application/database/pnconnection.py
setInteractiveGUI
deavid/pineboo
2
python
def setInteractiveGUI(self, b): self.interactiveGUI_ = b
def setInteractiveGUI(self, b): self.interactiveGUI_ = b<|docstring|>Set if it is an interactive GUI.<|endoftext|>
0b2f10b1a7bbf1b1e803e5ea5466533b0cce695da1ad383ca8c448fb6eab4f1f
@decorators.NotImplementedWarn def setQsaExceptions(self, b: bool) -> None: 'See properties of the qsa exceptions.' pass
See properties of the qsa exceptions.
pineboolib/application/database/pnconnection.py
setQsaExceptions
deavid/pineboo
2
python
@decorators.NotImplementedWarn def setQsaExceptions(self, b: bool) -> None: pass
@decorators.NotImplementedWarn def setQsaExceptions(self, b: bool) -> None: pass<|docstring|>See properties of the qsa exceptions.<|endoftext|>
8b9d7acea792eacdb2b599c444320652d0b47b80deaa14238b242d90f9a33b3b
def db(self) -> 'IConnection': 'Return the connection itself.' return self
Return the connection itself.
pineboolib/application/database/pnconnection.py
db
deavid/pineboo
2
python
def db(self) -> 'IConnection': return self
def db(self) -> 'IConnection': return self<|docstring|>Return the connection itself.<|endoftext|>
1c432174a7920233aeae6929ea04520487834bc338256bcddd01fc7461050fe9
def dbAux(self) -> 'IConnection': '\n Return the auxiliary connection to the database.\n\n This connection is useful for out of transaction operations.\n ' return self.useConn('dbAux')
Return the auxiliary connection to the database. This connection is useful for out of transaction operations.
pineboolib/application/database/pnconnection.py
dbAux
deavid/pineboo
2
python
def dbAux(self) -> 'IConnection': '\n Return the auxiliary connection to the database.\n\n This connection is useful for out of transaction operations.\n ' return self.useConn('dbAux')
def dbAux(self) -> 'IConnection': '\n Return the auxiliary connection to the database.\n\n This connection is useful for out of transaction operations.\n ' return self.useConn('dbAux')<|docstring|>Return the auxiliary connection to the database. This connection is useful for out of transaction operations.<|endoftext|>
00b1898ceb3d0ca593a4c5a1ae5900ea7e15e93b5602417d73703f90c685864b
def formatValue(self, t: str, v: Any, upper: bool) -> Any: 'Return a correctly formatted value to be assigned as a where filter.' return self.driver().formatValue(t, v, upper)
Return a correctly formatted value to be assigned as a where filter.
pineboolib/application/database/pnconnection.py
formatValue
deavid/pineboo
2
python
def formatValue(self, t: str, v: Any, upper: bool) -> Any: return self.driver().formatValue(t, v, upper)
def formatValue(self, t: str, v: Any, upper: bool) -> Any: return self.driver().formatValue(t, v, upper)<|docstring|>Return a correctly formatted value to be assigned as a where filter.<|endoftext|>
50148bf68cf501d0ca3a8faff0b3986e8c63fe762ecf775abe3087c505af0514
def formatValueLike(self, t, v, upper) -> str: 'Return a correctly formatted value to be assigned as a WHERE LIKE filter.' return self.driver().formatValueLike(t, v, upper)
Return a correctly formatted value to be assigned as a WHERE LIKE filter.
pineboolib/application/database/pnconnection.py
formatValueLike
deavid/pineboo
2
python
def formatValueLike(self, t, v, upper) -> str: return self.driver().formatValueLike(t, v, upper)
def formatValueLike(self, t, v, upper) -> str: return self.driver().formatValueLike(t, v, upper)<|docstring|>Return a correctly formatted value to be assigned as a WHERE LIKE filter.<|endoftext|>
7757ac2706060fa0ccbee909d1abfc05390c88e9c8afc5f8be9e650d9f53d1e3
def canSavePoint(self) -> bool: 'Inform if the sql driver can manage savepoints.' return self.dbAux().driver().canSavePoint()
Inform if the sql driver can manage savepoints.
pineboolib/application/database/pnconnection.py
canSavePoint
deavid/pineboo
2
python
def canSavePoint(self) -> bool: return self.dbAux().driver().canSavePoint()
def canSavePoint(self) -> bool: return self.dbAux().driver().canSavePoint()<|docstring|>Inform if the sql driver can manage savepoints.<|endoftext|>
312426a7e72965f1ca41d032888a88d6d0b54448374d964d8719754800471790
def canTransaction(self) -> bool: 'Inform if the sql driver can manage transactions.' return self.driver().canTransaction()
Inform if the sql driver can manage transactions.
pineboolib/application/database/pnconnection.py
canTransaction
deavid/pineboo
2
python
def canTransaction(self) -> bool: return self.driver().canTransaction()
def canTransaction(self) -> bool: return self.driver().canTransaction()<|docstring|>Inform if the sql driver can manage transactions.<|endoftext|>
65442aded8c0c0ad29a0f501040ce3a5dece71487ea10d1d96fd8f66097f4255
def lastActiveCursor(self): 'Return the last active cursor in the sql driver.' return self.lastActiveCursor_
Return the last active cursor in the sql driver.
pineboolib/application/database/pnconnection.py
lastActiveCursor
deavid/pineboo
2
python
def lastActiveCursor(self): return self.lastActiveCursor_
def lastActiveCursor(self): return self.lastActiveCursor_<|docstring|>Return the last active cursor in the sql driver.<|endoftext|>
de0917e3472b7abcc8259db32358d2967f53d55bf66cacda7425fd2bab6dfbb4
def doTransaction(self, cursor: 'PNSqlCursor') -> bool: 'Make a transaction or savePoint according to transaction level.' if ((not cursor) or (not self.db())): return False from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Iniciando Transacción... %s' % self.transaction_)]) if self.transaction(): self.lastActiveCursor_ = cursor db_signals.emitTransactionBegin(cursor) if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() self.transaction_ = (self.transaction_ + 1) cursor.d.transactionsOpened_.insert(0, self.transaction_) return True else: logger.warning('doTransaction: Fallo al intentar iniciar la transacción') return False else: if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Creando punto de salvaguarda %s:%s' % (self.name, self.transaction_))]) if (not self.canSavePoint()): if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if self.currentSavePoint_: if self.stackSavePoints_: self.stackSavePoints_.insert(0, self.currentSavePoint_) else: self.stackSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = PNSqlSavePoint(self.transaction_) else: self.savePoint(self.transaction_) self.transaction_ = (self.transaction_ + 1) if cursor.d.transactionsOpened_: cursor.d.transactionsOpened_.insert(0, self.transaction_) else: cursor.d.transactionsOpened_.append(self.transaction_) return True
Make a transaction or savePoint according to transaction level.
pineboolib/application/database/pnconnection.py
doTransaction
deavid/pineboo
2
python
def doTransaction(self, cursor: 'PNSqlCursor') -> bool: if ((not cursor) or (not self.db())): return False from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Iniciando Transacción... %s' % self.transaction_)]) if self.transaction(): self.lastActiveCursor_ = cursor db_signals.emitTransactionBegin(cursor) if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() self.transaction_ = (self.transaction_ + 1) cursor.d.transactionsOpened_.insert(0, self.transaction_) return True else: logger.warning('doTransaction: Fallo al intentar iniciar la transacción') return False else: if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Creando punto de salvaguarda %s:%s' % (self.name, self.transaction_))]) if (not self.canSavePoint()): if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if self.currentSavePoint_: if self.stackSavePoints_: self.stackSavePoints_.insert(0, self.currentSavePoint_) else: self.stackSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = PNSqlSavePoint(self.transaction_) else: self.savePoint(self.transaction_) self.transaction_ = (self.transaction_ + 1) if cursor.d.transactionsOpened_: cursor.d.transactionsOpened_.insert(0, self.transaction_) else: cursor.d.transactionsOpened_.append(self.transaction_) return True
def doTransaction(self, cursor: 'PNSqlCursor') -> bool: if ((not cursor) or (not self.db())): return False from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Iniciando Transacción... %s' % self.transaction_)]) if self.transaction(): self.lastActiveCursor_ = cursor db_signals.emitTransactionBegin(cursor) if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() self.transaction_ = (self.transaction_ + 1) cursor.d.transactionsOpened_.insert(0, self.transaction_) return True else: logger.warning('doTransaction: Fallo al intentar iniciar la transacción') return False else: if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Creando punto de salvaguarda %s:%s' % (self.name, self.transaction_))]) if (not self.canSavePoint()): if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if self.currentSavePoint_: if self.stackSavePoints_: self.stackSavePoints_.insert(0, self.currentSavePoint_) else: self.stackSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = PNSqlSavePoint(self.transaction_) else: self.savePoint(self.transaction_) self.transaction_ = (self.transaction_ + 1) if cursor.d.transactionsOpened_: cursor.d.transactionsOpened_.insert(0, self.transaction_) else: cursor.d.transactionsOpened_.append(self.transaction_) return True<|docstring|>Make a transaction or savePoint according to transaction level.<|endoftext|>
6aad5589976093596b0d73759b1da2d68cbdfc761e0878d318ad62148baee8f1
def transactionLevel(self) -> int: 'Indicate the level of transaction.' return self.transaction_
Indicate the level of transaction.
pineboolib/application/database/pnconnection.py
transactionLevel
deavid/pineboo
2
python
def transactionLevel(self) -> int: return self.transaction_
def transactionLevel(self) -> int: return self.transaction_<|docstring|>Indicate the level of transaction.<|endoftext|>
2e31857dffcd2268fcd8a18cb79068b1a44bbbdad9b6b5b35bc359a6c77cf104
def doRollback(self, cur: 'PNSqlCursor') -> bool: 'Drop a transaction or savepoint depending on the transaction level.' if ((not cur) or (not self.conn)): return False from pineboolib.application import project cancel = False if (self.interactiveGUI() and (cur.d.modeAccess_ in (CursorAccessMode.Insert, CursorAccessMode.Edit)) and cur.isModifiedBuffer() and cur.d.askForCancelChanges_): if project.DGI.localDesktop(): res = QtWidgets.QMessageBox.information(QtWidgets.QApplication.activeWindow(), 'Cancelar Cambios', 'Todos los cambios se cancelarán.¿Está seguro?', QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No) if (res == QtWidgets.QMessageBox.No): return False cancel = True if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.info('FLSqlDatabase: El cursor va a deshacer la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.info('FLSqlDatabaser : El cursor va a deshacer la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Deshaciendo Transacción... %s' % self.transaction_)]) if self.rollbackTransaction(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() cur.d.modeAccess_ = CursorAccessMode.Browse if cancel: cur.select() db_signals.emitTransactionRollback(cur) return True else: logger.warning('doRollback: Fallo al intentar deshacer transacción') return False else: project.message_manager().send('status_help_msg', 'send', [('Restaurando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_id = temp_save_point.id() if ((temp_id > self.transaction_) or (self.transaction_ == 0)): temp_save_point.undo() del temp_save_point else: self.queueSavePoints_.append(temp_save_point) if (self.currentSavePoint_ is not None): self.currentSavePoint_.undo() self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.rollbackSavePoint(self.transaction_) cur.d.modeAccess_ = CursorAccessMode.Browse return True
Drop a transaction or savepoint depending on the transaction level.
pineboolib/application/database/pnconnection.py
doRollback
deavid/pineboo
2
python
def doRollback(self, cur: 'PNSqlCursor') -> bool: if ((not cur) or (not self.conn)): return False from pineboolib.application import project cancel = False if (self.interactiveGUI() and (cur.d.modeAccess_ in (CursorAccessMode.Insert, CursorAccessMode.Edit)) and cur.isModifiedBuffer() and cur.d.askForCancelChanges_): if project.DGI.localDesktop(): res = QtWidgets.QMessageBox.information(QtWidgets.QApplication.activeWindow(), 'Cancelar Cambios', 'Todos los cambios se cancelarán.¿Está seguro?', QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No) if (res == QtWidgets.QMessageBox.No): return False cancel = True if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.info('FLSqlDatabase: El cursor va a deshacer la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.info('FLSqlDatabaser : El cursor va a deshacer la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Deshaciendo Transacción... %s' % self.transaction_)]) if self.rollbackTransaction(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() cur.d.modeAccess_ = CursorAccessMode.Browse if cancel: cur.select() db_signals.emitTransactionRollback(cur) return True else: logger.warning('doRollback: Fallo al intentar deshacer transacción') return False else: project.message_manager().send('status_help_msg', 'send', [('Restaurando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_id = temp_save_point.id() if ((temp_id > self.transaction_) or (self.transaction_ == 0)): temp_save_point.undo() del temp_save_point else: self.queueSavePoints_.append(temp_save_point) if (self.currentSavePoint_ is not None): self.currentSavePoint_.undo() self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.rollbackSavePoint(self.transaction_) cur.d.modeAccess_ = CursorAccessMode.Browse return True
def doRollback(self, cur: 'PNSqlCursor') -> bool: if ((not cur) or (not self.conn)): return False from pineboolib.application import project cancel = False if (self.interactiveGUI() and (cur.d.modeAccess_ in (CursorAccessMode.Insert, CursorAccessMode.Edit)) and cur.isModifiedBuffer() and cur.d.askForCancelChanges_): if project.DGI.localDesktop(): res = QtWidgets.QMessageBox.information(QtWidgets.QApplication.activeWindow(), 'Cancelar Cambios', 'Todos los cambios se cancelarán.¿Está seguro?', QtWidgets.QMessageBox.Yes, QtWidgets.QMessageBox.No) if (res == QtWidgets.QMessageBox.No): return False cancel = True if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.info('FLSqlDatabase: El cursor va a deshacer la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.info('FLSqlDatabaser : El cursor va a deshacer la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Deshaciendo Transacción... %s' % self.transaction_)]) if self.rollbackTransaction(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() cur.d.modeAccess_ = CursorAccessMode.Browse if cancel: cur.select() db_signals.emitTransactionRollback(cur) return True else: logger.warning('doRollback: Fallo al intentar deshacer transacción') return False else: project.message_manager().send('status_help_msg', 'send', [('Restaurando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_id = temp_save_point.id() if ((temp_id > self.transaction_) or (self.transaction_ == 0)): temp_save_point.undo() del temp_save_point else: self.queueSavePoints_.append(temp_save_point) if (self.currentSavePoint_ is not None): self.currentSavePoint_.undo() self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() if (self.transaction_ == 0): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.rollbackSavePoint(self.transaction_) cur.d.modeAccess_ = CursorAccessMode.Browse return True<|docstring|>Drop a transaction or savepoint depending on the transaction level.<|endoftext|>
708a04db1273acdb3603487f43d33b7816df2fb217e84e4d5a33601f3df5b7d7
def interactiveGUI(self) -> bool: 'Return if it is an interactive GUI.' return self.interactiveGUI_
Return if it is an interactive GUI.
pineboolib/application/database/pnconnection.py
interactiveGUI
deavid/pineboo
2
python
def interactiveGUI(self) -> bool: return self.interactiveGUI_
def interactiveGUI(self) -> bool: return self.interactiveGUI_<|docstring|>Return if it is an interactive GUI.<|endoftext|>
b8971a02d6b2fab6e1baffbf64ca3f89320e74d65e4655f10e9927ddf507b01f
def doCommit(self, cur: 'PNSqlCursor', notify: bool=True) -> bool: 'Approve changes to a transaction or a save point based on your transaction level.' if ((not cur) and (not self.db())): return False if (not notify): cur.autoCommit.emit() if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.warning('El cursor va a terminar la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.warning('El cursor va a terminar la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Terminando transacción... %s' % self.transaction_)]) try: if self.commit(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if notify: cur.d.modeAccess_ = CursorAccessMode.Browse db_signals.emitTransactionEnd(cur) return True else: logger.error(('doCommit: Fallo al intentar terminar transacción: %s' % self.transaction_)) return False except Exception as e: logger.error('doCommit: Fallo al intentar terminar transacción: %s', e) return False else: project.message_manager().send('status_help_msg', 'send', [('Liberando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (((self.transaction_ == 1) and self.canTransaction()) or ((self.transaction_ == 0) and (not self.canTransaction()))): if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_save_point.setId((self.transaction_ - 1)) self.queueSavePoints_.append(temp_save_point) if self.currentSavePoint_: self.queueSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True
Approve changes to a transaction or a save point based on your transaction level.
pineboolib/application/database/pnconnection.py
doCommit
deavid/pineboo
2
python
def doCommit(self, cur: 'PNSqlCursor', notify: bool=True) -> bool: if ((not cur) and (not self.db())): return False if (not notify): cur.autoCommit.emit() if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.warning('El cursor va a terminar la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.warning('El cursor va a terminar la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Terminando transacción... %s' % self.transaction_)]) try: if self.commit(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if notify: cur.d.modeAccess_ = CursorAccessMode.Browse db_signals.emitTransactionEnd(cur) return True else: logger.error(('doCommit: Fallo al intentar terminar transacción: %s' % self.transaction_)) return False except Exception as e: logger.error('doCommit: Fallo al intentar terminar transacción: %s', e) return False else: project.message_manager().send('status_help_msg', 'send', [('Liberando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (((self.transaction_ == 1) and self.canTransaction()) or ((self.transaction_ == 0) and (not self.canTransaction()))): if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_save_point.setId((self.transaction_ - 1)) self.queueSavePoints_.append(temp_save_point) if self.currentSavePoint_: self.queueSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True
def doCommit(self, cur: 'PNSqlCursor', notify: bool=True) -> bool: if ((not cur) and (not self.db())): return False if (not notify): cur.autoCommit.emit() if (self.transaction_ > 0): if cur.d.transactionsOpened_: trans = cur.d.transactionsOpened_.pop() if (not (trans == self.transaction_)): logger.warning('El cursor va a terminar la transacción %s pero la última que inició es la %s', self.transaction_, trans) else: logger.warning('El cursor va a terminar la transacción %s pero no ha iniciado ninguna', self.transaction_) self.transaction_ = (self.transaction_ - 1) else: return True from pineboolib.application import project if ((self.transaction_ == 0) and self.canTransaction()): if config.value('application/isDebuggerMode', False): project.message_manager().send('status_help_msg', 'send', [('Terminando transacción... %s' % self.transaction_)]) try: if self.commit(): self.lastActiveCursor_ = None if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() if notify: cur.d.modeAccess_ = CursorAccessMode.Browse db_signals.emitTransactionEnd(cur) return True else: logger.error(('doCommit: Fallo al intentar terminar transacción: %s' % self.transaction_)) return False except Exception as e: logger.error('doCommit: Fallo al intentar terminar transacción: %s', e) return False else: project.message_manager().send('status_help_msg', 'send', [('Liberando punto de salvaguarda %s:%s...' % (self.name, self.transaction_))]) if (((self.transaction_ == 1) and self.canTransaction()) or ((self.transaction_ == 0) and (not self.canTransaction()))): if (not self.canSavePoint()): if self.currentSavePoint_: del self.currentSavePoint_ self.currentSavePoint_ = None self.stackSavePoints_.clear() self.queueSavePoints_.clear() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True if (not self.canSavePoint()): tam_queue = len(self.queueSavePoints_) for i in range(tam_queue): temp_save_point = self.queueSavePoints_.pop() temp_save_point.setId((self.transaction_ - 1)) self.queueSavePoints_.append(temp_save_point) if self.currentSavePoint_: self.queueSavePoints_.append(self.currentSavePoint_) self.currentSavePoint_ = None if self.stackSavePoints_: self.currentSavePoint_ = self.stackSavePoints_.pop() else: self.releaseSavePoint(self.transaction_) if notify: cur.d.modeAccess_ = CursorAccessMode.Browse return True<|docstring|>Approve changes to a transaction or a save point based on your transaction level.<|endoftext|>
b1668c31093e66b5b524305a7838c5696cab53b0543921322a01f36e0cec4674
def canDetectLocks(self) -> bool: 'Indicate if the connection detects locks in the database.' if (not self.db()): return False return self.driver().canDetectLocks()
Indicate if the connection detects locks in the database.
pineboolib/application/database/pnconnection.py
canDetectLocks
deavid/pineboo
2
python
def canDetectLocks(self) -> bool: if (not self.db()): return False return self.driver().canDetectLocks()
def canDetectLocks(self) -> bool: if (not self.db()): return False return self.driver().canDetectLocks()<|docstring|>Indicate if the connection detects locks in the database.<|endoftext|>
6b39ffd7d436f4297a5762cf525e670f36d5daffc592e70f9d73e88ecbfc4080
def commit(self) -> bool: 'Send the commit order to the database.' if (not self.db()): return False return self.driver().commitTransaction()
Send the commit order to the database.
pineboolib/application/database/pnconnection.py
commit
deavid/pineboo
2
python
def commit(self) -> bool: if (not self.db()): return False return self.driver().commitTransaction()
def commit(self) -> bool: if (not self.db()): return False return self.driver().commitTransaction()<|docstring|>Send the commit order to the database.<|endoftext|>
627466720ffecbff6e8801178cf56ae1deba7a6be46eeaca2d15024e0c606c47
def managerModules(self) -> 'flmanagermodules.FLManagerModules': '\n Instance of the FLManagerModules class.\n\n Contains functions to control the state, health, etc ... of the database tables.\n ' if (not self._managerModules): from pineboolib.fllegacy.flmanagermodules import FLManagerModules self._managerModules = FLManagerModules(self) return self._managerModules
Instance of the FLManagerModules class. Contains functions to control the state, health, etc ... of the database tables.
pineboolib/application/database/pnconnection.py
managerModules
deavid/pineboo
2
python
def managerModules(self) -> 'flmanagermodules.FLManagerModules': '\n Instance of the FLManagerModules class.\n\n Contains functions to control the state, health, etc ... of the database tables.\n ' if (not self._managerModules): from pineboolib.fllegacy.flmanagermodules import FLManagerModules self._managerModules = FLManagerModules(self) return self._managerModules
def managerModules(self) -> 'flmanagermodules.FLManagerModules': '\n Instance of the FLManagerModules class.\n\n Contains functions to control the state, health, etc ... of the database tables.\n ' if (not self._managerModules): from pineboolib.fllegacy.flmanagermodules import FLManagerModules self._managerModules = FLManagerModules(self) return self._managerModules<|docstring|>Instance of the FLManagerModules class. Contains functions to control the state, health, etc ... of the database tables.<|endoftext|>
b20210c637379b47b5ddc3fd6dd0881225afafd17106e4d1d0a1847547a60c28
def canOverPartition(self) -> bool: 'Return True if the database supports the OVER statement.' if (not self.db()): return False return self.dbAux().driver().canOverPartition()
Return True if the database supports the OVER statement.
pineboolib/application/database/pnconnection.py
canOverPartition
deavid/pineboo
2
python
def canOverPartition(self) -> bool: if (not self.db()): return False return self.dbAux().driver().canOverPartition()
def canOverPartition(self) -> bool: if (not self.db()): return False return self.dbAux().driver().canOverPartition()<|docstring|>Return True if the database supports the OVER statement.<|endoftext|>
e05fc20b2b5b17611b7ddcf67d35ddb10ca261431cf7924a051c05d4398d5911
def savePoint(self, save_point: int) -> bool: 'Create a save point.' if (not self.db()): return False return self.driver().savePoint(save_point)
Create a save point.
pineboolib/application/database/pnconnection.py
savePoint
deavid/pineboo
2
python
def savePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().savePoint(save_point)
def savePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().savePoint(save_point)<|docstring|>Create a save point.<|endoftext|>
80e14e657f81f25281f11d96aafe054ded7e1ec4ecd9ed0f1f04fa8862e86a64
def releaseSavePoint(self, save_point: int) -> bool: 'Release a save point.' if (not self.db()): return False return self.driver().releaseSavePoint(save_point)
Release a save point.
pineboolib/application/database/pnconnection.py
releaseSavePoint
deavid/pineboo
2
python
def releaseSavePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().releaseSavePoint(save_point)
def releaseSavePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().releaseSavePoint(save_point)<|docstring|>Release a save point.<|endoftext|>
6fb089463c56f7d69cab13af1a08905f38f63d38189dbe72f7a011f0635e43c1
def Mr_Proper(self): 'Clean the database of unnecessary tables and records.' if (not self.db()): return self.dbAux().driver().Mr_Proper()
Clean the database of unnecessary tables and records.
pineboolib/application/database/pnconnection.py
Mr_Proper
deavid/pineboo
2
python
def Mr_Proper(self): if (not self.db()): return self.dbAux().driver().Mr_Proper()
def Mr_Proper(self): if (not self.db()): return self.dbAux().driver().Mr_Proper()<|docstring|>Clean the database of unnecessary tables and records.<|endoftext|>
6ffd1ac7669c63835430a626a62e37fb744af972bac08913daca5ebdfdefb8ed
def rollbackSavePoint(self, save_point: int) -> bool: 'Roll back a save point.' if (not self.db()): return False return self.driver().rollbackSavePoint(save_point)
Roll back a save point.
pineboolib/application/database/pnconnection.py
rollbackSavePoint
deavid/pineboo
2
python
def rollbackSavePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().rollbackSavePoint(save_point)
def rollbackSavePoint(self, save_point: int) -> bool: if (not self.db()): return False return self.driver().rollbackSavePoint(save_point)<|docstring|>Roll back a save point.<|endoftext|>
ce11e70773daae25a994e522e9479eac4122132cc4cdd02fcb55c8a73c20102c
def transaction(self) -> bool: 'Create a transaction.' if (not self.db()): return False return self.driver().transaction()
Create a transaction.
pineboolib/application/database/pnconnection.py
transaction
deavid/pineboo
2
python
def transaction(self) -> bool: if (not self.db()): return False return self.driver().transaction()
def transaction(self) -> bool: if (not self.db()): return False return self.driver().transaction()<|docstring|>Create a transaction.<|endoftext|>
d9f520a0c9ae1c44decd11e6b3c7d7c5aa3fcae1d4bcaa902a94ccf8384a56d8
def commitTransaction(self) -> bool: 'Release a transaction.' if (not self.db()): return False return self.driver().commitTransaction()
Release a transaction.
pineboolib/application/database/pnconnection.py
commitTransaction
deavid/pineboo
2
python
def commitTransaction(self) -> bool: if (not self.db()): return False return self.driver().commitTransaction()
def commitTransaction(self) -> bool: if (not self.db()): return False return self.driver().commitTransaction()<|docstring|>Release a transaction.<|endoftext|>
4fb108526e3a502bb35cc24bbd90cae020b2121489d9c97ba3d792920d6b0a87
def rollbackTransaction(self) -> bool: 'Roll back a transaction.' if (not self.db()): return False return self.driver().rollbackTransaction()
Roll back a transaction.
pineboolib/application/database/pnconnection.py
rollbackTransaction
deavid/pineboo
2
python
def rollbackTransaction(self) -> bool: if (not self.db()): return False return self.driver().rollbackTransaction()
def rollbackTransaction(self) -> bool: if (not self.db()): return False return self.driver().rollbackTransaction()<|docstring|>Roll back a transaction.<|endoftext|>
20acf2abc045895d7acf808d081e512ebb95b5ee6ba5962adf11b7e60ea21084
def nextSerialVal(self, table: str, field: str) -> Any: 'Indicate next available value of a serial type field.' if (not self.db()): return False return self.dbAux().driver().nextSerialVal(table, field)
Indicate next available value of a serial type field.
pineboolib/application/database/pnconnection.py
nextSerialVal
deavid/pineboo
2
python
def nextSerialVal(self, table: str, field: str) -> Any: if (not self.db()): return False return self.dbAux().driver().nextSerialVal(table, field)
def nextSerialVal(self, table: str, field: str) -> Any: if (not self.db()): return False return self.dbAux().driver().nextSerialVal(table, field)<|docstring|>Indicate next available value of a serial type field.<|endoftext|>
28b3af97fd9f870930b029757bb15f11ac8afee0cfd1880dc10658366347e3ef
def existsTable(self, name: str) -> bool: 'Indicate the existence of a table in the database.' if (not self.db()): return False return self.dbAux().driver().existsTable(name)
Indicate the existence of a table in the database.
pineboolib/application/database/pnconnection.py
existsTable
deavid/pineboo
2
python
def existsTable(self, name: str) -> bool: if (not self.db()): return False return self.dbAux().driver().existsTable(name)
def existsTable(self, name: str) -> bool: if (not self.db()): return False return self.dbAux().driver().existsTable(name)<|docstring|>Indicate the existence of a table in the database.<|endoftext|>
3aa8bec3ec0f7af0870490d4ef567eb4628b422301257dfdd8f6c2dafb314a8f
def createTable(self, tmd: 'PNTableMetaData') -> bool: 'Create a table in the database, from a PNTableMetaData.' if (not self.db()): return False sql = self.dbAux().driver().sqlCreateTable(tmd) if (not sql): return False if (self.transaction_ == 0): self.transaction() self.transaction_ += 1 for singleSql in sql.split(';'): try: self.dbAux().execute_query(singleSql) except Exception: logger.exception('createTable: Error happened executing sql: %s...', singleSql[:80]) self.rollbackTransaction() return False if (self.transaction_ > 0): self.commitTransaction() self.transaction_ -= 1 return True
Create a table in the database, from a PNTableMetaData.
pineboolib/application/database/pnconnection.py
createTable
deavid/pineboo
2
python
def createTable(self, tmd: 'PNTableMetaData') -> bool: if (not self.db()): return False sql = self.dbAux().driver().sqlCreateTable(tmd) if (not sql): return False if (self.transaction_ == 0): self.transaction() self.transaction_ += 1 for singleSql in sql.split(';'): try: self.dbAux().execute_query(singleSql) except Exception: logger.exception('createTable: Error happened executing sql: %s...', singleSql[:80]) self.rollbackTransaction() return False if (self.transaction_ > 0): self.commitTransaction() self.transaction_ -= 1 return True
def createTable(self, tmd: 'PNTableMetaData') -> bool: if (not self.db()): return False sql = self.dbAux().driver().sqlCreateTable(tmd) if (not sql): return False if (self.transaction_ == 0): self.transaction() self.transaction_ += 1 for singleSql in sql.split(';'): try: self.dbAux().execute_query(singleSql) except Exception: logger.exception('createTable: Error happened executing sql: %s...', singleSql[:80]) self.rollbackTransaction() return False if (self.transaction_ > 0): self.commitTransaction() self.transaction_ -= 1 return True<|docstring|>Create a table in the database, from a PNTableMetaData.<|endoftext|>
10ae84b49bd944b63f0306835a8ca8a48a7c529a50a4a1286ade06d860298327
def mismatchedTable(self, tablename: str, tmd: 'PNTableMetaData') -> bool: 'Compare an existing table with a PNTableMetaData and return if there are differences.' if (not self.db()): return False return self.dbAux().driver().mismatchedTable(tablename, tmd, self)
Compare an existing table with a PNTableMetaData and return if there are differences.
pineboolib/application/database/pnconnection.py
mismatchedTable
deavid/pineboo
2
python
def mismatchedTable(self, tablename: str, tmd: 'PNTableMetaData') -> bool: if (not self.db()): return False return self.dbAux().driver().mismatchedTable(tablename, tmd, self)
def mismatchedTable(self, tablename: str, tmd: 'PNTableMetaData') -> bool: if (not self.db()): return False return self.dbAux().driver().mismatchedTable(tablename, tmd, self)<|docstring|>Compare an existing table with a PNTableMetaData and return if there are differences.<|endoftext|>
7520e6dcf2b0de44b34a4275a06ce332180fed6e18e9753a45bf1a0a14b32c3a
def normalizeValue(self, text: str) -> Optional[str]: 'Return the value of a correctly formatted string to the database type from a string.' if getattr(self.driver(), 'normalizeValue', None): return self.driver().normalizeValue(text) logger.warning('PNConnection: El driver %s no dispone de normalizeValue(text)', self.driverName()) return text
Return the value of a correctly formatted string to the database type from a string.
pineboolib/application/database/pnconnection.py
normalizeValue
deavid/pineboo
2
python
def normalizeValue(self, text: str) -> Optional[str]: if getattr(self.driver(), 'normalizeValue', None): return self.driver().normalizeValue(text) logger.warning('PNConnection: El driver %s no dispone de normalizeValue(text)', self.driverName()) return text
def normalizeValue(self, text: str) -> Optional[str]: if getattr(self.driver(), 'normalizeValue', None): return self.driver().normalizeValue(text) logger.warning('PNConnection: El driver %s no dispone de normalizeValue(text)', self.driverName()) return text<|docstring|>Return the value of a correctly formatted string to the database type from a string.<|endoftext|>
a3c9a69f76aacc088378cc547677fe0b4e7fae4568980d691778086817e00ae2
def queryUpdate(self, name: str, update: str, filter: str) -> Optional[str]: 'Return a correct UPDATE query for the database type.' if (not self.db()): return None return self.driver().queryUpdate(name, update, filter)
Return a correct UPDATE query for the database type.
pineboolib/application/database/pnconnection.py
queryUpdate
deavid/pineboo
2
python
def queryUpdate(self, name: str, update: str, filter: str) -> Optional[str]: if (not self.db()): return None return self.driver().queryUpdate(name, update, filter)
def queryUpdate(self, name: str, update: str, filter: str) -> Optional[str]: if (not self.db()): return None return self.driver().queryUpdate(name, update, filter)<|docstring|>Return a correct UPDATE query for the database type.<|endoftext|>
e45514270d2e1147a72fe8aeab0556466f0ba8d9e98a4387b55ce7298931d368
def execute_query(self, q) -> Any: 'Execute a query in a database cursor.' if (not self.db()): return None return self.driver().execute_query(q)
Execute a query in a database cursor.
pineboolib/application/database/pnconnection.py
execute_query
deavid/pineboo
2
python
def execute_query(self, q) -> Any: if (not self.db()): return None return self.driver().execute_query(q)
def execute_query(self, q) -> Any: if (not self.db()): return None return self.driver().execute_query(q)<|docstring|>Execute a query in a database cursor.<|endoftext|>
067b78c87d050d767b37f6e84676a676d9105130581d344ca389306174e76029
def alterTable(self, mtd_1: 'PNTableMetaData', mtd_2: 'PNTableMetaData', key: str, force: bool=False) -> bool: 'Modify the fields of a table in the database based on the differences of two PNTableMetaData.' if (not self.db()): return False return self.dbAux().driver().alterTable(mtd_1, mtd_2, key, force)
Modify the fields of a table in the database based on the differences of two PNTableMetaData.
pineboolib/application/database/pnconnection.py
alterTable
deavid/pineboo
2
python
def alterTable(self, mtd_1: 'PNTableMetaData', mtd_2: 'PNTableMetaData', key: str, force: bool=False) -> bool: if (not self.db()): return False return self.dbAux().driver().alterTable(mtd_1, mtd_2, key, force)
def alterTable(self, mtd_1: 'PNTableMetaData', mtd_2: 'PNTableMetaData', key: str, force: bool=False) -> bool: if (not self.db()): return False return self.dbAux().driver().alterTable(mtd_1, mtd_2, key, force)<|docstring|>Modify the fields of a table in the database based on the differences of two PNTableMetaData.<|endoftext|>
f08ea377017a968775473c05f69ec0c3d15f6f02c29725b22e78fd98f6240c2c
def __str__(self): 'Return the name of the database in text format.' return self.DBName()
Return the name of the database in text format.
pineboolib/application/database/pnconnection.py
__str__
deavid/pineboo
2
python
def __str__(self): return self.DBName()
def __str__(self): return self.DBName()<|docstring|>Return the name of the database in text format.<|endoftext|>
e2883b3d4f1d17129d201d782e7b7c8c032c899318e923e31ba8ab0635a60247
def _format_timestamp_to_string(timestamp): '\n Input timestamp can be:\n - Epoch time or counter\n - Datetime\n ' if (type(timestamp) == datetime): return timestamp.strftime(DT_FORMAT) else: return str(timestamp)
Input timestamp can be: - Epoch time or counter - Datetime
sandbox/data_source.py
_format_timestamp_to_string
marionleborgne/EIT_Dashboard
0
python
def _format_timestamp_to_string(timestamp): '\n Input timestamp can be:\n - Epoch time or counter\n - Datetime\n ' if (type(timestamp) == datetime): return timestamp.strftime(DT_FORMAT) else: return str(timestamp)
def _format_timestamp_to_string(timestamp): '\n Input timestamp can be:\n - Epoch time or counter\n - Datetime\n ' if (type(timestamp) == datetime): return timestamp.strftime(DT_FORMAT) else: return str(timestamp)<|docstring|>Input timestamp can be: - Epoch time or counter - Datetime<|endoftext|>
6560278054504a4492a280cb4699dbadc3763874e9ace06dee443b4235955e94
def _read_string_timestamp(str_timestamp): "\n Input string timestamp can be:\n - Epoch time or counter (E.g. '250') --> can be cast to int\n - Formatted datetime (E.g. '2018-12-01 12:05:04') --> cannot be cast to int\n " try: timestamp = int(str_timestamp) except ValueError: timestamp = datetime.strptime(str_timestamp, DT_FORMAT) return timestamp
Input string timestamp can be: - Epoch time or counter (E.g. '250') --> can be cast to int - Formatted datetime (E.g. '2018-12-01 12:05:04') --> cannot be cast to int
sandbox/data_source.py
_read_string_timestamp
marionleborgne/EIT_Dashboard
0
python
def _read_string_timestamp(str_timestamp): "\n Input string timestamp can be:\n - Epoch time or counter (E.g. '250') --> can be cast to int\n - Formatted datetime (E.g. '2018-12-01 12:05:04') --> cannot be cast to int\n " try: timestamp = int(str_timestamp) except ValueError: timestamp = datetime.strptime(str_timestamp, DT_FORMAT) return timestamp
def _read_string_timestamp(str_timestamp): "\n Input string timestamp can be:\n - Epoch time or counter (E.g. '250') --> can be cast to int\n - Formatted datetime (E.g. '2018-12-01 12:05:04') --> cannot be cast to int\n " try: timestamp = int(str_timestamp) except ValueError: timestamp = datetime.strptime(str_timestamp, DT_FORMAT) return timestamp<|docstring|>Input string timestamp can be: - Epoch time or counter (E.g. '250') --> can be cast to int - Formatted datetime (E.g. '2018-12-01 12:05:04') --> cannot be cast to int<|endoftext|>
2e153df8b5814339dc4fdc10d9d8c9d584f51fd79df9018c1a06ff608b40e875
def add(self, asset_id=None, asset_body=None): 'Add parts in this object' if (asset_id is not None): self.asset_id = asset_id[:self.idlen_conf['asset_id']] if (asset_body is not None): self.asset_body = asset_body if isinstance(asset_body, str): self.asset_body = asset_body.encode() self.asset_body_size = len(asset_body)
Add parts in this object
bbclib/libs/bbclib_asset_raw.py
add
ks91/py-bbclib
0
python
def add(self, asset_id=None, asset_body=None): if (asset_id is not None): self.asset_id = asset_id[:self.idlen_conf['asset_id']] if (asset_body is not None): self.asset_body = asset_body if isinstance(asset_body, str): self.asset_body = asset_body.encode() self.asset_body_size = len(asset_body)
def add(self, asset_id=None, asset_body=None): if (asset_id is not None): self.asset_id = asset_id[:self.idlen_conf['asset_id']] if (asset_body is not None): self.asset_body = asset_body if isinstance(asset_body, str): self.asset_body = asset_body.encode() self.asset_body_size = len(asset_body)<|docstring|>Add parts in this object<|endoftext|>
5cc0673dba9eda1a2702622c266507c6e9d576c40fba3cc71eee9a7fa2c9d8ac
def digest(self): 'Return digest\n\n The digest corresponds to the asset_id of this object.\n The asset_id is given externally, so bbclib does not care about how to calculate the digest of the asset_body.\n\n Returns:\n bytes: asset_id\n ' return self.asset_id
Return digest The digest corresponds to the asset_id of this object. The asset_id is given externally, so bbclib does not care about how to calculate the digest of the asset_body. Returns: bytes: asset_id
bbclib/libs/bbclib_asset_raw.py
digest
ks91/py-bbclib
0
python
def digest(self): 'Return digest\n\n The digest corresponds to the asset_id of this object.\n The asset_id is given externally, so bbclib does not care about how to calculate the digest of the asset_body.\n\n Returns:\n bytes: asset_id\n ' return self.asset_id
def digest(self): 'Return digest\n\n The digest corresponds to the asset_id of this object.\n The asset_id is given externally, so bbclib does not care about how to calculate the digest of the asset_body.\n\n Returns:\n bytes: asset_id\n ' return self.asset_id<|docstring|>Return digest The digest corresponds to the asset_id of this object. The asset_id is given externally, so bbclib does not care about how to calculate the digest of the asset_body. Returns: bytes: asset_id<|endoftext|>
6ffb3e9d439d55fc0e31dab7db0535a4223866f4ab7bef5cf174fb8ba8572170
def pack(self): 'Pack this object\n\n Returns:\n bytes: packed binary data\n ' dat = bytearray() dat.extend(bbclib_binary.to_bigint(self.asset_id, size=self.idlen_conf['asset_id'])) dat.extend(bbclib_binary.to_2byte(self.asset_body_size)) if (self.asset_body_size > 0): dat.extend(self.asset_body) return bytes(dat)
Pack this object Returns: bytes: packed binary data
bbclib/libs/bbclib_asset_raw.py
pack
ks91/py-bbclib
0
python
def pack(self): 'Pack this object\n\n Returns:\n bytes: packed binary data\n ' dat = bytearray() dat.extend(bbclib_binary.to_bigint(self.asset_id, size=self.idlen_conf['asset_id'])) dat.extend(bbclib_binary.to_2byte(self.asset_body_size)) if (self.asset_body_size > 0): dat.extend(self.asset_body) return bytes(dat)
def pack(self): 'Pack this object\n\n Returns:\n bytes: packed binary data\n ' dat = bytearray() dat.extend(bbclib_binary.to_bigint(self.asset_id, size=self.idlen_conf['asset_id'])) dat.extend(bbclib_binary.to_2byte(self.asset_body_size)) if (self.asset_body_size > 0): dat.extend(self.asset_body) return bytes(dat)<|docstring|>Pack this object Returns: bytes: packed binary data<|endoftext|>
c852ec1ea4adb3af94008204d064a6299de71d75a55fce76706d1014779be782
def unpack(self, data): 'Unpack into this object\n\n Args:\n data (bytes): packed binary data\n Returns:\n bool: True if successful\n ' ptr = 0 try: (ptr, self.asset_id) = bbclib_binary.get_bigint(ptr, data) self.idlen_conf['asset_id'] = len(self.asset_id) (ptr, self.asset_body_size) = bbclib_binary.get_n_byte_int(ptr, 2, data) if (self.asset_body_size > 0): (ptr, self.asset_body) = bbclib_binary.get_n_bytes(ptr, self.asset_body_size, data) except: traceback.print_exc() return False return True
Unpack into this object Args: data (bytes): packed binary data Returns: bool: True if successful
bbclib/libs/bbclib_asset_raw.py
unpack
ks91/py-bbclib
0
python
def unpack(self, data): 'Unpack into this object\n\n Args:\n data (bytes): packed binary data\n Returns:\n bool: True if successful\n ' ptr = 0 try: (ptr, self.asset_id) = bbclib_binary.get_bigint(ptr, data) self.idlen_conf['asset_id'] = len(self.asset_id) (ptr, self.asset_body_size) = bbclib_binary.get_n_byte_int(ptr, 2, data) if (self.asset_body_size > 0): (ptr, self.asset_body) = bbclib_binary.get_n_bytes(ptr, self.asset_body_size, data) except: traceback.print_exc() return False return True
def unpack(self, data): 'Unpack into this object\n\n Args:\n data (bytes): packed binary data\n Returns:\n bool: True if successful\n ' ptr = 0 try: (ptr, self.asset_id) = bbclib_binary.get_bigint(ptr, data) self.idlen_conf['asset_id'] = len(self.asset_id) (ptr, self.asset_body_size) = bbclib_binary.get_n_byte_int(ptr, 2, data) if (self.asset_body_size > 0): (ptr, self.asset_body) = bbclib_binary.get_n_bytes(ptr, self.asset_body_size, data) except: traceback.print_exc() return False return True<|docstring|>Unpack into this object Args: data (bytes): packed binary data Returns: bool: True if successful<|endoftext|>
43c27416b21e7715133ca6f349478865406fae66d16aba2418c39d70eb740da9
def config_log(debug): ' configure log file to keep track of users queries ' logger = logging.getLogger('era5log') formatter = logging.Formatter('%(levelname)s %(asctime)s; %(message)s', '%Y-%m-%d %H:%M:%S') if debug: level = logging.DEBUG else: level = logging.INFO logger.setLevel(level) clog = logging.StreamHandler() if debug: level = logging.DEBUG else: level = logging.WARNING clog.setLevel(level) logger.addHandler(clog) date = datetime.now().strftime('%Y%m%d') logname = (((cfg['logdir'] + '/era5_log_') + date) + '.txt') flog = logging.FileHandler(logname) try: os.chmod(logname, ((stat.S_IRWXU | stat.S_IRWXG) | stat.S_IRWXO)) except: pass flog.setLevel(logging.INFO) flog.setFormatter(formatter) logger.addHandler(flog) return logger
configure log file to keep track of users queries
era5/era5_functions.py
config_log
coecms/era5
22
python
def config_log(debug): ' ' logger = logging.getLogger('era5log') formatter = logging.Formatter('%(levelname)s %(asctime)s; %(message)s', '%Y-%m-%d %H:%M:%S') if debug: level = logging.DEBUG else: level = logging.INFO logger.setLevel(level) clog = logging.StreamHandler() if debug: level = logging.DEBUG else: level = logging.WARNING clog.setLevel(level) logger.addHandler(clog) date = datetime.now().strftime('%Y%m%d') logname = (((cfg['logdir'] + '/era5_log_') + date) + '.txt') flog = logging.FileHandler(logname) try: os.chmod(logname, ((stat.S_IRWXU | stat.S_IRWXG) | stat.S_IRWXO)) except: pass flog.setLevel(logging.INFO) flog.setFormatter(formatter) logger.addHandler(flog) return logger
def config_log(debug): ' ' logger = logging.getLogger('era5log') formatter = logging.Formatter('%(levelname)s %(asctime)s; %(message)s', '%Y-%m-%d %H:%M:%S') if debug: level = logging.DEBUG else: level = logging.INFO logger.setLevel(level) clog = logging.StreamHandler() if debug: level = logging.DEBUG else: level = logging.WARNING clog.setLevel(level) logger.addHandler(clog) date = datetime.now().strftime('%Y%m%d') logname = (((cfg['logdir'] + '/era5_log_') + date) + '.txt') flog = logging.FileHandler(logname) try: os.chmod(logname, ((stat.S_IRWXU | stat.S_IRWXG) | stat.S_IRWXO)) except: pass flog.setLevel(logging.INFO) flog.setFormatter(formatter) logger.addHandler(flog) return logger<|docstring|>configure log file to keep track of users queries<|endoftext|>
69e5fb0185677b311bc398214a807442f4f3b8d405f67296ca1dbaf7e2375b80
def read_config(): '\n Read config from config.json file\n ' try: cfg_file = pkg_resources.resource_filename(__name__, 'data/config.json') with open(cfg_file, 'r') as fj: cfg = json.load(fj) except FileNotFoundError: print(f"Can't find file config.json in {os.getcwd()}") raise SystemExit() return cfg
Read config from config.json file
era5/era5_functions.py
read_config
coecms/era5
22
python
def read_config(): '\n \n ' try: cfg_file = pkg_resources.resource_filename(__name__, 'data/config.json') with open(cfg_file, 'r') as fj: cfg = json.load(fj) except FileNotFoundError: print(f"Can't find file config.json in {os.getcwd()}") raise SystemExit() return cfg
def read_config(): '\n \n ' try: cfg_file = pkg_resources.resource_filename(__name__, 'data/config.json') with open(cfg_file, 'r') as fj: cfg = json.load(fj) except FileNotFoundError: print(f"Can't find file config.json in {os.getcwd()}") raise SystemExit() return cfg<|docstring|>Read config from config.json file<|endoftext|>
452edd07d15ec0b4e5391bc8e880a852e943d44fb6237a4fabb0b13fdf30a0eb
def define_dates(yr, mn): ' return a date range for each file depending on selected type ' startday = 1 endday = monthrange(int(yr), int(mn))[1] daylist = [('%.2d' % i) for i in range(startday, (endday + 1))] return daylist
return a date range for each file depending on selected type
era5/era5_functions.py
define_dates
coecms/era5
22
python
def define_dates(yr, mn): ' ' startday = 1 endday = monthrange(int(yr), int(mn))[1] daylist = [('%.2d' % i) for i in range(startday, (endday + 1))] return daylist
def define_dates(yr, mn): ' ' startday = 1 endday = monthrange(int(yr), int(mn))[1] daylist = [('%.2d' % i) for i in range(startday, (endday + 1))] return daylist<|docstring|>return a date range for each file depending on selected type<|endoftext|>
f1068a8d9a99b395a0da7ae767b8135fc29b405592e344fe97ee8e7a91f1cc71
def define_var(vardict, varparam, era5log): ' Find grib code in vardict dictionary and return relevant info\n ' queue = True try: (name, cds_name) = vardict[varparam] except: era5log.info(f'Selected parameter code {varparam} is not available') queue = False return (queue, None, None) return (queue, name, cds_name)
Find grib code in vardict dictionary and return relevant info
era5/era5_functions.py
define_var
coecms/era5
22
python
def define_var(vardict, varparam, era5log): ' \n ' queue = True try: (name, cds_name) = vardict[varparam] except: era5log.info(f'Selected parameter code {varparam} is not available') queue = False return (queue, None, None) return (queue, name, cds_name)
def define_var(vardict, varparam, era5log): ' \n ' queue = True try: (name, cds_name) = vardict[varparam] except: era5log.info(f'Selected parameter code {varparam} is not available') queue = False return (queue, None, None) return (queue, name, cds_name)<|docstring|>Find grib code in vardict dictionary and return relevant info<|endoftext|>
29dea5ad559c824f3d171ea70d0b4816e8f0c6b2601228f5d307b7ed3058eb5d
def define_args(stream, tstep): ' Return parameters and levels lists and step, time depending on stream type' stream_file = pkg_resources.resource_filename(__name__, f'data/era5_{stream}_{tstep}.json') with open(stream_file, 'r') as fj: dsargs = json.load(fj) return dsargs
Return parameters and levels lists and step, time depending on stream type
era5/era5_functions.py
define_args
coecms/era5
22
python
def define_args(stream, tstep): ' ' stream_file = pkg_resources.resource_filename(__name__, f'data/era5_{stream}_{tstep}.json') with open(stream_file, 'r') as fj: dsargs = json.load(fj) return dsargs
def define_args(stream, tstep): ' ' stream_file = pkg_resources.resource_filename(__name__, f'data/era5_{stream}_{tstep}.json') with open(stream_file, 'r') as fj: dsargs = json.load(fj) return dsargs<|docstring|>Return parameters and levels lists and step, time depending on stream type<|endoftext|>
f68268ee5e1e989d7fba6b709acb064c8cd273a9b6fe2c679191eeb0b27a523a
def read_vars(stream): 'Read parameters info from era5_vars.json file\n ' if (stream in ['cems_fire', 'wfde5']): var_file = pkg_resources.resource_filename(__name__, 'data/era5_derived.json') else: var_file = pkg_resources.resource_filename(__name__, 'data/era5_vars.json') with open(var_file, 'r') as fj: vardict = json.load(fj) return vardict
Read parameters info from era5_vars.json file
era5/era5_functions.py
read_vars
coecms/era5
22
python
def read_vars(stream): '\n ' if (stream in ['cems_fire', 'wfde5']): var_file = pkg_resources.resource_filename(__name__, 'data/era5_derived.json') else: var_file = pkg_resources.resource_filename(__name__, 'data/era5_vars.json') with open(var_file, 'r') as fj: vardict = json.load(fj) return vardict
def read_vars(stream): '\n ' if (stream in ['cems_fire', 'wfde5']): var_file = pkg_resources.resource_filename(__name__, 'data/era5_derived.json') else: var_file = pkg_resources.resource_filename(__name__, 'data/era5_vars.json') with open(var_file, 'r') as fj: vardict = json.load(fj) return vardict<|docstring|>Read parameters info from era5_vars.json file<|endoftext|>
37775108a19eda1a4866af28e5a2a500c7e89d1b43ec521b7af032714bc47ddb
def file_exists(fn, nclist): ' check if file already exists\n ' return (fn in nclist)
check if file already exists
era5/era5_functions.py
file_exists
coecms/era5
22
python
def file_exists(fn, nclist): ' \n ' return (fn in nclist)
def file_exists(fn, nclist): ' \n ' return (fn in nclist)<|docstring|>check if file already exists<|endoftext|>
2a21f0c796ab5feb06552fc4b525ca0052d8253bc3276eaa69b3728e016cfeb5
def build_dict(dsargs, yr, mn, var, daylist, oformat, tstep, back): 'Builds request dictionary to pass to retrieve command \n ' timelist = [('%.2d:00' % i) for i in range(24)] rdict = {'variable': var, 'year': str(yr), 'month': str(mn), 'format': oformat, 'area': dsargs['area']} if ('product_type' in dsargs.keys()): rdict['product_type'] = dsargs['product_type'] if ('version' in dsargs.keys()): rdict['version'] = dsargs['version'] if ('dataset' in dsargs.keys()): rdict['dataset'] = dsargs['dataset'] if ('reference_dataset' in dsargs.keys()): rdict['reference_dataset'] = dsargs['reference_dataset'] if (dsargs['levels'] != []): rdict['pressure_level'] = dsargs['levels'] if (tstep == 'mon'): rdict['time'] = '00:00' elif (tstep == 'day'): rdict['day'] = daylist elif ((tstep == 'hr') and (dsargs['dsid'][:12] != 'derived-near')): rdict['day'] = daylist rdict['time'] = timelist if back: rdict['month'] = [('%.2d' % i) for i in range(1, 13)] if (dsargs['dsid'] == 'reanalysis-era5-land-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1981, 2020)] elif (dsargs['dsid'] == 'reanalysis-era5-single-levels-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1979, 2020)] return rdict
Builds request dictionary to pass to retrieve command
era5/era5_functions.py
build_dict
coecms/era5
22
python
def build_dict(dsargs, yr, mn, var, daylist, oformat, tstep, back): ' \n ' timelist = [('%.2d:00' % i) for i in range(24)] rdict = {'variable': var, 'year': str(yr), 'month': str(mn), 'format': oformat, 'area': dsargs['area']} if ('product_type' in dsargs.keys()): rdict['product_type'] = dsargs['product_type'] if ('version' in dsargs.keys()): rdict['version'] = dsargs['version'] if ('dataset' in dsargs.keys()): rdict['dataset'] = dsargs['dataset'] if ('reference_dataset' in dsargs.keys()): rdict['reference_dataset'] = dsargs['reference_dataset'] if (dsargs['levels'] != []): rdict['pressure_level'] = dsargs['levels'] if (tstep == 'mon'): rdict['time'] = '00:00' elif (tstep == 'day'): rdict['day'] = daylist elif ((tstep == 'hr') and (dsargs['dsid'][:12] != 'derived-near')): rdict['day'] = daylist rdict['time'] = timelist if back: rdict['month'] = [('%.2d' % i) for i in range(1, 13)] if (dsargs['dsid'] == 'reanalysis-era5-land-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1981, 2020)] elif (dsargs['dsid'] == 'reanalysis-era5-single-levels-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1979, 2020)] return rdict
def build_dict(dsargs, yr, mn, var, daylist, oformat, tstep, back): ' \n ' timelist = [('%.2d:00' % i) for i in range(24)] rdict = {'variable': var, 'year': str(yr), 'month': str(mn), 'format': oformat, 'area': dsargs['area']} if ('product_type' in dsargs.keys()): rdict['product_type'] = dsargs['product_type'] if ('version' in dsargs.keys()): rdict['version'] = dsargs['version'] if ('dataset' in dsargs.keys()): rdict['dataset'] = dsargs['dataset'] if ('reference_dataset' in dsargs.keys()): rdict['reference_dataset'] = dsargs['reference_dataset'] if (dsargs['levels'] != []): rdict['pressure_level'] = dsargs['levels'] if (tstep == 'mon'): rdict['time'] = '00:00' elif (tstep == 'day'): rdict['day'] = daylist elif ((tstep == 'hr') and (dsargs['dsid'][:12] != 'derived-near')): rdict['day'] = daylist rdict['time'] = timelist if back: rdict['month'] = [('%.2d' % i) for i in range(1, 13)] if (dsargs['dsid'] == 'reanalysis-era5-land-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1981, 2020)] elif (dsargs['dsid'] == 'reanalysis-era5-single-levels-monthly-means'): rdict['year'] = [('%.2d' % i) for i in range(1979, 2020)] return rdict<|docstring|>Builds request dictionary to pass to retrieve command<|endoftext|>
a90ce640db7294625f2c3ae29fa027049197e1f7c323187e19221c8d17b0209d
def build_mars(dsargs, yr, mn, param, oformat, tstep, back): ' Create request for MARS ' rdict = {'param': param, 'levtype': 'pl', 'type': 'an', 'grid': '0.25/0.25', 'format': oformat, 'area': dsargs['area']} datestr = f'{yr}-{mn}-01/to/{yr}-{mn}-{monthrange(int(yr), int(mn))[1]}' if (tstep == 'mon'): rdict['time'] = '00:00' rdict['stream'] = 'moda' if back: datestr = '' for m in range(1, 13): datestr = (((datestr + yr) + str(m).zfill(2)) + '01/') datestr = datestr[:(- 1)] else: rdict['stream'] = 'oper' rdict['time'] = '00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00/06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00/12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00/18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00' if (dsargs['levels'] != []): rdict['levelist'] = dsargs['levels'] rdict['date'] = datestr return rdict
Create request for MARS
era5/era5_functions.py
build_mars
coecms/era5
22
python
def build_mars(dsargs, yr, mn, param, oformat, tstep, back): ' ' rdict = {'param': param, 'levtype': 'pl', 'type': 'an', 'grid': '0.25/0.25', 'format': oformat, 'area': dsargs['area']} datestr = f'{yr}-{mn}-01/to/{yr}-{mn}-{monthrange(int(yr), int(mn))[1]}' if (tstep == 'mon'): rdict['time'] = '00:00' rdict['stream'] = 'moda' if back: datestr = for m in range(1, 13): datestr = (((datestr + yr) + str(m).zfill(2)) + '01/') datestr = datestr[:(- 1)] else: rdict['stream'] = 'oper' rdict['time'] = '00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00/06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00/12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00/18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00' if (dsargs['levels'] != []): rdict['levelist'] = dsargs['levels'] rdict['date'] = datestr return rdict
def build_mars(dsargs, yr, mn, param, oformat, tstep, back): ' ' rdict = {'param': param, 'levtype': 'pl', 'type': 'an', 'grid': '0.25/0.25', 'format': oformat, 'area': dsargs['area']} datestr = f'{yr}-{mn}-01/to/{yr}-{mn}-{monthrange(int(yr), int(mn))[1]}' if (tstep == 'mon'): rdict['time'] = '00:00' rdict['stream'] = 'moda' if back: datestr = for m in range(1, 13): datestr = (((datestr + yr) + str(m).zfill(2)) + '01/') datestr = datestr[:(- 1)] else: rdict['stream'] = 'oper' rdict['time'] = '00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00/06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00/12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00/18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00' if (dsargs['levels'] != []): rdict['levelist'] = dsargs['levels'] rdict['date'] = datestr return rdict<|docstring|>Create request for MARS<|endoftext|>
5c83568ad5dbb05cf9d756297fde13291787d62c50daf24ee323104c20e65005
def file_down(url, tempfn, size, era5log): ' Open process to download file\n If fails try tor esume at least once\n :return: success: true or false\n ' cmd = f"{cfg['getcmd']} {tempfn} {url}" era5log.info(f'ERA5 Downloading: {url} to {tempfn}') p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p.communicate() n = 0 if (os.path.getsize(tempfn) == size): return True while ((os.path.getsize(tempfn) < size) and (n < cfg['retry'])): cmd = f"{cfg['resumecmd']} {tempfn} {url}" era5log.info(f'ERA5 Resuming download {(n + 1)}: {url} to {tempfn}') p1 = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p1.communicate() if (not p1.returncode): return True else: n += 1 return False
Open process to download file If fails try tor esume at least once :return: success: true or false
era5/era5_functions.py
file_down
coecms/era5
22
python
def file_down(url, tempfn, size, era5log): ' Open process to download file\n If fails try tor esume at least once\n :return: success: true or false\n ' cmd = f"{cfg['getcmd']} {tempfn} {url}" era5log.info(f'ERA5 Downloading: {url} to {tempfn}') p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p.communicate() n = 0 if (os.path.getsize(tempfn) == size): return True while ((os.path.getsize(tempfn) < size) and (n < cfg['retry'])): cmd = f"{cfg['resumecmd']} {tempfn} {url}" era5log.info(f'ERA5 Resuming download {(n + 1)}: {url} to {tempfn}') p1 = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p1.communicate() if (not p1.returncode): return True else: n += 1 return False
def file_down(url, tempfn, size, era5log): ' Open process to download file\n If fails try tor esume at least once\n :return: success: true or false\n ' cmd = f"{cfg['getcmd']} {tempfn} {url}" era5log.info(f'ERA5 Downloading: {url} to {tempfn}') p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p.communicate() n = 0 if (os.path.getsize(tempfn) == size): return True while ((os.path.getsize(tempfn) < size) and (n < cfg['retry'])): cmd = f"{cfg['resumecmd']} {tempfn} {url}" era5log.info(f'ERA5 Resuming download {(n + 1)}: {url} to {tempfn}') p1 = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) (out, err) = p1.communicate() if (not p1.returncode): return True else: n += 1 return False<|docstring|>Open process to download file If fails try tor esume at least once :return: success: true or false<|endoftext|>
d2d040cb5176de9622d3012545f3158fc76e8d1ea089a035ef4b5cf23b9ba1f3
def target(stream, var, yr, mn, dsargs, tstep, back, oformat): 'Build output paths and filename, \n build list of days to process based on year and month\n ' if (oformat == 'netcdf'): oformat = 'nc' did = 'era5' if (stream in ['cems_fire', 'agera5', 'wfde5']): did = stream elif (stream == 'land'): did += stream ydir = yr if (tstep == 'mon'): daylist = [] ydir = 'monthly' else: daylist = define_dates(yr, mn) if ((tstep in ['mon', 'day']) or (stream == 'wfde5')): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}{mn}.{oformat}" if back: if (stream == 'land'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_198101_201912.{oformat}" elif (stream == 'pressure'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}01_{yr}12.{oformat}" elif (stream in ['cems_fire', 'agera5', 'wfde5']): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}0101_{yr}1231.{oformat}" else: fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_197901_201912.{oformat}" else: startmn = mn fname = f"{var}_{did}_{dsargs['grid']}_{yr}{startmn}{daylist[0]}_{yr}{mn}{daylist[(- 1)]}.{oformat}" stagedir = os.path.join(cfg['staging'], stream, var, ydir) if (stream in ['cems_fire', 'agera5', 'wfde5']): destdir = os.path.join(cfg['derivdir'], stream, var) else: destdir = os.path.join(cfg['datadir'], stream, var, ydir) if (not os.path.exists(stagedir)): os.makedirs(stagedir) if (not os.path.exists(destdir)): os.makedirs(destdir) return (stagedir, destdir, fname, daylist)
Build output paths and filename, build list of days to process based on year and month
era5/era5_functions.py
target
coecms/era5
22
python
def target(stream, var, yr, mn, dsargs, tstep, back, oformat): 'Build output paths and filename, \n build list of days to process based on year and month\n ' if (oformat == 'netcdf'): oformat = 'nc' did = 'era5' if (stream in ['cems_fire', 'agera5', 'wfde5']): did = stream elif (stream == 'land'): did += stream ydir = yr if (tstep == 'mon'): daylist = [] ydir = 'monthly' else: daylist = define_dates(yr, mn) if ((tstep in ['mon', 'day']) or (stream == 'wfde5')): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}{mn}.{oformat}" if back: if (stream == 'land'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_198101_201912.{oformat}" elif (stream == 'pressure'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}01_{yr}12.{oformat}" elif (stream in ['cems_fire', 'agera5', 'wfde5']): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}0101_{yr}1231.{oformat}" else: fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_197901_201912.{oformat}" else: startmn = mn fname = f"{var}_{did}_{dsargs['grid']}_{yr}{startmn}{daylist[0]}_{yr}{mn}{daylist[(- 1)]}.{oformat}" stagedir = os.path.join(cfg['staging'], stream, var, ydir) if (stream in ['cems_fire', 'agera5', 'wfde5']): destdir = os.path.join(cfg['derivdir'], stream, var) else: destdir = os.path.join(cfg['datadir'], stream, var, ydir) if (not os.path.exists(stagedir)): os.makedirs(stagedir) if (not os.path.exists(destdir)): os.makedirs(destdir) return (stagedir, destdir, fname, daylist)
def target(stream, var, yr, mn, dsargs, tstep, back, oformat): 'Build output paths and filename, \n build list of days to process based on year and month\n ' if (oformat == 'netcdf'): oformat = 'nc' did = 'era5' if (stream in ['cems_fire', 'agera5', 'wfde5']): did = stream elif (stream == 'land'): did += stream ydir = yr if (tstep == 'mon'): daylist = [] ydir = 'monthly' else: daylist = define_dates(yr, mn) if ((tstep in ['mon', 'day']) or (stream == 'wfde5')): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}{mn}.{oformat}" if back: if (stream == 'land'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_198101_201912.{oformat}" elif (stream == 'pressure'): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}01_{yr}12.{oformat}" elif (stream in ['cems_fire', 'agera5', 'wfde5']): fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_{yr}0101_{yr}1231.{oformat}" else: fname = f"{var}_{did}_{tstep}_{dsargs['grid']}_197901_201912.{oformat}" else: startmn = mn fname = f"{var}_{did}_{dsargs['grid']}_{yr}{startmn}{daylist[0]}_{yr}{mn}{daylist[(- 1)]}.{oformat}" stagedir = os.path.join(cfg['staging'], stream, var, ydir) if (stream in ['cems_fire', 'agera5', 'wfde5']): destdir = os.path.join(cfg['derivdir'], stream, var) else: destdir = os.path.join(cfg['datadir'], stream, var, ydir) if (not os.path.exists(stagedir)): os.makedirs(stagedir) if (not os.path.exists(destdir)): os.makedirs(destdir) return (stagedir, destdir, fname, daylist)<|docstring|>Build output paths and filename, build list of days to process based on year and month<|endoftext|>
1c8a2262348ba740a1adb13b00a5a325ea0f2303064a4294ed4be64b9155ee22
def dump_args(of, st, ps, yr, mns, tstep, back, urgent): ' Create arguments dictionary and dump to json file\n ' tstamp = datetime.now().strftime('%Y%m%d%H%M%S') fname = f'era5_request_{tstamp}.json' requestdir = cfg['requestdir'] if urgent: requestdir += 'Urgent/' args = {} args['format'] = of args['stream'] = st args['params'] = ps args['year'] = yr args['months'] = mns args['timestep'] = tstep args['back'] = back with open((requestdir + fname), 'w+') as fj: json.dump(args, fj) return
Create arguments dictionary and dump to json file
era5/era5_functions.py
dump_args
coecms/era5
22
python
def dump_args(of, st, ps, yr, mns, tstep, back, urgent): ' \n ' tstamp = datetime.now().strftime('%Y%m%d%H%M%S') fname = f'era5_request_{tstamp}.json' requestdir = cfg['requestdir'] if urgent: requestdir += 'Urgent/' args = {} args['format'] = of args['stream'] = st args['params'] = ps args['year'] = yr args['months'] = mns args['timestep'] = tstep args['back'] = back with open((requestdir + fname), 'w+') as fj: json.dump(args, fj) return
def dump_args(of, st, ps, yr, mns, tstep, back, urgent): ' \n ' tstamp = datetime.now().strftime('%Y%m%d%H%M%S') fname = f'era5_request_{tstamp}.json' requestdir = cfg['requestdir'] if urgent: requestdir += 'Urgent/' args = {} args['format'] = of args['stream'] = st args['params'] = ps args['year'] = yr args['months'] = mns args['timestep'] = tstep args['back'] = back with open((requestdir + fname), 'w+') as fj: json.dump(args, fj) return<|docstring|>Create arguments dictionary and dump to json file<|endoftext|>
1551d74bb277a2239ce153265b53c28bcb40fbddb67980ce22a220ae63ba51fc
def status(self, nick, channel, param=None): 'Set your status for other people to see' nick = nick.lower() if param: self.statuses[nick] = param self.msg(channel, ('%s: your status is set.' % nick)) elif (nick in self.statuses): del self.statuses[nick] self.msg(channel, ('%s: status cleared.' % nick))
Set your status for other people to see
modules/statusbot.py
status
aj00200/BBot
1
python
def status(self, nick, channel, param=None): nick = nick.lower() if param: self.statuses[nick] = param self.msg(channel, ('%s: your status is set.' % nick)) elif (nick in self.statuses): del self.statuses[nick] self.msg(channel, ('%s: status cleared.' % nick))
def status(self, nick, channel, param=None): nick = nick.lower() if param: self.statuses[nick] = param self.msg(channel, ('%s: your status is set.' % nick)) elif (nick in self.statuses): del self.statuses[nick] self.msg(channel, ('%s: status cleared.' % nick))<|docstring|>Set your status for other people to see<|endoftext|>
7425b25d3106ef664a75eed64d2908769c4463a690ca7be27a2c5ec406abd3e5
def whereis(self, nick, channel, param=None): 'Check the status of someone; Parameters: None' param = param.lower() if (param and (' ' in param)): param = param.strip() if (param in self.statuses): self.msg(channel, ('%s: %s left the status: %s' % (nick, param, self.statuses[param]))) elif (not param): self.msg(channel, ("%s: who's status do you want?" % nick)) else: self.msg(channel, ('%s: that person has not left a status.' % nick))
Check the status of someone; Parameters: None
modules/statusbot.py
whereis
aj00200/BBot
1
python
def whereis(self, nick, channel, param=None): param = param.lower() if (param and (' ' in param)): param = param.strip() if (param in self.statuses): self.msg(channel, ('%s: %s left the status: %s' % (nick, param, self.statuses[param]))) elif (not param): self.msg(channel, ("%s: who's status do you want?" % nick)) else: self.msg(channel, ('%s: that person has not left a status.' % nick))
def whereis(self, nick, channel, param=None): param = param.lower() if (param and (' ' in param)): param = param.strip() if (param in self.statuses): self.msg(channel, ('%s: %s left the status: %s' % (nick, param, self.statuses[param]))) elif (not param): self.msg(channel, ("%s: who's status do you want?" % nick)) else: self.msg(channel, ('%s: that person has not left a status.' % nick))<|docstring|>Check the status of someone; Parameters: None<|endoftext|>
6cbd3d9743e18de938f4c41ff3f2258da34b03f7c0719fcd881d1e71205f5d80
def GetData(self, location): "Read the online Surfline data and return data as a dataframe\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\n\t\tOutputs:\n\t\t\tdf_surf: DataFrame. Dataframe with wave height and timestamps.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location = self.LOCATION_LOOKUP[location.upper()] url_location = self.URL url_location = url_location.replace('_ENTER_SPOT_ID_HERE_', location, 1) session = requests.Session() session.headers['User-Agent'] = self.USER_AGENT session.headers['Accept-Language'] = self.LANGUAGE session.headers['Content-Language'] = self.LANGUAGE html = session.get(url_location) soup = bs(html.text, 'html.parser') soup_text = soup.text surf_dict = json.loads(soup_text) df_surf = self.SimplifyData(surf_dict) self.data[location] = df_surf return df_surf
Read the online Surfline data and return data as a dataframe Args: location: String. Either the Surfline location code associated with the location of interest, of the location name itself as long as it's been pre-recorded in the class location lookup dictionary. Outputs: df_surf: DataFrame. Dataframe with wave height and timestamps.
SurflineScraper.py
GetData
dniamir/Surfline-Scraper
0
python
def GetData(self, location): "Read the online Surfline data and return data as a dataframe\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\n\t\tOutputs:\n\t\t\tdf_surf: DataFrame. Dataframe with wave height and timestamps.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location = self.LOCATION_LOOKUP[location.upper()] url_location = self.URL url_location = url_location.replace('_ENTER_SPOT_ID_HERE_', location, 1) session = requests.Session() session.headers['User-Agent'] = self.USER_AGENT session.headers['Accept-Language'] = self.LANGUAGE session.headers['Content-Language'] = self.LANGUAGE html = session.get(url_location) soup = bs(html.text, 'html.parser') soup_text = soup.text surf_dict = json.loads(soup_text) df_surf = self.SimplifyData(surf_dict) self.data[location] = df_surf return df_surf
def GetData(self, location): "Read the online Surfline data and return data as a dataframe\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\n\t\tOutputs:\n\t\t\tdf_surf: DataFrame. Dataframe with wave height and timestamps.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location = self.LOCATION_LOOKUP[location.upper()] url_location = self.URL url_location = url_location.replace('_ENTER_SPOT_ID_HERE_', location, 1) session = requests.Session() session.headers['User-Agent'] = self.USER_AGENT session.headers['Accept-Language'] = self.LANGUAGE session.headers['Content-Language'] = self.LANGUAGE html = session.get(url_location) soup = bs(html.text, 'html.parser') soup_text = soup.text surf_dict = json.loads(soup_text) df_surf = self.SimplifyData(surf_dict) self.data[location] = df_surf return df_surf<|docstring|>Read the online Surfline data and return data as a dataframe Args: location: String. Either the Surfline location code associated with the location of interest, of the location name itself as long as it's been pre-recorded in the class location lookup dictionary. Outputs: df_surf: DataFrame. Dataframe with wave height and timestamps.<|endoftext|>
9a1293c263f585cae3ba46a213d1e420793e3774b7cd47e39959ec0fa860a162
def SimplifyData(self, surf_dict): 'Read in json file information from the Surfline website and parse it\n\n\t\tArgs:\n\t\t\tsurf_dict: Dictionary. Dictonary format of the json read from the Surfline\n\t\t\t\twebsite with data\n\n\t\tOutputs:\n\t\t\tdf: DataFrame. Dataframe with wave height and timestamps.\n\t\t' waves = surf_dict['data']['wave'] wave_heights_max = [] wave_heights_min = [] wave_heights_avg = [] wave_timestamp = [] dotws = [] for wave in waves[::6]: min_wave = wave['surf']['min'] max_wave = wave['surf']['max'] wave_heights_max += [max_wave] wave_heights_min += [min_wave] mean_wave_height = np.mean([min_wave, max_wave]) wave_heights_avg += [mean_wave_height] wave_timestamp += [wave['timestamp']] dotws += [self.DOTW[datetime.datetime.fromtimestamp(wave_timestamp[(- 1)]).weekday()]] df = pd.DataFrame({'Timestamp': wave_timestamp, 'Weekday': dotws, 'Wave Max Height [ft]': wave_heights_max, 'Wave Min Height [ft]': wave_heights_min, 'Wave Avg Height [ft]': wave_heights_avg}) return df
Read in json file information from the Surfline website and parse it Args: surf_dict: Dictionary. Dictonary format of the json read from the Surfline website with data Outputs: df: DataFrame. Dataframe with wave height and timestamps.
SurflineScraper.py
SimplifyData
dniamir/Surfline-Scraper
0
python
def SimplifyData(self, surf_dict): 'Read in json file information from the Surfline website and parse it\n\n\t\tArgs:\n\t\t\tsurf_dict: Dictionary. Dictonary format of the json read from the Surfline\n\t\t\t\twebsite with data\n\n\t\tOutputs:\n\t\t\tdf: DataFrame. Dataframe with wave height and timestamps.\n\t\t' waves = surf_dict['data']['wave'] wave_heights_max = [] wave_heights_min = [] wave_heights_avg = [] wave_timestamp = [] dotws = [] for wave in waves[::6]: min_wave = wave['surf']['min'] max_wave = wave['surf']['max'] wave_heights_max += [max_wave] wave_heights_min += [min_wave] mean_wave_height = np.mean([min_wave, max_wave]) wave_heights_avg += [mean_wave_height] wave_timestamp += [wave['timestamp']] dotws += [self.DOTW[datetime.datetime.fromtimestamp(wave_timestamp[(- 1)]).weekday()]] df = pd.DataFrame({'Timestamp': wave_timestamp, 'Weekday': dotws, 'Wave Max Height [ft]': wave_heights_max, 'Wave Min Height [ft]': wave_heights_min, 'Wave Avg Height [ft]': wave_heights_avg}) return df
def SimplifyData(self, surf_dict): 'Read in json file information from the Surfline website and parse it\n\n\t\tArgs:\n\t\t\tsurf_dict: Dictionary. Dictonary format of the json read from the Surfline\n\t\t\t\twebsite with data\n\n\t\tOutputs:\n\t\t\tdf: DataFrame. Dataframe with wave height and timestamps.\n\t\t' waves = surf_dict['data']['wave'] wave_heights_max = [] wave_heights_min = [] wave_heights_avg = [] wave_timestamp = [] dotws = [] for wave in waves[::6]: min_wave = wave['surf']['min'] max_wave = wave['surf']['max'] wave_heights_max += [max_wave] wave_heights_min += [min_wave] mean_wave_height = np.mean([min_wave, max_wave]) wave_heights_avg += [mean_wave_height] wave_timestamp += [wave['timestamp']] dotws += [self.DOTW[datetime.datetime.fromtimestamp(wave_timestamp[(- 1)]).weekday()]] df = pd.DataFrame({'Timestamp': wave_timestamp, 'Weekday': dotws, 'Wave Max Height [ft]': wave_heights_max, 'Wave Min Height [ft]': wave_heights_min, 'Wave Avg Height [ft]': wave_heights_avg}) return df<|docstring|>Read in json file information from the Surfline website and parse it Args: surf_dict: Dictionary. Dictonary format of the json read from the Surfline website with data Outputs: df: DataFrame. Dataframe with wave height and timestamps.<|endoftext|>
ae435f840c2d5e6d548f76282465e21586f1479bb775a9df1a60cfe3ad57a655
def PlotSurfResults(self, location): "Plot the surf data in bar chart format\n\t\t\n\t\tCurrently works only if 6 days worth of data is presentd with 4 datapoints\n\t\tper day. Will eventually make this function work with more custom data.\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location_key = self.LOCATION_LOOKUP[location.upper()] df_surf = self.data[location_key] days = 6 points_per_day = 4 weekdays = df_surf['Weekday'].values wave_max_height = df_surf['Wave Max Height [ft]'].values wave_avg_height = df_surf['Wave Avg Height [ft]'].values diff = (wave_max_height - wave_avg_height) x = np.arange(0, len(weekdays), 1) x_labels = (['12AM', '6AM', '12PM', '6PM'] * days) plt.figure(figsize=(15, 7)) for i in range(days): x_data = x[(i * points_per_day):((i + 1) * points_per_day)] y_data = wave_avg_height[(i * points_per_day):((i + 1) * points_per_day)] diff_data = diff[(i * points_per_day):((i + 1) * points_per_day)] rects = plt.bar(x=x_data, height=y_data, width=1, yerr=diff_data, edgecolor='black') if (i != (days - 1)): plt.axvline((x_data[(- 1)] + 0.5), color='red', markersize=0, linestyle='--', linewidth=3) for rect in rects: height = rect.get_height() plt.gca().annotate(('%.1f' % height), xy=((rect.get_x() + (rect.get_width() / 2)), (height + 1)), xytext=(0, 3), textcoords='offset points', ha='center', va='bottom', fontsize=12) plt.gca().text(x=np.mean(x_data), y=(np.max(wave_max_height) + 1.5), s=weekdays[(i * 4)], fontsize=20, ha='center', va='center') plt.gca().yaxis.grid(True) _ = plt.xticks(ticks=x, labels=x_labels, rotation=30, fontsize=14) _ = plt.yticks(fontsize=14) plt.xlabel('Time [HR]', fontsize=18) plt.ylabel('Wave Size [ft]', fontsize=18) plt.title(('Wave Size over 6 day Period at %s' % location), fontsize=28) (min_y, max_y) = plt.ylim() plt.ylim([min_y, (max_y + 2)])
Plot the surf data in bar chart format Currently works only if 6 days worth of data is presentd with 4 datapoints per day. Will eventually make this function work with more custom data. Args: location: String. Either the Surfline location code associated with the location of interest, of the location name itself as long as it's been pre-recorded in the class location lookup dictionary.
SurflineScraper.py
PlotSurfResults
dniamir/Surfline-Scraper
0
python
def PlotSurfResults(self, location): "Plot the surf data in bar chart format\n\t\t\n\t\tCurrently works only if 6 days worth of data is presentd with 4 datapoints\n\t\tper day. Will eventually make this function work with more custom data.\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location_key = self.LOCATION_LOOKUP[location.upper()] df_surf = self.data[location_key] days = 6 points_per_day = 4 weekdays = df_surf['Weekday'].values wave_max_height = df_surf['Wave Max Height [ft]'].values wave_avg_height = df_surf['Wave Avg Height [ft]'].values diff = (wave_max_height - wave_avg_height) x = np.arange(0, len(weekdays), 1) x_labels = (['12AM', '6AM', '12PM', '6PM'] * days) plt.figure(figsize=(15, 7)) for i in range(days): x_data = x[(i * points_per_day):((i + 1) * points_per_day)] y_data = wave_avg_height[(i * points_per_day):((i + 1) * points_per_day)] diff_data = diff[(i * points_per_day):((i + 1) * points_per_day)] rects = plt.bar(x=x_data, height=y_data, width=1, yerr=diff_data, edgecolor='black') if (i != (days - 1)): plt.axvline((x_data[(- 1)] + 0.5), color='red', markersize=0, linestyle='--', linewidth=3) for rect in rects: height = rect.get_height() plt.gca().annotate(('%.1f' % height), xy=((rect.get_x() + (rect.get_width() / 2)), (height + 1)), xytext=(0, 3), textcoords='offset points', ha='center', va='bottom', fontsize=12) plt.gca().text(x=np.mean(x_data), y=(np.max(wave_max_height) + 1.5), s=weekdays[(i * 4)], fontsize=20, ha='center', va='center') plt.gca().yaxis.grid(True) _ = plt.xticks(ticks=x, labels=x_labels, rotation=30, fontsize=14) _ = plt.yticks(fontsize=14) plt.xlabel('Time [HR]', fontsize=18) plt.ylabel('Wave Size [ft]', fontsize=18) plt.title(('Wave Size over 6 day Period at %s' % location), fontsize=28) (min_y, max_y) = plt.ylim() plt.ylim([min_y, (max_y + 2)])
def PlotSurfResults(self, location): "Plot the surf data in bar chart format\n\t\t\n\t\tCurrently works only if 6 days worth of data is presentd with 4 datapoints\n\t\tper day. Will eventually make this function work with more custom data.\n\n\t\tArgs:\n\t\t\tlocation: String. Either the Surfline location code associated with the\n\t\t\t\tlocation of interest, of the location name itself as long as it's been\n\t\t\t\tpre-recorded in the class location lookup dictionary.\n\t\t" if (location.upper() in list(self.LOCATION_LOOKUP)): location_key = self.LOCATION_LOOKUP[location.upper()] df_surf = self.data[location_key] days = 6 points_per_day = 4 weekdays = df_surf['Weekday'].values wave_max_height = df_surf['Wave Max Height [ft]'].values wave_avg_height = df_surf['Wave Avg Height [ft]'].values diff = (wave_max_height - wave_avg_height) x = np.arange(0, len(weekdays), 1) x_labels = (['12AM', '6AM', '12PM', '6PM'] * days) plt.figure(figsize=(15, 7)) for i in range(days): x_data = x[(i * points_per_day):((i + 1) * points_per_day)] y_data = wave_avg_height[(i * points_per_day):((i + 1) * points_per_day)] diff_data = diff[(i * points_per_day):((i + 1) * points_per_day)] rects = plt.bar(x=x_data, height=y_data, width=1, yerr=diff_data, edgecolor='black') if (i != (days - 1)): plt.axvline((x_data[(- 1)] + 0.5), color='red', markersize=0, linestyle='--', linewidth=3) for rect in rects: height = rect.get_height() plt.gca().annotate(('%.1f' % height), xy=((rect.get_x() + (rect.get_width() / 2)), (height + 1)), xytext=(0, 3), textcoords='offset points', ha='center', va='bottom', fontsize=12) plt.gca().text(x=np.mean(x_data), y=(np.max(wave_max_height) + 1.5), s=weekdays[(i * 4)], fontsize=20, ha='center', va='center') plt.gca().yaxis.grid(True) _ = plt.xticks(ticks=x, labels=x_labels, rotation=30, fontsize=14) _ = plt.yticks(fontsize=14) plt.xlabel('Time [HR]', fontsize=18) plt.ylabel('Wave Size [ft]', fontsize=18) plt.title(('Wave Size over 6 day Period at %s' % location), fontsize=28) (min_y, max_y) = plt.ylim() plt.ylim([min_y, (max_y + 2)])<|docstring|>Plot the surf data in bar chart format Currently works only if 6 days worth of data is presentd with 4 datapoints per day. Will eventually make this function work with more custom data. Args: location: String. Either the Surfline location code associated with the location of interest, of the location name itself as long as it's been pre-recorded in the class location lookup dictionary.<|endoftext|>
550cfe8ddf67c3e80b2f8d9c0323448431f9d6a8993dc611b42698919fac10cc
@numba.jit() def CCI(dfHigh, dfLow, dfClose, period=20, scaling=0.015): '\n Similar to TTR package in R, central tendency measure uses mean\n ' typicalPrice = (((dfHigh + dfClose) + dfLow) / 3) rolling_windows = rolling_window(typicalPrice, period) central_tendency_arr = SMA_numba(typicalPrice, period)[(period - 1):] abs_deviation_arr = np.abs((rolling_windows.T - central_tendency_arr).T) mean_abs_deviation = np.zeros(len(abs_deviation_arr)) for i in range(len(rolling_windows)): mean_abs_deviation[i] = np.mean(abs_deviation_arr[i]) result = ((typicalPrice[(period - 1):] - central_tendency_arr) / (mean_abs_deviation * scaling)) result = np.concatenate((np.array(([np.nan] * (period - 1))), result)) return result
Similar to TTR package in R, central tendency measure uses mean
tech_inds/__init__.py
CCI
boonteck/tech_inds
10
python
@numba.jit() def CCI(dfHigh, dfLow, dfClose, period=20, scaling=0.015): '\n \n ' typicalPrice = (((dfHigh + dfClose) + dfLow) / 3) rolling_windows = rolling_window(typicalPrice, period) central_tendency_arr = SMA_numba(typicalPrice, period)[(period - 1):] abs_deviation_arr = np.abs((rolling_windows.T - central_tendency_arr).T) mean_abs_deviation = np.zeros(len(abs_deviation_arr)) for i in range(len(rolling_windows)): mean_abs_deviation[i] = np.mean(abs_deviation_arr[i]) result = ((typicalPrice[(period - 1):] - central_tendency_arr) / (mean_abs_deviation * scaling)) result = np.concatenate((np.array(([np.nan] * (period - 1))), result)) return result
@numba.jit() def CCI(dfHigh, dfLow, dfClose, period=20, scaling=0.015): '\n \n ' typicalPrice = (((dfHigh + dfClose) + dfLow) / 3) rolling_windows = rolling_window(typicalPrice, period) central_tendency_arr = SMA_numba(typicalPrice, period)[(period - 1):] abs_deviation_arr = np.abs((rolling_windows.T - central_tendency_arr).T) mean_abs_deviation = np.zeros(len(abs_deviation_arr)) for i in range(len(rolling_windows)): mean_abs_deviation[i] = np.mean(abs_deviation_arr[i]) result = ((typicalPrice[(period - 1):] - central_tendency_arr) / (mean_abs_deviation * scaling)) result = np.concatenate((np.array(([np.nan] * (period - 1))), result)) return result<|docstring|>Similar to TTR package in R, central tendency measure uses mean<|endoftext|>
16324467ed6194a7f1c0e34f3f797e6bf70ec508f4723f74be1efb94a2523f83
def _get_results(self, hash_output=False): 'Digest info in the statepoint and return as a string.' statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0] sp = openmc.StatePoint(statepoint) self.mgxs_lib.load_from_statepoint(sp) outstr = '' for domain in self.mgxs_lib.domains: for mgxs_type in self.mgxs_lib.mgxs_types: mgxs = self.mgxs_lib.get_mgxs(domain, mgxs_type) df = mgxs.get_pandas_dataframe() outstr += (df.to_string() + '\n') if hash_output: sha512 = hashlib.sha512() sha512.update(outstr.encode('utf-8')) outstr = sha512.hexdigest() return outstr
Digest info in the statepoint and return as a string.
tests/test_mgxs_library_no_nuclides/test_mgxs_library_no_nuclides.py
_get_results
scopatz/openmc
0
python
def _get_results(self, hash_output=False): statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0] sp = openmc.StatePoint(statepoint) self.mgxs_lib.load_from_statepoint(sp) outstr = for domain in self.mgxs_lib.domains: for mgxs_type in self.mgxs_lib.mgxs_types: mgxs = self.mgxs_lib.get_mgxs(domain, mgxs_type) df = mgxs.get_pandas_dataframe() outstr += (df.to_string() + '\n') if hash_output: sha512 = hashlib.sha512() sha512.update(outstr.encode('utf-8')) outstr = sha512.hexdigest() return outstr
def _get_results(self, hash_output=False): statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0] sp = openmc.StatePoint(statepoint) self.mgxs_lib.load_from_statepoint(sp) outstr = for domain in self.mgxs_lib.domains: for mgxs_type in self.mgxs_lib.mgxs_types: mgxs = self.mgxs_lib.get_mgxs(domain, mgxs_type) df = mgxs.get_pandas_dataframe() outstr += (df.to_string() + '\n') if hash_output: sha512 = hashlib.sha512() sha512.update(outstr.encode('utf-8')) outstr = sha512.hexdigest() return outstr<|docstring|>Digest info in the statepoint and return as a string.<|endoftext|>
5ab25abe46075d00c0bc427862d29a6fce0ba8849355a083684f3e289f920c1e
@task def check_for_change(self): '\n Determines if a new release has been made.\n ' r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if (last_fingerprint != current_fingerprint): print(('A new release is available. %s' % self.get_most_recent_version())) return True print('No updates found.') return False
Determines if a new release has been made.
burlap/selenium.py
check_for_change
tutordelphia/burlap
0
python
@task def check_for_change(self): '\n \n ' r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if (last_fingerprint != current_fingerprint): print(('A new release is available. %s' % self.get_most_recent_version())) return True print('No updates found.') return False
@task def check_for_change(self): '\n \n ' r = self.local_renderer lm = self.last_manifest last_fingerprint = lm.fingerprint current_fingerprint = self.get_target_geckodriver_version_number() self.vprint('last_fingerprint:', last_fingerprint) self.vprint('current_fingerprint:', current_fingerprint) if (last_fingerprint != current_fingerprint): print(('A new release is available. %s' % self.get_most_recent_version())) return True print('No updates found.') return False<|docstring|>Determines if a new release has been made.<|endoftext|>
a11a97eb6c3ae4e603e3a85fd0da2b7adce29546eee05b28a1adee95a1cbdf73
@task def get_latest_geckodriver_version_number(self): '\n Retrieves the version number from the latest tagged release.\n ' import feedparser latest_url = feedparser.parse(self.env.geckodriver_feed)['entries'][0]['link'] self.vprint('latest_url:', latest_url) version = latest_url.split('/')[(- 1)][1:] self.vprint('version:', version) return version
Retrieves the version number from the latest tagged release.
burlap/selenium.py
get_latest_geckodriver_version_number
tutordelphia/burlap
0
python
@task def get_latest_geckodriver_version_number(self): '\n \n ' import feedparser latest_url = feedparser.parse(self.env.geckodriver_feed)['entries'][0]['link'] self.vprint('latest_url:', latest_url) version = latest_url.split('/')[(- 1)][1:] self.vprint('version:', version) return version
@task def get_latest_geckodriver_version_number(self): '\n \n ' import feedparser latest_url = feedparser.parse(self.env.geckodriver_feed)['entries'][0]['link'] self.vprint('latest_url:', latest_url) version = latest_url.split('/')[(- 1)][1:] self.vprint('version:', version) return version<|docstring|>Retrieves the version number from the latest tagged release.<|endoftext|>
96793ce05762caba6589bed4e488e087600b1c9e57801f7d9be09f6175261b38
def record_manifest(self): '\n Called after a deployment to record any data necessary to detect changes\n for a future deployment.\n ' manifest = super(SeleniumSatchel, self).record_manifest() manifest['fingerprint'] = str(self.get_target_geckodriver_version_number()) return manifest
Called after a deployment to record any data necessary to detect changes for a future deployment.
burlap/selenium.py
record_manifest
tutordelphia/burlap
0
python
def record_manifest(self): '\n Called after a deployment to record any data necessary to detect changes\n for a future deployment.\n ' manifest = super(SeleniumSatchel, self).record_manifest() manifest['fingerprint'] = str(self.get_target_geckodriver_version_number()) return manifest
def record_manifest(self): '\n Called after a deployment to record any data necessary to detect changes\n for a future deployment.\n ' manifest = super(SeleniumSatchel, self).record_manifest() manifest['fingerprint'] = str(self.get_target_geckodriver_version_number()) return manifest<|docstring|>Called after a deployment to record any data necessary to detect changes for a future deployment.<|endoftext|>
43c8da14701a5f780a39ab8c94955d3553f2f61dc9b66dd5a5f840d233b6bac0
def convert_currency(val): '\n 125000.00\n Convert the string number value to a float\n - Remove $\n - Remove commas\n - Convert to float type\n ' new_val = val.replace(',', '').replace('$', '') return float(new_val)
125000.00 Convert the string number value to a float - Remove $ - Remove commas - Convert to float type
sc2.py
convert_currency
malbt/PandasDataTypes
0
python
def convert_currency(val): '\n 125000.00\n Convert the string number value to a float\n - Remove $\n - Remove commas\n - Convert to float type\n ' new_val = val.replace(',', ).replace('$', ) return float(new_val)
def convert_currency(val): '\n 125000.00\n Convert the string number value to a float\n - Remove $\n - Remove commas\n - Convert to float type\n ' new_val = val.replace(',', ).replace('$', ) return float(new_val)<|docstring|>125000.00 Convert the string number value to a float - Remove $ - Remove commas - Convert to float type<|endoftext|>
c066ba98e85c23aa66f3e97f1a7b97e9ce9ce44408df8de953a645b586d548ef
def convert_percent(val): '\n Convert the percentage string to an actual floating point percent\n ' new_val = val.replace('%', '') return (float(new_val) / 100)
Convert the percentage string to an actual floating point percent
sc2.py
convert_percent
malbt/PandasDataTypes
0
python
def convert_percent(val): '\n \n ' new_val = val.replace('%', ) return (float(new_val) / 100)
def convert_percent(val): '\n \n ' new_val = val.replace('%', ) return (float(new_val) / 100)<|docstring|>Convert the percentage string to an actual floating point percent<|endoftext|>
fe2a82b229fd35fc22e2bcb7501fc3e23680b3bd8a37d5c9dbcf7b6eff70008c
def create_blank_udic(ndim): '\n Create a blank universal dictionary for a spectrum of dimension ndim.\n ' udic = dict() udic['ndim'] = ndim for i in range(ndim): d = dict() d['sw'] = 999.99 d['complex'] = True d['obs'] = 999.99 d['car'] = 999.99 d['size'] = 1 d['label'] = ['X', 'Y', 'Z', 'A'][i] if (i == (ndim - 1)): d['encoding'] = 'direct' else: d['encoding'] = 'states' d['time'] = True d['freq'] = False udic[i] = d return udic
Create a blank universal dictionary for a spectrum of dimension ndim.
spec2nii/fileiobase.py
create_blank_udic
NeutralKaon/spec2nii
5
python
def create_blank_udic(ndim): '\n \n ' udic = dict() udic['ndim'] = ndim for i in range(ndim): d = dict() d['sw'] = 999.99 d['complex'] = True d['obs'] = 999.99 d['car'] = 999.99 d['size'] = 1 d['label'] = ['X', 'Y', 'Z', 'A'][i] if (i == (ndim - 1)): d['encoding'] = 'direct' else: d['encoding'] = 'states' d['time'] = True d['freq'] = False udic[i] = d return udic
def create_blank_udic(ndim): '\n \n ' udic = dict() udic['ndim'] = ndim for i in range(ndim): d = dict() d['sw'] = 999.99 d['complex'] = True d['obs'] = 999.99 d['car'] = 999.99 d['size'] = 1 d['label'] = ['X', 'Y', 'Z', 'A'][i] if (i == (ndim - 1)): d['encoding'] = 'direct' else: d['encoding'] = 'states' d['time'] = True d['freq'] = False udic[i] = d return udic<|docstring|>Create a blank universal dictionary for a spectrum of dimension ndim.<|endoftext|>
4da79d9fc1f533954f7c0104a10e99cd1676b7a31a574777e74a52cbf48753be
def uc_from_udic(udic, dim=(- 1)): '\n Create a unit conversion object from a Universal dictionary.\n\n Parameters\n ----------\n udic : dic\n Universal dictionary of spectral parameters.\n dim : int. optional\n Dimension number to create unit conversion object for. Default is for\n last dimension.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given dimension.\n ' if (dim == (- 1)): dim = (udic['ndim'] - 1) adic = udic[dim] return unit_conversion(adic['size'], adic['complex'], adic['sw'], adic['obs'], adic['car'])
Create a unit conversion object from a Universal dictionary. Parameters ---------- udic : dic Universal dictionary of spectral parameters. dim : int. optional Dimension number to create unit conversion object for. Default is for last dimension. Returns ------- uc : unit conversion object. Unit conversion object for given dimension.
spec2nii/fileiobase.py
uc_from_udic
NeutralKaon/spec2nii
5
python
def uc_from_udic(udic, dim=(- 1)): '\n Create a unit conversion object from a Universal dictionary.\n\n Parameters\n ----------\n udic : dic\n Universal dictionary of spectral parameters.\n dim : int. optional\n Dimension number to create unit conversion object for. Default is for\n last dimension.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given dimension.\n ' if (dim == (- 1)): dim = (udic['ndim'] - 1) adic = udic[dim] return unit_conversion(adic['size'], adic['complex'], adic['sw'], adic['obs'], adic['car'])
def uc_from_udic(udic, dim=(- 1)): '\n Create a unit conversion object from a Universal dictionary.\n\n Parameters\n ----------\n udic : dic\n Universal dictionary of spectral parameters.\n dim : int. optional\n Dimension number to create unit conversion object for. Default is for\n last dimension.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given dimension.\n ' if (dim == (- 1)): dim = (udic['ndim'] - 1) adic = udic[dim] return unit_conversion(adic['size'], adic['complex'], adic['sw'], adic['obs'], adic['car'])<|docstring|>Create a unit conversion object from a Universal dictionary. Parameters ---------- udic : dic Universal dictionary of spectral parameters. dim : int. optional Dimension number to create unit conversion object for. Default is for last dimension. Returns ------- uc : unit conversion object. Unit conversion object for given dimension.<|endoftext|>
800873be44618a69e37dc09615c8ceeb75d5919d1c354600c963c2fa5c6f3095
def uc_from_freqscale(scale, obs, unit='ppm'): "\n Create a unit conversion object from a spectrum frequency scale axis.\n\n Parameters\n ----------\n scale : array like\n array of spectrum axis\n obs : float\n Observation frequency in MHz.\n unit: {'ppm', 'hz', 'khz'}\n The unit of the scale axis.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given axis.\n " scale = np.array(scale) size = len(scale) if (unit in ['ppm', 'hz', 'khz']): complex = False min = scale.min() max = scale.max() dx = abs((scale[1] - scale[0])) if (unit == 'ppm'): sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) * obs) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) * obs) elif (unit == 'hz'): sw = ((max + (dx / 2.0)) - (min - (dx / 2.0))) car = ((min - (dx / 2.0)) + ((max - min) / 2.0)) else: sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) / 1000.0) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) / 1000.0) else: mesg = '{} is not a supported unit.'.format(unit) raise ValueError(mesg) return unit_conversion(size, complex, sw, obs, car)
Create a unit conversion object from a spectrum frequency scale axis. Parameters ---------- scale : array like array of spectrum axis obs : float Observation frequency in MHz. unit: {'ppm', 'hz', 'khz'} The unit of the scale axis. Returns ------- uc : unit conversion object. Unit conversion object for given axis.
spec2nii/fileiobase.py
uc_from_freqscale
NeutralKaon/spec2nii
5
python
def uc_from_freqscale(scale, obs, unit='ppm'): "\n Create a unit conversion object from a spectrum frequency scale axis.\n\n Parameters\n ----------\n scale : array like\n array of spectrum axis\n obs : float\n Observation frequency in MHz.\n unit: {'ppm', 'hz', 'khz'}\n The unit of the scale axis.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given axis.\n " scale = np.array(scale) size = len(scale) if (unit in ['ppm', 'hz', 'khz']): complex = False min = scale.min() max = scale.max() dx = abs((scale[1] - scale[0])) if (unit == 'ppm'): sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) * obs) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) * obs) elif (unit == 'hz'): sw = ((max + (dx / 2.0)) - (min - (dx / 2.0))) car = ((min - (dx / 2.0)) + ((max - min) / 2.0)) else: sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) / 1000.0) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) / 1000.0) else: mesg = '{} is not a supported unit.'.format(unit) raise ValueError(mesg) return unit_conversion(size, complex, sw, obs, car)
def uc_from_freqscale(scale, obs, unit='ppm'): "\n Create a unit conversion object from a spectrum frequency scale axis.\n\n Parameters\n ----------\n scale : array like\n array of spectrum axis\n obs : float\n Observation frequency in MHz.\n unit: {'ppm', 'hz', 'khz'}\n The unit of the scale axis.\n\n Returns\n -------\n uc : unit conversion object.\n Unit conversion object for given axis.\n " scale = np.array(scale) size = len(scale) if (unit in ['ppm', 'hz', 'khz']): complex = False min = scale.min() max = scale.max() dx = abs((scale[1] - scale[0])) if (unit == 'ppm'): sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) * obs) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) * obs) elif (unit == 'hz'): sw = ((max + (dx / 2.0)) - (min - (dx / 2.0))) car = ((min - (dx / 2.0)) + ((max - min) / 2.0)) else: sw = (((max + (dx / 2.0)) - (min - (dx / 2.0))) / 1000.0) car = (((min - (dx / 2.0)) + ((max - min) / 2.0)) / 1000.0) else: mesg = '{} is not a supported unit.'.format(unit) raise ValueError(mesg) return unit_conversion(size, complex, sw, obs, car)<|docstring|>Create a unit conversion object from a spectrum frequency scale axis. Parameters ---------- scale : array like array of spectrum axis obs : float Observation frequency in MHz. unit: {'ppm', 'hz', 'khz'} The unit of the scale axis. Returns ------- uc : unit conversion object. Unit conversion object for given axis.<|endoftext|>
e44566de8e9b590b4159d9e78d86624a83da07ca8761a17131bccebbd587c7e1
def open_towrite(filename, overwrite=False, mode='wb'): '\n Open filename for writing and return file object\n\n Function checks if file exists (and raises IOError if overwrite=False) and\n creates necessary directiories as needed.\n ' if (os.path.exists(filename) and (overwrite is False)): raise IOError('File exists, recall with overwrite=True') (p, fn) = os.path.split(filename) if ((p != '') and (os.path.exists(p) is False)): os.makedirs(p) return open(filename, mode)
Open filename for writing and return file object Function checks if file exists (and raises IOError if overwrite=False) and creates necessary directiories as needed.
spec2nii/fileiobase.py
open_towrite
NeutralKaon/spec2nii
5
python
def open_towrite(filename, overwrite=False, mode='wb'): '\n Open filename for writing and return file object\n\n Function checks if file exists (and raises IOError if overwrite=False) and\n creates necessary directiories as needed.\n ' if (os.path.exists(filename) and (overwrite is False)): raise IOError('File exists, recall with overwrite=True') (p, fn) = os.path.split(filename) if ((p != ) and (os.path.exists(p) is False)): os.makedirs(p) return open(filename, mode)
def open_towrite(filename, overwrite=False, mode='wb'): '\n Open filename for writing and return file object\n\n Function checks if file exists (and raises IOError if overwrite=False) and\n creates necessary directiories as needed.\n ' if (os.path.exists(filename) and (overwrite is False)): raise IOError('File exists, recall with overwrite=True') (p, fn) = os.path.split(filename) if ((p != ) and (os.path.exists(p) is False)): os.makedirs(p) return open(filename, mode)<|docstring|>Open filename for writing and return file object Function checks if file exists (and raises IOError if overwrite=False) and creates necessary directiories as needed.<|endoftext|>
f688853ea2597b240cd1fa166bbf91fbfaecc2e3006a07b6a384c9591b5cb5cf
def index2trace_flat(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a flat structure\n ' a = index[(- 1)] for (i, v) in enumerate(index[:(- 1)]): mult = reduce((lambda x, y: (x * y)), shape[(i + 1):]) a = (a + (mult * v)) return a
Calculate trace number from shape and index of all indirect dimensions assuming a flat structure
spec2nii/fileiobase.py
index2trace_flat
NeutralKaon/spec2nii
5
python
def index2trace_flat(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a flat structure\n ' a = index[(- 1)] for (i, v) in enumerate(index[:(- 1)]): mult = reduce((lambda x, y: (x * y)), shape[(i + 1):]) a = (a + (mult * v)) return a
def index2trace_flat(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a flat structure\n ' a = index[(- 1)] for (i, v) in enumerate(index[:(- 1)]): mult = reduce((lambda x, y: (x * y)), shape[(i + 1):]) a = (a + (mult * v)) return a<|docstring|>Calculate trace number from shape and index of all indirect dimensions assuming a flat structure<|endoftext|>
dd373a651e3d8cd4d8051152103afe6dd55d80572423b0a1e50c94995479597e
def trace2index_flat(shape, ntrace): '\n Calculate the index of a trace assuming a flat structure\n ' q = ntrace index = [] for s in shape[:0:(- 1)]: (q, r) = divmod(q, s) index.insert(0, r) index.insert(0, q) return tuple(index)
Calculate the index of a trace assuming a flat structure
spec2nii/fileiobase.py
trace2index_flat
NeutralKaon/spec2nii
5
python
def trace2index_flat(shape, ntrace): '\n \n ' q = ntrace index = [] for s in shape[:0:(- 1)]: (q, r) = divmod(q, s) index.insert(0, r) index.insert(0, q) return tuple(index)
def trace2index_flat(shape, ntrace): '\n \n ' q = ntrace index = [] for s in shape[:0:(- 1)]: (q, r) = divmod(q, s) index.insert(0, r) index.insert(0, q) return tuple(index)<|docstring|>Calculate the index of a trace assuming a flat structure<|endoftext|>
42b1ed888566845464b72d18db88efa430e49bff0cb3917ff2b19ea1261b90ce
def index2trace_opp(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a phase ordering opposite the time increments.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases[::(- 1)]) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)
Calculate trace number from shape and index of all indirect dimensions assuming a phase ordering opposite the time increments.
spec2nii/fileiobase.py
index2trace_opp
NeutralKaon/spec2nii
5
python
def index2trace_opp(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a phase ordering opposite the time increments.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases[::(- 1)]) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)
def index2trace_opp(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming a phase ordering opposite the time increments.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases[::(- 1)]) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)<|docstring|>Calculate trace number from shape and index of all indirect dimensions assuming a phase ordering opposite the time increments.<|endoftext|>
ab0af434b1c8a2760d0a5ac9ed6b73e6edd44542cc213c23c587e830cf053122
def trace2index_opp(shape, ntrace): '\n Calculate the index of a trace assuming opposite phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r))[::(- 1)] pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)
Calculate the index of a trace assuming opposite phase/time increment ordering
spec2nii/fileiobase.py
trace2index_opp
NeutralKaon/spec2nii
5
python
def trace2index_opp(shape, ntrace): '\n Calculate the index of a trace assuming opposite phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r))[::(- 1)] pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)
def trace2index_opp(shape, ntrace): '\n Calculate the index of a trace assuming opposite phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r))[::(- 1)] pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)<|docstring|>Calculate the index of a trace assuming opposite phase/time increment ordering<|endoftext|>
32b09ec4e1b1956f8050089367102bf5f362fba82b75129f271c66475e669ad5
def index2trace_reg(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming the same phase and time ordering.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)
Calculate trace number from shape and index of all indirect dimensions assuming the same phase and time ordering.
spec2nii/fileiobase.py
index2trace_reg
NeutralKaon/spec2nii
5
python
def index2trace_reg(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming the same phase and time ordering.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)
def index2trace_reg(shape, index): '\n Calculate trace number from shape and index of all indirect dimensions\n assuming the same phase and time ordering.\n ' n = len(shape) phases = [(v % 2) for v in index] nphase = index2trace_flat(([2] * n), phases) pindex = [(v // 2) for v in index] pshape = [(i // 2) for i in shape] nbase = index2trace_flat(pshape, pindex) return ((nbase * (2 ** n)) + nphase)<|docstring|>Calculate trace number from shape and index of all indirect dimensions assuming the same phase and time ordering.<|endoftext|>
dc4e0d87e975ba6f4fd9ea7882c58ec7db99351b9ff34ccfc57aa3d5a9f727dc
def trace2index_reg(shape, ntrace): '\n Calculate the index of a trace assuming the same phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r)) pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)
Calculate the index of a trace assuming the same phase/time increment ordering
spec2nii/fileiobase.py
trace2index_reg
NeutralKaon/spec2nii
5
python
def trace2index_reg(shape, ntrace): '\n Calculate the index of a trace assuming the same phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r)) pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)
def trace2index_reg(shape, ntrace): '\n Calculate the index of a trace assuming the same phase/time increment\n ordering\n ' n = len(shape) (q, r) = divmod(ntrace, (2 ** n)) to_add = list(trace2index_flat(([2] * n), r)) pshape = [(i // 2) for i in shape] base = list(trace2index_flat(pshape, q)) total = [((b * 2) + a) for (b, a) in zip(base, to_add)] return tuple(total)<|docstring|>Calculate the index of a trace assuming the same phase/time increment ordering<|endoftext|>
48de60115de1f75cb53540e89d516f066e70b9afc77cc73aa71e8b7281f4e3a2
def __init__(self, size, cplx, sw, obs, car): '\n create and set up a unit_conversion object\n ' self._size = int(size) self._cplx = bool(cplx) self._sw = float(sw) self._obs = float(obs) self._car = float(car) self._delta = ((- self._sw) / (self._size * self._obs)) self._first = ((self._car / self._obs) - ((self._delta * self._size) / 2.0))
create and set up a unit_conversion object
spec2nii/fileiobase.py
__init__
NeutralKaon/spec2nii
5
python
def __init__(self, size, cplx, sw, obs, car): '\n \n ' self._size = int(size) self._cplx = bool(cplx) self._sw = float(sw) self._obs = float(obs) self._car = float(car) self._delta = ((- self._sw) / (self._size * self._obs)) self._first = ((self._car / self._obs) - ((self._delta * self._size) / 2.0))
def __init__(self, size, cplx, sw, obs, car): '\n \n ' self._size = int(size) self._cplx = bool(cplx) self._sw = float(sw) self._obs = float(obs) self._car = float(car) self._delta = ((- self._sw) / (self._size * self._obs)) self._first = ((self._car / self._obs) - ((self._delta * self._size) / 2.0))<|docstring|>create and set up a unit_conversion object<|endoftext|>
064a14a51710b2b3b92e51808b28ba7974d0f33193aa44a07014ea2e46533d2b
def __unit2pnt(self, val, units): '\n Convert units to points\n ' units = units.upper() if (units == 'PPM'): pts = self.__ppm2pts(val) elif (units == 'HZ'): pts = self.__hz2pts(val) elif ((units == '%') or (units == 'PERCENT')): pts = self.__percent2pts(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): pts = self.__sec2pts(val) elif (units == 'MS'): pts = self.__ms2pts(val) elif (units == 'US'): pts = self.__us2pts(val) else: raise ValueError('invalid unit type') return pts
Convert units to points
spec2nii/fileiobase.py
__unit2pnt
NeutralKaon/spec2nii
5
python
def __unit2pnt(self, val, units): '\n \n ' units = units.upper() if (units == 'PPM'): pts = self.__ppm2pts(val) elif (units == 'HZ'): pts = self.__hz2pts(val) elif ((units == '%') or (units == 'PERCENT')): pts = self.__percent2pts(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): pts = self.__sec2pts(val) elif (units == 'MS'): pts = self.__ms2pts(val) elif (units == 'US'): pts = self.__us2pts(val) else: raise ValueError('invalid unit type') return pts
def __unit2pnt(self, val, units): '\n \n ' units = units.upper() if (units == 'PPM'): pts = self.__ppm2pts(val) elif (units == 'HZ'): pts = self.__hz2pts(val) elif ((units == '%') or (units == 'PERCENT')): pts = self.__percent2pts(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): pts = self.__sec2pts(val) elif (units == 'MS'): pts = self.__ms2pts(val) elif (units == 'US'): pts = self.__us2pts(val) else: raise ValueError('invalid unit type') return pts<|docstring|>Convert units to points<|endoftext|>
2084309a6858f3c8d03a4cb2431b7626ad8128dce8b074c19f95347a999f805c
def __pnt2unit(self, val, units): '\n Convert points to units\n ' units = units.upper() if (units == 'PPM'): k = self.__pts2ppm(val) elif (units == 'HZ'): k = self.__pts2hz(val) elif ((units == '%') or (units == 'PERCENT')): k = self.__pts2percent(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): k = self.__pts2sec(val) elif (units == 'MS'): k = self.__pts2ms(val) elif (units == 'US'): k = self.__pts2us(val) else: raise ValueError('invalid units') return k
Convert points to units
spec2nii/fileiobase.py
__pnt2unit
NeutralKaon/spec2nii
5
python
def __pnt2unit(self, val, units): '\n \n ' units = units.upper() if (units == 'PPM'): k = self.__pts2ppm(val) elif (units == 'HZ'): k = self.__pts2hz(val) elif ((units == '%') or (units == 'PERCENT')): k = self.__pts2percent(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): k = self.__pts2sec(val) elif (units == 'MS'): k = self.__pts2ms(val) elif (units == 'US'): k = self.__pts2us(val) else: raise ValueError('invalid units') return k
def __pnt2unit(self, val, units): '\n \n ' units = units.upper() if (units == 'PPM'): k = self.__pts2ppm(val) elif (units == 'HZ'): k = self.__pts2hz(val) elif ((units == '%') or (units == 'PERCENT')): k = self.__pts2percent(val) elif ((units == 'SEC') or (units == 'SECOND') or (units == 'S')): k = self.__pts2sec(val) elif (units == 'MS'): k = self.__pts2ms(val) elif (units == 'US'): k = self.__pts2us(val) else: raise ValueError('invalid units') return k<|docstring|>Convert points to units<|endoftext|>
bf4373c0054ce4b2a3f896ca0dcdba42bde5253fbcde873939bb8c9758a20ae6
def __str2pnt(self, s): '\n Convert string with units to points\n ' units = s.strip((((string.digits + string.whitespace) + '.') + '-')).upper() val = float(s.strip(((string.ascii_letters + string.whitespace) + '%'))) return self.__unit2pnt(val, units)
Convert string with units to points
spec2nii/fileiobase.py
__str2pnt
NeutralKaon/spec2nii
5
python
def __str2pnt(self, s): '\n \n ' units = s.strip((((string.digits + string.whitespace) + '.') + '-')).upper() val = float(s.strip(((string.ascii_letters + string.whitespace) + '%'))) return self.__unit2pnt(val, units)
def __str2pnt(self, s): '\n \n ' units = s.strip((((string.digits + string.whitespace) + '.') + '-')).upper() val = float(s.strip(((string.ascii_letters + string.whitespace) + '%'))) return self.__unit2pnt(val, units)<|docstring|>Convert string with units to points<|endoftext|>
84b7aa2868da85b815fcca4528b2ccf09befd8d00848bab50efa4b499efc954d
def __convert(self, val, unit=None): '\n Convert string or value/unit pair\n ' if isinstance(val, str): return self.__str2pnt(val) else: if (unit is None): raise ValueError('invalid unit type') return self.__unit2pnt(val, unit)
Convert string or value/unit pair
spec2nii/fileiobase.py
__convert
NeutralKaon/spec2nii
5
python
def __convert(self, val, unit=None): '\n \n ' if isinstance(val, str): return self.__str2pnt(val) else: if (unit is None): raise ValueError('invalid unit type') return self.__unit2pnt(val, unit)
def __convert(self, val, unit=None): '\n \n ' if isinstance(val, str): return self.__str2pnt(val) else: if (unit is None): raise ValueError('invalid unit type') return self.__unit2pnt(val, unit)<|docstring|>Convert string or value/unit pair<|endoftext|>