Dataset Preview
Duplicate
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code:   DatasetGenerationError
Exception:    TypeError
Message:      Couldn't cast array of type
struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
to
{'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2011, in _prepare_split_single
                  writer.write_table(table)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/arrow_writer.py", line 585, in write_table
                  pa_table = table_cast(pa_table, self._schema)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2302, in table_cast
                  return cast_table_to_schema(table, schema)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in cast_table_to_schema
                  arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2261, in <listcomp>
                  arrays = [cast_array_to_feature(table[name], feature) for name, feature in features.items()]
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in wrapper
                  return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 1802, in <listcomp>
                  return pa.chunked_array([func(chunk, *args, **kwargs) for chunk in array.chunks])
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/table.py", line 2122, in cast_array_to_feature
                  raise TypeError(f"Couldn't cast array of type\n{_short_str(array.type)}\nto\n{_short_str(feature)}")
              TypeError: Couldn't cast array of type
              struct<content_hash: string, timestamp: string, source: string, line_count: int64, max_line_length: int64, avg_line_length: double, alnum_prop: double, repo_name: string, id: string, size: string, binary: bool, copies: string, ref: string, path: string, mode: string, license: string, language: list<item: struct<name: string, bytes: string>>, symlink_target: string>
              to
              {'content_hash': Value(dtype='string', id=None), 'timestamp': Value(dtype='string', id=None), 'source': Value(dtype='string', id=None), 'line_count': Value(dtype='int64', id=None), 'max_line_length': Value(dtype='int64', id=None), 'avg_line_length': Value(dtype='float64', id=None), 'alnum_prop': Value(dtype='float64', id=None), 'repo_name': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'size': Value(dtype='string', id=None), 'binary': Value(dtype='bool', id=None), 'copies': Value(dtype='string', id=None), 'ref': Value(dtype='string', id=None), 'path': Value(dtype='string', id=None), 'mode': Value(dtype='string', id=None), 'license': Value(dtype='string', id=None), 'language': [{'name': Value(dtype='string', id=None), 'bytes': Value(dtype='string', id=None)}]}
              
              The above exception was the direct cause of the following exception:
              
              Traceback (most recent call last):
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
                  parquet_operations = convert_to_parquet(builder)
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
                  builder.download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
                  self._download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
                  self._prepare_split(split_generator, **prepare_split_kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
                  for job_id, done, content in self._prepare_split_single(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
                  raise DatasetGenerationError("An error occurred while generating the dataset") from e
              datasets.exceptions.DatasetGenerationError: An error occurred while generating the dataset

Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.

text
string
meta
dict
package sameuser
{ "content_hash": "df716692b24a42a014c80c67fa7040e8", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 16, "avg_line_length": 17, "alnum_prop": 0.8823529411764706, "repo_name": "go-delve/delve", "id": "b6fc8fd14b4e8ae13783b5ee9d9066476f404d9f", "size": "122", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "service/internal/sameuser/doc.go", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "3179" }, { "name": "C", "bytes": "2657334" }, { "name": "Go", "bytes": "2792924" }, { "name": "Kotlin", "bytes": "7688" }, { "name": "Makefile", "bytes": "970" }, { "name": "PowerShell", "bytes": "3085" }, { "name": "Python", "bytes": "599" }, { "name": "Shell", "bytes": "5109" }, { "name": "Starlark", "bytes": "2024" } ] }
cc.getAbsolutePosition = function(pt, type, containerSize, propName){ var absPt = cc.p(0,0); if(type === CCB_POSITIONTYPE_RELATIVE_BOTTOM_LEFT) absPt = pt; else if(type === CCB_POSITIONTYPE_RELATIVE_TOP_LEFT){ absPt.x = pt.x; absPt.y = containerSize.height - pt.y; } else if(type === CCB_POSITIONTYPE_RELATIVE_TOP_RIGHT){ absPt.x = containerSize.width - pt.x; absPt.y = containerSize.height - pt.y; } else if (type === CCB_POSITIONTYPE_RELATIVE_BOTTOM_RIGHT) { absPt.x = containerSize.width - pt.x; absPt.y = pt.y; } else if (type === CCB_POSITIONTYPE_PERCENT) { absPt.x = (containerSize.width * pt.x / 100.0); absPt.y = (containerSize.height * pt.y / 100.0); } else if (type === CCB_POSITIONTYPE_MULTIPLY_RESOLUTION) { var resolutionScale = cc.BuilderReader.getResolutionScale(); absPt.x = pt.x * resolutionScale; absPt.y = pt.y * resolutionScale; } return absPt; }; cc._getAbsolutePosition = function(x, y, type, containerSize, propName){ var absPt = cc.p(0,0); if(type === CCB_POSITIONTYPE_RELATIVE_BOTTOM_LEFT){ absPt.x = x; absPt.y = y; } else if(type === CCB_POSITIONTYPE_RELATIVE_TOP_LEFT){ absPt.x = x; absPt.y = containerSize.height - y; } else if(type === CCB_POSITIONTYPE_RELATIVE_TOP_RIGHT){ absPt.x = containerSize.width - x; absPt.y = containerSize.height - y; } else if (type === CCB_POSITIONTYPE_RELATIVE_BOTTOM_RIGHT) { absPt.x = containerSize.width - x; absPt.y = y; } else if (type === CCB_POSITIONTYPE_PERCENT) { absPt.x = (containerSize.width * x / 100.0); absPt.y = (containerSize.height * y / 100.0); } else if (type === CCB_POSITIONTYPE_MULTIPLY_RESOLUTION) { var resolutionScale = cc.BuilderReader.getResolutionScale(); absPt.x = x * resolutionScale; absPt.y = y * resolutionScale; } return absPt; }; cc.setRelativeScale = function(node,scaleX, scaleY, type, propName){ if(!node) throw new Error("cc.setRelativeScale(): node should be non-null"); if (type === CCB_POSITIONTYPE_MULTIPLY_RESOLUTION) { var resolutionScale = cc.BuilderReader.getResolutionScale(); scaleX *= resolutionScale; scaleY *= resolutionScale; } node.setScaleX(scaleX); node.setScaleY(scaleY); };
{ "content_hash": "f23be66fb17ff814f7e0c10592c99e10", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 74, "avg_line_length": 37.71212121212121, "alnum_prop": 0.6022498995580554, "repo_name": "babestvl/FoodClash", "id": "79119a9447787e587c0416e901a4dbd08a83031e", "size": "3866", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frameworks/cocos2d-html5/extensions/ccb-reader/CCBRelativePositioning.js", "mode": "33188", "license": "mit", "language": [ { "name": "CMake", "bytes": "11529" }, { "name": "HTML", "bytes": "4292" }, { "name": "JavaScript", "bytes": "4636509" } ] }
title: "Ambient Mobile Charging (2015)" date: 2015-08-01T12:00:00-08:00 draft: false short_name: "ambient-mobile-charging" tags: Android, applications, engineering management, program management, USB resources: - src: "ambient_charging.svg" name: ambient_charging - src: "ambient_charging_tiny.jpg" name: ambient_charging_tiny entry_media: - image: resource: "ambient_charging" lazyload: "ambient_charging_tiny" alt: "Illustration of people chatting at a bar while their phones charge on bar stools nearby" --- A startup offering mobile charging as a service was having quality problems with their devices. The service operates via a deployment of smart batteries throughout urban areas. When users encounter these smart batteries and connect their phones, they're prompted to sign into the startup's service to pay for the battery usage. Unfortunately, the batteries would not reliably connect to various Android phones. The issues seemed to be a combination of firmware and app-level bugs. Because of our prior Android accessory and app experience, Mindtribe was asked to help debug the issues. I managed this program and planned/oversaw a short sprint during which my team debugged the issues while I lent USB expertise to the program.
{ "content_hash": "67067cfd299271d1b84c9bede8f0a9d0", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 100, "avg_line_length": 46.592592592592595, "alnum_prop": 0.7901430842607313, "repo_name": "jerryryle/jerryryle.com", "id": "d7dcf0708d15acb60283766cb795a44e02a69415", "size": "1262", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "content/portfolio/2015-ambient-mobile-charging/index.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "2724" }, { "name": "HTML", "bytes": "17265" }, { "name": "JavaScript", "bytes": "9657" }, { "name": "Shell", "bytes": "1880" } ] }
var config = {}; config.connectionString = process.env.MONGODB_URL || 'mongodb://127.0.0.1:27017/peopleApi'; config.port = process.env.PORT || 2426; module.exports = config;
{ "content_hash": "d35ad3ad149b5a20bc7672dc5598ab9f", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 91, "avg_line_length": 29.166666666666668, "alnum_prop": 0.7142857142857143, "repo_name": "dclucas/poc-cross-domain-harvester", "id": "2c0df79831b0af7ad9b80bfb56efa43e3b0bc40b", "size": "175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "peopleApi/app/config.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1687" } ] }
<?php /* * * * http://localhost:8080/pvcloud_backend/account_authenticate.php?email=jose.a.nunez@gmail.com&pwd=1234pass * * * */ error_reporting(E_ERROR); class simpleResponse { public $status = ""; public $message = ""; } require_once './DA/da_conf.php'; require_once './DA/da_helper.php'; require_once './DA/da_account.php'; require_once './DA/da_session.php'; /** * Validates email + token provcided in query string and returns simpleResponse object with status (OK, ERROR, EXCEPTION) and a message * * @return \simpleResponse */ function validate() { $response = new simpleResponse(); $account_id = filter_input(INPUT_GET, "account_id"); $token = filter_input(INPUT_GET, "token"); try { if ($account_id == 0 || $account_id == "" || $account_id == NULL || $token == "" || $token == NULL) { $response->status = "ERROR"; $response->message = "La sesión no es válida. Por favor autentíquese nuevamente"; } else { $session = da_session::GetAndValidateSession($account_id, $token); if ($session->account_id == $account_id && $session->token == $token) { $response->status = "OK"; $response->message = "Sesión válida"; } else { $response->status = "ERROR"; $response->message = "La sesión no es válida. Por favor autentíquese nuevamente"; } } } catch (Exception $ex) { $response->status = "EXCEPTION"; $response->message = $ex->getMessage(); } return $response; } include './inc/incJSONHeaders.php'; echo json_encode(validate());
{ "content_hash": "9bb56514226511fca0bd553a54e18a95", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 135, "avg_line_length": 29.625, "alnum_prop": 0.5877034358047016, "repo_name": "gsmahajan/pvcloud", "id": "c706be767888c9eaebc7bd5575198f23ea3c136c", "size": "1667", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/backend/session_validate.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "24139" }, { "name": "Arduino", "bytes": "154" }, { "name": "CSS", "bytes": "1056182" }, { "name": "HTML", "bytes": "75188" }, { "name": "JavaScript", "bytes": "94595" }, { "name": "PHP", "bytes": "162807" } ] }
""" signals are sent for each event Stripe sends to the app Stripe docs for Webhooks: https://stripe.com/docs/webhooks """ from django.dispatch import Signal webhook_processing_error = Signal(providing_args=["data", "exception"]) # A signal for each Event type. See https://stripe.com/docs/api/events/types WEBHOOK_SIGNALS = dict( [ (hook, Signal(providing_args=["event"])) for hook in [ # Update this by copy-pasting the "enabled_events" enum values from # https://raw.githubusercontent.com/stripe/openapi/master/openapi/spec3.json "account.application.authorized", "account.application.deauthorized", "account.external_account.created", "account.external_account.deleted", "account.external_account.updated", "account.updated", "application_fee.created", "application_fee.refund.updated", "application_fee.refunded", "balance.available", "capability.updated", "charge.captured", "charge.dispute.closed", "charge.dispute.created", "charge.dispute.funds_reinstated", "charge.dispute.funds_withdrawn", "charge.dispute.updated", "charge.expired", "charge.failed", "charge.pending", "charge.refund.updated", "charge.refunded", "charge.succeeded", "charge.updated", "checkout.session.async_payment_failed", "checkout.session.async_payment_succeeded", "checkout.session.completed", "coupon.created", "coupon.deleted", "coupon.updated", "credit_note.created", "credit_note.updated", "credit_note.voided", "customer.created", "customer.deleted", "customer.discount.created", "customer.discount.deleted", "customer.discount.updated", "customer.source.created", "customer.source.deleted", "customer.source.expiring", "customer.source.updated", "customer.subscription.created", "customer.subscription.deleted", "customer.subscription.pending_update_applied", "customer.subscription.pending_update_expired", "customer.subscription.trial_will_end", "customer.subscription.updated", "customer.tax_id.created", "customer.tax_id.deleted", "customer.tax_id.updated", "customer.updated", "file.created", "invoice.created", "invoice.deleted", "invoice.finalization_failed", "invoice.finalized", "invoice.marked_uncollectible", "invoice.paid", "invoice.payment_action_required", "invoice.payment_failed", "invoice.payment_succeeded", "invoice.sent", "invoice.upcoming", "invoice.updated", "invoice.voided", "invoiceitem.created", "invoiceitem.deleted", "invoiceitem.updated", "issuing_authorization.created", "issuing_authorization.request", "issuing_authorization.updated", "issuing_card.created", "issuing_card.updated", "issuing_cardholder.created", "issuing_cardholder.updated", "issuing_dispute.closed", "issuing_dispute.created", "issuing_dispute.funds_reinstated", "issuing_dispute.submitted", "issuing_dispute.updated", "issuing_transaction.created", "issuing_transaction.updated", "mandate.updated", "order.created", "order.payment_failed", "order.payment_succeeded", "order.updated", "order_return.created", "payment_intent.amount_capturable_updated", "payment_intent.canceled", "payment_intent.created", "payment_intent.payment_failed", "payment_intent.processing", "payment_intent.requires_action", "payment_intent.succeeded", "payment_method.attached", "payment_method.automatically_updated", "payment_method.detached", "payment_method.updated", "payout.canceled", "payout.created", "payout.failed", "payout.paid", "payout.updated", "person.created", "person.deleted", "person.updated", "plan.created", "plan.deleted", "plan.updated", "price.created", "price.deleted", "price.updated", "product.created", "product.deleted", "product.updated", "promotion_code.created", "promotion_code.updated", "radar.early_fraud_warning.created", "radar.early_fraud_warning.updated", "recipient.created", "recipient.deleted", "recipient.updated", "reporting.report_run.failed", "reporting.report_run.succeeded", "reporting.report_type.updated", "review.closed", "review.opened", "setup_intent.canceled", "setup_intent.created", "setup_intent.requires_action", "setup_intent.setup_failed", "setup_intent.succeeded", "sigma.scheduled_query_run.created", "sku.created", "sku.deleted", "sku.updated", "source.canceled", "source.chargeable", "source.failed", "source.mandate_notification", "source.refund_attributes_required", "source.transaction.created", "source.transaction.updated", "subscription_schedule.aborted", "subscription_schedule.canceled", "subscription_schedule.completed", "subscription_schedule.created", "subscription_schedule.expiring", "subscription_schedule.released", "subscription_schedule.updated", "tax_rate.created", "tax_rate.updated", "topup.canceled", "topup.created", "topup.failed", "topup.reversed", "topup.succeeded", "transfer.created", "transfer.failed", "transfer.paid", "transfer.reversed", "transfer.updated", # deprecated (no longer in events_types list) - TODO can be deleted? "checkout_beta.session_succeeded", "issuer_fraud_record.created", "payment_intent.requires_capture", "payment_method.card_automatically_updated", "issuing_dispute.created", "issuing_dispute.updated", "issuing_settlement.created", "issuing_settlement.updated", # special case? - TODO can be deleted? "ping", ] ] )
{ "content_hash": "b5f476d35ca8be9407a81f6611cdc01b", "timestamp": "", "source": "github", "line_count": 195, "max_line_length": 88, "avg_line_length": 37.251282051282054, "alnum_prop": 0.5410242290748899, "repo_name": "pydanny/dj-stripe", "id": "1a64040d3bc1e6a1fc7bd9fd1eba551e1c100e37", "size": "7264", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "djstripe/signals.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "21431" }, { "name": "Python", "bytes": "322111" } ] }
// Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies // of the Software, and to permit persons to whom the Software is furnished to do // so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. namespace YamlDotNet.Core.Events { /// <summary> /// Represents a mapping end event. /// </summary> public class MappingEnd : ParsingEvent { /// <summary> /// Gets a value indicating the variation of depth caused by this event. /// The value can be either -1, 0 or 1. For start events, it will be 1, /// for end events, it will be -1, and for the remaining events, it will be 0. /// </summary> public override int NestingIncrease { get { return -1; } } /// <summary> /// Gets the event type, which allows for simpler type comparisons. /// </summary> internal override EventType Type { get { return EventType.MappingEnd; } } /// <summary> /// Initializes a new instance of the <see cref="MappingEnd"/> class. /// </summary> /// <param name="start">The start position of the event.</param> /// <param name="end">The end position of the event.</param> public MappingEnd(Mark start, Mark end) : base(start, end) { } /// <summary> /// Initializes a new instance of the <see cref="MappingEnd"/> class. /// </summary> public MappingEnd() : this(Mark.Empty, Mark.Empty) { } /// <summary> /// Returns a <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </summary> /// <returns> /// A <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>. /// </returns> public override string ToString() { return "Mapping end"; } /// <summary> /// Invokes run-time type specific Visit() method of the specified visitor. /// </summary> /// <param name="visitor">visitor, may not be null.</param> public override void Accept(IParsingEventVisitor visitor) { visitor.Visit(this); } } }
{ "content_hash": "c9aa63379019b45607843b95940222ab", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 110, "avg_line_length": 38.01176470588236, "alnum_prop": 0.602909316001238, "repo_name": "bi3mer/SeniorProject", "id": "65e070882d8949b2231ebea07fcdeeb563f8ceb0", "size": "3343", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Assets/Plugins/YamlDotNet/Core/Events/MappingEnd.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "5530081" }, { "name": "C++", "bytes": "946" }, { "name": "GLSL", "bytes": "21345" }, { "name": "HLSL", "bytes": "204971" }, { "name": "Mask", "bytes": "33634" }, { "name": "Objective-C", "bytes": "446" }, { "name": "Objective-C++", "bytes": "5963" }, { "name": "ShaderLab", "bytes": "611878" }, { "name": "Smalltalk", "bytes": "106" } ] }
FROM balenalib/orangepi-plus2-ubuntu:xenial-run # A few reasons for installing distribution-provided OpenJDK: # # 1. Oracle. Licensing prevents us from redistributing the official JDK. # # 2. Compiling OpenJDK also requires the JDK to be installed, and it gets # really hairy. # # For some sample build times, see Debian's buildd logs: # https://buildd.debian.org/status/logs.php?pkg=openjdk-7 # Default to UTF-8 file.encoding ENV LANG C.UTF-8 # add a simple script that can auto-detect the appropriate JAVA_HOME value # based on whether the JDK or only the JRE is installed RUN { \ echo '#!/bin/sh'; \ echo 'set -e'; \ echo; \ echo 'dirname "$(dirname "$(readlink -f "$(which javac || which java)")")"'; \ } > /usr/local/bin/docker-java-home \ && chmod +x /usr/local/bin/docker-java-home # do some fancy footwork to create a JAVA_HOME that's cross-architecture-safe RUN ln -svT "/usr/lib/jvm/java-11-openjdk-$(dpkg --print-architecture)" /docker-java-home ENV JAVA_HOME /docker-java-home RUN set -ex; \ \ # deal with slim variants not having man page directories (which causes "update-alternatives" to fail) if [ ! -d /usr/share/man/man1 ]; then \ mkdir -p /usr/share/man/man1; \ fi; \ \ apt-get update; \ apt-get install -y --no-install-recommends \ software-properties-common \ ; \ add-apt-repository ppa:openjdk-r/ppa; \ apt-get update; \ apt-get install -y --no-install-recommends \ openjdk-11-jdk \ ; \ rm -rf /var/lib/apt/lists/*; \ \ # verify that "docker-java-home" returns what we expect [ "$(readlink -f "$JAVA_HOME")" = "$(docker-java-home)" ]; \ \ # update-alternatives so that future installs of other OpenJDK versions don't change /usr/bin/java update-alternatives --get-selections | awk -v home="$(readlink -f "$JAVA_HOME")" 'index($3, home) == 1 { $2 = "manual"; print | "update-alternatives --set-selections" }'; \ # ... and verify that it actually worked for one of the alternatives we care about update-alternatives --query java | grep -q 'Status: manual' CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu xenial \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nOpenJDK v11-jdk \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
{ "content_hash": "20009c3ddb6d7f3706e027176ad324e7", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 677, "avg_line_length": 48.93650793650794, "alnum_prop": 0.7025624391826143, "repo_name": "nghiant2710/base-images", "id": "5f6fa66f86134ea297dbbac964766a5b6a7dd264", "size": "3104", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "balena-base-images/openjdk/orangepi-plus2/ubuntu/xenial/11-jdk/run/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "144558581" }, { "name": "JavaScript", "bytes": "16316" }, { "name": "Shell", "bytes": "368690" } ] }
/* +----------------------------------------------------------------------+ | PHP Version 5 | +----------------------------------------------------------------------+ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | | http://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | license@php.net so we can mail you a copy immediately. | +----------------------------------------------------------------------+ | Authors: Scott MacVicar <scottmac@php.net> | +----------------------------------------------------------------------+ */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include "spoofchecker_class.h" #include "spoofchecker.h" #include <unicode/uspoof.h> /* {{{ spoofchecker_register_constants * Register constants */ void spoofchecker_register_constants(INIT_FUNC_ARGS) { if (!Spoofchecker_ce_ptr) { zend_error(E_ERROR, "Spoofchecker class not defined"); return; } #define SPOOFCHECKER_EXPOSE_CLASS_CONST(x) zend_declare_class_constant_long(Spoofchecker_ce_ptr, ZEND_STRS( #x ) - 1, USPOOF_##x TSRMLS_CC); SPOOFCHECKER_EXPOSE_CLASS_CONST(SINGLE_SCRIPT_CONFUSABLE) SPOOFCHECKER_EXPOSE_CLASS_CONST(MIXED_SCRIPT_CONFUSABLE) SPOOFCHECKER_EXPOSE_CLASS_CONST(WHOLE_SCRIPT_CONFUSABLE) SPOOFCHECKER_EXPOSE_CLASS_CONST(ANY_CASE) SPOOFCHECKER_EXPOSE_CLASS_CONST(SINGLE_SCRIPT) SPOOFCHECKER_EXPOSE_CLASS_CONST(INVISIBLE) SPOOFCHECKER_EXPOSE_CLASS_CONST(CHAR_LIMIT) #undef SPOOFCHECKER_EXPOSE_CLASS_CONST } /* }}} */ /* * Local variables: * tab-width: 4 * c-basic-offset: 4 * End: * vim600: noet sw=4 ts=4 fdm=marker * vim<600: noet sw=4 ts=4 */
{ "content_hash": "4f1568cc0fdd94d7bb1dbb9d7e3d4f82", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 141, "avg_line_length": 34.06666666666667, "alnum_prop": 0.5547945205479452, "repo_name": "lunaczp/learning", "id": "42a014a90e9d24019ed7ccd7ce5a01e6a4b9c17b", "size": "2044", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "language/c/testPhpSrc/php-5.6.17/ext/intl/spoofchecker/spoofchecker.c", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "4526" }, { "name": "Assembly", "bytes": "14500403" }, { "name": "Awk", "bytes": "21252" }, { "name": "Batchfile", "bytes": "2526" }, { "name": "C", "bytes": "381839655" }, { "name": "C++", "bytes": "10162228" }, { "name": "CMake", "bytes": "68196" }, { "name": "CSS", "bytes": "3943" }, { "name": "D", "bytes": "1022" }, { "name": "DTrace", "bytes": "4528" }, { "name": "Fortran", "bytes": "1834" }, { "name": "GAP", "bytes": "4344" }, { "name": "GDB", "bytes": "31864" }, { "name": "Gnuplot", "bytes": "148" }, { "name": "Go", "bytes": "732" }, { "name": "HTML", "bytes": "86756" }, { "name": "Java", "bytes": "8286" }, { "name": "JavaScript", "bytes": "238365" }, { "name": "Lex", "bytes": "121233" }, { "name": "Limbo", "bytes": "1609" }, { "name": "Lua", "bytes": "96" }, { "name": "M4", "bytes": "483288" }, { "name": "Makefile", "bytes": "1915601" }, { "name": "Nix", "bytes": "180099" }, { "name": "Objective-C", "bytes": "1742504" }, { "name": "OpenEdge ABL", "bytes": "4238" }, { "name": "PHP", "bytes": "27984629" }, { "name": "Pascal", "bytes": "74868" }, { "name": "Perl", "bytes": "317465" }, { "name": "Perl 6", "bytes": "6916" }, { "name": "Python", "bytes": "21547" }, { "name": "R", "bytes": "1112" }, { "name": "Roff", "bytes": "435717" }, { "name": "Scilab", "bytes": "22980" }, { "name": "Shell", "bytes": "468206" }, { "name": "UnrealScript", "bytes": "20840" }, { "name": "Vue", "bytes": "563" }, { "name": "XSLT", "bytes": "7946" }, { "name": "Yacc", "bytes": "172805" }, { "name": "sed", "bytes": "2073" } ] }
<?php namespace v310\AddCustomNavElements; class CreateCustomNavElementToTextTable extends \AbstractMigration { function up($hesk_settings) { $this->executeQuery("CREATE TABLE `" . hesk_dbEscape($hesk_settings['db_pfix']) . "custom_nav_element_to_text` (id INT NOT NULL AUTO_INCREMENT PRIMARY KEY, nav_element_id INT NOT NULL, language VARCHAR(200) NOT NULL, text VARCHAR(200) NOT NULL, subtext VARCHAR(200))"); } function down($hesk_settings) { $this->executeQuery("DROP TABLE `" . hesk_dbEscape($hesk_settings['db_pfix']) . "custom_nav_element_to_text`"); } }
{ "content_hash": "edbafbf6f6606d24e226150eb5316989", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 119, "avg_line_length": 33.1, "alnum_prop": 0.6344410876132931, "repo_name": "mkoch227/Mods-for-HESK", "id": "87eff45ed8688e6a4749d5cc8fc9178e43ac682d", "size": "662", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "install/migrations/v310/AddCustomNavElements/CreateCustomNavElementToTextTable.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "55466" }, { "name": "HTML", "bytes": "34223" }, { "name": "JavaScript", "bytes": "121138" }, { "name": "PHP", "bytes": "1966630" } ] }
package org.hoteia.qalingo.core.solr.bean; import java.util.Date; import org.apache.solr.client.solrj.beans.Field; public class CustomerSolr { @Field private Long id; @Field private String lastname; @Field private String firstname; @Field private String email; @Field private String gender ; @Field private String title; @Field private String address; @Field private String postalCode; @Field private String city; @Field private String countryCode; @Field private String addressUniqueKey; @Field private String random; @Field("datecreate") private Date dateCreate; @Field("dateupdate") private Date dateUpdate; public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getGender() { return gender; } public void setGender(String gender) { this.gender = gender; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getLastname() { return lastname; } public void setLastname(String lastname) { this.lastname = lastname; } public String getFirstname() { return firstname; } public void setFirstname(String firstname) { this.firstname = firstname; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public String getPostalCode() { return postalCode; } public void setPostalCode(String postalCode) { this.postalCode = postalCode; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getCountryCode() { return countryCode; } public void setCountryCode(String countryCode) { this.countryCode = countryCode; } public String getAddressUniqueKey() { return addressUniqueKey; } public void setAddressUniqueKey(String addressUniqueKey) { this.addressUniqueKey = addressUniqueKey; } public String getRandom() { return random; } public void setRandom(String random) { this.random = random; } public Date getDateCreate() { return dateCreate; } public void setDateCreate(Date dateCreate) { this.dateCreate = dateCreate; } public Date getDateUpdate() { return dateUpdate; } public void setDateUpdate(Date dateUpdate) { this.dateUpdate = dateUpdate; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CustomerSolr other = (CustomerSolr) obj; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; return true; } @Override public String toString() { return "CustomerSolr [id=" + id + ", lastname=" + lastname + ", firstname=" + firstname + ", email=" + email + ", gender=" + gender + ", title=" + title + ", dateCreate=" + dateCreate + ", dateUpdate=" + dateUpdate + "]"; } }
{ "content_hash": "2c9fd5fdb0063a306bca429825bcd14c", "timestamp": "", "source": "github", "line_count": 195, "max_line_length": 191, "avg_line_length": 20.08205128205128, "alnum_prop": 0.5653728294177732, "repo_name": "qalingo/qalingo-engine", "id": "90a416b2679b4e3fadccb3efb63fbff99088d546", "size": "4260", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apis/api-core/api-core-solr/src/main/java/org/hoteia/qalingo/core/solr/bean/CustomerSolr.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "5213430" } ] }
title: "Drupal, qu'on lui coupe la tête&nbsp;!" description: | Retour d'expérience sur la refonte du site internet et de la plateforme de contribution d'un leader dans la location de vacances en mobile-home : passage du CMS Drupal 7 à une architecture micro-services en NodeJS et React, avec migration vers Drupal 8 headless. speaker: - sylvain_etienne # reference correspondant à celle du fichier spearkers dans _data type: lt symbol: expe # icone date: 2017-05-19 11:50:00 +0200 # heure de début du talk lang: fr # langue du talk display-order: 6 # ordre d'affichage sur le site video: https://vimeo.com/224596352 slides: # url du support de présentation ---
{ "content_hash": "bf9d4bb7b53e40f8ea898e1bb89f60d7", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 265, "avg_line_length": 51.23076923076923, "alnum_prop": 0.7612612612612613, "repo_name": "sudweb/2017", "id": "11b2c8fcef98a1b6ee814c943719e77f43010b34", "size": "676", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_talks/lightning-talks/micro-services-le-grand-saut.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "189278" }, { "name": "HTML", "bytes": "89877" }, { "name": "JSONiq", "bytes": "627" }, { "name": "JavaScript", "bytes": "910" }, { "name": "Ruby", "bytes": "832" }, { "name": "Shell", "bytes": "744" } ] }
namespace Envoy { namespace Server { StatsRenderTestBase::StatsRenderTestBase() : alloc_(symbol_table_), store_(alloc_) { store_.addSink(sink_); store_.initializeThreading(main_thread_dispatcher_, tls_); } StatsRenderTestBase::~StatsRenderTestBase() { tls_.shutdownGlobalThreading(); store_.shutdownThreading(); tls_.shutdownThread(); } Stats::ParentHistogram& StatsRenderTestBase::populateHistogram(const std::string& name, const std::vector<uint64_t>& vals) { Stats::Histogram& h = store_.histogramFromString(name, Stats::Histogram::Unit::Unspecified); for (uint64_t val : vals) { h.recordValue(val); } store_.mergeHistograms([]() -> void {}); return dynamic_cast<Stats::ParentHistogram&>(h); } } // namespace Server } // namespace Envoy
{ "content_hash": "0476e565db72b1dcb17e6b2fa6a0f2ab", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 99, "avg_line_length": 31.96153846153846, "alnum_prop": 0.6654632972322503, "repo_name": "envoyproxy/envoy", "id": "23923da1e00c6c554e9ac927b8a048b594291b89", "size": "886", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "test/server/admin/stats_render_test_base.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "439" }, { "name": "C", "bytes": "54172" }, { "name": "C++", "bytes": "36279350" }, { "name": "CSS", "bytes": "884" }, { "name": "Dockerfile", "bytes": "891" }, { "name": "Emacs Lisp", "bytes": "966" }, { "name": "Go", "bytes": "558" }, { "name": "HTML", "bytes": "582" }, { "name": "Java", "bytes": "1309139" }, { "name": "JavaScript", "bytes": "76" }, { "name": "Jinja", "bytes": "46306" }, { "name": "Kotlin", "bytes": "311319" }, { "name": "Makefile", "bytes": "303" }, { "name": "NASL", "bytes": "327095" }, { "name": "Objective-C", "bytes": "95941" }, { "name": "PureBasic", "bytes": "472" }, { "name": "Python", "bytes": "630897" }, { "name": "Ruby", "bytes": "47" }, { "name": "Rust", "bytes": "38041" }, { "name": "Shell", "bytes": "194810" }, { "name": "Smarty", "bytes": "3528" }, { "name": "Starlark", "bytes": "2229814" }, { "name": "Swift", "bytes": "307285" }, { "name": "Thrift", "bytes": "748" } ] }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Input; namespace ModernUI.ExtendedToolkit { /// <summary> /// Wizard Commands /// </summary> public static class WizardCommands { private static RoutedCommand _cancelCommand = new RoutedCommand(); public static RoutedCommand Cancel { get { return _cancelCommand; } } private static RoutedCommand _finishCommand = new RoutedCommand(); public static RoutedCommand Finish { get { return _finishCommand; } } private static RoutedCommand _helpCommand = new RoutedCommand(); public static RoutedCommand Help { get { return _helpCommand; } } private static RoutedCommand _nextPageCommand = new RoutedCommand(); public static RoutedCommand NextPage { get { return _nextPageCommand; } } private static RoutedCommand _previousPageCommand = new RoutedCommand(); public static RoutedCommand PreviousPage { get { return _previousPageCommand; } } private static RoutedCommand _selectPageCommand = new RoutedCommand(); public static RoutedCommand SelectPage { get { return _selectPageCommand; } } } }
{ "content_hash": "d5886dd7e0c3dd27db03b3dcb2a7860d", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 80, "avg_line_length": 25.028985507246375, "alnum_prop": 0.5136074116965836, "repo_name": "jkmchinese/ModernUI", "id": "ab3933f4866d3441611e130d4779c44a7009d677", "size": "1729", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Genew.ModernUI/Genew.ModernUI/ExtendedToolkit/Wizard/WizardCommands.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1815472" } ] }
<?php header('Content-Type: application/json'); require('localVars.php'); require($root_dir . 'class/Autoloader.php'); require($root_dir . 'config/wpcc_config.php'); try { echo \Wpcc\Tests::launchPortailTest($phpwpcc_config['projectName'], $root_dir); } catch (\Exception $e) { echo json_encode(array('success' => false, 'content' => $e->getMessage())); }
{ "content_hash": "2f72fc6df8e0c8581522a6777072b5b7", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 87, "avg_line_length": 40, "alnum_prop": 0.6175, "repo_name": "ousamabenyounes/PHPWpcc", "id": "0c8a2284557f5e040788c7ef247734d1b696911d", "size": "400", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ajax/launchPortailTest.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "76085" }, { "name": "HTML", "bytes": "46807" }, { "name": "JavaScript", "bytes": "151254" }, { "name": "PHP", "bytes": "67450" }, { "name": "Shell", "bytes": "1099" }, { "name": "Smarty", "bytes": "9409" } ] }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto; import com.facebook.presto.connector.ConnectorId; import com.facebook.presto.metadata.SessionPropertyManager; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.security.ConnectorIdentity; import com.facebook.presto.spi.type.TimeZoneKey; import com.google.common.collect.ImmutableMap; import java.util.Locale; import java.util.Map; import static com.facebook.presto.spi.StandardErrorCode.INVALID_SESSION_PROPERTY; import static com.google.common.base.MoreObjects.toStringHelper; import static java.lang.String.format; import static java.util.Objects.requireNonNull; public class FullConnectorSession implements ConnectorSession { private final String queryId; private final ConnectorIdentity identity; private final TimeZoneKey timeZoneKey; private final Locale locale; private final long startTime; private final Map<String, String> properties; private final ConnectorId connectorId; private final String catalog; private final SessionPropertyManager sessionPropertyManager; private final boolean isLegacyTimestamp; public FullConnectorSession( String queryId, ConnectorIdentity identity, TimeZoneKey timeZoneKey, Locale locale, long startTime, boolean isLegacyTimestamp) { this.queryId = requireNonNull(queryId, "queryId is null"); this.identity = requireNonNull(identity, "identity is null"); this.timeZoneKey = requireNonNull(timeZoneKey, "timeZoneKey is null"); this.locale = requireNonNull(locale, "locale is null"); this.startTime = startTime; this.properties = null; this.connectorId = null; this.catalog = null; this.sessionPropertyManager = null; this.isLegacyTimestamp = isLegacyTimestamp; } public FullConnectorSession( String queryId, ConnectorIdentity identity, TimeZoneKey timeZoneKey, Locale locale, long startTime, Map<String, String> properties, ConnectorId connectorId, String catalog, SessionPropertyManager sessionPropertyManager, boolean isLegacyTimestamp) { this.queryId = requireNonNull(queryId, "queryId is null"); this.identity = requireNonNull(identity, "identity is null"); this.timeZoneKey = requireNonNull(timeZoneKey, "timeZoneKey is null"); this.locale = requireNonNull(locale, "locale is null"); this.startTime = startTime; this.properties = ImmutableMap.copyOf(requireNonNull(properties, "properties is null")); this.connectorId = requireNonNull(connectorId, "connectorId is null"); this.catalog = requireNonNull(catalog, "catalog is null"); this.sessionPropertyManager = requireNonNull(sessionPropertyManager, "sessionPropertyManager is null"); this.isLegacyTimestamp = isLegacyTimestamp; } @Override public String getQueryId() { return queryId; } @Override public ConnectorIdentity getIdentity() { return identity; } @Override public TimeZoneKey getTimeZoneKey() { return timeZoneKey; } @Override public Locale getLocale() { return locale; } @Override public long getStartTime() { return startTime; } @Override public boolean isLegacyTimestamp() { return isLegacyTimestamp; } @Override public <T> T getProperty(String propertyName, Class<T> type) { if (properties == null) { throw new PrestoException(INVALID_SESSION_PROPERTY, format("Unknown session property: %s.%s", catalog, propertyName)); } return sessionPropertyManager.decodeCatalogPropertyValue(connectorId, catalog, propertyName, properties.get(propertyName), type); } @Override public String toString() { return toStringHelper(this) .omitNullValues() .add("queryId", queryId) .add("user", getUser()) .add("timeZoneKey", timeZoneKey) .add("locale", locale) .add("startTime", startTime) .add("properties", properties) .toString(); } }
{ "content_hash": "e12e917c9f69af1a82f8429e4d4d1f4b", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 137, "avg_line_length": 33.019867549668874, "alnum_prop": 0.6760930605695948, "repo_name": "troels/nz-presto", "id": "81d67d492fe603c2c7787d34e4c59de34d0b5c71", "size": "4986", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "presto-main/src/main/java/com/facebook/presto/FullConnectorSession.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "27894" }, { "name": "CSS", "bytes": "14164" }, { "name": "HTML", "bytes": "71989" }, { "name": "Java", "bytes": "25099069" }, { "name": "JavaScript", "bytes": "173258" }, { "name": "Makefile", "bytes": "8086" }, { "name": "PLSQL", "bytes": "14194" }, { "name": "Python", "bytes": "7726" }, { "name": "SQLPL", "bytes": "7737" }, { "name": "Shell", "bytes": "29775" } ] }
<!DOCTYPE html> <!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]--> <!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]--> <!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]--> <!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]--> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title></title> <meta name="description" content=""> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- Place favicon.ico and apple-touch-icon.png in the root directory --> <link rel="stylesheet" href="css/normalize.css"> <link rel="stylesheet" href="css/main.css"> <script src="js/vendor/modernizr-2.6.2.min.js"></script> </head> <body> <!--[if lt IE 7]> <p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p> <![endif]--> <!-- Add your site or application content here --> <p>This is official workplace of http://thenextbyte.net!</p> <h4><i>Hang ON... </i>Surprise is ahead.</h4> <p>ready to stsrt a new home page</p> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="js/vendor/jquery-1.10.2.min.js"><\/script>')</script> <script src="js/plugins.js"></script> <script src="js/main.js"></script> <!-- Google Analytics: change UA-XXXXX-X to be your site's ID. --> <script> (function(b,o,i,l,e,r){b.GoogleAnalyticsObject=l;b[l]||(b[l]= function(){(b[l].q=b[l].q||[]).push(arguments)});b[l].l=+new Date; e=o.createElement(i);r=o.getElementsByTagName(i)[0]; e.src='//www.google-analytics.com/analytics.js'; r.parentNode.insertBefore(e,r)}(window,document,'script','ga')); ga('create','UA-XXXXX-X');ga('send','pageview'); </script> </body> </html>
{ "content_hash": "e215a35ef2093b80f47ee51768cec71a", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 184, "avg_line_length": 48.45454545454545, "alnum_prop": 0.5651969981238274, "repo_name": "prat-hbk/thenextbyte", "id": "687353daedc1c870179ead44d47330fdd6d70192", "size": "2132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "15330" }, { "name": "JavaScript", "bytes": "734" } ] }
#ifndef _LPXCB_DATA_H_ #define _LPXCB_DATA_H_ #include <xcb/xcb.h> #include <xcb/xproto.h> #include <xcb/damage.h> #include <xcb/xfixes.h> #include <xcb/xcb_aux.h> /* Holds information for a window */ typedef struct lpxcb_window_t { /* The connection associated with this window */ xcb_connection_t *conn; /* The id of this window */ xcb_drawable_t window; /* The id of this parent's window, if it has one. */ xcb_drawable_t parent; /* Rectangle used to manage damage */ xcb_rectangle_t damage_rect; /* Damage associated with this window */ xcb_damage_damage_t damage; /* Region of window marked as damage area */ xcb_xfixes_region_t region; xcb_xfixes_region_t repair; /* The next window with damage */ struct lpxcb_window_t *next_damaged; /* Should we have something for the pixmap? */ } lpxcb_window_t; /* Node to hold window in table */ typedef struct table_node_t { lpxcb_window_t *entry; struct table_node_t *prev; struct table_node_t *next; } table_node_t; /* Connection */ typedef struct lpxcb_connection_t { /* The XCB connection */ xcb_connection_t *conn; /* Damaged lpxcb_window_t */ lpxcb_window_t *damaged; } lpxcb_connection_t; /* Node for connection data structure */ typedef struct conn_node_t { lpxcb_connection_t *lpxcb_conn; struct conn_node_t *next; struct conn_node_t *prev; } conn_node_t; #endif /* _LPXCB_DATA_H_ */
{ "content_hash": "035ed103e843dc829c52c68aa6f7bb10", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 56, "avg_line_length": 25.103448275862068, "alnum_prop": 0.6600274725274725, "repo_name": "TeamEuclid/euclid", "id": "ba11a6f8cfdc177a7fea64def43d2ef0e8f41230", "size": "1518", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "experimental/jvanderw/lp-xcb/lpxcb_data.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "148971" }, { "name": "Objective-C", "bytes": "61127" }, { "name": "Shell", "bytes": "900" } ] }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <meta name="viewport" content="width=device-width"> <link rel="stylesheet" href="node_modules/bootstrap-css-only/css/bootstrap.min.css"> <link rel="stylesheet" href="node_modules/ng-table/bundles/ng-table.css"> <script src="node_modules/angular/angular.js"></script> <script src="node_modules/ng-table/bundles/ng-table.js"></script> <script src="index.js"></script> <script src="src/my-table.component.js"></script> </head> <body ng-app="demo-app" ng-strict-di> <h1>ng-table sample app (ES5)</h1> <my-table></my-table> </body> </html>
{ "content_hash": "351d474d8bd22d80ebef048700082cf3", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 88, "avg_line_length": 28.04, "alnum_prop": 0.6633380884450785, "repo_name": "nizamiftahul/ci_p1", "id": "71a2e152f03b2d79f117be2a7be0600617ee0937", "size": "701", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "bower_components/ng-table/demo-apps/es5/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "265" }, { "name": "CSS", "bytes": "903300" }, { "name": "HTML", "bytes": "11163728" }, { "name": "JavaScript", "bytes": "3032614" }, { "name": "PHP", "bytes": "2692351" }, { "name": "Python", "bytes": "32324" } ] }
package ch.epfl.data.squall.examples.imperative.debug; import java.util.Arrays; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import ch.epfl.data.squall.components.DataSourceComponent; import ch.epfl.data.squall.components.EquiJoinComponent; import ch.epfl.data.squall.components.OperatorComponent; import ch.epfl.data.squall.expressions.ColumnReference; import ch.epfl.data.squall.operators.AggregateCountOperator; import ch.epfl.data.squall.operators.AggregateSumOperator; import ch.epfl.data.squall.operators.ProjectOperator; import ch.epfl.data.squall.query_plans.QueryBuilder; import ch.epfl.data.squall.query_plans.QueryPlan; import ch.epfl.data.squall.types.IntegerType; public class HyracksL3BatchPlan extends QueryPlan { private static Logger LOG = Logger.getLogger(HyracksL3BatchPlan.class); private final QueryBuilder _queryBuilder = new QueryBuilder(); private static final IntegerType _ic = new IntegerType(); public HyracksL3BatchPlan(String dataPath, String extension, Map conf) { // ------------------------------------------------------------------------------------- // start of query plan filling final ProjectOperator projectionCustomer = new ProjectOperator( new int[] { 0, 6 }); final List<Integer> hashCustomer = Arrays.asList(0); final DataSourceComponent relationCustomer = new DataSourceComponent( "CUSTOMER", dataPath + "customer" + extension).add( projectionCustomer).setOutputPartKey(hashCustomer); _queryBuilder.add(relationCustomer); // ------------------------------------------------------------------------------------- final ProjectOperator projectionOrders = new ProjectOperator( new int[] { 1 }); final List<Integer> hashOrders = Arrays.asList(0); final DataSourceComponent relationOrders = new DataSourceComponent( "ORDERS", dataPath + "orders" + extension) .add(projectionOrders).setOutputPartKey(hashOrders); _queryBuilder.add(relationOrders); // ------------------------------------------------------------------------------------- final AggregateCountOperator postAgg = new AggregateCountOperator(conf) .setGroupByColumns(Arrays.asList(1)); final List<Integer> hashIndexes = Arrays.asList(0); final EquiJoinComponent CUSTOMER_ORDERSjoin = new EquiJoinComponent( relationCustomer, relationOrders).add(postAgg) .setOutputPartKey(hashIndexes).setBatchOutputMillis(1000); _queryBuilder.add(CUSTOMER_ORDERSjoin); // ------------------------------------------------------------------------------------- final AggregateSumOperator agg = new AggregateSumOperator( new ColumnReference(_ic, 1), conf).setGroupByColumns(Arrays .asList(0)); OperatorComponent oc = new OperatorComponent(CUSTOMER_ORDERSjoin, "COUNTAGG").add(agg).setFullHashList( Arrays.asList("FURNITURE", "BUILDING", "MACHINERY", "HOUSEHOLD", "AUTOMOBILE")); _queryBuilder.add(oc); // ------------------------------------------------------------------------------------- } @Override public QueryBuilder getQueryPlan() { return _queryBuilder; } }
{ "content_hash": "c4f90c7b31f475d8e9489d138b9934e9", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 89, "avg_line_length": 39.924050632911396, "alnum_prop": 0.6540900443880786, "repo_name": "epfldata/squall", "id": "66e4eaa60ed801fc48636e905bfdfe649be5faf1", "size": "3871", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "squall-examples/squall-java-examples/src/ch/epfl/data/squall/examples/imperative/debug/HyracksL3BatchPlan.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "2501266" }, { "name": "Python", "bytes": "2868" }, { "name": "Scala", "bytes": "83580" }, { "name": "Shell", "bytes": "3299" } ] }
package com.zhaomeng.study.basic; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class StringHelper { /** * split the string to tokens by splitter * @param content * @return * @throws IOException */ public static List<String> stringToList(String content, String splitter) { List<String> list = new ArrayList<String>(); String[] segs = content.split(splitter); for(String seg: segs){ list.add(seg.trim()); } return list; } public static String listToString(List<String> segs, String splitter) { StringBuilder sb = new StringBuilder(); for(int i = 0; i < segs.size() - 1; i ++){ sb.append(segs.get(i)); sb.append(splitter); } sb.append(segs.get(segs.size() - 1)); return sb.toString(); } }
{ "content_hash": "34e3321e6793cdd5f291fd3828d2d3c4", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 75, "avg_line_length": 23.515151515151516, "alnum_prop": 0.6765463917525774, "repo_name": "izhaomeng/spring-poc", "id": "1b65be3985f385d1873e18079df72c6b2b937bd8", "size": "776", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/zhaomeng/study/basic/StringHelper.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "51629" }, { "name": "Java", "bytes": "177746" }, { "name": "JavaScript", "bytes": "1335" }, { "name": "Python", "bytes": "10007" } ] }
A visual, syntax-driven, browser-based editor and generator for the [Geoserver dialect](http://docs.geoserver.org/stable/en/user/styling/sld/index.html) of the [Styled Layer Description](http://www.opengeospatial.org/standards/sld) (SLD) language. [Try it!](https://dr-jts.github.io/sled)
{ "content_hash": "5ffc8cc31202f0f6e9669f577b0cedfd", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 93, "avg_line_length": 42, "alnum_prop": 0.7585034013605442, "repo_name": "dr-jts/sled", "id": "da50b68e15ab5df633d53524c0b88b86ffe1f8d1", "size": "302", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "2097" }, { "name": "HTML", "bytes": "698" }, { "name": "JavaScript", "bytes": "23136" } ] }
importScripts("/resources/testharness.js"); importScripts("/html/canvas/resources/canvas-tests.js"); var t = async_test(""); var t_pass = t.done.bind(t); var t_fail = t.step_func(function(reason) { throw reason; }); t.step(function() { var offscreenCanvas = new OffscreenCanvas(100, 50); var ctx = offscreenCanvas.getContext('2d'); _assertSame(ctx.textAlign, 'start', "ctx.textAlign", "'start'"); t.done(); }); done();
{ "content_hash": "56c9bf449dc6f3024dd64d4de4a4548d", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 64, "avg_line_length": 23.72222222222222, "alnum_prop": 0.6861826697892272, "repo_name": "ric2b/Vivaldi-browser", "id": "d4fe61a26b60e46bbc8a8326b6df2698f542d24d", "size": "590", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "chromium/third_party/blink/web_tests/external/wpt/html/canvas/offscreen/text/2d.text.align.default.worker.js", "mode": "33188", "license": "bsd-3-clause", "language": [] }
package com.tngtech.jgiven.impl; import static org.assertj.core.api.Assertions.assertThat; import com.tngtech.jgiven.Stage; import com.tngtech.jgiven.exception.JGivenMissingGuaranteedScenarioStateException; import com.tngtech.jgiven.junit.SimpleScenarioTest; import com.tngtech.jgiven.report.model.ReportModel; import com.tngtech.jgiven.testframework.TestExecutionResult; import com.tngtech.jgiven.testframework.TestExecutor; import com.tngtech.jgiven.testframework.TestFramework; import com.tngtech.jgiven.tests.GuaranteedFieldRealTest; import com.tngtech.jgiven.tests.TestScenarioRepository; import org.junit.Test; public class GuaranteedStateTest extends SimpleScenarioTest<GuaranteedStateTest.SimpleTestStage> { @Test public void assure_before_method_of_second_test_is_executed_after_guaranteed_fields_validation() { given().a_Jgiven_test_with_a_guaranteed_null_state(); when().the_test_is_executed(); then().the_report_contains_$_exception(JGivenMissingGuaranteedScenarioStateException.class); } @Test public void assure_before_method_of_second_test_is_executed_if_guaranteed_initialized() { given().a_Jgiven_test_with_a_guaranteed_state(); when().the_test_is_executed(); then().the_report_contains_$_exception(ClassNotFoundException.class); } public static class SimpleTestStage extends Stage<SimpleTestStage> { TestScenarioRepository.TestScenario testScenario; private ReportModel testReport; public void a_Jgiven_test_with_a_guaranteed_null_state() { testScenario = new TestScenarioRepository.TestScenario(GuaranteedFieldRealTest.class, "a_sample_test"); } public void a_Jgiven_test_with_a_guaranteed_state() { testScenario = new TestScenarioRepository.TestScenario(GuaranteedFieldRealTest.class, "a_sample_initialized_test"); } public void the_test_is_executed() { TestExecutor testExecutor = TestExecutor.getExecutor(TestFramework.JUnit); TestExecutionResult testExecutionResult = testExecutor.execute(testScenario.testClass, testScenario.testMethod); testReport = testExecutionResult.getReportModel(); } public void the_report_contains_$_exception(Class<? extends Exception> givenException) { assertThat(testReport.getFailedScenarios()).isNotEmpty(); assertThat(testReport.getFailedScenarios().get(0) .getScenarioCases().get(0).getErrorMessage()).contains(givenException.getName()); } } }
{ "content_hash": "2fd7d2a01300544d1b490b78f93c818f", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 115, "avg_line_length": 45.932203389830505, "alnum_prop": 0.7033210332103321, "repo_name": "TNG/JGiven", "id": "fbd7176f435861b9bcbfedce853b617246e9dfa8", "size": "2710", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jgiven-tests/src/test/java/com/tngtech/jgiven/impl/GuaranteedStateTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "151" }, { "name": "Groovy", "bytes": "11023" }, { "name": "HTML", "bytes": "389" }, { "name": "Java", "bytes": "1209844" }, { "name": "Kotlin", "bytes": "2496" }, { "name": "Scala", "bytes": "3669" }, { "name": "Shell", "bytes": "19848" } ] }
@interface MainWindowController : NSWindowController @end
{ "content_hash": "486074020071f9764c8ad6edd4c68b67", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 52, "avg_line_length": 19.666666666666668, "alnum_prop": 0.847457627118644, "repo_name": "zhaorui/ImageViewer", "id": "16a7f7fbdb68612b0d3b0c89cefe4651804c543b", "size": "225", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ImageViewer/MainWindowController.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "42021" } ] }
package urchin.model.folder; import org.immutables.value.Value; @Value.Immutable public abstract class Passphrase { public static final int ECRYPTFS_MAX_PASSPHRASE_LENGTH = 64; @Value.Parameter public abstract String getValue(); @Value.Check void validateLenght() { if (getValue().length() < ECRYPTFS_MAX_PASSPHRASE_LENGTH) { throw new IllegalArgumentException(String.format("passphrase must be at least %d characters but was %d", ECRYPTFS_MAX_PASSPHRASE_LENGTH, getValue().length())); } } }
{ "content_hash": "fdfdf5484518e0f209b251ad5ba02db8", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 171, "avg_line_length": 28.894736842105264, "alnum_prop": 0.6994535519125683, "repo_name": "anhem/urchin", "id": "0452c7347e6033a65881f49eca36a41a139a2dfa", "size": "549", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/urchin/model/folder/Passphrase.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1199" }, { "name": "HTML", "bytes": "312" }, { "name": "Java", "bytes": "333784" }, { "name": "JavaScript", "bytes": "130573" }, { "name": "SCSS", "bytes": "1521" }, { "name": "Shell", "bytes": "1023" } ] }
class PrefService; namespace feed { // Tracks the raw counts of the info cards in order to determine their // acknowledgement states. class InfoCardTracker { public: explicit InfoCardTracker(PrefService* profile_prefs); ~InfoCardTracker(); InfoCardTracker(const InfoCardTracker&) = delete; InfoCardTracker& operator=(const InfoCardTracker&) = delete; // Returns the list of states of all tracked info cards. The returned view // timestamps will be adjusted to be based on server's clock. The adjustment // is computed based on `server_timestamp` and `client_timestamp`. // `server_timestamp` is the server timestamp, in milliseconds from Epoch, // when the response is produced. // `client_timestamp` is the client timestamp, in milliseconds from Epoch, // when the response is received. std::vector<feedwire::InfoCardTrackingState> GetAllStates( int64_t server_timestamp, int64_t client_timestamp) const; // Called when the info card is fully visible. void OnViewed(int info_card_type, int minimum_view_interval_seconds); // Called when the info card is tapped. void OnClicked(int info_card_type); // Called when the info card is dismissed explicitly. void OnDismissed(int info_card_type); // Reset the state of the info card. void ResetState(int info_card_type); private: feedwire::InfoCardTrackingState GetState(int info_card_type) const; void SetState(int info_card_type, const feedwire::InfoCardTrackingState& state); raw_ptr<PrefService> profile_prefs_; }; } // namespace feed #endif // COMPONENTS_FEED_CORE_V2_STREAM_INFO_CARD_TRACKER_H_
{ "content_hash": "ea1ace4bd187432c163ff62e9846da32", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 78, "avg_line_length": 34.0625, "alnum_prop": 0.7382262996941896, "repo_name": "nwjs/chromium.src", "id": "d0de816b846cef4ce9c6756363692524483db2ab", "size": "2156", "binary": false, "copies": "1", "ref": "refs/heads/nw70", "path": "components/feed/core/v2/stream/info_card_tracker.h", "mode": "33188", "license": "bsd-3-clause", "language": [] }
package playn.robovm; import org.robovm.apple.avfoundation.AVAudioPlayer; import org.robovm.apple.avfoundation.AVAudioPlayerDelegate; import org.robovm.apple.foundation.NSError; import playn.core.AbstractSound; /** * An implementation of Sound using the AVAudioPlayer. This is used for compressed audio, * especially lengthy music tracks. */ public class RoboSoundAVAP extends AbstractSound<AVAudioPlayer> { private AVAudioPlayerDelegate delegate = new AVAudioPlayerDelegate() { public void didFinishPlaying(AVAudioPlayer player, boolean flag) {} public void decodeErrorDidOccur(AVAudioPlayer player, NSError error) {} public void beginInterruption(AVAudioPlayer player) {} public void endInterruptionWithOptions(AVAudioPlayer player, long flags) {} public void endInterruptionWithFlags(AVAudioPlayer player, long flags) {} public void endInterruption(AVAudioPlayer player) { impl.setCurrentTime(0); impl.prepareToPlay(); impl.play(); } }; @Override public void onLoaded(AVAudioPlayer impl) { super.onLoaded(impl); } @Override protected boolean prepareImpl() { return impl.prepareToPlay(); } @Override protected boolean playingImpl() { return impl.isPlaying(); } @Override protected boolean playImpl() { impl.setCurrentTime(0); return impl.play(); } @Override protected void stopImpl() { // TODO: disable interruption handler? impl.stop(); impl.setCurrentTime(0); } @Override protected void setLoopingImpl(boolean looping) { impl.setNumberOfLoops(looping ? -1 : 0); } @Override protected void setVolumeImpl(float volume) { impl.setVolume(volume); } @Override protected void releaseImpl() { impl.dispose(); } }
{ "content_hash": "bb21b5aa5d96b8b26482b87fec9a9664", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 89, "avg_line_length": 24.901408450704224, "alnum_prop": 0.7251131221719457, "repo_name": "ruslansennov/playn", "id": "41198047c3dade063bc4a1711638ebf1af0af110", "size": "2365", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "robovm/src/playn/robovm/RoboSoundAVAP.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "694" }, { "name": "C++", "bytes": "611" }, { "name": "Emacs Lisp", "bytes": "111" }, { "name": "Java", "bytes": "2026462" }, { "name": "Makefile", "bytes": "265" } ] }
package org.apache.ignite.internal.processors.cache; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCache; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteFutureTimeoutException; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static org.apache.ignite.IgniteSystemProperties.IGNITE_MAX_COMPLETED_TX_COUNT; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * */ public class GridCacheMissingCommitVersionSelfTest extends GridCommonAbstractTest { /** */ private volatile boolean putFailed; /** */ private String maxCompletedTxCnt; /** */ public GridCacheMissingCommitVersionSelfTest() { super(true); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration() throws Exception { maxCompletedTxCnt = System.getProperty(IGNITE_MAX_COMPLETED_TX_COUNT); System.setProperty(IGNITE_MAX_COMPLETED_TX_COUNT, String.valueOf(5)); IgniteConfiguration cfg = super.getConfiguration(); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(new TcpDiscoveryVmIpFinder(true)); cfg.setDiscoverySpi(discoSpi); CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setCacheMode(PARTITIONED); ccfg.setAtomicityMode(TRANSACTIONAL); ccfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setCacheConfiguration(ccfg); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { System.setProperty(IGNITE_MAX_COMPLETED_TX_COUNT, maxCompletedTxCnt != null ? maxCompletedTxCnt : ""); super.afterTest(); } /** * @throws Exception If failed. */ public void testMissingCommitVersion() throws Exception { final IgniteCache<Integer, Integer> cache = jcache(); final int KEYS_PER_THREAD = 10_000; final AtomicInteger keyStart = new AtomicInteger(); final ConcurrentLinkedDeque<Integer> q = new ConcurrentLinkedDeque<>(); GridTestUtils.runMultiThreaded(new Callable<Object>() { @Override public Object call() throws Exception { int start = keyStart.getAndAdd(KEYS_PER_THREAD); for (int i = 0; i < KEYS_PER_THREAD && !putFailed; i++) { int key = start + i; try { cache.put(key, 1); } catch (Exception e) { log.info("Put failed [err=" + e + ", i=" + i + ']'); putFailed = true; q.add(key); } } return null; } }, 10, "put-thread"); assertTrue("Test failed to provoke 'missing commit version' error.", putFailed); for (Integer key : q) { log.info("Trying to update " + key); IgniteCache<Integer, Integer> asyncCache = cache.withAsync(); asyncCache.put(key, 2); IgniteFuture<?> fut = asyncCache.future(); try { fut.get(5000); } catch (IgniteFutureTimeoutException ignore) { fail("Put failed to finish in 5s: " + key); } } } }
{ "content_hash": "297ff6b0184cbc51e01bdfe4754481ac", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 110, "avg_line_length": 31.536, "alnum_prop": 0.6415525114155252, "repo_name": "vldpyatkov/ignite", "id": "ac56d18acc72f542ac3a3abb7cb01321fbbe204f", "size": "4744", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/GridCacheMissingCommitVersionSelfTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "41054" }, { "name": "C", "bytes": "7504" }, { "name": "C#", "bytes": "4484446" }, { "name": "C++", "bytes": "2354759" }, { "name": "CSS", "bytes": "110872" }, { "name": "Groovy", "bytes": "15092" }, { "name": "HTML", "bytes": "497146" }, { "name": "Java", "bytes": "25766360" }, { "name": "JavaScript", "bytes": "1075745" }, { "name": "M4", "bytes": "5568" }, { "name": "Makefile", "bytes": "102786" }, { "name": "Nginx", "bytes": "3468" }, { "name": "PHP", "bytes": "11079" }, { "name": "PowerShell", "bytes": "13480" }, { "name": "Scala", "bytes": "682934" }, { "name": "Shell", "bytes": "586345" }, { "name": "Smalltalk", "bytes": "1908" } ] }
class QTimer; class QShowEvent; class QHideEvent; class LocalRepo; class LocalReposListView; class LocalReposListModel; class LocalView : public QWidget { Q_OBJECT public: LocalView(QWidget *parent=0); protected: void showEvent(QShowEvent *event); void hideEvent(QHideEvent *event); private slots: void refreshRepos(); private: Q_DISABLE_COPY(LocalView) LocalReposListView *repos_list_; LocalReposListModel *repos_model_; QTimer *refresh_timer_; bool in_refresh_; }; #endif // SEAFILE_CLIENT_REPOS_VIEW_H
{ "content_hash": "481e693dbee96ae3c89556d462d9ef6d", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 38, "avg_line_length": 16.352941176470587, "alnum_prop": 0.7212230215827338, "repo_name": "gndy/seafile-client-3.0.4", "id": "4ba19ce2c3d67c668993db68bf710bb658f4fcd7", "size": "666", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/ui/local-view.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "4246" }, { "name": "C++", "bytes": "324763" }, { "name": "CMake", "bytes": "8104" }, { "name": "CSS", "bytes": "8233" }, { "name": "QMake", "bytes": "6847" }, { "name": "Shell", "bytes": "2767" } ] }
<?xml version="1.0" encoding="utf-8"?> <set xmlns:android="http://schemas.android.com/apk/res/android"> <translate android:fromXDelta="0" android:toXDelta="-50%p" android:duration="@android:integer/config_longAnimTime"/> <alpha android:fromAlpha="1.0" android:toAlpha="0.0" android:duration="@android:integer/config_longAnimTime" /> </set>
{ "content_hash": "5c333c65a372bb6be5debd59ae4f2cfa", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 66, "avg_line_length": 51.857142857142854, "alnum_prop": 0.699724517906336, "repo_name": "Dark-Keeper/ModsAndMaps", "id": "e53cbc9fb8ca6c2b713d4c5d486e245520be7468", "size": "363", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/anim/slide_out_left.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "112306" } ] }
int main(int argc, char * argv[]) { @autoreleasepool { return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); } }
{ "content_hash": "cb45da1d13db45e3517bf04dfdc9b3b3", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 90, "avg_line_length": 31.6, "alnum_prop": 0.6582278481012658, "repo_name": "lonely-life/-", "id": "ef6ae4620ffe559a91eaf6cac0a531bc57130951", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "03、属性列表/03、属性列表/main.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Objective-C", "bytes": "45293" } ] }
<?xml version="1.0" encoding="utf-8"?> <resources> <!-- From: file:/C:/Users/Administrator/Desktop/ONOFFZONE0617/app/build/intermediates/exploded-aar/com.android.support/appcompat-v7/22.2.0/res/values-w480dp/values-w480dp.xml --> <eat-comment/> <bool name="abc_action_bar_embed_tabs_pre_jb">true</bool> <bool name="abc_config_allowActionMenuItemTextWithIcon">true</bool> </resources>
{ "content_hash": "b6a3ccba790bf775e9c358d084383ed2", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 182, "avg_line_length": 57, "alnum_prop": 0.7293233082706767, "repo_name": "v-process/Switch", "id": "a7a71096d0973cef6406843c0c1625e81ac1f7cc", "size": "399", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "SwitchSource/ONOFFZONE0617/app/build/intermediates/res/debug/values-w480dp-v13/values.xml", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16453" }, { "name": "HTML", "bytes": "14579" }, { "name": "Java", "bytes": "4702584" }, { "name": "JavaScript", "bytes": "3558" } ] }
package sgl.buffer; import java.nio.*; /** * @author link */ public interface Buffer3D extends Buffer2D { /** * Gets the depth of this Buffer3D. * * @return the depth of this Buffer3D */ int getDepth(); /** * Sets the depth of this Buffer3D. * * @param depth the depth of this Buffer3D */ void setDepth(int depth); default void get(int x, int y, int z, int width, int height, int depth, ByteBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, ByteBuffer read) { set(x + y + z * height, width * height * depth, read); } default void get(int x, int y, int z, int width, int height, int depth, ShortBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, ShortBuffer read) { set(x + y + z * height, width * height * depth, read); } default void get(int x, int y, int z, int width, int height, int depth, IntBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, IntBuffer read) { set(x + y + z * height, width * height * depth, read); } default void get(int x, int y, int z, int width, int height, int depth, LongBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, LongBuffer read) { set(x + y + z * height, width * height * depth, read); } default void get(int x, int y, int z, int width, int height, int depth, FloatBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, FloatBuffer read) { set(x + y + z * height, width * height * depth, read); } default void get(int x, int y, int z, int width, int height, int depth, DoubleBuffer write) { get(x + y + z * height, width * height * depth, write); } default void set(int x, int y, int z, int width, int height, int depth, DoubleBuffer read) { set(x + y + z * height, width * height * depth, read); } }
{ "content_hash": "05cb09457dbe7ca71f77b3b2f4fe138d", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 94, "avg_line_length": 29.346666666666668, "alnum_prop": 0.6369831894593366, "repo_name": "LinkTheProgrammer/XUGL", "id": "0289e0ab80859dbcbc9b5832c08dba243a1040f8", "size": "3413", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/sgl/buffer/Buffer3D.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "78913" } ] }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Diagnostics; namespace Plywood.Tests.Functional { static class LogTests { public static void Run(ControllerConfiguration context, Guid instanceKey) { var logsController = new Logs(context); var searchEmpty = logsController.GetLogEntryPage(instanceKey); Debug.Assert(searchEmpty.LogEntries.Count() == 0); Debug.Assert(searchEmpty.InstanceKey == instanceKey); var logEntry1 = new LogEntry() { InstanceKey = instanceKey, LogContent = "Now this is the story all about how\r\nMy life got flipped, turned upside down\r\nAnd I'd like to take a minute just sit right there\r\nI'll tell you how I became the prince of a town called Bel-air" }; logsController.AddLogEntry(logEntry1); var createdLogEntry1 = logsController.GetLogEntry(instanceKey, logEntry1.Timestamp, logEntry1.Status); Debug.Assert(createdLogEntry1.Timestamp == logEntry1.Timestamp); Debug.Assert(createdLogEntry1.InstanceKey == logEntry1.InstanceKey); Debug.Assert(createdLogEntry1.Status == logEntry1.Status); Debug.Assert(createdLogEntry1.LogContent == logEntry1.LogContent); var searchSingle = logsController.GetLogEntryPage(instanceKey); Debug.Assert(searchSingle.LogEntries.Count() == 1); Debug.Assert(searchSingle.LogEntries.ElementAt(0).Timestamp == logEntry1.Timestamp); Debug.Assert(searchSingle.LogEntries.ElementAt(0).Status == LogStatus.Ok); var logEntry2 = new LogEntry() { InstanceKey = instanceKey, Status = LogStatus.Warning, LogContent = "In west Philadelphia born and raised\r\nOn the playground where I spent most of my days\r\nChilling out, maxing, relaxing all cool\r\nAnd all shooting some b-ball outside of the school\r\nWhen a couple of guys, they were up to no good\r\nStarted making trouble in my neighbourhood\r\nI got in one little fight and my mom got scared\r\nAnd said \"You're moving with your auntie and uncle in Bel-air\"" }; logsController.AddLogEntry(logEntry2); var createdLogEntry2 = logsController.GetLogEntry(instanceKey, logEntry2.Timestamp, logEntry2.Status); Debug.Assert(createdLogEntry2.Timestamp == logEntry2.Timestamp); Debug.Assert(createdLogEntry2.InstanceKey == logEntry2.InstanceKey); Debug.Assert(createdLogEntry2.Status == logEntry2.Status); Debug.Assert(createdLogEntry2.LogContent == logEntry2.LogContent); var search2 = logsController.GetLogEntryPage(instanceKey); Debug.Assert(search2.LogEntries.Count() == 2); Debug.Assert(search2.LogEntries.ElementAt(0).Timestamp == logEntry2.Timestamp); Debug.Assert(search2.LogEntries.ElementAt(0).Status == LogStatus.Warning); Debug.Assert(search2.LogEntries.ElementAt(1).Timestamp == logEntry1.Timestamp); var searchFirst = logsController.GetLogEntryPage(instanceKey, pageSize: 1); Debug.Assert(searchFirst.PageSize == 1); Debug.Assert(searchFirst.LogEntries.Count() == 1); Debug.Assert(searchFirst.LogEntries.ElementAt(0).Timestamp == logEntry2.Timestamp); var searchNext = logsController.GetLogEntryPage(instanceKey, searchFirst.NextMarker); Debug.Assert(searchNext.StartMarker == searchFirst.NextMarker); Debug.Assert(searchNext.LogEntries.Count() == 1); Debug.Assert(searchNext.LogEntries.ElementAt(0).Timestamp == logEntry1.Timestamp); } } }
{ "content_hash": "9f9f85143594d31e6c3fb17068827a23", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 517, "avg_line_length": 61.166666666666664, "alnum_prop": 0.7024523160762943, "repo_name": "danielrbradley/Plywood", "id": "3b45d05e1d329b9021aa398e3b88a8d53a0e1501", "size": "3672", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PlywoodFunctionalTesting/LogTests.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "106" }, { "name": "C#", "bytes": "411043" }, { "name": "JavaScript", "bytes": "19831" }, { "name": "Shell", "bytes": "1901" } ] }
""" This module defines a Drone to assimilate vasp data and insert it into a Mongo database. """ from __future__ import division __author__ = "Shyue Ping Ong" __copyright__ = "Copyright 2012, The Materials Project" __version__ = "2.0.0" __maintainer__ = "Shyue Ping Ong" __email__ = "shyue@mit.edu" __date__ = "Mar 18, 2012" import os import re import glob import logging import datetime import string import json import socket import numpy as np import zlib import six from fnmatch import fnmatch from collections import OrderedDict from pymongo import MongoClient import gridfs from pymatgen.apps.borg.hive import AbstractDrone from pymatgen.analysis.structure_analyzer import VoronoiCoordFinder from pymatgen.core.structure import Structure from pymatgen.core.composition import Composition from pymatgen.io.vasp import Vasprun, Incar, Kpoints, Potcar, Poscar, \ Outcar, Oszicar from pymatgen.io.cif import CifWriter from pymatgen.symmetry.analyzer import SpacegroupAnalyzer from pymatgen.analysis.bond_valence import BVAnalyzer from monty.io import zopen from pymatgen.matproj.rest import MPRester from pymatgen.entries.computed_entries import ComputedEntry from monty.json import MontyEncoder logger = logging.getLogger(__name__) class VaspToDbTaskDrone(AbstractDrone): """ VaspToDbTaskDrone assimilates directories containing vasp input to inserted db tasks. This drone is meant ot be used with pymatgen's BorgQueen to assimilate entire directory structures and insert them into a database using Python's multiprocessing. The current format assumes standard VASP relaxation runs. If you have other kinds of runs, you may design your own Drone class based on this one. There are some restrictions on the valid directory structures: 1. There can be only one vasp run in each directory. Nested directories are fine. 2. Directories designated "relax1", "relax2" are considered to be 2 parts of an aflow style run. 3. Directories containing vasp output with ".relax1" and ".relax2" are also considered as 2 parts of an aflow style run. """ # Version of this db creator document. __version__ = "2.0.0" def __init__(self, host="127.0.0.1", port=27017, database="vasp", user=None, password=None, collection="tasks", parse_dos=False, compress_dos=False, simulate_mode=False, additional_fields=None, update_duplicates=True, mapi_key=None, use_full_uri=True, runs=None): """Constructor. Args: host: Hostname of database machine. Defaults to 127.0.0.1 or localhost. port: Port for db access. Defaults to mongo's default of 27017. database: Actual database to access. Defaults to "vasp". user: User for db access. Requires write access. Defaults to None, which means no authentication. password: Password for db access. Requires write access. Defaults to None, which means no authentication. collection: Collection to query. Defaults to "tasks". parse_dos: Whether to parse the DOS data. Options are True, False, and 'final' Defaults to False. If True, all dos will be inserted into a gridfs collection called dos_fs. If 'final', only the last calculation will be parsed. compress_dos: Whether to compress the DOS data. Valid options are integers 1-9, corresponding to zlib compression level. 1 is usually adequate. simulate_mode: Allows one to simulate db insertion without actually performing the insertion. additional_fields: Dict specifying additional fields to append to each doc inserted into the collection. For example, allows one to add an author or tags to a whole set of runs for example. update_duplicates: If True, if a duplicate path exists in the collection, the entire doc is updated. Else, duplicates are skipped. mapi_key: A Materials API key. If this key is supplied, the insertion code will attempt to use the Materials REST API to calculate stability data for inserted calculations. Stability assessment requires a large quantity of materials data. E.g., to compute the stability of a new LixFeyOz calculation, you need to the energies of all known phases in the Li-Fe-O chemical system. Using the Materials API, we can obtain the pre-calculated data from the Materials Project. Go to www.materialsproject.org/profile to generate or obtain your API key. use_full_uri: Whether to use full uri path (including hostname) for the directory name. Defaults to True. If False, only the abs path will be used. runs: Ordered list of runs to look for e.g. ["relax1", "relax2"]. Automatically detects whether the runs are stored in the subfolder or file extension schema. """ self.host = host self.database = database self.user = user self.password = password self.collection = collection self.port = port self.simulate = simulate_mode if isinstance(parse_dos, six.string_types) and parse_dos != 'final': raise ValueError('Invalid value for parse_dos') self.parse_dos = parse_dos self.compress_dos = compress_dos self.additional_fields = additional_fields or {} self.update_duplicates = update_duplicates self.mapi_key = mapi_key self.use_full_uri = use_full_uri self.runs = runs or ["relax1", "relax2"] if not simulate_mode: conn = MongoClient(self.host, self.port, j=True) db = conn[self.database] if self.user: db.authenticate(self.user, self.password) if db.counter.find({"_id": "taskid"}).count() == 0: db.counter.insert({"_id": "taskid", "c": 1}) def assimilate(self, path): """ Parses vasp runs. Then insert the result into the db. and return the task_id or doc of the insertion. Returns: If in simulate_mode, the entire doc is returned for debugging purposes. Else, only the task_id of the inserted doc is returned. """ try: d = self.get_task_doc(path) if self.mapi_key is not None and d["state"] == "successful": self.calculate_stability(d) tid = self._insert_doc(d) return tid except Exception as ex: import traceback logger.error(traceback.format_exc()) return False def calculate_stability(self, d): m = MPRester(self.mapi_key) functional = d["pseudo_potential"]["functional"] syms = ["{} {}".format(functional, l) for l in d["pseudo_potential"]["labels"]] entry = ComputedEntry(Composition(d["unit_cell_formula"]), d["output"]["final_energy"], parameters={"hubbards": d["hubbards"], "potcar_symbols": syms}) data = m.get_stability([entry])[0] for k in ("e_above_hull", "decomposes_to"): d["analysis"][k] = data[k] def get_task_doc(self, path): """ Get the entire task doc for a path, including any post-processing. """ logger.info("Getting task doc for base dir :{}".format(path)) files = os.listdir(path) vasprun_files = OrderedDict() if "STOPCAR" in files: #Stopped runs. Try to parse as much as possible. logger.info(path + " contains stopped run") for r in self.runs: if r in files: #try subfolder schema for f in os.listdir(os.path.join(path, r)): if fnmatch(f, "vasprun.xml*"): vasprun_files[r] = os.path.join(r, f) else: #try extension schema for f in files: if fnmatch(f, "vasprun.xml.{}*".format(r)): vasprun_files[r] = f if len(vasprun_files) == 0: for f in files: #get any vasprun from the folder if fnmatch(f, "vasprun.xml*") and \ f not in vasprun_files.values(): vasprun_files['standard'] = f if len(vasprun_files) > 0: d = self.generate_doc(path, vasprun_files) if not d: d = self.process_killed_run(path) self.post_process(path, d) elif (not (path.endswith("relax1") or path.endswith("relax2"))) and contains_vasp_input(path): #If not Materials Project style, process as a killed run. logger.warning(path + " contains killed run") d = self.process_killed_run(path) self.post_process(path, d) else: raise ValueError("No VASP files found!") return d def _insert_doc(self, d): if not self.simulate: # Perform actual insertion into db. Because db connections cannot # be pickled, every insertion needs to create a new connection # to the db. conn = MongoClient(self.host, self.port) db = conn[self.database] if self.user: db.authenticate(self.user, self.password) coll = db[self.collection] # Insert dos data into gridfs and then remove it from the dict. # DOS data tends to be above the 4Mb limit for mongo docs. A ref # to the dos file is in the dos_fs_id. result = coll.find_one({"dir_name": d["dir_name"]}, ["dir_name", "task_id"]) if result is None or self.update_duplicates: if self.parse_dos and "calculations" in d: for calc in d["calculations"]: if "dos" in calc: dos = json.dumps(calc["dos"], cls=MontyEncoder) if self.compress_dos: dos = zlib.compress(dos, self.compress_dos) calc["dos_compression"] = "zlib" fs = gridfs.GridFS(db, "dos_fs") dosid = fs.put(dos) calc["dos_fs_id"] = dosid del calc["dos"] d["last_updated"] = datetime.datetime.today() if result is None: if ("task_id" not in d) or (not d["task_id"]): d["task_id"] = db.counter.find_and_modify( query={"_id": "taskid"}, update={"$inc": {"c": 1}} )["c"] logger.info("Inserting {} with taskid = {}" .format(d["dir_name"], d["task_id"])) elif self.update_duplicates: d["task_id"] = result["task_id"] logger.info("Updating {} with taskid = {}" .format(d["dir_name"], d["task_id"])) coll.update({"dir_name": d["dir_name"]}, {"$set": d}, upsert=True) return d["task_id"] else: logger.info("Skipping duplicate {}".format(d["dir_name"])) else: d["task_id"] = 0 logger.info("Simulated insert into database for {} with task_id {}" .format(d["dir_name"], d["task_id"])) return d def post_process(self, dir_name, d): """ Simple post-processing for various files other than the vasprun.xml. Called by generate_task_doc. Modify this if your runs have other kinds of processing requirements. Args: dir_name: The dir_name. d: Current doc generated. """ logger.info("Post-processing dir:{}".format(dir_name)) fullpath = os.path.abspath(dir_name) # VASP input generated by pymatgen's alchemy has a # transformations.json file that keeps track of the origin of a # particular structure. This is extremely useful for tracing back a # result. If such a file is found, it is inserted into the task doc # as d["transformations"] transformations = {} filenames = glob.glob(os.path.join(fullpath, "transformations.json*")) if len(filenames) >= 1: with zopen(filenames[0], "rt") as f: transformations = json.load(f) try: m = re.match("(\d+)-ICSD", transformations["history"][0]["source"]) if m: d["icsd_id"] = int(m.group(1)) except Exception as ex: logger.warning("Cannot parse ICSD from transformations " "file.") pass else: logger.warning("Transformations file does not exist.") other_parameters = transformations.get("other_parameters") new_tags = None if other_parameters: # We don't want to leave tags or authors in the # transformations file because they'd be copied into # every structure generated after this one. new_tags = other_parameters.pop("tags", None) new_author = other_parameters.pop("author", None) if new_author: d["author"] = new_author if not other_parameters: # if dict is now empty remove it transformations.pop("other_parameters") d["transformations"] = transformations # Calculations done using custodian has a custodian.json, # which tracks the jobs performed and any errors detected and fixed. # This is useful for tracking what has actually be done to get a # result. If such a file is found, it is inserted into the task doc # as d["custodian"] filenames = glob.glob(os.path.join(fullpath, "custodian.json*")) if len(filenames) >= 1: with zopen(filenames[0], "r") as f: d["custodian"] = json.load(f) # Parse OUTCAR for additional information and run stats that are # generally not in vasprun.xml. try: run_stats = {} for filename in glob.glob(os.path.join(fullpath, "OUTCAR*")): outcar = Outcar(filename) i = 1 if re.search("relax2", filename) else 0 taskname = "relax2" if re.search("relax2", filename) else \ "relax1" d["calculations"][i]["output"]["outcar"] = outcar.as_dict() run_stats[taskname] = outcar.run_stats except: logger.error("Bad OUTCAR for {}.".format(fullpath)) try: overall_run_stats = {} for key in ["Total CPU time used (sec)", "User time (sec)", "System time (sec)", "Elapsed time (sec)"]: overall_run_stats[key] = sum([v[key] for v in run_stats.values()]) run_stats["overall"] = overall_run_stats except: logger.error("Bad run stats for {}.".format(fullpath)) d["run_stats"] = run_stats #Convert to full uri path. if self.use_full_uri: d["dir_name"] = get_uri(dir_name) if new_tags: d["tags"] = new_tags logger.info("Post-processed " + fullpath) def process_killed_run(self, dir_name): """ Process a killed vasp run. """ fullpath = os.path.abspath(dir_name) logger.info("Processing Killed run " + fullpath) d = {"dir_name": fullpath, "state": "killed", "oszicar": {}} for f in os.listdir(dir_name): filename = os.path.join(dir_name, f) if fnmatch(f, "INCAR*"): try: incar = Incar.from_file(filename) d["incar"] = incar.as_dict() d["is_hubbard"] = incar.get("LDAU", False) if d["is_hubbard"]: us = np.array(incar.get("LDAUU", [])) js = np.array(incar.get("LDAUJ", [])) if sum(us - js) == 0: d["is_hubbard"] = False d["hubbards"] = {} else: d["hubbards"] = {} if d["is_hubbard"]: d["run_type"] = "GGA+U" elif incar.get("LHFCALC", False): d["run_type"] = "HF" else: d["run_type"] = "GGA" except Exception as ex: print(str(ex)) logger.error("Unable to parse INCAR for killed run {}." .format(dir_name)) elif fnmatch(f, "KPOINTS*"): try: kpoints = Kpoints.from_file(filename) d["kpoints"] = kpoints.as_dict() except: logger.error("Unable to parse KPOINTS for killed run {}." .format(dir_name)) elif fnmatch(f, "POSCAR*"): try: s = Poscar.from_file(filename).structure comp = s.composition el_amt = s.composition.get_el_amt_dict() d.update({"unit_cell_formula": comp.as_dict(), "reduced_cell_formula": comp.to_reduced_dict, "elements": list(el_amt.keys()), "nelements": len(el_amt), "pretty_formula": comp.reduced_formula, "anonymous_formula": comp.anonymized_formula, "nsites": comp.num_atoms, "chemsys": "-".join(sorted(el_amt.keys()))}) d["poscar"] = s.as_dict() except: logger.error("Unable to parse POSCAR for killed run {}." .format(dir_name)) elif fnmatch(f, "POTCAR*"): try: potcar = Potcar.from_file(filename) d["pseudo_potential"] = { "functional": potcar.functional.lower(), "pot_type": "paw", "labels": potcar.symbols} except: logger.error("Unable to parse POTCAR for killed run in {}." .format(dir_name)) elif fnmatch(f, "OSZICAR"): try: d["oszicar"]["root"] = \ Oszicar(os.path.join(dir_name, f)).as_dict() except: logger.error("Unable to parse OSZICAR for killed run in {}." .format(dir_name)) elif re.match("relax\d", f): if os.path.exists(os.path.join(dir_name, f, "OSZICAR")): try: d["oszicar"][f] = Oszicar( os.path.join(dir_name, f, "OSZICAR")).as_dict() except: logger.error("Unable to parse OSZICAR for killed " "run in {}.".format(dir_name)) return d def process_vasprun(self, dir_name, taskname, filename): """ Process a vasprun.xml file. """ vasprun_file = os.path.join(dir_name, filename) r = Vasprun(vasprun_file) d = r.as_dict() d["dir_name"] = os.path.abspath(dir_name) d["completed_at"] = \ str(datetime.datetime.fromtimestamp(os.path.getmtime( vasprun_file))) d["cif"] = str(CifWriter(r.final_structure)) d["density"] = r.final_structure.density if self.parse_dos and (self.parse_dos != 'final' \ or taskname == self.runs[-1]): try: d["dos"] = r.complete_dos.as_dict() except Exception: logger.warn("No valid dos data exist in {}.\n Skipping dos" .format(dir_name)) if taskname == "relax1" or taskname == "relax2": d["task"] = {"type": "aflow", "name": taskname} else: d["task"] = {"type": taskname, "name": taskname} return d def generate_doc(self, dir_name, vasprun_files): """ Process aflow style runs, where each run is actually a combination of two vasp runs. """ try: fullpath = os.path.abspath(dir_name) #Defensively copy the additional fields first. This is a MUST. #Otherwise, parallel updates will see the same object and inserts #will be overridden!! d = {k: v for k, v in self.additional_fields.items()} d["dir_name"] = fullpath d["schema_version"] = VaspToDbTaskDrone.__version__ d["calculations"] = [ self.process_vasprun(dir_name, taskname, filename) for taskname, filename in vasprun_files.items()] d1 = d["calculations"][0] d2 = d["calculations"][-1] #Now map some useful info to the root level. for root_key in ["completed_at", "nsites", "unit_cell_formula", "reduced_cell_formula", "pretty_formula", "elements", "nelements", "cif", "density", "is_hubbard", "hubbards", "run_type"]: d[root_key] = d2[root_key] d["chemsys"] = "-".join(sorted(d2["elements"])) #store any overrides to the exchange correlation functional xc = d2["input"]["incar"].get("GGA") if xc: xc = xc.upper() d["input"] = {"crystal": d1["input"]["crystal"], "is_lasph": d2["input"]["incar"].get("LASPH", False), "potcar_spec": d1["input"].get("potcar_spec"), "xc_override": xc} vals = sorted(d2["reduced_cell_formula"].values()) d["anonymous_formula"] = {string.ascii_uppercase[i]: float(vals[i]) for i in range(len(vals))} d["output"] = { "crystal": d2["output"]["crystal"], "final_energy": d2["output"]["final_energy"], "final_energy_per_atom": d2["output"]["final_energy_per_atom"]} d["name"] = "aflow" p = d2["input"]["potcar_type"][0].split("_") pot_type = p[0] functional = "lda" if len(pot_type) == 1 else "_".join(p[1:]) d["pseudo_potential"] = {"functional": functional.lower(), "pot_type": pot_type.lower(), "labels": d2["input"]["potcar"]} if len(d["calculations"]) == len(self.runs) or \ list(vasprun_files.keys())[0] != "relax1": d["state"] = "successful" if d2["has_vasp_completed"] \ else "unsuccessful" else: d["state"] = "stopped" d["analysis"] = get_basic_analysis_and_error_checks(d) sg = SpacegroupAnalyzer(Structure.from_dict(d["output"]["crystal"]), 0.1) d["spacegroup"] = {"symbol": sg.get_spacegroup_symbol(), "number": sg.get_spacegroup_number(), "point_group": sg.get_point_group(), "source": "spglib", "crystal_system": sg.get_crystal_system(), "hall": sg.get_hall()} d["last_updated"] = datetime.datetime.today() return d except Exception as ex: import traceback print(traceback.format_exc()) logger.error("Error in " + os.path.abspath(dir_name) + ".\n" + traceback.format_exc()) return None def get_valid_paths(self, path): """ There are some restrictions on the valid directory structures: 1. There can be only one vasp run in each directory. Nested directories are fine. 2. Directories designated "relax1", "relax2" are considered to be 2 parts of an aflow style run. 3. Directories containing vasp output with ".relax1" and ".relax2" are also considered as 2 parts of an aflow style run. """ (parent, subdirs, files) = path if set(self.runs).intersection(subdirs): return [parent] if not any([parent.endswith(os.sep + r) for r in self.runs]) and \ len(glob.glob(os.path.join(parent, "vasprun.xml*"))) > 0: return [parent] return [] def convert(self, d): return d def __str__(self): return "VaspToDbDictDrone" @classmethod def from_dict(cls, d): return cls(**d["init_args"]) def as_dict(self): init_args = {"host": self.host, "port": self.port, "database": self.database, "user": self.user, "password": self.password, "collection": self.collection, "parse_dos": self.parse_dos, "simulate_mode": self.simulate, "additional_fields": self.additional_fields, "update_duplicates": self.update_duplicates} output = {"name": self.__class__.__name__, "init_args": init_args, "version": __version__} return output def get_basic_analysis_and_error_checks(d, max_force_threshold=0.5, volume_change_threshold=0.2): initial_vol = d["input"]["crystal"]["lattice"]["volume"] final_vol = d["output"]["crystal"]["lattice"]["volume"] delta_vol = final_vol - initial_vol percent_delta_vol = delta_vol / initial_vol coord_num = get_coordination_numbers(d) calc = d["calculations"][-1] gap = calc["output"]["bandgap"] cbm = calc["output"]["cbm"] vbm = calc["output"]["vbm"] is_direct = calc["output"]["is_gap_direct"] warning_msgs = [] error_msgs = [] if abs(percent_delta_vol) > volume_change_threshold: warning_msgs.append("Volume change > {}%" .format(volume_change_threshold * 100)) bv_struct = Structure.from_dict(d["output"]["crystal"]) try: bva = BVAnalyzer() bv_struct = bva.get_oxi_state_decorated_structure(bv_struct) except ValueError as e: logger.error("Valence cannot be determined due to {e}." .format(e=e)) except Exception as ex: logger.error("BVAnalyzer error {e}.".format(e=str(ex))) max_force = None if d["state"] == "successful" and \ d["calculations"][0]["input"]["parameters"].get("NSW", 0) > 0: # handle the max force and max force error max_force = max([np.linalg.norm(a) for a in d["calculations"][-1]["output"] ["ionic_steps"][-1]["forces"]]) if max_force > max_force_threshold: error_msgs.append("Final max force exceeds {} eV" .format(max_force_threshold)) d["state"] = "error" s = Structure.from_dict(d["output"]["crystal"]) if not s.is_valid(): error_msgs.append("Bad structure (atoms are too close!)") d["state"] = "error" return {"delta_volume": delta_vol, "max_force": max_force, "percent_delta_volume": percent_delta_vol, "warnings": warning_msgs, "errors": error_msgs, "coordination_numbers": coord_num, "bandgap": gap, "cbm": cbm, "vbm": vbm, "is_gap_direct": is_direct, "bv_structure": bv_struct.as_dict()} def contains_vasp_input(dir_name): """ Checks if a directory contains valid VASP input. Args: dir_name: Directory name to check. Returns: True if directory contains all four VASP input files (INCAR, POSCAR, KPOINTS and POTCAR). """ for f in ["INCAR", "POSCAR", "POTCAR", "KPOINTS"]: if not os.path.exists(os.path.join(dir_name, f)) and \ not os.path.exists(os.path.join(dir_name, f + ".orig")): return False return True def get_coordination_numbers(d): """ Helper method to get the coordination number of all sites in the final structure from a run. Args: d: Run dict generated by VaspToDbTaskDrone. Returns: Coordination numbers as a list of dict of [{"site": site_dict, "coordination": number}, ...]. """ structure = Structure.from_dict(d["output"]["crystal"]) f = VoronoiCoordFinder(structure) cn = [] for i, s in enumerate(structure.sites): try: n = f.get_coordination_number(i) number = int(round(n)) cn.append({"site": s.as_dict(), "coordination": number}) except Exception: logger.error("Unable to parse coordination errors") return cn def get_uri(dir_name): """ Returns the URI path for a directory. This allows files hosted on different file servers to have distinct locations. Args: dir_name: A directory name. Returns: Full URI path, e.g., fileserver.host.com:/full/path/of/dir_name. """ fullpath = os.path.abspath(dir_name) try: hostname = socket.gethostbyaddr(socket.gethostname())[0] except: hostname = socket.gethostname() return "{}:{}".format(hostname, fullpath)
{ "content_hash": "f4a9ea45aa3570c028b9957d9d647eaf", "timestamp": "", "source": "github", "line_count": 735, "max_line_length": 84, "avg_line_length": 41.925170068027214, "alnum_prop": 0.5227324355021905, "repo_name": "migueldiascosta/pymatgen-db", "id": "df70fc6d4384af67e9cbb8fc55a2d357e1017298", "size": "30838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "matgendb/creator.py", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "31964" }, { "name": "CSS", "bytes": "212547" }, { "name": "HTML", "bytes": "2280625" }, { "name": "JavaScript", "bytes": "1312249" }, { "name": "PHP", "bytes": "9068" }, { "name": "Python", "bytes": "344019" }, { "name": "Shell", "bytes": "419" } ] }
package cmd import ( "archive/tar" "bytes" "errors" "fmt" "io" "io/ioutil" "os" "path" "path/filepath" "strings" "k8s.io/kubernetes/pkg/kubectl/cmd/templates" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/kubectl/util/i18n" "github.com/renstrom/dedent" "github.com/spf13/cobra" ) var ( cpExample = templates.Examples(i18n.T(` # !!!Important Note!!! # Requires that the 'tar' binary is present in your container # image. If 'tar' is not present, 'kubectl cp' will fail. # Copy /tmp/foo_dir local directory to /tmp/bar_dir in a remote pod in the default namespace kubectl cp /tmp/foo_dir <some-pod>:/tmp/bar_dir # Copy /tmp/foo local file to /tmp/bar in a remote pod in a specific container kubectl cp /tmp/foo <some-pod>:/tmp/bar -c <specific-container> # Copy /tmp/foo local file to /tmp/bar in a remote pod in namespace <some-namespace> kubectl cp /tmp/foo <some-namespace>/<some-pod>:/tmp/bar # Copy /tmp/foo from a remote pod to /tmp/bar locally kubectl cp <some-namespace>/<some-pod>:/tmp/foo /tmp/bar`)) cpUsageStr = dedent.Dedent(` expected 'cp <file-spec-src> <file-spec-dest> [-c container]'. <file-spec> is: [namespace/]pod-name:/file/path for a remote file /file/path for a local file`) ) // NewCmdCp creates a new Copy command. func NewCmdCp(f cmdutil.Factory, cmdOut, cmdErr io.Writer) *cobra.Command { cmd := &cobra.Command{ Use: "cp <file-spec-src> <file-spec-dest>", Short: i18n.T("Copy files and directories to and from containers."), Long: "Copy files and directories to and from containers.", Example: cpExample, Run: func(cmd *cobra.Command, args []string) { cmdutil.CheckErr(runCopy(f, cmd, cmdOut, cmdErr, args)) }, } cmd.Flags().StringP("container", "c", "", "Container name. If omitted, the first container in the pod will be chosen") return cmd } type fileSpec struct { PodNamespace string PodName string File string } var ( errFileSpecDoesntMatchFormat = errors.New("Filespec must match the canonical format: [[namespace/]pod:]file/path") errFileCannotBeEmpty = errors.New("Filepath can not be empty") ) func extractFileSpec(arg string) (fileSpec, error) { pieces := strings.Split(arg, ":") if len(pieces) == 1 { return fileSpec{File: arg}, nil } if len(pieces) != 2 { // FIXME Kubernetes can't copy files that contain a ':' // character. return fileSpec{}, errFileSpecDoesntMatchFormat } file := pieces[1] pieces = strings.Split(pieces[0], "/") if len(pieces) == 1 { return fileSpec{ PodName: pieces[0], File: file, }, nil } if len(pieces) == 2 { return fileSpec{ PodNamespace: pieces[0], PodName: pieces[1], File: file, }, nil } return fileSpec{}, errFileSpecDoesntMatchFormat } func runCopy(f cmdutil.Factory, cmd *cobra.Command, out, cmderr io.Writer, args []string) error { if len(args) != 2 { return cmdutil.UsageErrorf(cmd, cpUsageStr) } srcSpec, err := extractFileSpec(args[0]) if err != nil { return err } destSpec, err := extractFileSpec(args[1]) if err != nil { return err } if len(srcSpec.PodName) != 0 { return copyFromPod(f, cmd, cmderr, srcSpec, destSpec) } if len(destSpec.PodName) != 0 { return copyToPod(f, cmd, out, cmderr, srcSpec, destSpec) } return cmdutil.UsageErrorf(cmd, "One of src or dest must be a remote file specification") } // checkDestinationIsDir receives a destination fileSpec and // determines if the provided destination path exists on the // pod. If the destination path does not exist or is _not_ a // directory, an error is returned with the exit code received. func checkDestinationIsDir(dest fileSpec, f cmdutil.Factory, cmd *cobra.Command) error { options := &ExecOptions{ StreamOptions: StreamOptions{ Out: bytes.NewBuffer([]byte{}), Err: bytes.NewBuffer([]byte{}), Namespace: dest.PodNamespace, PodName: dest.PodName, }, Command: []string{"test", "-d", dest.File}, Executor: &DefaultRemoteExecutor{}, } return execute(f, cmd, options) } func copyToPod(f cmdutil.Factory, cmd *cobra.Command, stdout, stderr io.Writer, src, dest fileSpec) error { if len(src.File) == 0 { return errFileCannotBeEmpty } reader, writer := io.Pipe() // strip trailing slash (if any) if strings.HasSuffix(string(dest.File[len(dest.File)-1]), "/") { dest.File = dest.File[:len(dest.File)-1] } if err := checkDestinationIsDir(dest, f, cmd); err == nil { // If no error, dest.File was found to be a directory. // Copy specified src into it dest.File = dest.File + "/" + path.Base(src.File) } go func() { defer writer.Close() err := makeTar(src.File, dest.File, writer) cmdutil.CheckErr(err) }() // TODO: Improve error messages by first testing if 'tar' is present in the container? cmdArr := []string{"tar", "xf", "-"} destDir := path.Dir(dest.File) if len(destDir) > 0 { cmdArr = append(cmdArr, "-C", destDir) } options := &ExecOptions{ StreamOptions: StreamOptions{ In: reader, Out: stdout, Err: stderr, Stdin: true, Namespace: dest.PodNamespace, PodName: dest.PodName, }, Command: cmdArr, Executor: &DefaultRemoteExecutor{}, } return execute(f, cmd, options) } func copyFromPod(f cmdutil.Factory, cmd *cobra.Command, cmderr io.Writer, src, dest fileSpec) error { if len(src.File) == 0 { return errFileCannotBeEmpty } reader, outStream := io.Pipe() options := &ExecOptions{ StreamOptions: StreamOptions{ In: nil, Out: outStream, Err: cmderr, Namespace: src.PodNamespace, PodName: src.PodName, }, // TODO: Improve error messages by first testing if 'tar' is present in the container? Command: []string{"tar", "cf", "-", src.File}, Executor: &DefaultRemoteExecutor{}, } go func() { defer outStream.Close() execute(f, cmd, options) }() prefix := getPrefix(src.File) prefix = path.Clean(prefix) return untarAll(reader, dest.File, prefix) } func makeTar(srcPath, destPath string, writer io.Writer) error { // TODO: use compression here? tarWriter := tar.NewWriter(writer) defer tarWriter.Close() srcPath = path.Clean(srcPath) destPath = path.Clean(destPath) return recursiveTar(path.Dir(srcPath), path.Base(srcPath), path.Dir(destPath), path.Base(destPath), tarWriter) } func recursiveTar(srcBase, srcFile, destBase, destFile string, tw *tar.Writer) error { filepath := path.Join(srcBase, srcFile) stat, err := os.Lstat(filepath) if err != nil { return err } if stat.IsDir() { files, err := ioutil.ReadDir(filepath) if err != nil { return err } if len(files) == 0 { //case empty directory hdr, _ := tar.FileInfoHeader(stat, filepath) hdr.Name = destFile if err := tw.WriteHeader(hdr); err != nil { return err } } for _, f := range files { if err := recursiveTar(srcBase, path.Join(srcFile, f.Name()), destBase, path.Join(destFile, f.Name()), tw); err != nil { return err } } return nil } else if stat.Mode()&os.ModeSymlink != 0 { //case soft link hdr, _ := tar.FileInfoHeader(stat, filepath) target, err := os.Readlink(filepath) if err != nil { return err } hdr.Linkname = target hdr.Name = destFile if err := tw.WriteHeader(hdr); err != nil { return err } } else { //case regular file or other file type like pipe hdr, err := tar.FileInfoHeader(stat, filepath) if err != nil { return err } hdr.Name = destFile if err := tw.WriteHeader(hdr); err != nil { return err } f, err := os.Open(filepath) if err != nil { return err } defer f.Close() if _, err := io.Copy(tw, f); err != nil { return err } return f.Close() } return nil } // clean prevents path traversals by stripping them out. // This is adapted from https://golang.org/src/net/http/fs.go#L74 func clean(fileName string) string { return path.Clean(string(os.PathSeparator) + fileName) } func untarAll(reader io.Reader, destFile, prefix string) error { entrySeq := -1 // TODO: use compression here? tarReader := tar.NewReader(reader) for { header, err := tarReader.Next() if err != nil { if err != io.EOF { return err } break } entrySeq++ mode := header.FileInfo().Mode() outFileName := path.Join(destFile, clean(header.Name[len(prefix):])) baseName := path.Dir(outFileName) if err := os.MkdirAll(baseName, 0755); err != nil { return err } if header.FileInfo().IsDir() { if err := os.MkdirAll(outFileName, 0755); err != nil { return err } continue } // handle coping remote file into local directory if entrySeq == 0 && !header.FileInfo().IsDir() { exists, err := dirExists(outFileName) if err != nil { return err } if exists { outFileName = filepath.Join(outFileName, path.Base(clean(header.Name))) } } if mode&os.ModeSymlink != 0 { err := os.Symlink(header.Linkname, outFileName) if err != nil { return err } } else { outFile, err := os.Create(outFileName) if err != nil { return err } defer outFile.Close() if _, err := io.Copy(outFile, tarReader); err != nil { return err } if err := outFile.Close(); err != nil { return err } } } if entrySeq == -1 { //if no file was copied errInfo := fmt.Sprintf("error: %s no such file or directory", prefix) return errors.New(errInfo) } return nil } func getPrefix(file string) string { if file[0] == '/' { // tar strips the leading '/' if it's there, so we will too return file[1:] } return file } func execute(f cmdutil.Factory, cmd *cobra.Command, options *ExecOptions) error { if len(options.Namespace) == 0 { namespace, _, err := f.DefaultNamespace() if err != nil { return err } options.Namespace = namespace } container := cmdutil.GetFlagString(cmd, "container") if len(container) > 0 { options.ContainerName = container } config, err := f.ClientConfig() if err != nil { return err } options.Config = config clientset, err := f.ClientSet() if err != nil { return err } options.PodClient = clientset.Core() if err := options.Validate(); err != nil { return err } if err := options.Run(); err != nil { return err } return nil } // dirExists checks if a path exists and is a directory. func dirExists(path string) (bool, error) { fi, err := os.Stat(path) if err == nil && fi.IsDir() { return true, nil } if os.IsNotExist(err) { return false, nil } return false, err }
{ "content_hash": "f52df66e3c1384ce00d64f2aa4ba73cf", "timestamp": "", "source": "github", "line_count": 426, "max_line_length": 123, "avg_line_length": 24.544600938967136, "alnum_prop": 0.6608645753634277, "repo_name": "rthallisey/ansible-service-broker", "id": "752071edcf5705916d33797d2a0ccd079ac58952", "size": "11025", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "vendor/k8s.io/kubernetes/pkg/kubectl/cmd/cp.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "283324" }, { "name": "Makefile", "bytes": "4303" }, { "name": "Python", "bytes": "36768" }, { "name": "Shell", "bytes": "50212" } ] }
<?php namespace Sunspikes\Ratelimit\Throttle\Factory; use Sunspikes\Ratelimit\Cache\Adapter\CacheAdapterInterface; use Sunspikes\Ratelimit\Throttle\Entity\Data; use Sunspikes\Ratelimit\Throttle\Settings\ElasticWindowSettings; use Sunspikes\Ratelimit\Throttle\Settings\ThrottleSettingsInterface; use Sunspikes\Ratelimit\Throttle\Throttler\ElasticWindowThrottler; class ThrottlerFactory implements FactoryInterface { /** * @var CacheAdapterInterface */ protected $cacheAdapter; /** * @param CacheAdapterInterface $cacheAdapter */ public function __construct(CacheAdapterInterface $cacheAdapter) { $this->cacheAdapter = $cacheAdapter; } /** * @inheritdoc */ public function make(Data $data, ThrottleSettingsInterface $settings) { if (!$settings->isValid()) { throw new \InvalidArgumentException('Provided throttler settings not valid'); } return $this->createThrottler($data, $settings); } /** * @param Data $data * @param ThrottleSettingsInterface $settings * * @return ElasticWindowThrottler */ protected function createThrottler(Data $data, ThrottleSettingsInterface $settings) { if ($settings instanceof ElasticWindowSettings) { return new ElasticWindowThrottler( $this->cacheAdapter, $data->getKey(), $settings->getLimit(), $settings->getTime() ); } throw new \InvalidArgumentException( sprintf('Unable to create throttler for %s settings', get_class($settings)) ); } }
{ "content_hash": "a43f89590040c3df898602bbd3ea702b", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 89, "avg_line_length": 28.15, "alnum_prop": 0.6459443457667259, "repo_name": "sunspikes/php-ratelimiter", "id": "66187ba1d7ec33ba654db4bd09de1a0f8cec09ed", "size": "2860", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Throttle/Factory/ThrottlerFactory.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "139826" } ] }
(function(global, _) { var Dataset = global.Miso.Dataset; /** * Handles basic strict data format. Looks like: * * { * data : { * columns : [ * { name : colName, type : colType, data : [...] } * ] * } * } * * @constructor * @name Strict * @memberof Miso.Dataset.Parsers * @augments Miso.Dataset.Parsers * * @param {Object} [options] */ Dataset.Parsers.Strict = function( options ) { this.options = options || {}; }; _.extend( Dataset.Parsers.Strict.prototype, Dataset.Parsers.prototype, { parse : function( data ) { var columnData = {}, columnNames = []; _.each(data.columns, function(column) { if (columnNames.indexOf(column.name) !== -1) { throw new Error("You have more than one column named \"" + column.name + "\""); } else { columnNames.push( column.name ); columnData[ column.name ] = column.data; } }); return { columns : columnNames, data : columnData }; } }); }(this, _));
{ "content_hash": "f706b4d872cd4078323867aa1729ffae", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 89, "avg_line_length": 22.755102040816325, "alnum_prop": 0.5121076233183857, "repo_name": "misoproject/dataset", "id": "2c50d80eda7a1faeaa0000cdb1e73c6ff7919ced", "size": "1115", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/parsers/strict.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1510065" }, { "name": "Ruby", "bytes": "2211" } ] }
package com.coolweather1.android.gson; /** * Created by Administrator on 2017/4/4 0004. */ public class AQI { public AQIcity city; public class AQIcity{ public String aqi; public String pm25; } }
{ "content_hash": "485abaef4980313a9e07a232be88909b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 45, "avg_line_length": 17.692307692307693, "alnum_prop": 0.6391304347826087, "repo_name": "xiaoyuan199/coolweather1", "id": "de559909d30bd3075ea31d8a503fb33937e0566a", "size": "230", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/coolweather1/android/gson/AQI.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "149994" } ] }
package stream.flarebot.flarebot.database; import io.github.binaryoverload.JSONConfig; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisMonitor; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import stream.flarebot.flarebot.FlareBot; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; public class RedisController { private static JedisPool jedisPool; public static BlockingQueue<RedisSetData> setQueue = new LinkedBlockingQueue<>(); private RedisController() { } public RedisController(JSONConfig config) { jedisPool = new JedisPool( new JedisPoolConfig(), config.getString("redis.host").get(), Integer.parseInt(config.getString("redis.port").get()), 3000, config.getString("redis.password").get().isEmpty() ? null : config.getString("redis.password").get()); try (Jedis jedis = jedisPool.getResource()) { String response = jedis.ping(); if (!("PONG".equals(response))) throw new IOException("Ping to server failed!"); FlareBot.LOGGER.info("Redis started with a DB Size of {}", jedis.dbSize()); } catch (Exception e) { FlareBot.LOGGER.error("Could not connect to redis!", e); return; } new Thread(() -> { try (Jedis jedis = RedisController.getJedisPool().getResource()) { jedis.monitor(new JedisMonitor() { public void onCommand(String command) { String finalCommand = command; if (command.contains("AUTH")) finalCommand = "AUTH"; FlareBot.LOGGER.debug("Executing redis command: {}", command.substring(finalCommand.lastIndexOf("]") + 1).trim()); } }); } }, "Redis-Monitor").start(); Thread setThread = new Thread(() -> { while (!FlareBot.EXITING.get()) { try { RedisSetData data = RedisController.setQueue.poll(2, TimeUnit.SECONDS); if (data != null) { try (Jedis jedis = RedisController.getJedisPool().getResource()) { data.set(jedis); FlareBot.LOGGER.debug("Saving redis value with key: " + data.getKey()); } } } catch (Exception e) { FlareBot.LOGGER.error("Error in set thread!", e); } } }, "Redis-SetThread"); setThread.start(); } public static JedisPool getJedisPool() { return jedisPool; } /** * Expires a key after a certain amount of time * * @param key The key to set the expiry on * @param seconds The amount of seconds to expire after * @return {@code 0} if the key does not exist. <br> * {@code 1} if the expiry was successfully set. */ public static Long expire(String key, int seconds) { try (Jedis jedis = jedisPool.getResource()) { return jedis.expire(key, seconds); } } /** * Expires a key after a certain amount of milliseconds * * @param key The key to set the expiry on * @param millis The number of milliseconds to expire after * @return {@code 0} if the key does not exist. <br> * {@code 1} if the expiry was successfully set. */ public static Long pExpire(String key, long millis) { try (Jedis jedis = jedisPool.getResource()) { return jedis.pexpire(key, millis); } } /** * Expires a key after at a certain unix time in seconds. * <i>If the unix time is in the past the key is deleted</i> * * @param key The key to set the expiry on * @param unixTime The unix time in seconds to expire the key at * @return {@code 0} if the key does not exist. <br> * {@code 1} if the expiry was successfully set. */ public static Long expireAt(String key, long unixTime) { try (Jedis jedis = jedisPool.getResource()) { return jedis.expireAt(key, unixTime); } } /** * Expires a key after at a certain unix time in milliseconds. * <i>If the unix time is in the past the key is deleted</i> * * @param key The key to set the expiry on * @param unixTime The unix time in milliseconds to expire the key at * @return {@code 0} if the key does not exist. <br> * {@code 1} if the expiry was successfully set. */ public static Long pExpireAt(String key, long unixTime) { try (Jedis jedis = jedisPool.getResource()) { return jedis.pexpireAt(key, unixTime); } } /** * Gets the TTL in seconds for the specified key * * @param key The key to check for TTL * @return The TTL of the specified key in seconds. <br> * {@code -2} if the key does not exist. <br> * {@code -1} if the key exists but has no associated expire. */ public static Long ttl(String key) { try (Jedis jedis = jedisPool.getResource()) { return jedis.ttl(key); } } /** * Gets the TTL in milliseconds for the specified key * * @param key The key to check for TTL * @return The TTL of the specified key in milliseconds. <br> * {@code -2} if the key does not exist. <br> * {@code -1} if the key exists but has no associated expire. */ public static Long pttl(String key) { try (Jedis jedis = jedisPool.getResource()) { return jedis.pttl(key); } } /** * Sets a value with a specific key in the datebase * * @param key The key to set * @param value The value to set at the key */ public static void set(String key, String value) { setQueue.add(new RedisSetData(key, value)); } /** * Sets a value with a specific key in the datebase * * @param key The key to set * @param value The value to set at the key * @param nxxx {@code NX} to set the key only if doesn't exist <br> * {@code XX} to set the key only if it exists <br> * Otherwise use {@link RedisController#set(String, String)} */ public static void set(String key, String value, String nxxx) { setQueue.add(new RedisSetData(key, value, nxxx)); } /** * Sets a value with a specific key in the datebase * * @param key The key to set * @param value The value to set at the key * @param nxxx {@code NX} to set the key only if doesn't exist <br> * {@code XX} to set the key only if it exists <br> * Otherwise use empty value * @param pxex {@code PX} to set the expiry in milliseconds <br> * {@code EX} to set the expiry in seconds <br> * Otherwise use {@link RedisController#set(String, String, String)} * @param time The expiry time to set */ public static void set(String key, String value, String nxxx, String pxex, long time) { setQueue.add(new RedisSetData(key, value, nxxx, pxex, time)); } /** * Gets a value from the database * * @param key The key to get from redis * @return The value of the key or {@code null} if the key doesn't exist */ public static String get(String key) { try (Jedis jedis = jedisPool.getResource()) { return jedis.get(key); } } /** * Deletes one or more keys from Eedis * * @param keys The keys to remove * @return The amount of keys that were removed */ public static Long del(String... keys) { try (Jedis jedis = jedisPool.getResource()) { return jedis.del(keys); } } /** * Check whether a key exists * * @param key The key to check * @return Whether the specified key exists or not */ public static boolean exists(String key) { if (key == null) return false; try (Jedis jedis = jedisPool.getResource()) { return jedis.exists(key); } } }
{ "content_hash": "7fdc3e86d8e39ac5671751382bd464e2", "timestamp": "", "source": "github", "line_count": 237, "max_line_length": 138, "avg_line_length": 35.40084388185654, "alnum_prop": 0.5783075089392133, "repo_name": "binaryoverload/FlareBot", "id": "a3176210baa3f47844c4c1849e519fd043c53076", "size": "8390", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/main/java/stream/flarebot/flarebot/database/RedisController.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "858401" } ] }
package parquet.io; import java.util.ArrayDeque; import java.util.Deque; import parquet.Log; import parquet.schema.MessageType; import parquet.schema.Type; import parquet.schema.PrimitiveType.PrimitiveTypeName; import parquet.schema.Type.Repetition; /** * Wraps a record consumer * Validates the record written aainst the schema and pass down the event to the wrapped consumer * * @author Julien Le Dem * */ public class ValidatingRecordConsumer extends RecordConsumer { private static final Log LOG = Log.getLog(ValidatingRecordConsumer.class); private static final boolean DEBUG = Log.DEBUG; private final RecordConsumer delegate; private Deque<Type> types = new ArrayDeque<Type>(); private Deque<Integer> fields = new ArrayDeque<Integer>(); private Deque<Integer> previousField = new ArrayDeque<Integer>(); private Deque<Integer> fieldValueCount = new ArrayDeque<Integer>(); /** * * @param delegate the consumer to pass down the event to * @param schema the schema to validate against */ public ValidatingRecordConsumer(RecordConsumer delegate, MessageType schema) { this.delegate = delegate; this.types.push(schema); } /** * {@inheritDoc} */ public void startMessage() { previousField.push(-1); delegate.startMessage(); } /** * {@inheritDoc} */ public void endMessage() { delegate.endMessage(); validateMissingFields(types.peek().asGroupType().getFieldCount()); previousField.pop(); } /** * {@inheritDoc} */ public void startField(String field, int index) { if (index <= previousField.peek()) { throw new InvalidRecordException("fields must be added in order " + field + " index " + index + " is before previous field " + previousField.peek()); } validateMissingFields(index); fields.push(index); fieldValueCount.push(0); delegate.startField(field, index); } private void validateMissingFields(int index) { for (int i = previousField.peek() + 1; i < index; i++) { Type type = types.peek().asGroupType().getType(i); if (type.getRepetition() == Repetition.REQUIRED) { throw new InvalidRecordException("required field is missing " + type); } } } /** * {@inheritDoc} */ public void endField(String field, int index) { delegate.endField(field, index); fieldValueCount.pop(); previousField.push(fields.pop()); } /** * {@inheritDoc} */ public void startGroup() { previousField.push(-1); types.push(types.peek().asGroupType().getType(fields.peek())); delegate.startGroup(); } /** * {@inheritDoc} */ public void endGroup() { delegate.endGroup(); validateMissingFields(types.peek().asGroupType().getFieldCount()); types.pop(); previousField.pop(); } private void validate(PrimitiveTypeName p) { Type currentType = types.peek().asGroupType().getType(fields.peek()); int c = fieldValueCount.pop() + 1; fieldValueCount.push(c); if (DEBUG) LOG.debug("validate " + p + " for " + currentType.getName()); switch (currentType.getRepetition()) { case OPTIONAL: case REQUIRED: if (c > 1) { throw new InvalidRecordException("repeated value when the type is not repeated in " + currentType); } break; case REPEATED: break; default: throw new InvalidRecordException("unknown repetition " + currentType.getRepetition() + " in " + currentType); } if (!currentType.isPrimitive() || currentType.asPrimitiveType().getPrimitiveTypeName() != p) { throw new InvalidRecordException("expected type " + currentType + " but got "+ p); } } /** * {@inheritDoc} */ public void addInteger(int value) { validate(PrimitiveTypeName.INT32); delegate.addInteger(value); } /** * {@inheritDoc} */ public void addLong(long value) { validate(PrimitiveTypeName.INT64); delegate.addLong(value); } /** * {@inheritDoc} */ public void addBoolean(boolean value) { validate(PrimitiveTypeName.BOOLEAN); delegate.addBoolean(value); } /** * {@inheritDoc} */ public void addBinary(Binary value) { validate(PrimitiveTypeName.BINARY); delegate.addBinary(value); } /** * {@inheritDoc} */ public void addFloat(float value) { validate(PrimitiveTypeName.FLOAT); delegate.addFloat(value); } /** * {@inheritDoc} */ public void addDouble(double value) { validate(PrimitiveTypeName.DOUBLE); delegate.addDouble(value); } }
{ "content_hash": "ffe565bfc9733a21ab09ef200d3abf65", "timestamp": "", "source": "github", "line_count": 179, "max_line_length": 155, "avg_line_length": 25.48603351955307, "alnum_prop": 0.6600175361683472, "repo_name": "julienledem/redelm", "id": "d80ab6939176b5c3c5b898129fa72a7c58ec46ef", "size": "5155", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "parquet-column/src/main/java/parquet/io/ValidatingRecordConsumer.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "718696" } ] }
package org.eft.evol.model; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import org.eft.evol.stats.UnitAction; import cern.jet.math.Arithmetic; import cern.jet.random.Exponential; import cern.jet.random.Uniform; public class UnitSigmoidImpl { } // extends AbstractUnit implements Unit { // // private static final float TRANSACTION_COST = 4.0f; // private static final float MODIFIER = 0.1f; // private static final float ERROR_TOLERANCE = 0.001f; // // public UnitSigmoidImpl(int ETF_SIZE, int index) { // super(ETF_SIZE, index); // } // // public UnitSigmoidImpl(int index, float[] character, Map<Integer,Float> // preference) { // super(index, character, preference); // } // // private float sigmoid(float x) { // return (float) (1.0f / (1.0f + Math.exp((double) -x))); // } // // private int chooseIndex(Set<Integer> etfValueMapKeySet) { // List<Integer> indexes = new ArrayList<Integer>(); // // for (int i = 0; i < etfValueMapKeySet.size(); i++) { // int indexChoice = Uniform.staticNextIntFromTo(0, etfValueMapKeySet.size() - // 1); // Integer chosenIndex = etfValueMapKeySet.toArray(new Integer[] // {})[indexChoice]; // indexes.add(chosenIndex + 1); // } // // for (Integer index : indexes) { // float result = (float) index * sigmoid(100 * buyPreference.get(index-1)); // if (Math.abs(((float) index) - result) < ERROR_TOLERANCE && // etfValueMapKeySet.contains(index - 1)) { // return index - 1; // } // } // // return -1; // } // // @Override // public boolean buy(int iteration, int cycle, List<Map<Integer, Float>> // etfValueMap) { // // if (etfValueMap.get(cycle).size() == 0) { // return false; // } // // Set<Integer> etfValueMapKeySet = etfValueMap.get(cycle).keySet(); // // int index = chooseIndex(etfValueMapKeySet); // if (index == -1) // return false; // float nav = etfValueMap.get(cycle).get(index); // // return doBuyAction(iteration, cycle, etfValueMap, index, nav); // } // // private boolean doBuyAction(int iteration, int cycle, List<Map<Integer, // Float>> etfValueMap, int index, float nav) { // if (cash < nav) { // return false; // } // // long shares = Arithmetic.floor((cash - TRANSACTION_COST) / nav); // shares = Uniform.staticNextLongFromTo(1l, shares); // while(((float)shares)*nav+TRANSACTION_COST > cash){ // shares--; // } // if(shares <= 0){ // return false; // } // // float bought = ((float) shares) * nav; // // // cash = cash - bought - TRANSACTION_COST; // // if (etfs.containsKey(index)) { // shares += etfs.get(index); // } // // etfs.put(index, shares); // // incrementPreference(index,buyPreference); // // UnitAction buyAction = new UnitAction(); // // buyAction.actionType = ActionType.BUY; // buyAction.cycle = cycle; // buyAction.iteration = iteration; // buyAction.indexOfETF = index; // buyAction.nav = nav; // buyAction.shares = shares; // addGradientToAction(cycle, etfValueMap, buyAction); // // actions.add(buyAction); // // if(shares > 0l) // return true; // // return false; // } // // @Override // public boolean sell(int iteration, int cycle, List<Map<Integer, Float>> // etfValueMap) { // // if (etfs.keySet().size() == 0) { // return false; // } // // if (etfValueMap.get(cycle).size() == 0) { // return false; // } // // int index = chooseIndex(etfs.keySet()); // // if (!etfValueMap.get(cycle).containsKey(index)) { // return false; // } // // float nav = etfValueMap.get(cycle).get(index); // // return doSellAction(iteration, cycle, etfValueMap, index, nav); // } // // private boolean doSellAction(int iteration, int cycle, List<Map<Integer, // Float>> etfValueMap, int index, float nav) { //// if (preference[index] - MODIFIER > 0.0d) //// preference[index] -= MODIFIER; // // long shares = etfs.get(index); // shares = Uniform.staticNextLongFromTo(1l, shares); // cash += (float) (nav * ((float) shares)); // cash -= TRANSACTION_COST; // if (etfs.get(index) - shares == 0) { // etfs.remove(index); // } else { // etfs.put(index, etfs.get(index) - shares); // } // // UnitAction sellAction = new UnitAction(); // // sellAction.actionType = ActionType.SELL; // sellAction.cycle = cycle; // sellAction.iteration = iteration; // sellAction.indexOfETF = index; // sellAction.nav = nav; // sellAction.shares = shares; // addGradientToAction(cycle, etfValueMap, sellAction); // // actions.add(sellAction); // // return true; // } // // @Override // public Unit crossOver(Unit other) { // // // float[] nCharacter = crossOverCharacter(other); // // Map<Integer, Float> nPreference = // crossoverPreference(other,buyPreference,ActionType.BUY); // // return new UnitSigmoidImpl(UnitSequenceGenerator.getID(), nCharacter, // nPreference); // } // // @Override // public String toString() { // return "UnitSigmoidImpl " + super.toString(); // } // // @Override // protected String currentStateString() { // return "UnitSigmoidImpl " + super.currentStateString(); // } // // @Override // protected void addGradientToAction(int cycle, List<Map<Integer, Float>> // etfValueMap, UnitAction sellAction) { // super.addGradientToAction(cycle, etfValueMap, sellAction); // if(this.actions.size() > 0) // this.sumGradient += this.actions.get(this.actions.size()-1).gradient; // } // // @Override // public void performAction(List<Map<Integer, Float>> navValues, int it, int // cycle) { // boolean actionPerformed = false; // // if(Math.abs(ActionType.HOLD - // ActionType.HOLD*sigmoid(100.0f*character[INDEX_HOLD] + sumGradient)) < // ERROR_TOLERANCE){ // hold(it, cycle, navValues); // return; // } // // if(Math.abs(ActionType.SELL - // ActionType.SELL*sigmoid(100.0f*character[INDEX_BUY] + sumGradient)) < // ERROR_TOLERANCE){ // actionPerformed = sell(it, cycle, navValues); // } // // if(Math.abs(ActionType.BUY - // ActionType.BUY*sigmoid(100.0f*character[INDEX_SELL] + sumGradient)) < // ERROR_TOLERANCE){ // actionPerformed = buy(it, cycle, navValues); // } // // if(!actionPerformed){ // hold(it, cycle, navValues); // } // } // // @Override // public String getDir(int iteration) { // return "UnitSigmoidImpl("+iteration+")"; // } // // }
{ "content_hash": "8149d181bc402049aa0ef9342c2d430e", "timestamp": "", "source": "github", "line_count": 239, "max_line_length": 78, "avg_line_length": 25.355648535564853, "alnum_prop": 0.666006600660066, "repo_name": "xSakix/etf_expert", "id": "c2ac641945863db663e78bde0c8df31d8701ab66", "size": "6060", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ETF_EVOL_MODEL/src/org/eft/evol/model/UnitSigmoidImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "35109" }, { "name": "Java", "bytes": "559679" }, { "name": "Python", "bytes": "70674" } ] }
/* Navicat MySQL Data Transfer Source Server : localhost Source Server Version : 50540 Source Host : localhost:3306 Source Database : rps Target Server Type : MYSQL Target Server Version : 50540 File Encoding : 65001 Date: 2017-07-20 18:33:21 */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for `t_base_user` -- ---------------------------- DROP TABLE IF EXISTS `t_base_user`; CREATE TABLE `t_base_user` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `username` varchar(16) DEFAULT NULL, `password` varchar(32) DEFAULT NULL, `name` varchar(16) DEFAULT NULL, `email` varchar(32) DEFAULT NULL, `level` int(2) DEFAULT NULL, `score` int(10) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_base_user -- ---------------------------- INSERT INTO `t_base_user` VALUES ('2', 'admin', 'admin', '系统管理员', 'admin@wallstreetcn.com', '1', '0'); INSERT INTO `t_base_user` VALUES ('3', 'ad', 'asd', 'asd', 'asd', '1', '0'); INSERT INTO `t_base_user` VALUES ('4', 'qwe', 'qwe', 'qwe', 'qwe', '1', '0'); INSERT INTO `t_base_user` VALUES ('5', 'asd', 'asd', 'asd', 'asd', '1', '0'); -- ---------------------------- -- Table structure for `t_core_project` -- ---------------------------- DROP TABLE IF EXISTS `t_core_project`; CREATE TABLE `t_core_project` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `name` varchar(128) DEFAULT NULL, `sn` varchar(32) DEFAULT NULL, `describe` varchar(255) DEFAULT NULL, `mngr` bigint(20) DEFAULT NULL, `start` date DEFAULT NULL, `end` date DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_core_project -- ---------------------------- INSERT INTO `t_core_project` VALUES ('1', '安全生产信息化管理系统', ' spms', 'dasdsadasdaaaaaaaaaaadsad', '1', '2017-04-01', '2017-07-30'); -- ---------------------------- -- Table structure for `t_core_task` -- ---------------------------- DROP TABLE IF EXISTS `t_core_task`; CREATE TABLE `t_core_task` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `project` bigint(20) DEFAULT NULL, `label` varchar(64) DEFAULT NULL, `pusher` bigint(20) DEFAULT NULL, `exp` int(11) DEFAULT '5', `result` varchar(128) DEFAULT NULL, `describe` varchar(255) DEFAULT NULL, `classify` int(1) DEFAULT NULL, `process` int(3) DEFAULT NULL, `fettle` int(1) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of t_core_task -- ----------------------------
{ "content_hash": "6cbe9ad0df7b1c6b4528d184f60fc7c0", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 128, "avg_line_length": 33.44444444444444, "alnum_prop": 0.5537098560354374, "repo_name": "yunmel/rps", "id": "b62339cbff7bad5233502d5f68d36180aa4061fd", "size": "2741", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/rps.sql", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "136348" }, { "name": "HTML", "bytes": "41881" }, { "name": "Java", "bytes": "157347" }, { "name": "JavaScript", "bytes": "6884" } ] }
package net.dirtydeeds.discordsoundboard.utils; import java.util.List; import net.dirtydeeds.discordsoundboard.service.SoundboardBot; import net.dv8tion.jda.api.Permission; import net.dv8tion.jda.api.entities.Guild; import net.dv8tion.jda.api.entities.TextChannel; import net.dv8tion.jda.api.requests.RestAction; import net.dv8tion.jda.api.entities.MessageHistory; import net.dv8tion.jda.api.entities.Message; public class ChatUtils { public static final int NUM_MESSAGES_TO_GO_BACK = 1000; public static final int MAX_NUM_MESSAGES_TO_GO_BACK = 99; public static void clearBotMessagesInChannel(SoundboardBot bot, TextChannel channel) { MessageHistory history = new MessageHistory(channel); for (int i = 0; i < NUM_MESSAGES_TO_GO_BACK; i += MAX_NUM_MESSAGES_TO_GO_BACK) { RestAction<List<Message>> a = history.retrievePast(MAX_NUM_MESSAGES_TO_GO_BACK); a.queue(msgs -> { for (Message msg : msgs) { if (msg.getAuthor().equals(bot.getAPI().getSelfUser())) { try { msg.delete().queue(); } catch (Exception e) { continue; } } } }); } } public static void cacheMessagesInChannel(SoundboardBot bot, TextChannel channel) { MessageHistory history = new MessageHistory(channel); for (int i = 0; i < NUM_MESSAGES_TO_GO_BACK; i += MAX_NUM_MESSAGES_TO_GO_BACK) { RestAction<List<Message>> a = history.retrievePast(MAX_NUM_MESSAGES_TO_GO_BACK); a.queue(msgs -> { for (Message msg : msgs) { if (!msg.getAuthor().equals(bot.getAPI().getSelfUser())) StringUtils.cacheWords(msg.getContentRaw()); } }); } } public static TextChannel getDiscussionChannel(SoundboardBot bot, Guild guild) { TextChannel channel = guild.getDefaultChannel(); if (channel == null) { for (TextChannel c : guild.getTextChannels()) { if (bot.hasPermissionInChannel(c, Permission.MESSAGE_WRITE)) { channel = c; break; } } } return channel; } }
{ "content_hash": "06905125dca5af5e19cd70366802895a", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 70, "avg_line_length": 30.82608695652174, "alnum_prop": 0.6398683591913493, "repo_name": "AlexSafatli/NootBot", "id": "0e7f49cc8beb634bac00408e5883ca09344df579", "size": "2127", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/net/dirtydeeds/discordsoundboard/utils/ChatUtils.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "22" }, { "name": "Java", "bytes": "259990" } ] }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>metacoq: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.7.1+1 / metacoq - 1.0~alpha2+8.10</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> metacoq <small> 1.0~alpha2+8.10 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-04-30 14:46:12 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-04-30 14:46:12 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base camlp5 7.14 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-perl 2 Virtual package relying on perl coq 8.7.1+1 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.08.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.08.1 Official release 4.08.1 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.3 A library manager for OCaml # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;matthieu.sozeau@inria.fr&quot; homepage: &quot;https://metacoq.github.io/metacoq&quot; dev-repo: &quot;git+https://github.com/MetaCoq/metacoq.git#coq-8.10&quot; bug-reports: &quot;https://github.com/MetaCoq/metacoq/issues&quot; authors: [&quot;Abhishek Anand &lt;aa755@cs.cornell.edu&gt;&quot; &quot;Simon Boulier &lt;simon.boulier@inria.fr&gt;&quot; &quot;Cyril Cohen &lt;cyril.cohen@inria.fr&gt;&quot; &quot;Yannick Forster &lt;forster@ps.uni-saarland.de&gt;&quot; &quot;Fabian Kunze &lt;fkunze@fakusb.de&gt;&quot; &quot;Gregory Malecha &lt;gmalecha@gmail.com&gt;&quot; &quot;Matthieu Sozeau &lt;matthieu.sozeau@inria.fr&gt;&quot; &quot;Nicolas Tabareau &lt;nicolas.tabareau@inria.fr&gt;&quot; &quot;Théo Winterhalter &lt;theo.winterhalter@inria.fr&gt;&quot; ] license: &quot;MIT&quot; depends: [ &quot;ocaml&quot; {&gt; &quot;4.02.3&quot;} &quot;coq&quot; {&gt;= &quot;8.10&quot; &amp; &lt; &quot;8.11~&quot;} &quot;coq-metacoq-template&quot; {= version} &quot;coq-metacoq-checker&quot; {= version} &quot;coq-metacoq-pcuic&quot; {= version} &quot;coq-metacoq-safechecker&quot; {= version} &quot;coq-metacoq-erasure&quot; {= version} &quot;coq-metacoq-translations&quot; {= version} ] synopsis: &quot;A meta-programming framework for Coq&quot; description: &quot;&quot;&quot; MetaCoq is a meta-programming framework for Coq. The meta-package includes the template-coq library, unverified checker for Coq, PCUIC development including a verified translation from Coq to PCUIC, safe checker and erasure for PCUIC and example translations. See individual packages for more detailed descriptions. &quot;&quot;&quot; url { src: &quot;https://github.com/MetaCoq/metacoq/archive/v1.0-alpha2-8.10.tar.gz&quot; checksum: &quot;sha256=94156cb9397b44915c9217a435a812cabc9651684cd229d5069b34332d0792a2&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-metacoq.1.0~alpha2+8.10 coq.8.7.1+1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.7.1+1). The following dependencies couldn&#39;t be met: - coq-metacoq -&gt; coq &gt;= 8.10 Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-metacoq.1.0~alpha2+8.10</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "e580323508fdb4aa30a276f02d0cb445", "timestamp": "", "source": "github", "line_count": 179, "max_line_length": 159, "avg_line_length": 42.79329608938548, "alnum_prop": 0.558355091383812, "repo_name": "coq-bench/coq-bench.github.io", "id": "d62413be052f9f984b97b16a3ccbd3272926768f", "size": "7686", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.08.1-2.0.5/released/8.7.1+1/metacoq/1.0~alpha2+8.10.html", "mode": "33188", "license": "mit", "language": [] }
package org.scalatest.tools import org.scalatest.SharedHelpers.EventRecordingReporter import org.scalatest._ import org.scalatest.funspec.AnyFunSpec import org.scalatest.funsuite.AnyFunSuite class SuiteRunnerSpec extends AnyFunSpec { describe("SuiteRunner") { it("should fire SuiteAborted event when after function in BeforeAndAfter throws RuntimeException") { class ExampleSuite extends AnyFunSuite with BeforeAndAfter { test("test 1") {} after { throw new RuntimeException("oops!") } } val suite = new ExampleSuite val rep = new EventRecordingReporter val runner = new SuiteRunner(suite, Args(rep), new ScalaTestStatefulStatus) runner.run() assert(rep.suiteStartingEventsReceived.length == 1) assert(rep.suiteCompletedEventsReceived.length == 0) assert(rep.suiteAbortedEventsReceived.length == 1) } it("should fire SuiteAborted event when afterAll function in BeforeAndAfterAll throws RuntimeException") { class ExampleSuite extends AnyFunSuite with BeforeAndAfterAll { test("test 1") {} override protected def afterAll(): Unit = { throw new RuntimeException("oops!") } } val suite = new ExampleSuite val rep = new EventRecordingReporter val runner = new SuiteRunner(suite, Args(rep), new ScalaTestStatefulStatus) runner.run() assert(rep.suiteStartingEventsReceived.length == 1) assert(rep.suiteCompletedEventsReceived.length == 0) assert(rep.suiteAbortedEventsReceived.length == 1) } it("should fire SuiteAborted event when afterAll function in BeforeAndAfterAllConfigMap throws RuntimeException") { class ExampleSuite extends AnyFunSuite with BeforeAndAfterAllConfigMap { test("test 1") {} override protected def afterAll(configMap: ConfigMap): Unit = { throw new RuntimeException("oops!") } } val suite = new ExampleSuite val rep = new EventRecordingReporter val runner = new SuiteRunner(suite, Args(rep), new ScalaTestStatefulStatus) runner.run() assert(rep.suiteStartingEventsReceived.length == 1) assert(rep.suiteCompletedEventsReceived.length == 0) assert(rep.suiteAbortedEventsReceived.length == 1) } it("should fire SuiteAborted event when afterEach function in BeforeAndAfterEach throws RuntimeException") { class ExampleSuite extends AnyFunSuite with BeforeAndAfterEach { test("test 1") {} override protected def afterEach(): Unit = { throw new RuntimeException("oops!") } } val suite = new ExampleSuite val rep = new EventRecordingReporter val runner = new SuiteRunner(suite, Args(rep), new ScalaTestStatefulStatus) runner.run() assert(rep.suiteStartingEventsReceived.length == 1) assert(rep.suiteCompletedEventsReceived.length == 0) assert(rep.suiteAbortedEventsReceived.length == 1) } it("should fire SuiteAborted event when afterEach function in BeforeAndAfterEachTestData throws RuntimeException") { class ExampleSuite extends AnyFunSuite with BeforeAndAfterEachTestData { test("test 1") {} override protected def afterEach(testData: TestData): Unit = { throw new RuntimeException("oops!") } } val suite = new ExampleSuite val rep = new EventRecordingReporter val runner = new SuiteRunner(suite, Args(rep), new ScalaTestStatefulStatus) runner.run() assert(rep.suiteStartingEventsReceived.length == 1) assert(rep.suiteCompletedEventsReceived.length == 0) assert(rep.suiteAbortedEventsReceived.length == 1) } } }
{ "content_hash": "b74ec9cbee62f5ccb29b3f0b7c01056e", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 120, "avg_line_length": 31.233333333333334, "alnum_prop": 0.6945037353255069, "repo_name": "scalatest/scalatest", "id": "08c61a5adadb5e7e7b19ae8c8ea8b79324d6b237", "size": "4348", "binary": false, "copies": "3", "ref": "refs/heads/main", "path": "jvm/scalatest-test/src/test/scala/org/scalatest/tools/SuiteRunnerSpec.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "14110" }, { "name": "Java", "bytes": "48211" }, { "name": "JavaScript", "bytes": "19211" }, { "name": "Roff", "bytes": "518" }, { "name": "Scala", "bytes": "30174450" }, { "name": "Shell", "bytes": "11947" } ] }
namespace network { class PlayerControllerMessage: public Message { friend class ::boost::serialization::access; public: DEFINE_MESSAGE_BASE( PlayerControllerMessage ) int32_t mActorGUID; int32_t mOrientation; int32_t mHeading; bool mShoot; bool mShootAlt; Trigger mUseNormalItem; Trigger mUseReload; bool mMoving; Trigger mActivate; PlayerControllerMessage() : mActorGUID( 0 ) , mOrientation( 0 ) , mHeading( 0 ) , mShoot( false ) , mShootAlt( false ) , mUseNormalItem() , mUseReload() , mMoving( false ) , mActivate() { } template<class Archive> void serialize( Archive& ar, const unsigned int version ); }; template<class Archive> void PlayerControllerMessage::serialize( Archive& ar, const unsigned int version ) { ar& boost::serialization::base_object<Message>( *this ); ar& mActorGUID; ar& mOrientation; ar& mHeading; ar& mShoot; ar& mShootAlt; ar& mUseNormalItem; ar& mUseReload; ar& mMoving; ar& mActivate; } class PlayerControllerMessageHandlerSubSystem: public MessageHandlerSubSystem { public: DEFINE_SUB_SYSTEM_BASE( PlayerControllerMessageHandlerSubSystem ) PlayerControllerMessageHandlerSubSystem(); virtual void Init(); virtual void Execute( Message const& message ); }; class PlayerControllerMessageSenderSystem: public MessageSenderSystem { public: DEFINE_SYSTEM_BASE( PlayerControllerMessageSenderSystem ) PlayerControllerMessageSenderSystem(); virtual void Init(); virtual void Update( double DeltaTime ); }; } // namespace network REAPING2_CLASS_EXPORT_KEY2( network__PlayerControllerMessage, network::PlayerControllerMessage, "player_c" ); #endif//INCLUDED_NETWORK_PLAYER_CONTROLLER_MESSAGE_H
{ "content_hash": "44bc46413ed17ea8c3a967852beeb85b", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 109, "avg_line_length": 26.492753623188406, "alnum_prop": 0.699671772428884, "repo_name": "MrPepperoni/Reaping2-1", "id": "73a3c4f5a05ad8ae72f6a63551144638df93cdc0", "size": "2111", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/network/player_controller_message.h", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "216" }, { "name": "C", "bytes": "1986" }, { "name": "C++", "bytes": "2094703" }, { "name": "CMake", "bytes": "33465" }, { "name": "GLSL", "bytes": "19662" } ] }
package com.facebook.infrastructure.net; /** * Author : Avinash Lakshman ( alakshman@facebook.com) & Prashant Malik ( pmalik@facebook.com ) */ public class MessagingConfig { // The expected time for one message round trip. It does not reflect message processing // time at the receiver. private static int expectedRoundTripTime_ = 400; private static int numberOfPorts_ = 2; private static int threadCount_ = 4; public static int getMessagingThreadCount() { return threadCount_; } public static void setMessagingThreadCount(int threadCount) { threadCount_ = threadCount; } public static void setExpectedRoundTripTime(int roundTripTimeMillis) { if(roundTripTimeMillis > 0 ) expectedRoundTripTime_ = roundTripTimeMillis; } public static int getExpectedRoundTripTime() { return expectedRoundTripTime_; } public static int getConnectionPoolInitialSize() { return ConnectionPoolConfiguration.initialSize_; } public static int getConnectionPoolGrowthFactor() { return ConnectionPoolConfiguration.growthFactor_; } public static int getConnectionPoolMaxSize() { return ConnectionPoolConfiguration.maxSize_; } public static int getConnectionPoolWaitTimeout() { return ConnectionPoolConfiguration.waitTimeout_; } public static int getConnectionPoolMonitorInterval() { return ConnectionPoolConfiguration.monitorInterval_; } public static void setNumberOfPorts(int n) { numberOfPorts_ = n; } public static int getNumberOfPorts() { return numberOfPorts_; } } class ConnectionPoolConfiguration { public static int initialSize_ = 1; public static int growthFactor_ = 1; public static int maxSize_ = 1; public static int waitTimeout_ = 10; public static int monitorInterval_ = 300; }
{ "content_hash": "46807bf2c0940eb2e360da2b3a153c92", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 95, "avg_line_length": 25.325, "alnum_prop": 0.6638696939782823, "repo_name": "leonhong/cassandra-dev", "id": "6e079ace49efad9cef72f358d19fccf0fcb6bf1a", "size": "2848", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/com/facebook/infrastructure/net/MessagingConfig.java", "mode": "33188", "license": "apache-2.0", "language": [] }
End of preview.

No dataset card yet

Downloads last month
5