Dataset Preview
Duplicate
The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
The dataset generation failed
Error code:   DatasetGenerationError
Exception:    ArrowInvalid
Message:      JSON parse error: Missing a closing quotation mark in string. in row 236
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 145, in _generate_tables
                  dataset = json.load(f)
                File "/usr/local/lib/python3.9/json/__init__.py", line 293, in load
                  return loads(fp.read(),
                File "/usr/local/lib/python3.9/json/__init__.py", line 346, in loads
                  return _default_decoder.decode(s)
                File "/usr/local/lib/python3.9/json/decoder.py", line 340, in decode
                  raise JSONDecodeError("Extra data", s, end)
              json.decoder.JSONDecodeError: Extra data: line 2 column 1 (char 2095)
              
              During handling of the above exception, another exception occurred:
              
              Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1995, in _prepare_split_single
                  for _, table in generator:
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 148, in _generate_tables
                  raise e
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/packaged_modules/json/json.py", line 122, in _generate_tables
                  pa_table = paj.read_json(
                File "pyarrow/_json.pyx", line 308, in pyarrow._json.read_json
                File "pyarrow/error.pxi", line 154, in pyarrow.lib.pyarrow_internal_check_status
                File "pyarrow/error.pxi", line 91, in pyarrow.lib.check_status
              pyarrow.lib.ArrowInvalid: JSON parse error: Missing a closing quotation mark in string. in row 236
              
              The above exception was the direct cause of the following exception:
              
              Traceback (most recent call last):
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1529, in compute_config_parquet_and_info_response
                  parquet_operations = convert_to_parquet(builder)
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 1154, in convert_to_parquet
                  builder.download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1027, in download_and_prepare
                  self._download_and_prepare(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1122, in _download_and_prepare
                  self._prepare_split(split_generator, **prepare_split_kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 1882, in _prepare_split
                  for job_id, done, content in self._prepare_split_single(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/builder.py", line 2038, in _prepare_split_single
                  raise DatasetGenerationError("An error occurred while generating the dataset") from e
              datasets.exceptions.DatasetGenerationError: An error occurred while generating the dataset

Need help to make the dataset viewer work? Make sure to review how to configure the dataset viewer, and open a discussion for direct support.

text
string
meta
dict
package org.apache.spark.status.api.v1; import org.apache.spark.util.EnumUtil; import java.util.Collections; import java.util.HashSet; import java.util.Locale; import java.util.Set; public enum TaskSorting { ID, INCREASING_RUNTIME("runtime"), DECREASING_RUNTIME("-runtime"); private final Set<String> alternateNames; TaskSorting(String... names) { alternateNames = new HashSet<>(); Collections.addAll(alternateNames, names); } public static TaskSorting fromString(String str) { String lower = str.toLowerCase(Locale.ROOT); for (TaskSorting t: values()) { if (t.alternateNames.contains(lower)) { return t; } } return EnumUtil.parseIgnoreCase(TaskSorting.class, str); } }
{ "content_hash": "8a7d7e7141387e9f78ea51fd01bcd2d0", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 60, "avg_line_length": 23.363636363636363, "alnum_prop": 0.6718547341115434, "repo_name": "wangyixiaohuihui/spark2-annotation", "id": "62856128eb4918169b1c94553cf054568b097e89", "size": "1586", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "33815" }, { "name": "Batchfile", "bytes": "24294" }, { "name": "C", "bytes": "1542" }, { "name": "CSS", "bytes": "23957" }, { "name": "HTML", "bytes": "10012" }, { "name": "HiveQL", "bytes": "1828674" }, { "name": "Java", "bytes": "3737029" }, { "name": "JavaScript", "bytes": "143063" }, { "name": "Makefile", "bytes": "7980" }, { "name": "PLpgSQL", "bytes": "9666" }, { "name": "PowerShell", "bytes": "3751" }, { "name": "Python", "bytes": "2248750" }, { "name": "R", "bytes": "1027534" }, { "name": "Roff", "bytes": "14420" }, { "name": "SQLPL", "bytes": "3603" }, { "name": "Scala", "bytes": "22897473" }, { "name": "Shell", "bytes": "156941" }, { "name": "Thrift", "bytes": "33665" }, { "name": "q", "bytes": "147332" } ] }
<?php namespace Runner\Messages; /** * Mensagem contendo dados do jogador */ class MeMessage extends Message { /** * @var int Tempo do recorde */ public $time; public function __construct($message = null, $connection) { parent::__construct($message, $connection); $this->time = $connection->player->time; } }
{ "content_hash": "c947f16fbeb0f4b8c63e57c9b88c9c0c", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 61, "avg_line_length": 17.85, "alnum_prop": 0.6106442577030813, "repo_name": "gabrielrcouto/palestra-ratchet", "id": "f8dadef7da2bfe6f34aae59a01e9e5d5ae44ce91", "size": "357", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/src/Runner/Messages/MeMessage.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "157202" }, { "name": "JavaScript", "bytes": "164192" }, { "name": "PHP", "bytes": "36035" }, { "name": "Shell", "bytes": "44" } ] }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "0a423013a49e0c5594daee5e67aceeee", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "4a6dae6a9c99380829d0e73de3ec875586c086c1", "size": "184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Boraginales/Boraginaceae/Lithospermum/Lithospermum intermedium/README.md", "mode": "33188", "license": "apache-2.0", "language": [] }
from __future__ import annotations # isort:skip import pytest ; pytest #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- # Bokeh imports import bokeh.document as document from bokeh.core.properties import Instance, Int, Nullable from bokeh.model import Model # Module under test from bokeh.protocol import Protocol # isort:skip #----------------------------------------------------------------------------- # Setup #----------------------------------------------------------------------------- proto = Protocol() #----------------------------------------------------------------------------- # General API #----------------------------------------------------------------------------- class AnotherModelInTestPullDoc(Model): bar = Int(1) class SomeModelInTestPullDoc(Model): foo = Int(2) child = Nullable(Instance(Model)) class TestPullDocument: def _sample_doc(self): doc = document.Document() another = AnotherModelInTestPullDoc() doc.add_root(SomeModelInTestPullDoc(child=another)) doc.add_root(SomeModelInTestPullDoc()) return doc def test_create_req(self) -> None: proto.create("PULL-DOC-REQ") def test_create_reply(self) -> None: sample = self._sample_doc() proto.create("PULL-DOC-REPLY", 'fakereqid', sample) def test_create_reply_then_parse(self) -> None: sample = self._sample_doc() msg = proto.create("PULL-DOC-REPLY", 'fakereqid', sample) copy = document.Document() msg.push_to_document(copy) assert len(sample.roots) == 2 assert len(copy.roots) == 2 #----------------------------------------------------------------------------- # Dev API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Private API #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Code #-----------------------------------------------------------------------------
{ "content_hash": "83d272ca12f38882920a596a32d71811", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 78, "avg_line_length": 32.970588235294116, "alnum_prop": 0.38447814451382695, "repo_name": "bokeh/bokeh", "id": "52b273c41505c0235585b4dc8edc1ab579121e48", "size": "2746", "binary": false, "copies": "1", "ref": "refs/heads/branch-3.1", "path": "tests/unit/bokeh/protocol/messages/test_pull_doc.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1884" }, { "name": "Dockerfile", "bytes": "1924" }, { "name": "GLSL", "bytes": "44696" }, { "name": "HTML", "bytes": "53475" }, { "name": "JavaScript", "bytes": "20301" }, { "name": "Less", "bytes": "46376" }, { "name": "Python", "bytes": "4475226" }, { "name": "Shell", "bytes": "7673" }, { "name": "TypeScript", "bytes": "3652153" } ] }
package com.aristocrat.mandrill.requests.Whitelists import com.aristocrat.mandrill.requests.MandrillRequest case class List(key: String, email: String) extends MandrillRequest
{ "content_hash": "63d2725735b268eaf63a625d282fd3e9", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 67, "avg_line_length": 35.6, "alnum_prop": 0.848314606741573, "repo_name": "aristocratic/mandrill", "id": "8a97ff11bd305f3d6f00018002abb05c1d549411", "size": "178", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/scala/com/aristocrat/mandrill/requests/Whitelists/List.scala", "mode": "33188", "license": "mit", "language": [ { "name": "Scala", "bytes": "36583" } ] }
from . import FixtureTest class AddHikingRoutes(FixtureTest): def test_track(self): self.load_fixtures([ 'https://www.openstreetmap.org/way/12188550', 'https://www.openstreetmap.org/relation/2684235', ], clip=self.tile_bbox(12, 654, 1582)) self.assert_has_feature( 12, 654, 1582, 'roads', {'kind': 'path', 'kind_detail': 'track'}) def test_steps(self): self.load_fixtures(['https://www.openstreetmap.org/way/25292070']) self.assert_has_feature( 14, 2620, 6334, 'roads', {'kind': 'path', 'kind_detail': 'steps', 'name': 'Esmeralda Ave.'}) def test_footway(self): self.load_fixtures(['https://www.openstreetmap.org/way/346093021']) self.assert_has_feature( 15, 5235, 12671, 'roads', {'kind': 'path', 'kind_detail': 'footway'}) self.load_fixtures(['http://www.openstreetmap.org/way/344205837']) self.assert_has_feature( 15, 5234, 12667, 'roads', {'kind': 'path', 'kind_detail': 'footway'}) def test_minor_road_nwn(self): # Baker River Road - residential - part of Pacific Northwest # Trail (nwn) # should be visible at z11 self.load_fixtures([ 'http://www.openstreetmap.org/way/5260896', 'http://www.openstreetmap.org/relation/3718820', ], clip=self.tile_bbox(11, 331, 706)) self.assert_has_feature( 11, 331, 706, 'roads', {'kind': 'minor_road', 'kind_detail': 'residential', 'walking_network': 'nwn'}) def test_major_road_nwn(self): # Mount Baker Highway - secondary - part of Pacific Northwest # Trail (nwn) # should be visible at z11 self.load_fixtures([ 'http://www.openstreetmap.org/way/5254587', 'http://www.openstreetmap.org/relation/3718820', ], clip=self.tile_bbox(11, 331, 704)) self.assert_has_feature( 11, 331, 704, 'roads', {'kind': 'major_road', 'kind_detail': 'secondary', 'walking_network': 'nwn'}) def test_unclassified_nwn(self): # Whiskey Bend Road - unclassified - part of Pacific Northwest # Trail (nwn) # should be visible at z11 self.load_fixtures([ 'http://www.openstreetmap.org/way/5857215', 'http://www.openstreetmap.org/relation/3718820', ], clip=self.tile_bbox(11, 320, 712)) self.assert_has_feature( 11, 320, 712, 'roads', {'kind': 'minor_road', 'kind_detail': 'unclassified', 'walking_network': 'nwn'}) def test_service_nwn(self): # Matz Road - service - part of Ice Age National Scenic Trail # (nwn) # should be visible at z11 self.load_fixtures([ 'http://www.openstreetmap.org/way/6671321', 'http://www.openstreetmap.org/relation/2381423', ], clip=self.tile_bbox(11, 514, 751)) self.assert_has_feature( 11, 514, 751, 'roads', {'kind': 'minor_road', 'kind_detail': 'service', 'walking_network': 'nwn'}) def test_driveway_nwn(self): # Dogbane - service=driveway - part of American Discovery Trail # (nwn) # should be visible at z11 self.load_fixtures([ 'http://www.openstreetmap.org/way/16000421', 'http://www.openstreetmap.org/relation/1544944', ], clip=self.tile_bbox(11, 491, 762)) self.assert_has_feature( 11, 491, 762, 'roads', {'kind': 'minor_road', 'kind_detail': 'service', 'service': 'driveway', 'walking_network': 'nwn'})
{ "content_hash": "551e6c3ce6f8e9fbc6524b4d929a707e", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 75, "avg_line_length": 36.15384615384615, "alnum_prop": 0.5561170212765958, "repo_name": "mapzen/vector-datasource", "id": "78bc3368b28a80149deec8292ba593d5e58a262b", "size": "3760", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "integration-test/596-add-hiking-routes.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2839" }, { "name": "PLpgSQL", "bytes": "32195" }, { "name": "Python", "bytes": "268894" }, { "name": "SQLPL", "bytes": "222" }, { "name": "Shell", "bytes": "13283" }, { "name": "XSLT", "bytes": "339" } ] }
// RTM.Images // RTM.Images.Decoder.ImageSource // AssemblyInfo.cs // // Created by Bartosz Rachwal. // Copyright (c) 2015 Bartosz Rachwal. The National Institute of Advanced Industrial Science and Technology, Japan. All rights reserved. using System.Reflection; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("RTM.Images.Decoder.ImageSource")] [assembly: AssemblyDescription("Created by Bartosz Rachwal")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("The National Institute of Advanced Industrial Science and Technology, Japan")] [assembly: AssemblyProduct("RT Middleware Images Utility Library")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("217315b5-7b0c-4771-9705-093ef7aaa7ce")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
{ "content_hash": "370378ce80b7e430a6c5f72381425b84", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 137, "avg_line_length": 37.93478260869565, "alnum_prop": 0.7524355300859599, "repo_name": "rachwal/RTM-Images", "id": "3153f4e844335f2bc83ff04632b85293a14d7e31", "size": "1748", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "RTM.Images.Decoder.ImageSource/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "27482" } ] }
<?php /** * Manages WordPress comments * * @package WordPress * @subpackage Comment */ /** * Checks whether a comment passes internal checks to be allowed to add. * * If comment moderation is set in the administration, then all comments, * regardless of their type and whitelist will be set to false. If the number of * links exceeds the amount in the administration, then the check fails. If any * of the parameter contents match the blacklist of words, then the check fails. * * If the number of links exceeds the amount in the administration, then the * check fails. If any of the parameter contents match the blacklist of words, * then the check fails. * * If the comment author was approved before, then the comment is * automatically whitelisted. * * If none of the checks fail, then the failback is to set the check to pass * (return true). * * @since 1.2.0 * @uses $wpdb * * @param string $author Comment Author's name * @param string $email Comment Author's email * @param string $url Comment Author's URL * @param string $comment Comment contents * @param string $user_ip Comment Author's IP address * @param string $user_agent Comment Author's User Agent * @param string $comment_type Comment type, either user submitted comment, * trackback, or pingback * @return bool Whether the checks passed (true) and the comments should be * displayed or set to moderated */ function check_comment($author, $email, $url, $comment, $user_ip, $user_agent, $comment_type) { global $wpdb; if ( 1 == get_option('comment_moderation') ) return false; // If moderation is set to manual $comment = apply_filters( 'comment_text', $comment ); // Check # of external links if ( $max_links = get_option( 'comment_max_links' ) ) { $num_links = preg_match_all( '/<a [^>]*href/i', $comment, $out ); $num_links = apply_filters( 'comment_max_links_url', $num_links, $url ); // provide for counting of $url as a link if ( $num_links >= $max_links ) return false; } $mod_keys = trim(get_option('moderation_keys')); if ( !empty($mod_keys) ) { $words = explode("\n", $mod_keys ); foreach ( (array) $words as $word) { $word = trim($word); // Skip empty lines if ( empty($word) ) continue; // Do some escaping magic so that '#' chars in the // spam words don't break things: $word = preg_quote($word, '#'); $pattern = "#$word#i"; if ( preg_match($pattern, $author) ) return false; if ( preg_match($pattern, $email) ) return false; if ( preg_match($pattern, $url) ) return false; if ( preg_match($pattern, $comment) ) return false; if ( preg_match($pattern, $user_ip) ) return false; if ( preg_match($pattern, $user_agent) ) return false; } } // Comment whitelisting: if ( 1 == get_option('comment_whitelist')) { if ( 'trackback' != $comment_type && 'pingback' != $comment_type && $author != '' && $email != '' ) { // expected_slashed ($author, $email) $ok_to_comment = $wpdb->get_var("SELECT comment_approved FROM $wpdb->comments WHERE comment_author = '$author' AND comment_author_email = '$email' and comment_approved = '1' LIMIT 1"); if ( ( 1 == $ok_to_comment ) && ( empty($mod_keys) || false === strpos( $email, $mod_keys) ) ) return true; else return false; } else { return false; } } return true; } /** * Retrieve the approved comments for post $post_id. * * @since 2.0.0 * @uses $wpdb * * @param int $post_id The ID of the post * @return array $comments The approved comments */ function get_approved_comments($post_id) { global $wpdb; return $wpdb->get_results($wpdb->prepare("SELECT * FROM $wpdb->comments WHERE comment_post_ID = %d AND comment_approved = '1' ORDER BY comment_date", $post_id)); } /** * Retrieves comment data given a comment ID or comment object. * * If an object is passed then the comment data will be cached and then returned * after being passed through a filter. If the comment is empty, then the global * comment variable will be used, if it is set. * * If the comment is empty, then the global comment variable will be used, if it * is set. * * @since 2.0.0 * @uses $wpdb * * @param object|string|int $comment Comment to retrieve. * @param string $output Optional. OBJECT or ARRAY_A or ARRAY_N constants. * @return object|array|null Depends on $output value. */ function get_comment(&$comment, $output = OBJECT) { global $wpdb; $null = null; if ( empty($comment) ) { if ( isset($GLOBALS['comment']) ) $_comment = & $GLOBALS['comment']; else $_comment = null; } elseif ( is_object($comment) ) { wp_cache_add($comment->comment_ID, $comment, 'comment'); $_comment = $comment; } else { if ( isset($GLOBALS['comment']) && ($GLOBALS['comment']->comment_ID == $comment) ) { $_comment = & $GLOBALS['comment']; } elseif ( ! $_comment = wp_cache_get($comment, 'comment') ) { $_comment = $wpdb->get_row($wpdb->prepare("SELECT * FROM $wpdb->comments WHERE comment_ID = %d LIMIT 1", $comment)); if ( ! $_comment ) return $null; wp_cache_add($_comment->comment_ID, $_comment, 'comment'); } } $_comment = apply_filters('get_comment', $_comment); if ( $output == OBJECT ) { return $_comment; } elseif ( $output == ARRAY_A ) { $__comment = get_object_vars($_comment); return $__comment; } elseif ( $output == ARRAY_N ) { $__comment = array_values(get_object_vars($_comment)); return $__comment; } else { return $_comment; } } /** * Retrieve a list of comments. * * The comment list can be for the blog as a whole or for an individual post. * * The list of comment arguments are 'status', 'orderby', 'comment_date_gmt', * 'order', 'number', 'offset', and 'post_id'. * * @since 2.7.0 * @uses $wpdb * * @param mixed $args Optional. Array or string of options to override defaults. * @return array List of comments. */ function get_comments( $args = '' ) { $query = new WP_Comment_Query; return $query->query( $args ); } /** * WordPress Comment Query class. * * @since 3.1.0 */ class WP_Comment_Query { /** * Metadata query container * * @since 3.5.0 * @access public * @var object WP_Meta_Query */ var $meta_query = false; /** * Execute the query * * @since 3.1.0 * * @param string|array $query_vars * @return int|array */ function query( $query_vars ) { global $wpdb; $defaults = array( 'author_email' => '', 'ID' => '', 'karma' => '', 'number' => '', 'offset' => '', 'orderby' => '', 'order' => 'DESC', 'parent' => '', 'post_ID' => '', 'post_id' => 0, 'post_author' => '', 'post_name' => '', 'post_parent' => '', 'post_status' => '', 'post_type' => '', 'status' => '', 'type' => '', 'user_id' => '', 'search' => '', 'count' => false, 'meta_key' => '', 'meta_value' => '', 'meta_query' => '', ); $groupby = ''; $this->query_vars = wp_parse_args( $query_vars, $defaults ); // Parse meta query $this->meta_query = new WP_Meta_Query(); $this->meta_query->parse_query_vars( $this->query_vars ); do_action_ref_array( 'pre_get_comments', array( &$this ) ); extract( $this->query_vars, EXTR_SKIP ); // $args can be whatever, only use the args defined in defaults to compute the key $key = md5( serialize( compact(array_keys($defaults)) ) ); $last_changed = wp_cache_get( 'last_changed', 'comment' ); if ( ! $last_changed ) $last_changed = wp_cache_set( 'last_changed', 1, 'comment' ); $cache_key = "get_comments:$key:$last_changed"; if ( $cache = wp_cache_get( $cache_key, 'comment' ) ) return $cache; $post_id = absint($post_id); if ( 'hold' == $status ) $approved = "comment_approved = '0'"; elseif ( 'approve' == $status ) $approved = "comment_approved = '1'"; elseif ( ! empty( $status ) && 'all' != $status ) $approved = $wpdb->prepare( "comment_approved = %s", $status ); else $approved = "( comment_approved = '0' OR comment_approved = '1' )"; $order = ( 'ASC' == strtoupper($order) ) ? 'ASC' : 'DESC'; if ( ! empty( $orderby ) ) { $ordersby = is_array($orderby) ? $orderby : preg_split('/[,\s]/', $orderby); $allowed_keys = array( 'comment_agent', 'comment_approved', 'comment_author', 'comment_author_email', 'comment_author_IP', 'comment_author_url', 'comment_content', 'comment_date', 'comment_date_gmt', 'comment_ID', 'comment_karma', 'comment_parent', 'comment_post_ID', 'comment_type', 'user_id', ); if ( ! empty( $this->query_vars['meta_key'] ) ) { $allowed_keys[] = $q['meta_key']; $allowed_keys[] = 'meta_value'; $allowed_keys[] = 'meta_value_num'; } $ordersby = array_intersect( $ordersby, $allowed_keys ); foreach ( $ordersby as $key => $value ) { if ( $value == $q['meta_key'] || $value == 'meta_value' ) { $ordersby[ $key ] = "$wpdb->commentmeta.meta_value"; } elseif ( $value == 'meta_value_num' ) { $ordersby[ $key ] = "$wpdb->commentmeta.meta_value+0"; } } $orderby = empty( $ordersby ) ? 'comment_date_gmt' : implode(', ', $ordersby); } else { $orderby = 'comment_date_gmt'; } $number = absint($number); $offset = absint($offset); if ( !empty($number) ) { if ( $offset ) $limits = 'LIMIT ' . $offset . ',' . $number; else $limits = 'LIMIT ' . $number; } else { $limits = ''; } if ( $count ) $fields = 'COUNT(*)'; else $fields = '*'; $join = ''; $where = $approved; if ( ! empty($post_id) ) $where .= $wpdb->prepare( ' AND comment_post_ID = %d', $post_id ); if ( '' !== $author_email ) $where .= $wpdb->prepare( ' AND comment_author_email = %s', $author_email ); if ( '' !== $karma ) $where .= $wpdb->prepare( ' AND comment_karma = %d', $karma ); if ( 'comment' == $type ) { $where .= " AND comment_type = ''"; } elseif( 'pings' == $type ) { $where .= ' AND comment_type IN ("pingback", "trackback")'; } elseif ( ! empty( $type ) ) { $where .= $wpdb->prepare( ' AND comment_type = %s', $type ); } if ( '' !== $parent ) $where .= $wpdb->prepare( ' AND comment_parent = %d', $parent ); if ( '' !== $user_id ) $where .= $wpdb->prepare( ' AND user_id = %d', $user_id ); if ( '' !== $search ) $where .= $this->get_search_sql( $search, array( 'comment_author', 'comment_author_email', 'comment_author_url', 'comment_author_IP', 'comment_content' ) ); $post_fields = array_filter( compact( array( 'post_author', 'post_name', 'post_parent', 'post_status', 'post_type', ) ) ); if ( ! empty( $post_fields ) ) { $join = "JOIN $wpdb->posts ON $wpdb->posts.ID = $wpdb->comments.comment_post_ID"; foreach( $post_fields as $field_name => $field_value ) $where .= $wpdb->prepare( " AND {$wpdb->posts}.{$field_name} = %s", $field_value ); } if ( ! empty( $this->meta_query->queries ) ) { $clauses = $this->meta_query->get_sql( 'comment', $wpdb->comments, 'comment_ID', $this ); $join .= $clauses['join']; $where .= $clauses['where']; $groupby = "{$wpdb->comments}.comment_ID"; } $pieces = array( 'fields', 'join', 'where', 'orderby', 'order', 'limits', 'groupby' ); $clauses = apply_filters_ref_array( 'comments_clauses', array( compact( $pieces ), &$this ) ); foreach ( $pieces as $piece ) $$piece = isset( $clauses[ $piece ] ) ? $clauses[ $piece ] : ''; if ( $groupby ) $groupby = 'GROUP BY ' . $groupby; $query = "SELECT $fields FROM $wpdb->comments $join WHERE $where $groupby ORDER BY $orderby $order $limits"; if ( $count ) return $wpdb->get_var( $query ); $comments = $wpdb->get_results( $query ); $comments = apply_filters_ref_array( 'the_comments', array( $comments, &$this ) ); wp_cache_add( $cache_key, $comments, 'comment' ); return $comments; } /* * Used internally to generate an SQL string for searching across multiple columns * * @access protected * @since 3.1.0 * * @param string $string * @param array $cols * @return string */ function get_search_sql( $string, $cols ) { $string = esc_sql( like_escape( $string ) ); $searches = array(); foreach ( $cols as $col ) $searches[] = "$col LIKE '%$string%'"; return ' AND (' . implode(' OR ', $searches) . ')'; } } /** * Retrieve all of the WordPress supported comment statuses. * * Comments have a limited set of valid status values, this provides the comment * status values and descriptions. * * @package WordPress * @subpackage Post * @since 2.7.0 * * @return array List of comment statuses. */ function get_comment_statuses( ) { $status = array( 'hold' => __('Unapproved'), /* translators: comment status */ 'approve' => _x('Approved', 'adjective'), /* translators: comment status */ 'spam' => _x('Spam', 'adjective'), ); return $status; } /** * The date the last comment was modified. * * @since 1.5.0 * @uses $wpdb * * @param string $timezone Which timezone to use in reference to 'gmt', 'blog', * or 'server' locations. * @return string Last comment modified date. */ function get_lastcommentmodified($timezone = 'server') { global $wpdb; static $cache_lastcommentmodified = array(); if ( isset($cache_lastcommentmodified[$timezone]) ) return $cache_lastcommentmodified[$timezone]; $add_seconds_server = date('Z'); switch ( strtolower($timezone)) { case 'gmt': $lastcommentmodified = $wpdb->get_var("SELECT comment_date_gmt FROM $wpdb->comments WHERE comment_approved = '1' ORDER BY comment_date_gmt DESC LIMIT 1"); break; case 'blog': $lastcommentmodified = $wpdb->get_var("SELECT comment_date FROM $wpdb->comments WHERE comment_approved = '1' ORDER BY comment_date_gmt DESC LIMIT 1"); break; case 'server': $lastcommentmodified = $wpdb->get_var($wpdb->prepare("SELECT DATE_ADD(comment_date_gmt, INTERVAL %s SECOND) FROM $wpdb->comments WHERE comment_approved = '1' ORDER BY comment_date_gmt DESC LIMIT 1", $add_seconds_server)); break; } $cache_lastcommentmodified[$timezone] = $lastcommentmodified; return $lastcommentmodified; } /** * The amount of comments in a post or total comments. * * A lot like {@link wp_count_comments()}, in that they both return comment * stats (albeit with different types). The {@link wp_count_comments()} actual * caches, but this function does not. * * @since 2.0.0 * @uses $wpdb * * @param int $post_id Optional. Comment amount in post if > 0, else total comments blog wide. * @return array The amount of spam, approved, awaiting moderation, and total comments. */ function get_comment_count( $post_id = 0 ) { global $wpdb; $post_id = (int) $post_id; $where = ''; if ( $post_id > 0 ) { $where = $wpdb->prepare("WHERE comment_post_ID = %d", $post_id); } $totals = (array) $wpdb->get_results(" SELECT comment_approved, COUNT( * ) AS total FROM {$wpdb->comments} {$where} GROUP BY comment_approved ", ARRAY_A); $comment_count = array( "approved" => 0, "awaiting_moderation" => 0, "spam" => 0, "total_comments" => 0 ); foreach ( $totals as $row ) { switch ( $row['comment_approved'] ) { case 'spam': $comment_count['spam'] = $row['total']; $comment_count["total_comments"] += $row['total']; break; case 1: $comment_count['approved'] = $row['total']; $comment_count['total_comments'] += $row['total']; break; case 0: $comment_count['awaiting_moderation'] = $row['total']; $comment_count['total_comments'] += $row['total']; break; default: break; } } return $comment_count; } // // Comment meta functions // /** * Add meta data field to a comment. * * @since 2.9.0 * @uses add_metadata * @link http://codex.wordpress.org/Function_Reference/add_comment_meta * * @param int $comment_id Comment ID. * @param string $meta_key Metadata name. * @param mixed $meta_value Metadata value. * @param bool $unique Optional, default is false. Whether the same key should not be added. * @return bool False for failure. True for success. */ function add_comment_meta($comment_id, $meta_key, $meta_value, $unique = false) { return add_metadata('comment', $comment_id, $meta_key, $meta_value, $unique); } /** * Remove metadata matching criteria from a comment. * * You can match based on the key, or key and value. Removing based on key and * value, will keep from removing duplicate metadata with the same key. It also * allows removing all metadata matching key, if needed. * * @since 2.9.0 * @uses delete_metadata * @link http://codex.wordpress.org/Function_Reference/delete_comment_meta * * @param int $comment_id comment ID * @param string $meta_key Metadata name. * @param mixed $meta_value Optional. Metadata value. * @return bool False for failure. True for success. */ function delete_comment_meta($comment_id, $meta_key, $meta_value = '') { return delete_metadata('comment', $comment_id, $meta_key, $meta_value); } /** * Retrieve comment meta field for a comment. * * @since 2.9.0 * @uses get_metadata * @link http://codex.wordpress.org/Function_Reference/get_comment_meta * * @param int $comment_id Comment ID. * @param string $key Optional. The meta key to retrieve. By default, returns data for all keys. * @param bool $single Whether to return a single value. * @return mixed Will be an array if $single is false. Will be value of meta data field if $single * is true. */ function get_comment_meta($comment_id, $key = '', $single = false) { return get_metadata('comment', $comment_id, $key, $single); } /** * Update comment meta field based on comment ID. * * Use the $prev_value parameter to differentiate between meta fields with the * same key and comment ID. * * If the meta field for the comment does not exist, it will be added. * * @since 2.9.0 * @uses update_metadata * @link http://codex.wordpress.org/Function_Reference/update_comment_meta * * @param int $comment_id Comment ID. * @param string $meta_key Metadata key. * @param mixed $meta_value Metadata value. * @param mixed $prev_value Optional. Previous value to check before removing. * @return bool False on failure, true if success. */ function update_comment_meta($comment_id, $meta_key, $meta_value, $prev_value = '') { return update_metadata('comment', $comment_id, $meta_key, $meta_value, $prev_value); } /** * Sets the cookies used to store an unauthenticated commentator's identity. Typically used * to recall previous comments by this commentator that are still held in moderation. * * @param object $comment Comment object. * @param object $user Comment author's object. * * @since 3.4.0 */ function wp_set_comment_cookies($comment, $user) { if ( $user->exists() ) return; $comment_cookie_lifetime = apply_filters('comment_cookie_lifetime', 30000000); setcookie('comment_author_' . COOKIEHASH, $comment->comment_author, time() + $comment_cookie_lifetime, COOKIEPATH, COOKIE_DOMAIN); setcookie('comment_author_email_' . COOKIEHASH, $comment->comment_author_email, time() + $comment_cookie_lifetime, COOKIEPATH, COOKIE_DOMAIN); setcookie('comment_author_url_' . COOKIEHASH, esc_url($comment->comment_author_url), time() + $comment_cookie_lifetime, COOKIEPATH, COOKIE_DOMAIN); } /** * Sanitizes the cookies sent to the user already. * * Will only do anything if the cookies have already been created for the user. * Mostly used after cookies had been sent to use elsewhere. * * @since 2.0.4 */ function sanitize_comment_cookies() { if ( isset($_COOKIE['comment_author_'.COOKIEHASH]) ) { $comment_author = apply_filters('pre_comment_author_name', $_COOKIE['comment_author_'.COOKIEHASH]); $comment_author = stripslashes($comment_author); $comment_author = esc_attr($comment_author); $_COOKIE['comment_author_'.COOKIEHASH] = $comment_author; } if ( isset($_COOKIE['comment_author_email_'.COOKIEHASH]) ) { $comment_author_email = apply_filters('pre_comment_author_email', $_COOKIE['comment_author_email_'.COOKIEHASH]); $comment_author_email = stripslashes($comment_author_email); $comment_author_email = esc_attr($comment_author_email); $_COOKIE['comment_author_email_'.COOKIEHASH] = $comment_author_email; } if ( isset($_COOKIE['comment_author_url_'.COOKIEHASH]) ) { $comment_author_url = apply_filters('pre_comment_author_url', $_COOKIE['comment_author_url_'.COOKIEHASH]); $comment_author_url = stripslashes($comment_author_url); $_COOKIE['comment_author_url_'.COOKIEHASH] = $comment_author_url; } } /** * Validates whether this comment is allowed to be made. * * @since 2.0.0 * @uses $wpdb * @uses apply_filters() Calls 'pre_comment_approved' hook on the type of comment * @uses apply_filters() Calls 'comment_duplicate_trigger' hook on commentdata. * @uses do_action() Calls 'check_comment_flood' hook on $comment_author_IP, $comment_author_email, and $comment_date_gmt * * @param array $commentdata Contains information on the comment * @return mixed Signifies the approval status (0|1|'spam') */ function wp_allow_comment($commentdata) { global $wpdb; extract($commentdata, EXTR_SKIP); // Simple duplicate check // expected_slashed ($comment_post_ID, $comment_author, $comment_author_email, $comment_content) $dupe = "SELECT comment_ID FROM $wpdb->comments WHERE comment_post_ID = '$comment_post_ID' AND comment_parent = '$comment_parent' AND comment_approved != 'trash' AND ( comment_author = '$comment_author' "; if ( $comment_author_email ) $dupe .= "OR comment_author_email = '$comment_author_email' "; $dupe .= ") AND comment_content = '$comment_content' LIMIT 1"; if ( $wpdb->get_var($dupe) ) { do_action( 'comment_duplicate_trigger', $commentdata ); if ( defined('DOING_AJAX') ) die( __('Duplicate comment detected; it looks as though you&#8217;ve already said that!') ); wp_die( __('Duplicate comment detected; it looks as though you&#8217;ve already said that!') ); } do_action( 'check_comment_flood', $comment_author_IP, $comment_author_email, $comment_date_gmt ); if ( ! empty( $user_id ) ) { $user = get_userdata( $user_id ); $post_author = $wpdb->get_var($wpdb->prepare("SELECT post_author FROM $wpdb->posts WHERE ID = %d LIMIT 1", $comment_post_ID)); } if ( isset( $user ) && ( $user_id == $post_author || $user->has_cap( 'moderate_comments' ) ) ) { // The author and the admins get respect. $approved = 1; } else { // Everyone else's comments will be checked. if ( check_comment($comment_author, $comment_author_email, $comment_author_url, $comment_content, $comment_author_IP, $comment_agent, $comment_type) ) $approved = 1; else $approved = 0; if ( wp_blacklist_check($comment_author, $comment_author_email, $comment_author_url, $comment_content, $comment_author_IP, $comment_agent) ) $approved = 'spam'; } $approved = apply_filters( 'pre_comment_approved', $approved, $commentdata ); return $approved; } /** * Check whether comment flooding is occurring. * * Won't run, if current user can manage options, so to not block * administrators. * * @since 2.3.0 * @uses $wpdb * @uses apply_filters() Calls 'comment_flood_filter' filter with first * parameter false, last comment timestamp, new comment timestamp. * @uses do_action() Calls 'comment_flood_trigger' action with parameters with * last comment timestamp and new comment timestamp. * * @param string $ip Comment IP. * @param string $email Comment author email address. * @param string $date MySQL time string. */ function check_comment_flood_db( $ip, $email, $date ) { global $wpdb; if ( current_user_can( 'manage_options' ) ) return; // don't throttle admins $hour_ago = gmdate( 'Y-m-d H:i:s', time() - HOUR_IN_SECONDS ); if ( $lasttime = $wpdb->get_var( $wpdb->prepare( "SELECT `comment_date_gmt` FROM `$wpdb->comments` WHERE `comment_date_gmt` >= %s AND ( `comment_author_IP` = %s OR `comment_author_email` = %s ) ORDER BY `comment_date_gmt` DESC LIMIT 1", $hour_ago, $ip, $email ) ) ) { $time_lastcomment = mysql2date('U', $lasttime, false); $time_newcomment = mysql2date('U', $date, false); $flood_die = apply_filters('comment_flood_filter', false, $time_lastcomment, $time_newcomment); if ( $flood_die ) { do_action('comment_flood_trigger', $time_lastcomment, $time_newcomment); if ( defined('DOING_AJAX') ) die( __('You are posting comments too quickly. Slow down.') ); wp_die( __('You are posting comments too quickly. Slow down.'), '', array('response' => 403) ); } } } /** * Separates an array of comments into an array keyed by comment_type. * * @since 2.7.0 * * @param array $comments Array of comments * @return array Array of comments keyed by comment_type. */ function separate_comments(&$comments) { $comments_by_type = array('comment' => array(), 'trackback' => array(), 'pingback' => array(), 'pings' => array()); $count = count($comments); for ( $i = 0; $i < $count; $i++ ) { $type = $comments[$i]->comment_type; if ( empty($type) ) $type = 'comment'; $comments_by_type[$type][] = &$comments[$i]; if ( 'trackback' == $type || 'pingback' == $type ) $comments_by_type['pings'][] = &$comments[$i]; } return $comments_by_type; } /** * Calculate the total number of comment pages. * * @since 2.7.0 * @uses get_query_var() Used to fill in the default for $per_page parameter. * @uses get_option() Used to fill in defaults for parameters. * @uses Walker_Comment * * @param array $comments Optional array of comment objects. Defaults to $wp_query->comments * @param int $per_page Optional comments per page. * @param boolean $threaded Optional control over flat or threaded comments. * @return int Number of comment pages. */ function get_comment_pages_count( $comments = null, $per_page = null, $threaded = null ) { global $wp_query; if ( null === $comments && null === $per_page && null === $threaded && !empty($wp_query->max_num_comment_pages) ) return $wp_query->max_num_comment_pages; if ( !$comments || !is_array($comments) ) $comments = $wp_query->comments; if ( empty($comments) ) return 0; if ( !isset($per_page) ) $per_page = (int) get_query_var('comments_per_page'); if ( 0 === $per_page ) $per_page = (int) get_option('comments_per_page'); if ( 0 === $per_page ) return 1; if ( !isset($threaded) ) $threaded = get_option('thread_comments'); if ( $threaded ) { $walker = new Walker_Comment; $count = ceil( $walker->get_number_of_root_elements( $comments ) / $per_page ); } else { $count = ceil( count( $comments ) / $per_page ); } return $count; } /** * Calculate what page number a comment will appear on for comment paging. * * @since 2.7.0 * @uses get_comment() Gets the full comment of the $comment_ID parameter. * @uses get_option() Get various settings to control function and defaults. * @uses get_page_of_comment() Used to loop up to top level comment. * * @param int $comment_ID Comment ID. * @param array $args Optional args. * @return int|null Comment page number or null on error. */ function get_page_of_comment( $comment_ID, $args = array() ) { global $wpdb; if ( !$comment = get_comment( $comment_ID ) ) return; $defaults = array( 'type' => 'all', 'page' => '', 'per_page' => '', 'max_depth' => '' ); $args = wp_parse_args( $args, $defaults ); if ( '' === $args['per_page'] && get_option('page_comments') ) $args['per_page'] = get_query_var('comments_per_page'); if ( empty($args['per_page']) ) { $args['per_page'] = 0; $args['page'] = 0; } if ( $args['per_page'] < 1 ) return 1; if ( '' === $args['max_depth'] ) { if ( get_option('thread_comments') ) $args['max_depth'] = get_option('thread_comments_depth'); else $args['max_depth'] = -1; } // Find this comment's top level parent if threading is enabled if ( $args['max_depth'] > 1 && 0 != $comment->comment_parent ) return get_page_of_comment( $comment->comment_parent, $args ); $allowedtypes = array( 'comment' => '', 'pingback' => 'pingback', 'trackback' => 'trackback', ); $comtypewhere = ( 'all' != $args['type'] && isset($allowedtypes[$args['type']]) ) ? " AND comment_type = '" . $allowedtypes[$args['type']] . "'" : ''; // Count comments older than this one $oldercoms = $wpdb->get_var( $wpdb->prepare( "SELECT COUNT(comment_ID) FROM $wpdb->comments WHERE comment_post_ID = %d AND comment_parent = 0 AND comment_approved = '1' AND comment_date_gmt < '%s'" . $comtypewhere, $comment->comment_post_ID, $comment->comment_date_gmt ) ); // No older comments? Then it's page #1. if ( 0 == $oldercoms ) return 1; // Divide comments older than this one by comments per page to get this comment's page number return ceil( ( $oldercoms + 1 ) / $args['per_page'] ); } /** * Does comment contain blacklisted characters or words. * * @since 1.5.0 * @uses do_action() Calls 'wp_blacklist_check' hook for all parameters. * * @param string $author The author of the comment * @param string $email The email of the comment * @param string $url The url used in the comment * @param string $comment The comment content * @param string $user_ip The comment author IP address * @param string $user_agent The author's browser user agent * @return bool True if comment contains blacklisted content, false if comment does not */ function wp_blacklist_check($author, $email, $url, $comment, $user_ip, $user_agent) { do_action('wp_blacklist_check', $author, $email, $url, $comment, $user_ip, $user_agent); $mod_keys = trim( get_option('blacklist_keys') ); if ( '' == $mod_keys ) return false; // If moderation keys are empty $words = explode("\n", $mod_keys ); foreach ( (array) $words as $word ) { $word = trim($word); // Skip empty lines if ( empty($word) ) { continue; } // Do some escaping magic so that '#' chars in the // spam words don't break things: $word = preg_quote($word, '#'); $pattern = "#$word#i"; if ( preg_match($pattern, $author) || preg_match($pattern, $email) || preg_match($pattern, $url) || preg_match($pattern, $comment) || preg_match($pattern, $user_ip) || preg_match($pattern, $user_agent) ) return true; } return false; } /** * Retrieve total comments for blog or single post. * * The properties of the returned object contain the 'moderated', 'approved', * and spam comments for either the entire blog or single post. Those properties * contain the amount of comments that match the status. The 'total_comments' * property contains the integer of total comments. * * The comment stats are cached and then retrieved, if they already exist in the * cache. * * @since 2.5.0 * * @param int $post_id Optional. Post ID. * @return object Comment stats. */ function wp_count_comments( $post_id = 0 ) { global $wpdb; $post_id = (int) $post_id; $stats = apply_filters('wp_count_comments', array(), $post_id); if ( !empty($stats) ) return $stats; $count = wp_cache_get("comments-{$post_id}", 'counts'); if ( false !== $count ) return $count; $where = ''; if ( $post_id > 0 ) $where = $wpdb->prepare( "WHERE comment_post_ID = %d", $post_id ); $count = $wpdb->get_results( "SELECT comment_approved, COUNT( * ) AS num_comments FROM {$wpdb->comments} {$where} GROUP BY comment_approved", ARRAY_A ); $total = 0; $approved = array('0' => 'moderated', '1' => 'approved', 'spam' => 'spam', 'trash' => 'trash', 'post-trashed' => 'post-trashed'); foreach ( (array) $count as $row ) { // Don't count post-trashed toward totals if ( 'post-trashed' != $row['comment_approved'] && 'trash' != $row['comment_approved'] ) $total += $row['num_comments']; if ( isset( $approved[$row['comment_approved']] ) ) $stats[$approved[$row['comment_approved']]] = $row['num_comments']; } $stats['total_comments'] = $total; foreach ( $approved as $key ) { if ( empty($stats[$key]) ) $stats[$key] = 0; } $stats = (object) $stats; wp_cache_set("comments-{$post_id}", $stats, 'counts'); return $stats; } /** * Trashes or deletes a comment. * * The comment is moved to trash instead of permanently deleted unless trash is * disabled, item is already in the trash, or $force_delete is true. * * The post comment count will be updated if the comment was approved and has a * post ID available. * * @since 2.0.0 * @uses $wpdb * @uses do_action() Calls 'delete_comment' hook on comment ID * @uses do_action() Calls 'deleted_comment' hook on comment ID after deletion, on success * @uses do_action() Calls 'wp_set_comment_status' hook on comment ID with 'delete' set for the second parameter * @uses wp_transition_comment_status() Passes new and old comment status along with $comment object * * @param int $comment_id Comment ID * @param bool $force_delete Whether to bypass trash and force deletion. Default is false. * @return bool False if delete comment query failure, true on success. */ function wp_delete_comment($comment_id, $force_delete = false) { global $wpdb; if (!$comment = get_comment($comment_id)) return false; if ( !$force_delete && EMPTY_TRASH_DAYS && !in_array( wp_get_comment_status($comment_id), array( 'trash', 'spam' ) ) ) return wp_trash_comment($comment_id); do_action('delete_comment', $comment_id); // Move children up a level. $children = $wpdb->get_col( $wpdb->prepare("SELECT comment_ID FROM $wpdb->comments WHERE comment_parent = %d", $comment_id) ); if ( !empty($children) ) { $wpdb->update($wpdb->comments, array('comment_parent' => $comment->comment_parent), array('comment_parent' => $comment_id)); clean_comment_cache($children); } // Delete metadata $meta_ids = $wpdb->get_col( $wpdb->prepare( "SELECT meta_id FROM $wpdb->commentmeta WHERE comment_id = %d", $comment_id ) ); foreach ( $meta_ids as $mid ) delete_metadata_by_mid( 'comment', $mid ); if ( ! $wpdb->delete( $wpdb->comments, array( 'comment_ID' => $comment_id ) ) ) return false; do_action('deleted_comment', $comment_id); $post_id = $comment->comment_post_ID; if ( $post_id && $comment->comment_approved == 1 ) wp_update_comment_count($post_id); clean_comment_cache($comment_id); do_action('wp_set_comment_status', $comment_id, 'delete'); wp_transition_comment_status('delete', $comment->comment_approved, $comment); return true; } /** * Moves a comment to the Trash * * If trash is disabled, comment is permanently deleted. * * @since 2.9.0 * @uses do_action() on 'trash_comment' before trashing * @uses do_action() on 'trashed_comment' after trashing * @uses wp_delete_comment() if trash is disabled * * @param int $comment_id Comment ID. * @return mixed False on failure */ function wp_trash_comment($comment_id) { if ( !EMPTY_TRASH_DAYS ) return wp_delete_comment($comment_id, true); if ( !$comment = get_comment($comment_id) ) return false; do_action('trash_comment', $comment_id); if ( wp_set_comment_status($comment_id, 'trash') ) { add_comment_meta($comment_id, '_wp_trash_meta_status', $comment->comment_approved); add_comment_meta($comment_id, '_wp_trash_meta_time', time() ); do_action('trashed_comment', $comment_id); return true; } return false; } /** * Removes a comment from the Trash * * @since 2.9.0 * @uses do_action() on 'untrash_comment' before untrashing * @uses do_action() on 'untrashed_comment' after untrashing * * @param int $comment_id Comment ID. * @return mixed False on failure */ function wp_untrash_comment($comment_id) { if ( ! (int)$comment_id ) return false; do_action('untrash_comment', $comment_id); $status = (string) get_comment_meta($comment_id, '_wp_trash_meta_status', true); if ( empty($status) ) $status = '0'; if ( wp_set_comment_status($comment_id, $status) ) { delete_comment_meta($comment_id, '_wp_trash_meta_time'); delete_comment_meta($comment_id, '_wp_trash_meta_status'); do_action('untrashed_comment', $comment_id); return true; } return false; } /** * Marks a comment as Spam * * @since 2.9.0 * @uses do_action() on 'spam_comment' before spamming * @uses do_action() on 'spammed_comment' after spamming * * @param int $comment_id Comment ID. * @return mixed False on failure */ function wp_spam_comment($comment_id) { if ( !$comment = get_comment($comment_id) ) return false; do_action('spam_comment', $comment_id); if ( wp_set_comment_status($comment_id, 'spam') ) { add_comment_meta($comment_id, '_wp_trash_meta_status', $comment->comment_approved); do_action('spammed_comment', $comment_id); return true; } return false; } /** * Removes a comment from the Spam * * @since 2.9.0 * @uses do_action() on 'unspam_comment' before unspamming * @uses do_action() on 'unspammed_comment' after unspamming * * @param int $comment_id Comment ID. * @return mixed False on failure */ function wp_unspam_comment($comment_id) { if ( ! (int)$comment_id ) return false; do_action('unspam_comment', $comment_id); $status = (string) get_comment_meta($comment_id, '_wp_trash_meta_status', true); if ( empty($status) ) $status = '0'; if ( wp_set_comment_status($comment_id, $status) ) { delete_comment_meta($comment_id, '_wp_trash_meta_status'); do_action('unspammed_comment', $comment_id); return true; } return false; } /** * The status of a comment by ID. * * @since 1.0.0 * * @param int $comment_id Comment ID * @return string|bool Status might be 'trash', 'approved', 'unapproved', 'spam'. False on failure. */ function wp_get_comment_status($comment_id) { $comment = get_comment($comment_id); if ( !$comment ) return false; $approved = $comment->comment_approved; if ( $approved == null ) return false; elseif ( $approved == '1' ) return 'approved'; elseif ( $approved == '0' ) return 'unapproved'; elseif ( $approved == 'spam' ) return 'spam'; elseif ( $approved == 'trash' ) return 'trash'; else return false; } /** * Call hooks for when a comment status transition occurs. * * Calls hooks for comment status transitions. If the new comment status is not the same * as the previous comment status, then two hooks will be ran, the first is * 'transition_comment_status' with new status, old status, and comment data. The * next action called is 'comment_OLDSTATUS_to_NEWSTATUS' the NEWSTATUS is the * $new_status parameter and the OLDSTATUS is $old_status parameter; it has the * comment data. * * The final action will run whether or not the comment statuses are the same. The * action is named 'comment_NEWSTATUS_COMMENTTYPE', NEWSTATUS is from the $new_status * parameter and COMMENTTYPE is comment_type comment data. * * @since 2.7.0 * * @param string $new_status New comment status. * @param string $old_status Previous comment status. * @param object $comment Comment data. */ function wp_transition_comment_status($new_status, $old_status, $comment) { // Translate raw statuses to human readable formats for the hooks // This is not a complete list of comment status, it's only the ones that need to be renamed $comment_statuses = array( 0 => 'unapproved', 'hold' => 'unapproved', // wp_set_comment_status() uses "hold" 1 => 'approved', 'approve' => 'approved', // wp_set_comment_status() uses "approve" ); if ( isset($comment_statuses[$new_status]) ) $new_status = $comment_statuses[$new_status]; if ( isset($comment_statuses[$old_status]) ) $old_status = $comment_statuses[$old_status]; // Call the hooks if ( $new_status != $old_status ) { do_action('transition_comment_status', $new_status, $old_status, $comment); do_action("comment_{$old_status}_to_{$new_status}", $comment); } do_action("comment_{$new_status}_{$comment->comment_type}", $comment->comment_ID, $comment); } /** * Get current commenter's name, email, and URL. * * Expects cookies content to already be sanitized. User of this function might * wish to recheck the returned array for validity. * * @see sanitize_comment_cookies() Use to sanitize cookies * * @since 2.0.4 * * @return array Comment author, email, url respectively. */ function wp_get_current_commenter() { // Cookies should already be sanitized. $comment_author = ''; if ( isset($_COOKIE['comment_author_'.COOKIEHASH]) ) $comment_author = $_COOKIE['comment_author_'.COOKIEHASH]; $comment_author_email = ''; if ( isset($_COOKIE['comment_author_email_'.COOKIEHASH]) ) $comment_author_email = $_COOKIE['comment_author_email_'.COOKIEHASH]; $comment_author_url = ''; if ( isset($_COOKIE['comment_author_url_'.COOKIEHASH]) ) $comment_author_url = $_COOKIE['comment_author_url_'.COOKIEHASH]; return apply_filters('wp_get_current_commenter', compact('comment_author', 'comment_author_email', 'comment_author_url')); } /** * Inserts a comment to the database. * * The available comment data key names are 'comment_author_IP', 'comment_date', * 'comment_date_gmt', 'comment_parent', 'comment_approved', and 'user_id'. * * @since 2.0.0 * @uses $wpdb * * @param array $commentdata Contains information on the comment. * @return int The new comment's ID. */ function wp_insert_comment($commentdata) { global $wpdb; extract(stripslashes_deep($commentdata), EXTR_SKIP); if ( ! isset($comment_author_IP) ) $comment_author_IP = ''; if ( ! isset($comment_date) ) $comment_date = current_time('mysql'); if ( ! isset($comment_date_gmt) ) $comment_date_gmt = get_gmt_from_date($comment_date); if ( ! isset($comment_parent) ) $comment_parent = 0; if ( ! isset($comment_approved) ) $comment_approved = 1; if ( ! isset($comment_karma) ) $comment_karma = 0; if ( ! isset($user_id) ) $user_id = 0; if ( ! isset($comment_type) ) $comment_type = ''; $data = compact('comment_post_ID', 'comment_author', 'comment_author_email', 'comment_author_url', 'comment_author_IP', 'comment_date', 'comment_date_gmt', 'comment_content', 'comment_karma', 'comment_approved', 'comment_agent', 'comment_type', 'comment_parent', 'user_id'); $wpdb->insert($wpdb->comments, $data); $id = (int) $wpdb->insert_id; if ( $comment_approved == 1 ) wp_update_comment_count($comment_post_ID); $comment = get_comment($id); do_action('wp_insert_comment', $id, $comment); if ( function_exists( 'wp_cache_incr' ) ) { wp_cache_incr( 'last_changed', 1, 'comment' ); } else { $last_changed = wp_cache_get( 'last_changed', 'comment' ); wp_cache_set( 'last_changed', $last_changed + 1, 'comment' ); } return $id; } /** * Filters and sanitizes comment data. * * Sets the comment data 'filtered' field to true when finished. This can be * checked as to whether the comment should be filtered and to keep from * filtering the same comment more than once. * * @since 2.0.0 * @uses apply_filters() Calls 'pre_user_id' hook on comment author's user ID * @uses apply_filters() Calls 'pre_comment_user_agent' hook on comment author's user agent * @uses apply_filters() Calls 'pre_comment_author_name' hook on comment author's name * @uses apply_filters() Calls 'pre_comment_content' hook on the comment's content * @uses apply_filters() Calls 'pre_comment_user_ip' hook on comment author's IP * @uses apply_filters() Calls 'pre_comment_author_url' hook on comment author's URL * @uses apply_filters() Calls 'pre_comment_author_email' hook on comment author's email address * * @param array $commentdata Contains information on the comment. * @return array Parsed comment information. */ function wp_filter_comment($commentdata) { if ( isset($commentdata['user_ID']) ) $commentdata['user_id'] = apply_filters('pre_user_id', $commentdata['user_ID']); elseif ( isset($commentdata['user_id']) ) $commentdata['user_id'] = apply_filters('pre_user_id', $commentdata['user_id']); $commentdata['comment_agent'] = apply_filters('pre_comment_user_agent', ( isset( $commentdata['comment_agent'] ) ? $commentdata['comment_agent'] : '' ) ); $commentdata['comment_author'] = apply_filters('pre_comment_author_name', $commentdata['comment_author']); $commentdata['comment_content'] = apply_filters('pre_comment_content', $commentdata['comment_content']); $commentdata['comment_author_IP'] = apply_filters('pre_comment_user_ip', $commentdata['comment_author_IP']); $commentdata['comment_author_url'] = apply_filters('pre_comment_author_url', $commentdata['comment_author_url']); $commentdata['comment_author_email'] = apply_filters('pre_comment_author_email', $commentdata['comment_author_email']); $commentdata['filtered'] = true; return $commentdata; } /** * Whether comment should be blocked because of comment flood. * * @since 2.1.0 * * @param bool $block Whether plugin has already blocked comment. * @param int $time_lastcomment Timestamp for last comment. * @param int $time_newcomment Timestamp for new comment. * @return bool Whether comment should be blocked. */ function wp_throttle_comment_flood($block, $time_lastcomment, $time_newcomment) { if ( $block ) // a plugin has already blocked... we'll let that decision stand return $block; if ( ($time_newcomment - $time_lastcomment) < 15 ) return true; return false; } /** * Adds a new comment to the database. * * Filters new comment to ensure that the fields are sanitized and valid before * inserting comment into database. Calls 'comment_post' action with comment ID * and whether comment is approved by WordPress. Also has 'preprocess_comment' * filter for processing the comment data before the function handles it. * * We use REMOTE_ADDR here directly. If you are behind a proxy, you should ensure * that it is properly set, such as in wp-config.php, for your environment. * See {@link http://core.trac.wordpress.org/ticket/9235} * * @since 1.5.0 * @uses apply_filters() Calls 'preprocess_comment' hook on $commentdata parameter array before processing * @uses do_action() Calls 'comment_post' hook on $comment_ID returned from adding the comment and if the comment was approved. * @uses wp_filter_comment() Used to filter comment before adding comment. * @uses wp_allow_comment() checks to see if comment is approved. * @uses wp_insert_comment() Does the actual comment insertion to the database. * * @param array $commentdata Contains information on the comment. * @return int The ID of the comment after adding. */ function wp_new_comment( $commentdata ) { $commentdata = apply_filters('preprocess_comment', $commentdata); $commentdata['comment_post_ID'] = (int) $commentdata['comment_post_ID']; if ( isset($commentdata['user_ID']) ) $commentdata['user_id'] = $commentdata['user_ID'] = (int) $commentdata['user_ID']; elseif ( isset($commentdata['user_id']) ) $commentdata['user_id'] = (int) $commentdata['user_id']; $commentdata['comment_parent'] = isset($commentdata['comment_parent']) ? absint($commentdata['comment_parent']) : 0; $parent_status = ( 0 < $commentdata['comment_parent'] ) ? wp_get_comment_status($commentdata['comment_parent']) : ''; $commentdata['comment_parent'] = ( 'approved' == $parent_status || 'unapproved' == $parent_status ) ? $commentdata['comment_parent'] : 0; $commentdata['comment_author_IP'] = preg_replace( '/[^0-9a-fA-F:., ]/', '',$_SERVER['REMOTE_ADDR'] ); $commentdata['comment_agent'] = substr($_SERVER['HTTP_USER_AGENT'], 0, 254); $commentdata['comment_date'] = current_time('mysql'); $commentdata['comment_date_gmt'] = current_time('mysql', 1); $commentdata = wp_filter_comment($commentdata); $commentdata['comment_approved'] = wp_allow_comment($commentdata); $comment_ID = wp_insert_comment($commentdata); do_action('comment_post', $comment_ID, $commentdata['comment_approved']); if ( 'spam' !== $commentdata['comment_approved'] ) { // If it's spam save it silently for later crunching if ( '0' == $commentdata['comment_approved'] ) wp_notify_moderator($comment_ID); $post = get_post($commentdata['comment_post_ID']); // Don't notify if it's your own comment if ( get_option('comments_notify') && $commentdata['comment_approved'] && ( ! isset( $commentdata['user_id'] ) || $post->post_author != $commentdata['user_id'] ) ) wp_notify_postauthor($comment_ID, isset( $commentdata['comment_type'] ) ? $commentdata['comment_type'] : '' ); } return $comment_ID; } /** * Sets the status of a comment. * * The 'wp_set_comment_status' action is called after the comment is handled. * If the comment status is not in the list, then false is returned. * * @since 1.0.0 * @uses wp_transition_comment_status() Passes new and old comment status along with $comment object * * @param int $comment_id Comment ID. * @param string $comment_status New comment status, either 'hold', 'approve', 'spam', or 'trash'. * @param bool $wp_error Whether to return a WP_Error object if there is a failure. Default is false. * @return bool False on failure or deletion and true on success. */ function wp_set_comment_status($comment_id, $comment_status, $wp_error = false) { global $wpdb; $status = '0'; switch ( $comment_status ) { case 'hold': case '0': $status = '0'; break; case 'approve': case '1': $status = '1'; if ( get_option('comments_notify') ) { $comment = get_comment($comment_id); wp_notify_postauthor($comment_id, $comment->comment_type); } break; case 'spam': $status = 'spam'; break; case 'trash': $status = 'trash'; break; default: return false; } $comment_old = clone get_comment($comment_id); if ( !$wpdb->update( $wpdb->comments, array('comment_approved' => $status), array('comment_ID' => $comment_id) ) ) { if ( $wp_error ) return new WP_Error('db_update_error', __('Could not update comment status'), $wpdb->last_error); else return false; } clean_comment_cache($comment_id); $comment = get_comment($comment_id); do_action('wp_set_comment_status', $comment_id, $comment_status); wp_transition_comment_status($comment_status, $comment_old->comment_approved, $comment); wp_update_comment_count($comment->comment_post_ID); return true; } /** * Updates an existing comment in the database. * * Filters the comment and makes sure certain fields are valid before updating. * * @since 2.0.0 * @uses $wpdb * @uses wp_transition_comment_status() Passes new and old comment status along with $comment object * * @param array $commentarr Contains information on the comment. * @return int Comment was updated if value is 1, or was not updated if value is 0. */ function wp_update_comment($commentarr) { global $wpdb; // First, get all of the original fields $comment = get_comment($commentarr['comment_ID'], ARRAY_A); // Escape data pulled from DB. $comment = esc_sql($comment); $old_status = $comment['comment_approved']; // Merge old and new fields with new fields overwriting old ones. $commentarr = array_merge($comment, $commentarr); $commentarr = wp_filter_comment( $commentarr ); // Now extract the merged array. extract(stripslashes_deep($commentarr), EXTR_SKIP); $comment_content = apply_filters('comment_save_pre', $comment_content); $comment_date_gmt = get_gmt_from_date($comment_date); if ( !isset($comment_approved) ) $comment_approved = 1; else if ( 'hold' == $comment_approved ) $comment_approved = 0; else if ( 'approve' == $comment_approved ) $comment_approved = 1; $data = compact( 'comment_content', 'comment_author', 'comment_author_email', 'comment_approved', 'comment_karma', 'comment_author_url', 'comment_date', 'comment_date_gmt', 'comment_parent' ); $rval = $wpdb->update( $wpdb->comments, $data, compact( 'comment_ID' ) ); clean_comment_cache($comment_ID); wp_update_comment_count($comment_post_ID); do_action('edit_comment', $comment_ID); $comment = get_comment($comment_ID); wp_transition_comment_status($comment->comment_approved, $old_status, $comment); return $rval; } /** * Whether to defer comment counting. * * When setting $defer to true, all post comment counts will not be updated * until $defer is set to false. When $defer is set to false, then all * previously deferred updated post comment counts will then be automatically * updated without having to call wp_update_comment_count() after. * * @since 2.5.0 * @staticvar bool $_defer * * @param bool $defer * @return unknown */ function wp_defer_comment_counting($defer=null) { static $_defer = false; if ( is_bool($defer) ) { $_defer = $defer; // flush any deferred counts if ( !$defer ) wp_update_comment_count( null, true ); } return $_defer; } /** * Updates the comment count for post(s). * * When $do_deferred is false (is by default) and the comments have been set to * be deferred, the post_id will be added to a queue, which will be updated at a * later date and only updated once per post ID. * * If the comments have not be set up to be deferred, then the post will be * updated. When $do_deferred is set to true, then all previous deferred post * IDs will be updated along with the current $post_id. * * @since 2.1.0 * @see wp_update_comment_count_now() For what could cause a false return value * * @param int $post_id Post ID * @param bool $do_deferred Whether to process previously deferred post comment counts * @return bool True on success, false on failure */ function wp_update_comment_count($post_id, $do_deferred=false) { static $_deferred = array(); if ( $do_deferred ) { $_deferred = array_unique($_deferred); foreach ( $_deferred as $i => $_post_id ) { wp_update_comment_count_now($_post_id); unset( $_deferred[$i] ); /** @todo Move this outside of the foreach and reset $_deferred to an array instead */ } } if ( wp_defer_comment_counting() ) { $_deferred[] = $post_id; return true; } elseif ( $post_id ) { return wp_update_comment_count_now($post_id); } } /** * Updates the comment count for the post. * * @since 2.5.0 * @uses $wpdb * @uses do_action() Calls 'wp_update_comment_count' hook on $post_id, $new, and $old * @uses do_action() Calls 'edit_posts' hook on $post_id and $post * * @param int $post_id Post ID * @return bool False on '0' $post_id or if post with ID does not exist. True on success. */ function wp_update_comment_count_now($post_id) { global $wpdb; $post_id = (int) $post_id; if ( !$post_id ) return false; if ( !$post = get_post($post_id) ) return false; $old = (int) $post->comment_count; $new = (int) $wpdb->get_var( $wpdb->prepare("SELECT COUNT(*) FROM $wpdb->comments WHERE comment_post_ID = %d AND comment_approved = '1'", $post_id) ); $wpdb->update( $wpdb->posts, array('comment_count' => $new), array('ID' => $post_id) ); clean_post_cache( $post ); do_action('wp_update_comment_count', $post_id, $new, $old); do_action('edit_post', $post_id, $post); return true; } // // Ping and trackback functions. // /** * Finds a pingback server URI based on the given URL. * * Checks the HTML for the rel="pingback" link and x-pingback headers. It does * a check for the x-pingback headers first and returns that, if available. The * check for the rel="pingback" has more overhead than just the header. * * @since 1.5.0 * * @param string $url URL to ping. * @param int $deprecated Not Used. * @return bool|string False on failure, string containing URI on success. */ function discover_pingback_server_uri( $url, $deprecated = '' ) { if ( !empty( $deprecated ) ) _deprecated_argument( __FUNCTION__, '2.7' ); $pingback_str_dquote = 'rel="pingback"'; $pingback_str_squote = 'rel=\'pingback\''; /** @todo Should use Filter Extension or custom preg_match instead. */ $parsed_url = parse_url($url); if ( ! isset( $parsed_url['host'] ) ) // Not an URL. This should never happen. return false; //Do not search for a pingback server on our own uploads $uploads_dir = wp_upload_dir(); if ( 0 === strpos($url, $uploads_dir['baseurl']) ) return false; $response = wp_remote_head( $url, array( 'timeout' => 2, 'httpversion' => '1.0' ) ); if ( is_wp_error( $response ) ) return false; if ( wp_remote_retrieve_header( $response, 'x-pingback' ) ) return wp_remote_retrieve_header( $response, 'x-pingback' ); // Not an (x)html, sgml, or xml page, no use going further. if ( preg_match('#(image|audio|video|model)/#is', wp_remote_retrieve_header( $response, 'content-type' )) ) return false; // Now do a GET since we're going to look in the html headers (and we're sure its not a binary file) $response = wp_remote_get( $url, array( 'timeout' => 2, 'httpversion' => '1.0' ) ); if ( is_wp_error( $response ) ) return false; $contents = wp_remote_retrieve_body( $response ); $pingback_link_offset_dquote = strpos($contents, $pingback_str_dquote); $pingback_link_offset_squote = strpos($contents, $pingback_str_squote); if ( $pingback_link_offset_dquote || $pingback_link_offset_squote ) { $quote = ($pingback_link_offset_dquote) ? '"' : '\''; $pingback_link_offset = ($quote=='"') ? $pingback_link_offset_dquote : $pingback_link_offset_squote; $pingback_href_pos = @strpos($contents, 'href=', $pingback_link_offset); $pingback_href_start = $pingback_href_pos+6; $pingback_href_end = @strpos($contents, $quote, $pingback_href_start); $pingback_server_url_len = $pingback_href_end - $pingback_href_start; $pingback_server_url = substr($contents, $pingback_href_start, $pingback_server_url_len); // We may find rel="pingback" but an incomplete pingback URL if ( $pingback_server_url_len > 0 ) { // We got it! return $pingback_server_url; } } return false; } /** * Perform all pingbacks, enclosures, trackbacks, and send to pingback services. * * @since 2.1.0 * @uses $wpdb */ function do_all_pings() { global $wpdb; // Do pingbacks while ($ping = $wpdb->get_row("SELECT ID, post_content, meta_id FROM {$wpdb->posts}, {$wpdb->postmeta} WHERE {$wpdb->posts}.ID = {$wpdb->postmeta}.post_id AND {$wpdb->postmeta}.meta_key = '_pingme' LIMIT 1")) { delete_metadata_by_mid( 'post', $ping->meta_id ); pingback( $ping->post_content, $ping->ID ); } // Do Enclosures while ($enclosure = $wpdb->get_row("SELECT ID, post_content, meta_id FROM {$wpdb->posts}, {$wpdb->postmeta} WHERE {$wpdb->posts}.ID = {$wpdb->postmeta}.post_id AND {$wpdb->postmeta}.meta_key = '_encloseme' LIMIT 1")) { delete_metadata_by_mid( 'post', $enclosure->meta_id ); do_enclose( $enclosure->post_content, $enclosure->ID ); } // Do Trackbacks $trackbacks = $wpdb->get_col("SELECT ID FROM $wpdb->posts WHERE to_ping <> '' AND post_status = 'publish'"); if ( is_array($trackbacks) ) foreach ( $trackbacks as $trackback ) do_trackbacks($trackback); //Do Update Services/Generic Pings generic_ping(); } /** * Perform trackbacks. * * @since 1.5.0 * @uses $wpdb * * @param int $post_id Post ID to do trackbacks on. */ function do_trackbacks($post_id) { global $wpdb; $post = get_post( $post_id ); $to_ping = get_to_ping($post_id); $pinged = get_pung($post_id); if ( empty($to_ping) ) { $wpdb->update($wpdb->posts, array('to_ping' => ''), array('ID' => $post_id) ); return; } if ( empty($post->post_excerpt) ) $excerpt = apply_filters('the_content', $post->post_content); else $excerpt = apply_filters('the_excerpt', $post->post_excerpt); $excerpt = str_replace(']]>', ']]&gt;', $excerpt); $excerpt = wp_html_excerpt($excerpt, 252) . '...'; $post_title = apply_filters('the_title', $post->post_title, $post->ID); $post_title = strip_tags($post_title); if ( $to_ping ) { foreach ( (array) $to_ping as $tb_ping ) { $tb_ping = trim($tb_ping); if ( !in_array($tb_ping, $pinged) ) { trackback($tb_ping, $post_title, $excerpt, $post_id); $pinged[] = $tb_ping; } else { $wpdb->query( $wpdb->prepare("UPDATE $wpdb->posts SET to_ping = TRIM(REPLACE(to_ping, %s, '')) WHERE ID = %d", $tb_ping, $post_id) ); } } } } /** * Sends pings to all of the ping site services. * * @since 1.2.0 * * @param int $post_id Post ID. Not actually used. * @return int Same as Post ID from parameter */ function generic_ping($post_id = 0) { $services = get_option('ping_sites'); $services = explode("\n", $services); foreach ( (array) $services as $service ) { $service = trim($service); if ( '' != $service ) weblog_ping($service); } return $post_id; } /** * Pings back the links found in a post. * * @since 0.71 * @uses $wp_version * @uses IXR_Client * * @param string $content Post content to check for links. * @param int $post_ID Post ID. */ function pingback($content, $post_ID) { global $wp_version; include_once(ABSPATH . WPINC . '/class-IXR.php'); include_once(ABSPATH . WPINC . '/class-wp-http-ixr-client.php'); // original code by Mort (http://mort.mine.nu:8080) $post_links = array(); $pung = get_pung($post_ID); // Variables $ltrs = '\w'; $gunk = '/#~:.?+=&%@!\-'; $punc = '.:?\-'; $any = $ltrs . $gunk . $punc; // Step 1 // Parsing the post, external links (if any) are stored in the $post_links array // This regexp comes straight from phpfreaks.com // http://www.phpfreaks.com/quickcode/Extract_All_URLs_on_a_Page/15.php preg_match_all("{\b http : [$any] +? (?= [$punc] * [^$any] | $)}x", $content, $post_links_temp); // Step 2. // Walking thru the links array // first we get rid of links pointing to sites, not to specific files // Example: // http://dummy-weblog.org // http://dummy-weblog.org/ // http://dummy-weblog.org/post.php // We don't wanna ping first and second types, even if they have a valid <link/> foreach ( (array) $post_links_temp[0] as $link_test ) : if ( !in_array($link_test, $pung) && (url_to_postid($link_test) != $post_ID) // If we haven't pung it already and it isn't a link to itself && !is_local_attachment($link_test) ) : // Also, let's never ping local attachments. if ( $test = @parse_url($link_test) ) { if ( isset($test['query']) ) $post_links[] = $link_test; elseif ( isset( $test['path'] ) && ( $test['path'] != '/' ) && ( $test['path'] != '' ) ) $post_links[] = $link_test; } endif; endforeach; do_action_ref_array( 'pre_ping', array( &$post_links, &$pung, $post_ID ) ); foreach ( (array) $post_links as $pagelinkedto ) { $pingback_server_url = discover_pingback_server_uri( $pagelinkedto ); if ( $pingback_server_url ) { @ set_time_limit( 60 ); // Now, the RPC call $pagelinkedfrom = get_permalink($post_ID); // using a timeout of 3 seconds should be enough to cover slow servers $client = new WP_HTTP_IXR_Client($pingback_server_url); $client->timeout = 3; $client->useragent = apply_filters( 'pingback_useragent', $client->useragent . ' -- WordPress/' . $wp_version, $client->useragent, $pingback_server_url, $pagelinkedto, $pagelinkedfrom); // when set to true, this outputs debug messages by itself $client->debug = false; if ( $client->query('pingback.ping', $pagelinkedfrom, $pagelinkedto) || ( isset($client->error->code) && 48 == $client->error->code ) ) // Already registered add_ping( $post_ID, $pagelinkedto ); } } } /** * Check whether blog is public before returning sites. * * @since 2.1.0 * * @param mixed $sites Will return if blog is public, will not return if not public. * @return mixed Empty string if blog is not public, returns $sites, if site is public. */ function privacy_ping_filter($sites) { if ( '0' != get_option('blog_public') ) return $sites; else return ''; } /** * Send a Trackback. * * Updates database when sending trackback to prevent duplicates. * * @since 0.71 * @uses $wpdb * * @param string $trackback_url URL to send trackbacks. * @param string $title Title of post. * @param string $excerpt Excerpt of post. * @param int $ID Post ID. * @return mixed Database query from update. */ function trackback($trackback_url, $title, $excerpt, $ID) { global $wpdb; if ( empty($trackback_url) ) return; $options = array(); $options['timeout'] = 4; $options['body'] = array( 'title' => $title, 'url' => get_permalink($ID), 'blog_name' => get_option('blogname'), 'excerpt' => $excerpt ); $response = wp_remote_post($trackback_url, $options); if ( is_wp_error( $response ) ) return; $wpdb->query( $wpdb->prepare("UPDATE $wpdb->posts SET pinged = CONCAT(pinged, '\n', %s) WHERE ID = %d", $trackback_url, $ID) ); return $wpdb->query( $wpdb->prepare("UPDATE $wpdb->posts SET to_ping = TRIM(REPLACE(to_ping, %s, '')) WHERE ID = %d", $trackback_url, $ID) ); } /** * Send a pingback. * * @since 1.2.0 * @uses $wp_version * @uses IXR_Client * * @param string $server Host of blog to connect to. * @param string $path Path to send the ping. */ function weblog_ping($server = '', $path = '') { global $wp_version; include_once(ABSPATH . WPINC . '/class-IXR.php'); include_once(ABSPATH . WPINC . '/class-wp-http-ixr-client.php'); // using a timeout of 3 seconds should be enough to cover slow servers $client = new WP_HTTP_IXR_Client($server, ((!strlen(trim($path)) || ('/' == $path)) ? false : $path)); $client->timeout = 3; $client->useragent .= ' -- WordPress/'.$wp_version; // when set to true, this outputs debug messages by itself $client->debug = false; $home = trailingslashit( home_url() ); if ( !$client->query('weblogUpdates.extendedPing', get_option('blogname'), $home, get_bloginfo('rss2_url') ) ) // then try a normal ping $client->query('weblogUpdates.ping', get_option('blogname'), $home); } // // Cache // /** * Removes comment ID from the comment cache. * * @since 2.3.0 * @package WordPress * @subpackage Cache * * @param int|array $ids Comment ID or array of comment IDs to remove from cache */ function clean_comment_cache($ids) { foreach ( (array) $ids as $id ) wp_cache_delete($id, 'comment'); if ( function_exists( 'wp_cache_incr' ) ) { wp_cache_incr( 'last_changed', 1, 'comment' ); } else { $last_changed = wp_cache_get( 'last_changed', 'comment' ); wp_cache_set( 'last_changed', $last_changed + 1, 'comment' ); } } /** * Updates the comment cache of given comments. * * Will add the comments in $comments to the cache. If comment ID already exists * in the comment cache then it will not be updated. The comment is added to the * cache using the comment group with the key using the ID of the comments. * * @since 2.3.0 * @package WordPress * @subpackage Cache * * @param array $comments Array of comment row objects */ function update_comment_cache($comments) { foreach ( (array) $comments as $comment ) wp_cache_add($comment->comment_ID, $comment, 'comment'); } // // Internal // /** * Close comments on old posts on the fly, without any extra DB queries. Hooked to the_posts. * * @access private * @since 2.7.0 * * @param object $posts Post data object. * @param object $query Query object. * @return object */ function _close_comments_for_old_posts( $posts, $query ) { if ( empty( $posts ) || ! $query->is_singular() || ! get_option( 'close_comments_for_old_posts' ) ) return $posts; $post_types = apply_filters( 'close_comments_for_post_types', array( 'post' ) ); if ( ! in_array( $posts[0]->post_type, $post_types ) ) return $posts; $days_old = (int) get_option( 'close_comments_days_old' ); if ( ! $days_old ) return $posts; if ( time() - strtotime( $posts[0]->post_date_gmt ) > ( $days_old * DAY_IN_SECONDS ) ) { $posts[0]->comment_status = 'closed'; $posts[0]->ping_status = 'closed'; } return $posts; } /** * Close comments on an old post. Hooked to comments_open and pings_open. * * @access private * @since 2.7.0 * * @param bool $open Comments open or closed * @param int $post_id Post ID * @return bool $open */ function _close_comments_for_old_post( $open, $post_id ) { if ( ! $open ) return $open; if ( !get_option('close_comments_for_old_posts') ) return $open; $days_old = (int) get_option('close_comments_days_old'); if ( !$days_old ) return $open; $post = get_post($post_id); $post_types = apply_filters( 'close_comments_for_post_types', array( 'post' ) ); if ( ! in_array( $post->post_type, $post_types ) ) return $open; if ( time() - strtotime( $post->post_date_gmt ) > ( $days_old * DAY_IN_SECONDS ) ) return false; return $open; }
{ "content_hash": "6111f2d5ef8142e072a54cbea744f6d4", "timestamp": "", "source": "github", "line_count": 2062, "max_line_length": 275, "avg_line_length": 32.978176527643065, "alnum_prop": 0.6542992014823311, "repo_name": "arturo-mayorga/am_com", "id": "7a8f71cfd470b59c980e7ca2999c4ce0283c232d", "size": "68001", "binary": false, "copies": "254", "ref": "refs/heads/master", "path": "site/wp-includes/comment.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "85" }, { "name": "CSS", "bytes": "1259348" }, { "name": "GCC Machine Description", "bytes": "20035" }, { "name": "HTML", "bytes": "176722" }, { "name": "JavaScript", "bytes": "1823009" }, { "name": "PHP", "bytes": "11843989" } ] }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "7a0dfcabd10747d08fb29b7f88d958c6", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "3c0806b6d13dbd6a5def5829c23fdfe1d91546c6", "size": "183", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Clusiaceae/Chrysochlamys/Chrysochlamys floribunda/README.md", "mode": "33188", "license": "apache-2.0", "language": [] }
package uk.jamierocks.mana.carbon; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.eventbus.EventBus; import com.sk89q.intake.dispatcher.Dispatcher; import uk.jamierocks.mana.carbon.irc.IRCManager; import uk.jamierocks.mana.carbon.module.ModuleManager; import uk.jamierocks.mana.carbon.plugin.PluginContainer; import uk.jamierocks.mana.carbon.plugin.PluginManager; import uk.jamierocks.mana.carbon.service.ServiceRegistry; /** * Holds all the necessary components for Carbon. * * @author Jamie Mansfield * @since 1.0.0 */ public abstract class Carbon { /** * This will be forcefully overridden by Carbon upon its initialisation. */ protected static final PluginContainer CONTAINER = null; /** * Gets the instance of {@link Carbon} currently running. * * @return The current instance of Carbon * @since 1.0.0 */ public static Carbon getCarbon() { checkNotNull(CONTAINER, "CONTAINER is null!"); return (Carbon) CONTAINER.getInstance().get(); } /** * Gets the {@link EventBus} used by Carbon. * * @return Carbon's event bus * @since 1.0.0 */ public abstract EventBus getEventBus(); /** * Gets the {@link PluginManager} used by Carbon. * * @return Carbon's plugin manager * @since 1.0.0 */ public abstract PluginManager getPluginManager(); /** * Gets the {@link ModuleManager} used by Carbon. * * @return Carbon's module manager * @since 1.0.0 */ public abstract ModuleManager getModuleManager(); /** * Gets the {@link IRCManager} used by Carbon. * * @return Carbon's irc manager * @since 1.0.0 */ public abstract IRCManager getIRCManager(); /** * Gets the {@link ServiceRegistry} used by Carbon. * * @return Carbon's service registry * @since 1.0.0 */ public abstract ServiceRegistry getServiceRegistry(); /** * Gets the {@link Dispatcher} used by Carbon. * * @return Carbon's command dispatcher * @since 1.0.0 */ public abstract Dispatcher getCommandDispatcher(); /** * Gets the {@link CarbonConfiguration} used by Carbon. * * @return Carbon's configuration * @since 2.0.0 */ public abstract CarbonConfiguration getConfiguration(); }
{ "content_hash": "1149b6906cf8f326aa6d3d6c45ef05cd", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 76, "avg_line_length": 25.47872340425532, "alnum_prop": 0.6501043841336117, "repo_name": "ManaBot/Carbon", "id": "d1f89e180c7bc4a7c370d9f79d03404509d8a9c4", "size": "2993", "binary": false, "copies": "1", "ref": "refs/heads/bleeding", "path": "carbon-api/src/main/java/uk/jamierocks/mana/carbon/Carbon.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "119657" } ] }
package cn.felord.wepay.ali.sdk.api.request; import java.util.Map; import cn.felord.wepay.ali.sdk.api.AlipayRequest; import cn.felord.wepay.ali.sdk.api.internal.util.AlipayHashMap; import cn.felord.wepay.ali.sdk.api.response.AlipayEcoMycarParkingLotbarcodeCreateResponse; import cn.felord.wepay.ali.sdk.api.AlipayObject; /** * ALIPAY API: alipay.eco.mycar.parking.lotbarcode.create request * * @author auto create * @version $Id: $Id */ public class AlipayEcoMycarParkingLotbarcodeCreateRequest implements AlipayRequest<AlipayEcoMycarParkingLotbarcodeCreateResponse> { private AlipayHashMap udfParams; // add user-defined text parameters private String apiVersion="1.0"; /** * 物料二维码 */ private String bizContent; /** * <p>Setter for the field <code>bizContent</code>.</p> * * @param bizContent a {@link java.lang.String} object. */ public void setBizContent(String bizContent) { this.bizContent = bizContent; } /** * <p>Getter for the field <code>bizContent</code>.</p> * * @return a {@link java.lang.String} object. */ public String getBizContent() { return this.bizContent; } private String terminalType; private String terminalInfo; private String prodCode; private String notifyUrl; private String returnUrl; private boolean needEncrypt=false; private AlipayObject bizModel=null; /** * <p>Getter for the field <code>notifyUrl</code>.</p> * * @return a {@link java.lang.String} object. */ public String getNotifyUrl() { return this.notifyUrl; } /** {@inheritDoc} */ public void setNotifyUrl(String notifyUrl) { this.notifyUrl = notifyUrl; } /** * <p>Getter for the field <code>returnUrl</code>.</p> * * @return a {@link java.lang.String} object. */ public String getReturnUrl() { return this.returnUrl; } /** {@inheritDoc} */ public void setReturnUrl(String returnUrl) { this.returnUrl = returnUrl; } /** * <p>Getter for the field <code>apiVersion</code>.</p> * * @return a {@link java.lang.String} object. */ public String getApiVersion() { return this.apiVersion; } /** {@inheritDoc} */ public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } /** {@inheritDoc} */ public void setTerminalType(String terminalType){ this.terminalType=terminalType; } /** * <p>Getter for the field <code>terminalType</code>.</p> * * @return a {@link java.lang.String} object. */ public String getTerminalType(){ return this.terminalType; } /** {@inheritDoc} */ public void setTerminalInfo(String terminalInfo){ this.terminalInfo=terminalInfo; } /** * <p>Getter for the field <code>terminalInfo</code>.</p> * * @return a {@link java.lang.String} object. */ public String getTerminalInfo(){ return this.terminalInfo; } /** {@inheritDoc} */ public void setProdCode(String prodCode) { this.prodCode=prodCode; } /** * <p>Getter for the field <code>prodCode</code>.</p> * * @return a {@link java.lang.String} object. */ public String getProdCode() { return this.prodCode; } /** * <p>getApiMethodName.</p> * * @return a {@link java.lang.String} object. */ public String getApiMethodName() { return "alipay.eco.mycar.parking.lotbarcode.create"; } /** * <p>getTextParams.</p> * * @return a {@link java.util.Map} object. */ public Map<String, String> getTextParams() { AlipayHashMap txtParams = new AlipayHashMap(); txtParams.put("biz_content", this.bizContent); if(udfParams != null) { txtParams.putAll(this.udfParams); } return txtParams; } /** * <p>putOtherTextParam.</p> * * @param key a {@link java.lang.String} object. * @param value a {@link java.lang.String} object. */ public void putOtherTextParam(String key, String value) { if(this.udfParams == null) { this.udfParams = new AlipayHashMap(); } this.udfParams.put(key, value); } /** * <p>getResponseClass.</p> * * @return a {@link java.lang.Class} object. */ public Class<AlipayEcoMycarParkingLotbarcodeCreateResponse> getResponseClass() { return AlipayEcoMycarParkingLotbarcodeCreateResponse.class; } /** * <p>isNeedEncrypt.</p> * * @return a boolean. */ public boolean isNeedEncrypt() { return this.needEncrypt; } /** {@inheritDoc} */ public void setNeedEncrypt(boolean needEncrypt) { this.needEncrypt=needEncrypt; } /** * <p>Getter for the field <code>bizModel</code>.</p> * * @return a {@link cn.felord.wepay.ali.sdk.api.AlipayObject} object. */ public AlipayObject getBizModel() { return this.bizModel; } /** {@inheritDoc} */ public void setBizModel(AlipayObject bizModel) { this.bizModel=bizModel; } }
{ "content_hash": "467b8c7478870bdfc7221b995abea530", "timestamp": "", "source": "github", "line_count": 215, "max_line_length": 131, "avg_line_length": 22.47906976744186, "alnum_prop": 0.6610800744878957, "repo_name": "NotFound403/WePay", "id": "7858a71677afef037c5a53d98bfba5352bc120b0", "size": "4843", "binary": false, "copies": "1", "ref": "refs/heads/1.0.2", "path": "src/main/java/cn/felord/wepay/ali/sdk/api/request/AlipayEcoMycarParkingLotbarcodeCreateRequest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "70942" } ] }
///////////////////////////////////////////////////////////////////////////// // Name: wx/html/htmldefs.h // Purpose: constants for wxhtml library // Author: Vaclav Slavik // Copyright: (c) 1999 Vaclav Slavik // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// #ifndef _WX_HTMLDEFS_H_ # define _WX_HTMLDEFS_H_ # include "wx/defs.h" # if wxUSE_HTML //-------------------------------------------------------------------------------- // ALIGNMENTS // Describes alignment of text etc. in containers //-------------------------------------------------------------------------------- # define wxHTML_ALIGN_LEFT 0x0000 # define wxHTML_ALIGN_RIGHT 0x0002 # define wxHTML_ALIGN_JUSTIFY 0x0010 # define wxHTML_ALIGN_TOP 0x0004 # define wxHTML_ALIGN_BOTTOM 0x0008 # define wxHTML_ALIGN_CENTER 0x0001 //-------------------------------------------------------------------------------- // COLOR MODES // Used by wxHtmlColourCell to determine clr of what is changing //-------------------------------------------------------------------------------- # define wxHTML_CLR_FOREGROUND 0x0001 # define wxHTML_CLR_BACKGROUND 0x0002 # define wxHTML_CLR_TRANSPARENT_BACKGROUND 0x0004 //-------------------------------------------------------------------------------- // UNITS // Used to specify units //-------------------------------------------------------------------------------- # define wxHTML_UNITS_PIXELS 0x0001 # define wxHTML_UNITS_PERCENT 0x0002 //-------------------------------------------------------------------------------- // INDENTS // Used to specify indetation relatives //-------------------------------------------------------------------------------- # define wxHTML_INDENT_LEFT 0x0010 # define wxHTML_INDENT_RIGHT 0x0020 # define wxHTML_INDENT_TOP 0x0040 # define wxHTML_INDENT_BOTTOM 0x0080 # define wxHTML_INDENT_HORIZONTAL(wxHTML_INDENT_LEFT | wxHTML_INDENT_RIGHT) # define wxHTML_INDENT_VERTICAL(wxHTML_INDENT_TOP | wxHTML_INDENT_BOTTOM) # define wxHTML_INDENT_ALL(wxHTML_INDENT_VERTICAL | wxHTML_INDENT_HORIZONTAL) //-------------------------------------------------------------------------------- // FIND CONDITIONS // Identifiers of wxHtmlCell's Find() conditions //-------------------------------------------------------------------------------- # define wxHTML_COND_ISANCHOR 1 // Finds the anchor of 'param' name (pointer to wxString). # define wxHTML_COND_ISIMAGEMAP 2 // Finds imagemap of 'param' name (pointer to wxString). // (used exclusively by m_image.cpp) # define wxHTML_COND_USER 10000 // User-defined conditions should start from this number //-------------------------------------------------------------------------------- // INTERNALS // wxHTML internal constants //-------------------------------------------------------------------------------- /* size of one scroll step of wxHtmlWindow in pixels */ # define wxHTML_SCROLL_STEP 16 /* size of temporary buffer used during parsing */ # define wxHTML_BUFLEN 1024 # endif #endif
{ "content_hash": "1350474659e578eba24575b3459e3eea", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 82, "avg_line_length": 47.028985507246375, "alnum_prop": 0.4406779661016949, "repo_name": "satya-das/cppparser", "id": "4c226581dcc89d278fae161aab70700894c0c05b", "size": "3245", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/e2e/test_master/wxWidgets/include/wx/html/htmldefs.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "3858548" }, { "name": "C++", "bytes": "40366039" }, { "name": "CMake", "bytes": "5653" }, { "name": "Lex", "bytes": "39563" }, { "name": "Objective-C", "bytes": "10345580" }, { "name": "Shell", "bytes": "1365" }, { "name": "Yacc", "bytes": "103019" } ] }
import { DEFAULT_LOCALE } from '../common/constants'; import isNumber from '../common/is-number'; import datePattern from './date-pattern'; import dateNameType from './date-name-type'; import { dateFormatRegExp, DATE_FIELD_MAP } from './constants'; import { localeInfo } from '../cldr'; var NAME_TYPES = { month: { type: 'months', minLength: 3, standAlone: 'L' }, quarter: { type: 'quarters', minLength: 3, standAlone: 'q' }, weekday: { type: 'days', minLength: { E: 0, c: 3, e: 3 }, standAlone: 'c' }, dayperiod: { type: 'dayPeriods', minLength: 0 }, era: { type: 'eras', minLength: 0 } }; var LITERAL = 'literal'; function addLiteral(parts, value) { var lastPart = parts[parts.length - 1]; if (lastPart && lastPart.type === LITERAL) { lastPart.pattern += value; } else { parts.push({ type: LITERAL, pattern: value }); } } function isHour12(pattern) { return pattern === 'h'; } export default function splitDateFormat(format, locale) { if ( locale === void 0 ) locale = DEFAULT_LOCALE; var info = localeInfo(locale); var pattern = datePattern(format, info); var parts = []; var lastIndex = dateFormatRegExp.lastIndex = 0; var match = dateFormatRegExp.exec(pattern); while (match) { var value = match[0]; if (lastIndex < match.index) { addLiteral(parts, pattern.substring(lastIndex, match.index)); } if (value.startsWith('"') || value.startsWith("'")) { addLiteral(parts, value); } else { var specifier = value[0]; var type = DATE_FIELD_MAP[specifier]; var part = { type: type, pattern: value }; if (type === 'hour') { part.hour12 = isHour12(value); } var names = NAME_TYPES[type]; if (names) { var minLength = isNumber(names.minLength) ? names.minLength : names.minLength[specifier]; var patternLength = value.length; if (patternLength >= minLength) { part.names = { type: names.type, nameType: dateNameType(patternLength), standAlone: names.standAlone === specifier }; } } parts.push(part); } lastIndex = dateFormatRegExp.lastIndex; match = dateFormatRegExp.exec(pattern); } if (lastIndex < pattern.length) { addLiteral(parts, pattern.substring(lastIndex)); } return parts; } //# sourceMappingURL=split-date-format.js.map
{ "content_hash": "b9e1a4cfe6f0c9286318a424fdfc3e9d", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 105, "avg_line_length": 24.252100840336134, "alnum_prop": 0.5148995148995149, "repo_name": "antpost/antpost-client", "id": "f613d36dd37b7897e17303bda440a11d0e50f67a", "size": "2886", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "node_modules/@progress/kendo-angular-intl/node_modules/@telerik/kendo-intl/dist/es/dates/split-date-format.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1477882" }, { "name": "HTML", "bytes": "65403" }, { "name": "JavaScript", "bytes": "492756" }, { "name": "TypeScript", "bytes": "253918" } ] }
<?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.grarak.kerneladiutor" android:versionCode="121" android:versionName="0.9.9.4"> <uses-permission android:name="android.permission.ACCESS_SUPERUSER" /> <uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" /> <uses-permission android:name="android.permission.INTERNET" /> <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> <uses-permission android:name="android.permission.WAKE_LOCK" /> <uses-permission android:name="cyanogenmod.permission.PUBLISH_CUSTOM_TILE" /> <uses-feature android:name="android.software.leanback" android:required="false" /> <uses-feature android:name="android.hardware.touchscreen" android:required="false" /> <uses-feature android:name="android.hardware.screen.portrait" android:required="false" /> <application android:allowBackup="true" android:banner="@drawable/banner" android:icon="@mipmap/ic_launcher" android:label="@string/app_name" android:supportsRtl="true" android:theme="@style/AppThemeLight"> <activity android:name=".MainActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation"> <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> <category android:name="android.intent.category.LEANBACK_LAUNCHER" /> <category android:name="android.intent.category.DEFAULT" /> </intent-filter> </activity> <activity android:name=".TextActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" android:theme="@style/AppThemeActionBarLight" /> <activity android:name=".KernelActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" /> <activity android:name=".FileBrowserActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" /> <activity android:name=".EditTextActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" /> <activity android:name=".DownloadPluginsActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" /> <activity android:name=".elements.DAdapter$MainHeader$MainHeaderActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation" android:theme="@android:style/Theme.Translucent.NoTitleBar" /> <receiver android:name=".services.BootReceiver"> <intent-filter> <action android:name="android.intent.action.BOOT_COMPLETED" /> </intent-filter> </receiver> <service android:name=".services.BootService" /> <service android:name=".services.InitdService" /> <!-- Profile Widget --> <receiver android:name=".services.ProfileWidget"> <intent-filter> <action android:name="android.appwidget.action.APPWIDGET_UPDATE" /> </intent-filter> <meta-data android:name="android.appwidget.provider" android:resource="@xml/profile_widget" /> </receiver> <service android:name=".services.ProfileWidget$WidgetService" android:permission="android.permission.BIND_REMOTEVIEWS" /> <!-- DashClock --> <service android:name=".services.DashClockService" android:icon="@drawable/ic_launcher_preview" android:permission="com.google.android.apps.dashclock.permission.READ_EXTENSION_DATA"> <intent-filter> <action android:name="com.google.android.apps.dashclock.Extension" /> </intent-filter> <meta-data android:name="protocolVersion" android:value="1" /> </service> <!-- Tasker --> <activity android:name=".tasker.AddProfileActivity" android:configChanges="locale|keyboard|keyboardHidden|orientation|screenSize|navigation"> <intent-filter> <action android:name="com.twofortyfouram.locale.intent.action.EDIT_SETTING" /> </intent-filter> </activity> <receiver android:name=".tasker.RunProfileReceiver" android:process=":background"> <intent-filter> <action android:name="com.twofortyfouram.locale.intent.action.FIRE_SETTING" /> </intent-filter> </receiver> <!-- CM SDK --> <receiver android:name=".services.ProfileTileReceiver"> <intent-filter> <action android:name="com.grarak.kerneladiutor.action.ACTION_TOGGLE_STATE" /> </intent-filter> </receiver> </application> </manifest>
{ "content_hash": "014321cad3f58483359ad59c618133a9", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 103, "avg_line_length": 40.91538461538462, "alnum_prop": 0.6300056401579244, "repo_name": "CandyDevices/packages_apps_KernelAdiutor", "id": "82a7cc633a5604be6b449fe5d65d74f8e6cb72d6", "size": "5319", "binary": false, "copies": "1", "ref": "refs/heads/candy", "path": "AndroidManifest.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1062043" }, { "name": "Makefile", "bytes": "4816" } ] }
#include "phd.h" #include "backlash_comp.h" BacklashComp::BacklashComp(Mount *theMount) { m_pMount = theMount; m_pulseWidth = pConfig->Profile.GetInt("/" + m_pMount->GetMountClassName() + "/DecBacklashPulse", 0); if (m_pulseWidth > 0) m_compActive = pConfig->Profile.GetBoolean("/" + m_pMount->GetMountClassName() + "/BacklashCompEnabled", false); else m_compActive = false; m_justCompensated = false; m_lastDirection = NONE; if (m_compActive) Debug.AddLine(wxString::Format("BLC: Backlash compensation is enabled with correction = %d ms", m_pulseWidth)); else Debug.AddLine("BLC: Backlash compensation is disabled"); } void BacklashComp::SetBacklashPulse(int ms) { m_pulseWidth = wxMax(0, ms); pConfig->Profile.SetInt("/" + m_pMount->GetMountClassName() + "/DecBacklashPulse", m_pulseWidth); Debug.AddLine(wxString::Format("BLC: Comp pulse set to %d ms", m_pulseWidth)); } void BacklashComp::EnableBacklashComp(bool enable) { m_compActive = enable; pConfig->Profile.SetBoolean("/" + m_pMount->GetMountClassName() + "/BacklashCompEnabled", m_compActive); Debug.AddLine(wxString::Format("BLC: Backlash comp %s, Comp pulse = %d ms", m_compActive ? "enabled" : "disabled", m_pulseWidth)); } void BacklashComp::HandleOverShoot(int pulseSize) { if (m_justCompensated && pulseSize > 0) { // We just did a backlash comp so this is probably our problem int reduction = floor(wxMin(0.5 * m_pulseWidth, pulseSize)); Debug.AddLine(wxString::Format("BLC: Backlash over-shoot, pulse size reduced from %d to %d", m_pulseWidth, m_pulseWidth - reduction)); m_pulseWidth -= reduction; } } int BacklashComp::GetBacklashComp(int dir, double yDist) { int rslt = 0; if (m_compActive && m_pulseWidth > 0) { if (fabs(yDist) > 0) { if (m_lastDirection != NONE && dir != m_lastDirection) { rslt = (int) m_pulseWidth; Debug.AddLine(wxString::Format("BLC: Dec direction reversal from %s to %s, backlash comp pulse of %d applied", m_lastDirection == NORTH ? "North" : "South", dir == NORTH ? "North" : "South", rslt)); } m_lastDirection = dir; } } m_justCompensated = (rslt != 0); return rslt; } void BacklashComp::Reset() { m_lastDirection = GUIDE_DIRECTION::NONE; } // Class for implementing the backlash graph dialog class BacklashGraph : public wxDialog { BacklashTool *m_BLT; public: BacklashGraph(wxDialog *parent, BacklashTool *pBL); wxBitmap CreateGraph(int graphicWidth, int graphicHeight); }; BacklashGraph::BacklashGraph(wxDialog *parent, BacklashTool *pBL) : wxDialog(parent, wxID_ANY, wxGetTranslation(_("Backlash Results")), wxDefaultPosition, wxSize(500, 400)) { m_BLT = pBL; // Just but a big button area for the graph with a button below it wxBoxSizer *vSizer = new wxBoxSizer(wxVERTICAL); // Use a bitmap button so we don't waste cycles in paint events wxBitmap theGraph = CreateGraph(450, 300); wxBitmapButton *graphButton = new wxBitmapButton(this, wxID_ANY, theGraph, wxDefaultPosition, wxSize(450, 300), wxBU_AUTODRAW | wxBU_EXACTFIT); vSizer->Add(graphButton, 0, wxALIGN_CENTER_HORIZONTAL | wxALL | wxFIXED_MINSIZE, 5); graphButton->SetBitmapDisabled(theGraph); graphButton->Enable(false); // ok button because we're modal vSizer->Add( CreateButtonSizer(wxOK), wxSizerFlags(0).Expand().Border(wxALL, 10)); SetSizerAndFit(vSizer); } wxBitmap BacklashGraph::CreateGraph(int bmpWidth, int bmpHeight) { wxMemoryDC dc; wxBitmap bmp(bmpWidth, bmpHeight, -1); wxPen axisPen("BLACK", 3, wxCROSS_HATCH); wxPen redPen("RED", 3, wxSOLID); wxPen bluePen("BLUE", 3, wxSOLID); wxBrush redBrush("RED", wxSOLID); wxBrush blueBrush("BLUE", wxSOLID); //double fakeNorthPoints[] = //{152.04, 164.77, 176.34, 188.5, 200.25, 212.36, 224.21, 236.89, 248.62, 260.25, 271.34, 283.54, 294.79, 307.56, 319.22, 330.87, 343.37, 355.75, 367.52, 379.7, 391.22, 403.89, 415.34, 427.09, 439.41, 450.36, 462.6}; //double fakeSouthPoints[] = //{474.84, 474.9, 464.01, 451.83, 438.08, 426, 414.68, 401.15, 390.39, 377.22, 366.17, 353.45, 340.75, 328.31, 316.93, 304.55, 292.42, 280.45, 269.03, 255.02, 243.76, 231.53, 219.43, 207.35, 195.22, 183.06, 169.47}; //std::vector <double> northSteps(fakeNorthPoints, fakeNorthPoints + 27); //std::vector <double> southSteps(fakeSouthPoints, fakeSouthPoints + 27); std::vector <double> northSteps = m_BLT->GetNorthSteps(); std::vector <double> southSteps = m_BLT->GetSouthSteps(); double xScaleFactor; double yScaleFactor; int xOrigin; int yOrigin; int ptRadius; int graphWindowWidth; int graphWindowHeight; int numNorth; double northInc; int numSouth; // Find the max excursion from the origin in order to scale the points to fit the bitmap double maxDec = -9999.0; double minDec = 9999.0; for (std::vector<double>::const_iterator it = northSteps.begin(); it != northSteps.end(); ++it) { maxDec = wxMax(maxDec, *it); minDec = wxMin(minDec, *it); } for (std::vector<double>::const_iterator it = southSteps.begin(); it != southSteps.end(); ++it) { maxDec = wxMax(maxDec, *it); minDec = wxMin(minDec, *it); } graphWindowWidth = bmpWidth; graphWindowHeight = 0.7 * bmpHeight; yScaleFactor = (graphWindowHeight) / (maxDec - minDec + 1); xScaleFactor = (graphWindowWidth) / (northSteps.size() + southSteps.size()); // Since we get mount coordinates, north steps will always be in ascending order numNorth = northSteps.size(); northInc = (northSteps.at(numNorth - 1) - northSteps.at(0)) / numNorth; numSouth = southSteps.size(); // Should be same as numNorth but be careful dc.SelectObject(bmp); dc.SetBackground(*wxLIGHT_GREY_BRUSH); dc.SetFont(wxFont(12, wxFONTFAMILY_DEFAULT, wxFONTSTYLE_NORMAL, wxFONTWEIGHT_NORMAL)); dc.Clear(); // Bottom and top labels dc.SetTextForeground("BLUE"); dc.DrawText(_("Ideal"), 0.7 * graphWindowWidth, bmpHeight - 25); dc.SetTextForeground("RED"); dc.DrawText(_("Measured"), 0.2 * graphWindowWidth, bmpHeight - 25); dc.DrawText(_("North"), 0.1 * graphWindowWidth, 10); dc.DrawText(_("South"), 0.8 * graphWindowWidth, 10); // Draw the axes dc.SetPen(axisPen); xOrigin = graphWindowWidth / 2; yOrigin = graphWindowHeight + 40; // Leave room at the top for labels and such dc.DrawLine(0, yOrigin, graphWindowWidth, yOrigin); // x dc.DrawLine(xOrigin, yOrigin, xOrigin, 0); // y // Draw the north steps dc.SetPen(redPen); dc.SetBrush(redBrush); ptRadius = 2; for (int i = 0; i < numNorth; i++) { wxPoint where = wxPoint(i * xScaleFactor, round(yOrigin - (northSteps.at(i) - minDec) * yScaleFactor)); dc.DrawCircle(wxPoint(i * xScaleFactor, round(yOrigin - (northSteps.at(i) - minDec) * yScaleFactor)), ptRadius); } // Draw the south steps for (int i = 0; i < numSouth; i++) { dc.DrawCircle(wxPoint((i + numNorth) * xScaleFactor, round(yOrigin - (southSteps.at(i) - minDec) * yScaleFactor)), ptRadius); } // Now show an ideal south recovery line dc.SetPen(bluePen); dc.SetBrush(blueBrush); double peakSouth = southSteps.at(0); for (int i = 1; i <= numNorth; i++) { wxPoint where = wxPoint((i + numNorth)* xScaleFactor, round(yOrigin - (peakSouth - i * northInc - minDec) * yScaleFactor)); dc.DrawCircle(where, ptRadius); } dc.SelectObject(wxNullBitmap); return bmp; } // ------------------- BacklashTool Implementation BacklashTool::BacklashTool() { Calibration lastCalibration; m_scope = TheScope(); if (m_scope->GetLastCalibrationParams(&lastCalibration)) { m_lastDecGuideRate = lastCalibration.yRate; m_bltState = BLT_STATE_INITIALIZE; } else { m_bltState = BLT_STATE_ABORTED; m_lastStatus = _("Backlash measurement cannot be run - please re-run your mount calibration"); Debug.AddLine("BLT: Could not get calibration data"); } m_backlashResultPx = 0; m_backlashResultMs = 0; } void BacklashTool::StartMeasurement() { m_bltState = BLT_STATE_INITIALIZE; m_northBLSteps.clear(); m_southBLSteps.clear(); DecMeasurementStep(pFrame->pGuider->CurrentPosition()); } void BacklashTool::StopMeasurement() { m_bltState = BLT_STATE_ABORTED; DecMeasurementStep(pFrame->pGuider->CurrentPosition()); } static bool OutOfRoom(wxSize frameSize, double camX, double camY, int margin) { return camX < margin || camY < margin || camX >= frameSize.GetWidth() - margin || camY >= frameSize.GetHeight() - margin; } void BacklashTool::DecMeasurementStep(const PHD_Point& currentCamLoc) { double decDelta = 0.; double amt = 0; // double fakeDeltas []= {0, -5, -2, 2, 4, 5, 5, 5, 5 }; PHD_Point currMountLocation; try { if (m_scope->TransformCameraCoordinatesToMountCoordinates(currentCamLoc, currMountLocation)) throw ERROR_INFO("BLT: CamToMount xForm failed"); if (m_bltState != BLT_STATE_INITIALIZE) { decDelta = currMountLocation.Y - m_markerPoint.Y; //if (m_bltState == BLT_STATE_CLEAR_NORTH) // GET THIS OUT OF HERE // decDelta = fakeDeltas[wxMin(m_stepCount, 7)]; } switch (m_bltState) { case BLT_STATE_INITIALIZE: m_stepCount = 0; m_markerPoint = currMountLocation; m_startingPoint = currMountLocation; // Compute pulse size for clearing backlash - just use the last known guide rate m_pulseWidth = BACKLASH_EXPECTED_DISTANCE * 1.25 / m_lastDecGuideRate; // px/px_per_ms, bump it to sidestep near misses m_acceptedMoves = 0; m_lastClearRslt = 0; m_Rslt = MEASUREMENT_VALID; // Get this state machine in synch with the guider state machine - let it drive us, starting with backlash clearing step m_bltState = BLT_STATE_CLEAR_NORTH; m_scope->SetGuidingEnabled(true); pFrame->pGuider->EnableMeasurementMode(true); // Measurement results now come to us break; case BLT_STATE_CLEAR_NORTH: // Want to see the mount moving north for 3 consecutive moves of >= expected distance pixels if (m_stepCount == 0) { // Get things moving with the first clearing pulse Debug.AddLine(wxString::Format("BLT starting North backlash clearing using pulse width of %d," " looking for moves >= %d px", m_pulseWidth, BACKLASH_EXPECTED_DISTANCE)); pFrame->ScheduleCalibrationMove(m_scope, NORTH, m_pulseWidth); m_stepCount = 1; m_lastStatus = wxString::Format("Clearing North backlash, step %d", m_stepCount); break; } if (fabs(decDelta) >= BACKLASH_EXPECTED_DISTANCE) { if (m_acceptedMoves == 0 || (m_lastClearRslt * decDelta) > 0) // Just starting or still moving in same direction { m_acceptedMoves++; Debug.AddLine(wxString::Format("BLT accepted clearing move of %0.2f", decDelta)); } else { m_acceptedMoves = 0; // Reset on a direction reversal Debug.AddLine(wxString::Format("BLT rejected clearing move of %0.2f, direction reversal", decDelta)); } } else Debug.AddLine(wxString::Format("BLT backlash clearing move of %0.2f px was not large enough", decDelta)); if (m_acceptedMoves < BACKLASH_MIN_COUNT) // More work to do { if (m_stepCount < MAX_CLEARING_STEPS) { pFrame->ScheduleCalibrationMove(m_scope, NORTH, m_pulseWidth); m_stepCount++; m_markerPoint = currMountLocation; m_lastClearRslt = decDelta; m_lastStatus = wxString::Format("Clearing North backlash, step %d", m_stepCount); Debug.AddLine(wxString::Format("BLT: %s, LastDecDelta = %0.2f px", m_lastStatus, decDelta)); break; } else { m_lastStatus = _("Could not clear North backlash - test failed"); m_Rslt = MEASUREMENT_INVALID; throw ERROR_INFO("BLT: Could not clear N backlash"); } } else // Got our 3 consecutive moves - press ahead { m_markerPoint = currMountLocation; // Marker point at start of big Dec move North m_bltState = BLT_STATE_STEP_NORTH; double totalBacklashCleared = m_stepCount * m_pulseWidth; // Want to move the mount North at >=500 ms, regardless of image scale. But reduce pulse width if it would exceed 80% of the tracking rectangle - // need to leave some room for seeing deflections and dec drift m_pulseWidth = wxMax((int)NORTH_PULSE_SIZE, m_scope->GetCalibrationDuration()); m_pulseWidth = wxMin(m_pulseWidth, (int)floor(0.7 * (double)pFrame->pGuider->GetMaxMovePixels() / m_lastDecGuideRate)); m_stepCount = 0; // Move 50% more than the backlash we cleared or >=8 secs, whichever is greater. We want to leave plenty of room // for giving South moves time to clear backlash and actually get moving m_northPulseCount = wxMax((MAX_NORTH_PULSES + m_pulseWidth - 1) / m_pulseWidth, totalBacklashCleared * 1.5 / m_pulseWidth); // Up to 8 secs Debug.AddLine(wxString::Format("BLT: Starting North moves at Dec=%0.2f", currMountLocation.Y)); // falling through to start moving North } case BLT_STATE_STEP_NORTH: if (m_stepCount < m_northPulseCount && !OutOfRoom(pCamera->FullSize, currentCamLoc.X, currentCamLoc.Y, pFrame->pGuider->GetMaxMovePixels())) { m_lastStatus = wxString::Format("Moving North for %d ms, step %d / %d", m_pulseWidth, m_stepCount + 1, m_northPulseCount); Debug.AddLine(wxString::Format("BLT: %s, DecLoc = %0.2f", m_lastStatus, currMountLocation.Y)); m_northBLSteps.push_back(currMountLocation.Y); pFrame->ScheduleCalibrationMove(m_scope, NORTH, m_pulseWidth); m_stepCount++; break; } else { // Either got finished or ran out of room Debug.AddLine(wxString::Format("BLT: North pulses ended at Dec location %0.2f, DecDelta=%0.2f px", currMountLocation.Y, decDelta)); m_northBLSteps.push_back(currMountLocation.Y); if (m_stepCount < m_northPulseCount) { if (m_stepCount < 0.5 * m_northPulseCount) { pFrame->Alert(_("Star too close to edge for accurate measurement of backlash")); m_Rslt = MEASUREMENT_INVALID; } Debug.AddLine("BLT: North pulses truncated, too close to frame edge"); } m_northRate = fabs(decDelta / (m_stepCount * m_pulseWidth)); m_northPulseCount = m_stepCount; m_stepCount = 0; m_bltState = BLT_STATE_STEP_SOUTH; // falling through to moving back South } case BLT_STATE_STEP_SOUTH: if (m_stepCount < m_northPulseCount) { m_lastStatus = wxString::Format("Moving South for %d ms, step %d / %d", m_pulseWidth, m_stepCount + 1, m_northPulseCount); Debug.AddLine(wxString::Format("BLT: %s, DecLoc = %0.2f", m_lastStatus, currMountLocation.Y)); m_southBLSteps.push_back(currMountLocation.Y); pFrame->ScheduleCalibrationMove(m_scope, SOUTH, m_pulseWidth); m_stepCount++; break; } // Now see where we ended up - fall through to testing this correction Debug.AddLine(wxString::Format("BLT: South pulses ended at Dec location %0.2f", currMountLocation.Y)); m_southBLSteps.push_back(currMountLocation.Y); m_endSouth = currMountLocation; m_bltState = BLT_STATE_TEST_CORRECTION; m_stepCount = 0; // fall through case BLT_STATE_TEST_CORRECTION: if (m_stepCount == 0) { // decDelta contains the nominal backlash amount m_backlashResultPx = fabs(decDelta); m_backlashResultMs = (int)(m_backlashResultPx / m_northRate); // our north rate is probably better than the calibration rate if (m_Rslt == MEASUREMENT_VALID) { if (m_backlashResultMs >= 0.8 * m_northPulseCount * m_pulseWidth) m_Rslt = MEASUREMENT_IMPAIRED; // May not have moved far enough north for accurate measurement } Debug.AddLine(wxString::Format("BLT: Backlash amount is %0.2f px, %d ms", m_backlashResultPx, m_backlashResultMs)); // Don't try this refinement if the clearing pulse will cause us to lose the star if (m_backlashResultPx < pFrame->pGuider->GetMaxMovePixels()) { m_lastStatus = wxString::Format(_("Issuing test backlash correction of %d ms"), m_backlashResultMs); Debug.AddLine(m_lastStatus); // This should put us back roughly to where we issued the big North pulse unless the backlash is very large pFrame->ScheduleCalibrationMove(m_scope, SOUTH, m_backlashResultMs); m_stepCount++; } else { int maxFrameMove = (int)floor((double)pFrame->pGuider->GetMaxMovePixels() / m_northRate); Debug.AddLine(wxString::Format("BLT: Clearing pulse is very large, issuing max S move of %d", maxFrameMove)); pFrame->ScheduleCalibrationMove(m_scope, SOUTH, maxFrameMove); // One more pulse to cycle the state machine m_bltState = BLT_STATE_RESTORE; } break; } // See how close we came, maybe fine-tune a bit Debug.AddLine(wxString::Format("BLT: Trial backlash pulse resulted in net DecDelta = %0.2f px, Dec Location %0.2f", decDelta, currMountLocation.Y)); if (fabs(decDelta) > TRIAL_TOLERANCE) { double pulse_delta = fabs(currMountLocation.Y - m_endSouth.Y); if ((m_endSouth.Y - m_markerPoint.Y) * decDelta < 0) // Sign change, went too far { m_backlashResultMs *= m_backlashResultPx / pulse_delta; Debug.AddLine(wxString::Format("BLT: Trial backlash resulted in overshoot - adjusting pulse size by %0.2f", m_backlashResultPx / pulse_delta)); } else { double corr_factor = (m_backlashResultPx / pulse_delta - 1.0) * 0.5 + 1.0; // apply 50% of the correction to avoid over-shoot //m_backlashResultMs *= corr_factor; Debug.AddLine(wxString::Format("BLT: Trial backlash resulted in under-correction - under-shot by %0.2f", corr_factor)); } } else Debug.AddLine("BLT: Initial backlash pulse resulted in final delta of < 2 px"); m_bltState = BLT_STATE_RESTORE; m_stepCount = 0; // fall through case BLT_STATE_RESTORE: // We could be a considerable distance from where we started, so get back close to the starting point without losing the star if (m_stepCount == 0) { Debug.AddLine(wxString::Format("BLT: Starting Dec position at %0.2f, Ending Dec position at %0.2f", m_markerPoint.Y, currMountLocation.Y)); amt = fabs(currMountLocation.Y - m_startingPoint.Y); if (amt > pFrame->pGuider->GetMaxMovePixels()) { m_restoreCount = (int)floor((amt / m_northRate) / m_pulseWidth); Debug.AddLine(wxString::Format("BLT: Final restore distance is %0.1f px, approx %d steps", amt, m_restoreCount)); m_stepCount = 0; } else m_bltState = BLT_STATE_WRAPUP; } if (m_stepCount < m_restoreCount) { pFrame->ScheduleCalibrationMove(m_scope, SOUTH, m_pulseWidth); m_stepCount++; m_lastStatus = _("Restoring star position"); Debug.AddLine("BLT: Issuing restore pulse count %d of %d ms", m_stepCount, m_pulseWidth); break; } m_bltState = BLT_STATE_WRAPUP; // fall through case BLT_STATE_WRAPUP: m_lastStatus = _("Measurement complete"); CleanUp(); m_bltState = BLT_STATE_COMPLETED; break; case BLT_STATE_COMPLETED: break; case BLT_STATE_ABORTED: m_lastStatus = _("Measurement halted"); Debug.AddLine("BLT: measurement process halted by user"); CleanUp(); break; } // end of switch on state } catch (const wxString& msg) { Debug.AddLine(wxString::Format("BLT: Exception thrown in logical state %d", (int)m_bltState)); m_bltState = BLT_STATE_ABORTED; m_lastStatus = _("Measurement encountered an error: " + msg); Debug.AddLine("BLT: " + m_lastStatus); CleanUp(); } } // Launch modal dialog to show the BLT graph void BacklashTool::ShowGraph(wxDialog *pGA) { BacklashGraph dlg(pGA, this); dlg.ShowModal(); } void BacklashTool::CleanUp() { m_scope->GetBacklashComp()->Reset(); // Normal guiding will start, don't want old BC state applied pFrame->pGuider->EnableMeasurementMode(false); } //------------------------------ End of BacklashTool implementation
{ "content_hash": "2dd0889d9f27dc23c69d7855c6efbfe2", "timestamp": "", "source": "github", "line_count": 523, "max_line_length": 220, "avg_line_length": 45.09177820267686, "alnum_prop": 0.5729126913454607, "repo_name": "eklenske/phd2", "id": "4c710577e5968653939d8e09702aa961562ea26e", "size": "25386", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backlash_comp.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "2158" }, { "name": "C", "bytes": "583323" }, { "name": "C++", "bytes": "2643666" }, { "name": "CMake", "bytes": "75485" }, { "name": "HTML", "bytes": "472418" }, { "name": "Inno Setup", "bytes": "4478" }, { "name": "Objective-C", "bytes": "36352" }, { "name": "Perl", "bytes": "3756" }, { "name": "Python", "bytes": "6628" }, { "name": "Shell", "bytes": "4387" }, { "name": "Visual Basic", "bytes": "2114" } ] }
<head> <link type="type/css" href="/actual/my-file.my-style.css"> <script type="application/javascript" src="/actual/my-file.js.js"></script> </head> <div>body</div>
{ "content_hash": "36484112735c44bb08a39a984d93833b", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 77, "avg_line_length": 28.5, "alnum_prop": 0.672514619883041, "repo_name": "da99/dum_dum_html", "id": "fd64a8d64634dd6771986217cb81b388648ef23c", "size": "172", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "specs/renders-css-js-vars/expect/my-file.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "23" }, { "name": "HTML", "bytes": "5621" }, { "name": "JavaScript", "bytes": "11472" }, { "name": "Shell", "bytes": "5465" } ] }
import * as ts from "typescript"; import {AbstractRule} from "./language/rule/abstractRule"; import {IOptions} from "./language/rule/rule"; import {forEachComment, TokenPosition} from "./language/utils"; import {RuleWalker} from "./language/walker/ruleWalker"; import {IEnableDisablePosition} from "./ruleLoader"; export class EnableDisableRulesWalker extends RuleWalker { public enableDisableRuleMap: {[rulename: string]: IEnableDisablePosition[]} = {}; constructor(sourceFile: ts.SourceFile, options: IOptions, rules: {[name: string]: any}) { super(sourceFile, options); if (rules) { for (const rule in rules) { if (rules.hasOwnProperty(rule) && AbstractRule.isRuleEnabled(rules[rule])) { this.enableDisableRuleMap[rule] = [{ isEnabled: true, position: 0, }]; } } } } public visitSourceFile(node: ts.SourceFile) { forEachComment(node, (fullText, _kind, pos) => { return this.handlePossibleTslintSwitch(fullText.substring(pos.tokenStart, pos.end), node, pos); }); } private getStartOfLinePosition(node: ts.SourceFile, position: number, lineOffset = 0) { const line = ts.getLineAndCharacterOfPosition(node, position).line + lineOffset; const lineStarts = node.getLineStarts(); if (line >= lineStarts.length) { // next line ends with eof or there is no next line return node.getFullWidth(); } return lineStarts[line]; } private switchRuleState(ruleName: string, isEnabled: boolean, start: number, end?: number): void { const ruleStateMap = this.enableDisableRuleMap[ruleName]; ruleStateMap.push({ isEnabled, position: start, }); if (end) { // switchRuleState method is only called when rule state changes therefore we can safely use opposite state ruleStateMap.push({ isEnabled: !isEnabled, position: end, }); } } private getLatestRuleState(ruleName: string): boolean { const ruleStateMap = this.enableDisableRuleMap[ruleName]; return ruleStateMap[ruleStateMap.length - 1].isEnabled; } private handlePossibleTslintSwitch(commentText: string, node: ts.SourceFile, pos: TokenPosition) { // regex is: start of string followed by "/*" or "//" followed by any amount of whitespace followed by "tslint:" if (commentText.match(/^(\/\*|\/\/)\s*tslint:/)) { const commentTextParts = commentText.split(":"); // regex is: start of string followed by either "enable" or "disable" // followed optionally by -line or -next-line // followed by either whitespace or end of string const enableOrDisableMatch = commentTextParts[1].match(/^(enable|disable)(-(line|next-line))?(\s|$)/); if (enableOrDisableMatch != null) { const isEnabled = enableOrDisableMatch[1] === "enable"; const isCurrentLine = enableOrDisableMatch[3] === "line"; const isNextLine = enableOrDisableMatch[3] === "next-line"; let rulesList = ["all"]; if (commentTextParts.length === 2) { // an implicit whitespace separator is used for the rules list. rulesList = commentTextParts[1].split(/\s+/).slice(1); // remove empty items and potential comment end. rulesList = rulesList.filter((item) => !!item && !item.includes("*/")); // potentially there were no items, so default to `all`. rulesList = rulesList.length > 0 ? rulesList : ["all"]; } else if (commentTextParts.length > 2) { // an explicit separator was specified for the rules list. rulesList = commentTextParts[2].split(/\s+/); } if (rulesList.indexOf("all") !== -1) { // iterate over all enabled rules rulesList = Object.keys(this.enableDisableRuleMap); } for (const ruleToSwitch of rulesList) { if (!(ruleToSwitch in this.enableDisableRuleMap)) { // all rules enabled in configuration are already in map - skip switches for disabled rules continue; } const previousState = this.getLatestRuleState(ruleToSwitch); if (previousState === isEnabled) { // no need to add switch points if there is no change in rule state continue; } let start: number; let end: number | undefined; if (isCurrentLine) { // start at the beginning of the current line start = this.getStartOfLinePosition(node, pos.tokenStart); // end at the beginning of the next line end = pos.end + 1; } else if (isNextLine) { // start at the current position start = pos.tokenStart; // end at the beginning of the line following the next line end = this.getStartOfLinePosition(node, pos.tokenStart, 2); } else { // disable rule for the rest of the file // start at the current position, but skip end position start = pos.tokenStart; end = undefined; } this.switchRuleState(ruleToSwitch, isEnabled, start, end); } } } } }
{ "content_hash": "7a4578f747075385e6414c6cf0bcd5be", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 120, "avg_line_length": 42.46099290780142, "alnum_prop": 0.5455152831134124, "repo_name": "nchen63/tslint", "id": "6c819f7ff4439aebaac97b345958434c99c3666a", "size": "6610", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/enableDisableRules.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "168" }, { "name": "JavaScript", "bytes": "3321" }, { "name": "Shell", "bytes": "437" }, { "name": "TypeScript", "bytes": "772904" } ] }
package command import ( "fmt" "net/http" "os" "strings" "time" stats_collect "github.com/chrislusf/seaweedfs/weed/stats" "github.com/chrislusf/seaweedfs/weed/glog" "github.com/chrislusf/seaweedfs/weed/pb" "github.com/chrislusf/seaweedfs/weed/util" "github.com/chrislusf/seaweedfs/weed/util/grace" ) type ServerOptions struct { cpuprofile *string memprofile *string debug *bool debugPort *int v VolumeServerOptions } var ( serverOptions ServerOptions masterOptions MasterOptions filerOptions FilerOptions s3Options S3Options iamOptions IamOptions webdavOptions WebDavOption mqBrokerOptions MessageQueueBrokerOptions ) func init() { cmdServer.Run = runServer // break init cycle } var cmdServer = &Command{ UsageLine: "server -dir=/tmp -volume.max=5 -ip=server_name", Short: "start a master server, a volume server, and optionally a filer and a S3 gateway", Long: `start both a volume server to provide storage spaces and a master server to provide volume=>location mapping service and sequence number of file ids This is provided as a convenient way to start both volume server and master server. The servers acts exactly the same as starting them separately. So other volume servers can connect to this master server also. Optionally, a filer server can be started. Also optionally, a S3 gateway can be started. `, } var ( serverIp = cmdServer.Flag.String("ip", util.DetectedHostAddress(), "ip or server name, also used as identifier") serverBindIp = cmdServer.Flag.String("ip.bind", "", "ip address to bind to. If empty, default to same as -ip option.") serverTimeout = cmdServer.Flag.Int("idleTimeout", 30, "connection idle seconds") serverDataCenter = cmdServer.Flag.String("dataCenter", "", "current volume server's data center name") serverRack = cmdServer.Flag.String("rack", "", "current volume server's rack name") serverWhiteListOption = cmdServer.Flag.String("whiteList", "", "comma separated Ip addresses having write permission. No limit if empty.") serverDisableHttp = cmdServer.Flag.Bool("disableHttp", false, "disable http requests, only gRPC operations are allowed.") volumeDataFolders = cmdServer.Flag.String("dir", os.TempDir(), "directories to store data files. dir[,dir]...") volumeMaxDataVolumeCounts = cmdServer.Flag.String("volume.max", "8", "maximum numbers of volumes, count[,count]... If set to zero, the limit will be auto configured as free disk space divided by volume size.") volumeMinFreeSpacePercent = cmdServer.Flag.String("volume.minFreeSpacePercent", "1", "minimum free disk space (default to 1%). Low disk space will mark all volumes as ReadOnly (deprecated, use minFreeSpace instead).") volumeMinFreeSpace = cmdServer.Flag.String("volume.minFreeSpace", "", "min free disk space (value<=100 as percentage like 1, other as human readable bytes, like 10GiB). Low disk space will mark all volumes as ReadOnly.") serverMetricsHttpPort = cmdServer.Flag.Int("metricsPort", 0, "Prometheus metrics listen port") // pulseSeconds = cmdServer.Flag.Int("pulseSeconds", 5, "number of seconds between heartbeats") isStartingMasterServer = cmdServer.Flag.Bool("master", true, "whether to start master server") isStartingVolumeServer = cmdServer.Flag.Bool("volume", true, "whether to start volume server") isStartingFiler = cmdServer.Flag.Bool("filer", false, "whether to start filer") isStartingS3 = cmdServer.Flag.Bool("s3", false, "whether to start S3 gateway") isStartingIam = cmdServer.Flag.Bool("iam", false, "whether to start IAM service") isStartingWebDav = cmdServer.Flag.Bool("webdav", false, "whether to start WebDAV gateway") isStartingMqBroker = cmdServer.Flag.Bool("mq.broker", false, "whether to start message queue broker") serverWhiteList []string False = false ) func init() { serverOptions.cpuprofile = cmdServer.Flag.String("cpuprofile", "", "cpu profile output file") serverOptions.memprofile = cmdServer.Flag.String("memprofile", "", "memory profile output file") serverOptions.debug = cmdServer.Flag.Bool("debug", false, "serves runtime profiling data, e.g., http://localhost:6060/debug/pprof/goroutine?debug=2") serverOptions.debugPort = cmdServer.Flag.Int("debug.port", 6060, "http port for debugging") masterOptions.port = cmdServer.Flag.Int("master.port", 9333, "master server http listen port") masterOptions.portGrpc = cmdServer.Flag.Int("master.port.grpc", 0, "master server grpc listen port") masterOptions.metaFolder = cmdServer.Flag.String("master.dir", "", "data directory to store meta data, default to same as -dir specified") masterOptions.peers = cmdServer.Flag.String("master.peers", "", "all master nodes in comma separated ip:masterPort list") masterOptions.volumeSizeLimitMB = cmdServer.Flag.Uint("master.volumeSizeLimitMB", 30*1000, "Master stops directing writes to oversized volumes.") masterOptions.volumePreallocate = cmdServer.Flag.Bool("master.volumePreallocate", false, "Preallocate disk space for volumes.") masterOptions.defaultReplication = cmdServer.Flag.String("master.defaultReplication", "", "Default replication type if not specified.") masterOptions.garbageThreshold = cmdServer.Flag.Float64("garbageThreshold", 0.3, "threshold to vacuum and reclaim spaces") masterOptions.metricsAddress = cmdServer.Flag.String("metrics.address", "", "Prometheus gateway address") masterOptions.metricsIntervalSec = cmdServer.Flag.Int("metrics.intervalSeconds", 15, "Prometheus push interval in seconds") masterOptions.raftResumeState = cmdServer.Flag.Bool("resumeState", false, "resume previous state on start master server") masterOptions.heartbeatInterval = cmdServer.Flag.Duration("master.heartbeatInterval", 300*time.Millisecond, "heartbeat interval of master servers, and will be randomly multiplied by [1, 1.25)") masterOptions.electionTimeout = cmdServer.Flag.Duration("master.electionTimeout", 10*time.Second, "election timeout of master servers") filerOptions.filerGroup = cmdServer.Flag.String("filer.filerGroup", "", "share metadata with other filers in the same filerGroup") filerOptions.collection = cmdServer.Flag.String("filer.collection", "", "all data will be stored in this collection") filerOptions.port = cmdServer.Flag.Int("filer.port", 8888, "filer server http listen port") filerOptions.portGrpc = cmdServer.Flag.Int("filer.port.grpc", 0, "filer server grpc listen port") filerOptions.publicPort = cmdServer.Flag.Int("filer.port.public", 0, "filer server public http listen port") filerOptions.defaultReplicaPlacement = cmdServer.Flag.String("filer.defaultReplicaPlacement", "", "default replication type. If not specified, use master setting.") filerOptions.disableDirListing = cmdServer.Flag.Bool("filer.disableDirListing", false, "turn off directory listing") filerOptions.maxMB = cmdServer.Flag.Int("filer.maxMB", 4, "split files larger than the limit") filerOptions.dirListingLimit = cmdServer.Flag.Int("filer.dirListLimit", 1000, "limit sub dir listing size") filerOptions.cipher = cmdServer.Flag.Bool("filer.encryptVolumeData", false, "encrypt data on volume servers") filerOptions.saveToFilerLimit = cmdServer.Flag.Int("filer.saveToFilerLimit", 0, "Small files smaller than this limit can be cached in filer store.") filerOptions.concurrentUploadLimitMB = cmdServer.Flag.Int("filer.concurrentUploadLimitMB", 64, "limit total concurrent upload size") filerOptions.localSocket = cmdServer.Flag.String("filer.localSocket", "", "default to /tmp/seaweedfs-filer-<port>.sock") filerOptions.showUIDirectoryDelete = cmdServer.Flag.Bool("filer.ui.deleteDir", true, "enable filer UI show delete directory button") serverOptions.v.port = cmdServer.Flag.Int("volume.port", 8080, "volume server http listen port") serverOptions.v.portGrpc = cmdServer.Flag.Int("volume.port.grpc", 0, "volume server grpc listen port") serverOptions.v.publicPort = cmdServer.Flag.Int("volume.port.public", 0, "volume server public port") serverOptions.v.indexType = cmdServer.Flag.String("volume.index", "memory", "Choose [memory|leveldb|leveldbMedium|leveldbLarge] mode for memory~performance balance.") serverOptions.v.diskType = cmdServer.Flag.String("volume.disk", "", "[hdd|ssd|<tag>] hard drive or solid state drive or any tag") serverOptions.v.fixJpgOrientation = cmdServer.Flag.Bool("volume.images.fix.orientation", false, "Adjust jpg orientation when uploading.") serverOptions.v.readMode = cmdServer.Flag.String("volume.readMode", "proxy", "[local|proxy|redirect] how to deal with non-local volume: 'not found|read in remote node|redirect volume location'.") serverOptions.v.compactionMBPerSecond = cmdServer.Flag.Int("volume.compactionMBps", 0, "limit compaction speed in mega bytes per second") serverOptions.v.fileSizeLimitMB = cmdServer.Flag.Int("volume.fileSizeLimitMB", 256, "limit file size to avoid out of memory") serverOptions.v.concurrentUploadLimitMB = cmdServer.Flag.Int("volume.concurrentUploadLimitMB", 64, "limit total concurrent upload size") serverOptions.v.concurrentDownloadLimitMB = cmdServer.Flag.Int("volume.concurrentDownloadLimitMB", 64, "limit total concurrent download size") serverOptions.v.publicUrl = cmdServer.Flag.String("volume.publicUrl", "", "publicly accessible address") serverOptions.v.preStopSeconds = cmdServer.Flag.Int("volume.preStopSeconds", 10, "number of seconds between stop send heartbeats and stop volume server") serverOptions.v.pprof = cmdServer.Flag.Bool("volume.pprof", false, "enable pprof http handlers. precludes --memprofile and --cpuprofile") serverOptions.v.idxFolder = cmdServer.Flag.String("volume.dir.idx", "", "directory to store .idx files") serverOptions.v.enableTcp = cmdServer.Flag.Bool("volume.tcp", false, "<exprimental> enable tcp port") serverOptions.v.inflightUploadDataTimeout = cmdServer.Flag.Duration("volume.inflightUploadDataTimeout", 60*time.Second, "inflight upload data wait timeout of volume servers") s3Options.port = cmdServer.Flag.Int("s3.port", 8333, "s3 server http listen port") s3Options.portGrpc = cmdServer.Flag.Int("s3.port.grpc", 0, "s3 server grpc listen port") s3Options.domainName = cmdServer.Flag.String("s3.domainName", "", "suffix of the host name in comma separated list, {bucket}.{domainName}") s3Options.tlsPrivateKey = cmdServer.Flag.String("s3.key.file", "", "path to the TLS private key file") s3Options.tlsCertificate = cmdServer.Flag.String("s3.cert.file", "", "path to the TLS certificate file") s3Options.config = cmdServer.Flag.String("s3.config", "", "path to the config file") s3Options.auditLogConfig = cmdServer.Flag.String("s3.auditLogConfig", "", "path to the audit log config file") s3Options.allowEmptyFolder = cmdServer.Flag.Bool("s3.allowEmptyFolder", true, "allow empty folders") s3Options.allowDeleteBucketNotEmpty = cmdServer.Flag.Bool("s3.allowDeleteBucketNotEmpty", true, "allow recursive deleting all entries along with bucket") iamOptions.port = cmdServer.Flag.Int("iam.port", 8111, "iam server http listen port") webdavOptions.port = cmdServer.Flag.Int("webdav.port", 7333, "webdav server http listen port") webdavOptions.collection = cmdServer.Flag.String("webdav.collection", "", "collection to create the files") webdavOptions.replication = cmdServer.Flag.String("webdav.replication", "", "replication to create the files") webdavOptions.disk = cmdServer.Flag.String("webdav.disk", "", "[hdd|ssd|<tag>] hard drive or solid state drive or any tag") webdavOptions.tlsPrivateKey = cmdServer.Flag.String("webdav.key.file", "", "path to the TLS private key file") webdavOptions.tlsCertificate = cmdServer.Flag.String("webdav.cert.file", "", "path to the TLS certificate file") webdavOptions.cacheDir = cmdServer.Flag.String("webdav.cacheDir", os.TempDir(), "local cache directory for file chunks") webdavOptions.cacheSizeMB = cmdServer.Flag.Int64("webdav.cacheCapacityMB", 0, "local cache capacity in MB") mqBrokerOptions.port = cmdServer.Flag.Int("mq.broker.port", 17777, "message queue broker gRPC listen port") } func runServer(cmd *Command, args []string) bool { if *serverOptions.debug { go http.ListenAndServe(fmt.Sprintf(":%d", *serverOptions.debugPort), nil) } util.LoadConfiguration("security", false) util.LoadConfiguration("master", false) grace.SetupProfiling(*serverOptions.cpuprofile, *serverOptions.memprofile) if *isStartingS3 { *isStartingFiler = true } if *isStartingIam { *isStartingFiler = true } if *isStartingWebDav { *isStartingFiler = true } if *isStartingMqBroker { *isStartingFiler = true } if *isStartingMasterServer { _, peerList := checkPeers(*serverIp, *masterOptions.port, *masterOptions.portGrpc, *masterOptions.peers) peers := strings.Join(pb.ToAddressStrings(peerList), ",") masterOptions.peers = &peers } if *serverBindIp == "" { serverBindIp = serverIp } // ip address masterOptions.ip = serverIp masterOptions.ipBind = serverBindIp filerOptions.masters = pb.ServerAddresses(*masterOptions.peers).ToAddressMap() filerOptions.ip = serverIp filerOptions.bindIp = serverBindIp s3Options.bindIp = serverBindIp iamOptions.ip = serverBindIp iamOptions.masters = masterOptions.peers serverOptions.v.ip = serverIp serverOptions.v.bindIp = serverBindIp serverOptions.v.masters = pb.ServerAddresses(*masterOptions.peers).ToAddresses() serverOptions.v.idleConnectionTimeout = serverTimeout serverOptions.v.dataCenter = serverDataCenter serverOptions.v.rack = serverRack mqBrokerOptions.ip = serverIp mqBrokerOptions.masters = filerOptions.masters mqBrokerOptions.filerGroup = filerOptions.filerGroup // serverOptions.v.pulseSeconds = pulseSeconds // masterOptions.pulseSeconds = pulseSeconds masterOptions.whiteList = serverWhiteListOption filerOptions.dataCenter = serverDataCenter filerOptions.rack = serverRack mqBrokerOptions.dataCenter = serverDataCenter mqBrokerOptions.rack = serverRack filerOptions.disableHttp = serverDisableHttp masterOptions.disableHttp = serverDisableHttp filerAddress := string(pb.NewServerAddress(*serverIp, *filerOptions.port, *filerOptions.portGrpc)) s3Options.filer = &filerAddress iamOptions.filer = &filerAddress webdavOptions.filer = &filerAddress mqBrokerOptions.filerGroup = filerOptions.filerGroup go stats_collect.StartMetricsServer(*serverMetricsHttpPort) folders := strings.Split(*volumeDataFolders, ",") if *masterOptions.volumeSizeLimitMB > util.VolumeSizeLimitGB*1000 { glog.Fatalf("masterVolumeSizeLimitMB should be less than 30000") } if *masterOptions.metaFolder == "" { *masterOptions.metaFolder = folders[0] } if err := util.TestFolderWritable(util.ResolvePath(*masterOptions.metaFolder)); err != nil { glog.Fatalf("Check Meta Folder (-mdir=\"%s\") Writable: %s", *masterOptions.metaFolder, err) } filerOptions.defaultLevelDbDirectory = masterOptions.metaFolder if *serverWhiteListOption != "" { serverWhiteList = strings.Split(*serverWhiteListOption, ",") } if *isStartingFiler { go func() { time.Sleep(1 * time.Second) filerOptions.startFiler() }() } if *isStartingS3 { go func() { time.Sleep(2 * time.Second) s3Options.localFilerSocket = filerOptions.localSocket s3Options.startS3Server() }() } if *isStartingIam { go func() { time.Sleep(2 * time.Second) iamOptions.startIamServer() }() } if *isStartingWebDav { go func() { time.Sleep(2 * time.Second) webdavOptions.startWebDav() }() } if *isStartingMqBroker { go func() { time.Sleep(2 * time.Second) mqBrokerOptions.startQueueServer() }() } // start volume server if *isStartingVolumeServer { minFreeSpaces := util.MustParseMinFreeSpace(*volumeMinFreeSpace, *volumeMinFreeSpacePercent) go serverOptions.v.startVolumeServer(*volumeDataFolders, *volumeMaxDataVolumeCounts, *serverWhiteListOption, minFreeSpaces) } if *isStartingMasterServer { go startMaster(masterOptions, serverWhiteList) } select {} }
{ "content_hash": "8c849b2e08ecff2807df9c261ff365dc", "timestamp": "", "source": "github", "line_count": 301, "max_line_length": 228, "avg_line_length": 53.33554817275748, "alnum_prop": 0.7609941447614301, "repo_name": "chrislusf/seaweedfs", "id": "b993d9428fbf106e6349008c641774a3889074cd", "size": "16054", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "weed/command/server.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "341" }, { "name": "Go", "bytes": "2846786" }, { "name": "HTML", "bytes": "25537" }, { "name": "Java", "bytes": "390343" }, { "name": "Lua", "bytes": "1515" }, { "name": "Makefile", "bytes": "7088" }, { "name": "Shell", "bytes": "5602" }, { "name": "Smarty", "bytes": "6176" } ] }
package gov.nasa.ensemble.common.ui.treetable; import org.eclipse.jface.viewers.LabelProvider; import org.eclipse.swt.graphics.Color; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.Image; public abstract class TreeTableLabelProvider extends LabelProvider { public TreeTableLabelProvider() { super(); } public abstract boolean needsUpdate(Object feature); public abstract Font getFont(Object element); public abstract Color getBackground(Object element); /** * * * @param element */ public void expand(Object element) { // do nothing } /** * Override to return true or false if you maintain this state in the model. * * @param element * @return */ public Boolean isExpanded(Object element) { return null; } @Override public String getText(Object element) { return ""; } @Override public Image getImage(Object element) { return null; } public Color getForeground(Object element) { return null; } public String getTooltipText(Object element) { return null; } }
{ "content_hash": "723d0d487d0e1f947dcfe8d1316071f4", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 77, "avg_line_length": 18.155172413793103, "alnum_prop": 0.7236467236467237, "repo_name": "nasa/OpenSPIFe", "id": "387f2fd8de71d35111cd28cffb01193ceb5055b4", "size": "1933", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gov.nasa.ensemble.common.ui/src/gov/nasa/ensemble/common/ui/treetable/TreeTableLabelProvider.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "4538" }, { "name": "HTML", "bytes": "705398" }, { "name": "Java", "bytes": "15764637" }, { "name": "JavaScript", "bytes": "2244" }, { "name": "Shell", "bytes": "60188" } ] }
// Copyright (c) .NET Core Community. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. using System; using System.Collections.Generic; using Confluent.Kafka; // ReSharper disable once CheckNamespace namespace DotNetCore.CAP { /// <summary> /// Provides programmatic configuration for the CAP kafka project. /// </summary> public class KafkaOptions { /// <summary> /// librdkafka configuration parameters (refer to https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). /// <para> /// Topic configuration parameters are specified via the "default.topic.config" sub-dictionary config parameter. /// </para> /// </summary> public readonly Dictionary<string, string> MainConfig; public KafkaOptions() { MainConfig = new Dictionary<string, string>(); RetriableErrorCodes = new List<ErrorCode> { ErrorCode.GroupLoadInProress }; } /// <summary> /// Producer connection pool size, default is 10 /// </summary> public int ConnectionPoolSize { get; set; } = 10; /// <summary> /// The `bootstrap.servers` item config of <see cref="MainConfig" />. /// <para> /// Initial list of brokers as a CSV list of broker host or host:port. /// </para> /// </summary> public string Servers { get; set; } = default!; /// <summary> /// If you need to get offset and partition and so on.., you can use this function to write additional header into <see cref="CapHeader"/> /// </summary> public Func<ConsumeResult<string, byte[]>, List<KeyValuePair<string, string>>>? CustomHeaders { get; set; } /// <summary> /// New retriable error code (refer to https://docs.confluent.io/platform/current/clients/librdkafka/html/rdkafkacpp_8h.html#a4c6b7af48c215724c323c60ea4080dbf) /// </summary> public IList<ErrorCode> RetriableErrorCodes { get; set; } } }
{ "content_hash": "a1bedff9985da47d75111a675c42e8d2", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 167, "avg_line_length": 38.035714285714285, "alnum_prop": 0.6225352112676056, "repo_name": "ouraspnet/cap", "id": "3453ad53c506fab557a10e736bfe3cf645935b91", "size": "2132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/DotNetCore.CAP.Kafka/CAP.KafkaOptions.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "148786" } ] }
import numpy as np import pandas as pd from datetime import datetime from pandas_datareader import data as web # Dow Jones Industrial Average Tickers DJIA = ['BIV','BLV','BND','VCIT','VFIAX', 'VYM','VO','VB','VWO','VSS','VGTSX','VNQ','PARWX'] # Dates start = datetime(2010, 1, 1) end = datetime.today() # Grab data, change to weekly returns and write to CSV print("Start time", datetime.today().now()) #keep time x = web.DataReader(DJIA,"yahoo", start, end)['Adj Close'] # x = x.ix['Adj Close'] df = pd.DataFrame(x) # df = df.sort_index(ascending=False) df = df.resample('W-FRI').last().sort_index(ascending=False) #changing data to weekly print(df) for row in range(len(df)-1): df.iloc[row] = df.iloc[row].div(df.iloc[row+1]) #return df = df.iloc[:-1] df = np.log(df) #taking log return df.to_csv('SethFundData.csv', encoding='utf-8') #write to CSV print("End Time: ", datetime.today().now())
{ "content_hash": "e932eeb68e39d05a817f075e5a99679c", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 85, "avg_line_length": 23.9, "alnum_prop": 0.643305439330544, "repo_name": "mishka28/NYU-Python", "id": "9dfcf5e7b8431d3317db392057c40b2ea840ac0c", "size": "980", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "programming_with_pythong_class2/Seth/yahoodata2.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "206" }, { "name": "HTML", "bytes": "43267" }, { "name": "Makefile", "bytes": "561" }, { "name": "Python", "bytes": "100728" }, { "name": "Shell", "bytes": "7729" }, { "name": "Vim script", "bytes": "719" } ] }
""" Extracts minimal phrases from symmetrized word alignments """ from collections import defaultdict, deque def as_words(phrase, id2word): return tuple(id2word[i] for i in phrase) def try_expand(f, f2e, f_min, f_max, e_min, e_max): """ Try to expand the boundaries of a phrase pair based on the alignment points in f2e[f] :param f: a position in the source :param f2e: maps source positions into a sorted list of aligned target positions :param f_min, f_max: source phrase boundary :param e_min, e_max: target phrase boundary :returns: f_min, f_max, e_min, e_max, discovered target positions """ # retrieve the target positions reachable from f es = f2e.get(f, None) extra = [] # if there is any if es is not None: if f_min is None: # we just discovered that we know something about the source phrase f_min = f_max = f if e_min is None: # thus e_max is also None # we just learnt the first thing about the target phrase e_min = es[0] e_max = es[-1] # basically, we discovered the positions [e_min .. e_max] extra.extend(xrange(e_min, e_max + 1)) else: # we have the chance to update our target phrase if e_min > es[0]: # we discovered a few extra words on the left extra.extend(xrange(es[0], e_min)) # and update e_min e_min = es[0] if e_max < es[-1]: # update e_max # we discovered a few extra words to the right extra.extend(xrange(e_max + 1, es[-1] + 1)) # and update e_max e_max = es[-1] return f_min, f_max, e_min, e_max, extra def minimal_biphrases(f_words, e_words, links): """ Returns the minimal phrase pairs :param f_words: list of source words :param e_words: list of target words :param links: list of alignment points :return: list of tuples (source phrase, target phrase) where a phrase is a list of positions in f_words or e_words """ # 1) organise alignment points # first we group them f2e = defaultdict(set) e2f = defaultdict(set) for i, j in links: f2e[i].add(j) e2f[j].add(i) # then we sort them f2e = {f:sorted(es) for f, es in f2e.iteritems()} e2f = {e:sorted(fs) for e, fs in e2f.iteritems()} # biphrases biphrases = set() # 2) find minimal phrase pairs f_done = set() e_done = set() # iterate investigating words in the source # TODO: sort alignment points as to visit adjacent points first for fi, ej in links: # check if row or column have alread been done, if so, the minimal phrase consistent with this alignment point has already been found if fi in f_done or ej in e_done: continue else: # flag row and column as processed f_done.add(fi) e_done.add(ej) # source phrase boundaries f_min, f_max = fi, fi # target phrase boundaries e_min, e_max = ej, ej # queue of words whose alignment points need be investigated f_queue = deque([f_min]) e_queue = deque([e_min]) # for as long as there are words to be visited while f_queue or e_queue: if f_queue: # get a source word f = f_queue.popleft() # try to expand the boundaries f_min, f_max, e_min, e_max, extra = try_expand(f, f2e, f_min, f_max, e_min, e_max) # book discovered target words e_queue.extend(extra) if e_queue: # get a target word e = e_queue.popleft() # try to expand the boundaries (the logic is the same, only transposed) e_min, e_max, f_min, f_max, extra = try_expand(e, e2f, e_min, e_max, f_min, f_max) # book discovered source words f_queue.extend(extra) # store the minimal phrase f_phrase = tuple(range(f_min, f_max + 1)) e_phrase = tuple(range(e_min, e_max + 1)) biphrases.add((f_phrase, e_phrase)) return biphrases def unaligned_words(f_words, e_words, biphrases): """Find unaligned words :param f_words: source words :param e_words: target words :param biphrases: list of phrase pairs (check `minimal_biphrases`) :returns: set of unaligned source words, set of unaligned target words """ fs = set() es = set() for fp, ep in biphrases: fs.update(fp) es.update(ep) return frozenset(range(len(f_words))) - fs, frozenset(range(len(e_words))) - es def parse_strings(fstr, estr, astr): """ :param fstr: source string :param estr: target string :param astr: alingment string :return: list of source words, a list of target words and list of alignment points where an alignment point is a pair of integers (i, j) """ f = fstr.split() e = estr.split() a = [map(int, link.split('-')) for link in astr.split()] return f, e, a def parse_line(line, separator = ' ||| '): """returns the source words, the target words and the alignment points""" return parse_strings(*line.split(separator)) def read_corpus(istream, separator=' ||| '): """ Reads a file containing lines like this: source sentence ||| taget sentence ||| alignment points and returns a list where each element is a triple (source, target, alignment) and source is a list of source words target is a list of target words alignment is a list of pairs (each pair represents an alignment point of the kind (f,e)) """ return [parse_line(line.strip()) for line in istream]
{ "content_hash": "1aaa6098c79c5253fab95f372827ed2d", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 141, "avg_line_length": 37.44230769230769, "alnum_prop": 0.5921931176168465, "repo_name": "wilkeraziz/smtutils", "id": "5fc30dde5027cffa6db58d6226ecc8826836647f", "size": "5841", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "atools/minphrases.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "13760" } ] }
This towering, ibis-like bird appears to be sculpted from silver and brass, yet it moves with the fluidity of a living creature. Stymphalidies CR 8 XP 4,800 N Large magical beast **Init** +6; **Senses** low-light vision; [Perception](skills/perception#_perception) +12 Defense **AC** 22, touch 11, flat-footed 20 (+2 Dex, +11 natural, –1 size) **hp** 94 (9d10+45) **Fort** +11, **Ref** +10, **Will** +5 **DR** 10/magic and adamantine; **Immune** fire **Weaknesses** vulnerable to sonic Offense **Speed** 20 ft., fly 120 ft. (poor) **Melee** bite +14 (1d8+6 plus [bleed](monsters/universalMonsterRules#_bleed)), 2 talons +14 (1d6+6), 2 wings +9 (1d6+3 plus [bleed](monsters/universalMonsterRules#_bleed)) **Ranged** 2 wing razors +10 (1d6+6 plus [bleed](monsters/universalMonsterRules#_bleed)) **Space** 10 ft.; **Reach** 5 ft. **Special Attacks** [bleed](monsters/universalMonsterRules#_bleed) (1d6), glare, wing razors Statistics **Str** 22, **Dex** 15, **Con** 20, **Int** 2, **Wis** 15, **Cha** 17 **Base Atk** +9; **CMB** +16; **CMD** 28 **Feats** [Critical Focus](feats#_critical-focus), [Flyby Attack](monsters/monsterFeats#_flyby-attack), [Improved Initiative](feats#_improved-initiative), [Lightning Reflexes](feats#_lightning-reflexes), [Skill Focus](feats#_skill-focus) ( [Perception](skills/perception#_perception)) **Skills** [Fly](skills/fly#_fly) +4, [Perception](skills/perception#_perception) +12 Ecology **Environment** warm plains or coastlines **Organization** solitary, pair, or flight (3–9) **Treasure** incidental Special Abilities **Glare (Su)** As a standard action, in any area of normal or brighter light, a stymphalidies can ruffle its metallic feathers in such a way as to blind all creatures nearby. Any creature within 30 feet must succeed at a DC 19 Fortitude save or be blinded for 1d6 minutes. A creature can defend against this effect in the same way it would a [gaze](monsters/universalMonsterRules#_gaze) attack. This is a sight-based effect. The save DC is Constitution-based. **Wing Razors (Ex)** A stymphalidies's metallic feathers are razor-sharp. In addition to being able to slash creatures with its wings as a melee attack, it may beat its wings, flinging two large, feathered shards at a single target. These wing razors deal 2d6 points of damage and cause [bleed](monsters/universalMonsterRules#_bleed), with a range increment of 50 feet. A stymphalidies can use this attack a number of times per day equal to its Constitution modifier (5 times per day for most stymphalidies). Man-eating birds of prey, the rare creatures known as stymphalidies stalk plains and coastlines, shredding warm-blooded animals and unwary travelers with their steely-bladed feathers and daggerlike beaks. From a distance, stymphalidies might easily be mistaken for cranes, ibises, or other long-necked birds, though in full sunlight their gleaming feathers and the haze of blood lingering from past meals are impossible to mistake. When not at rest, flocks of the creatures soar high on warm currents of air, relying upon their keen eyesight to spot suitable prey far below. Because of their voracious appetites, flocks of stymphalidies must claim a vast swath of territory to maintain their feeding habits. Fortunately for the folk of civilized lands, stymphalidies rarely encroach upon areas well traveled by humanoids and other creatures clever enough to bring down one of these metallic-winged avians. Stymphalidies breed at an astonishing rate, however, and overpopulation and exhaustion of game frequently force extraordinarily large and deadly groups of the creatures to band together in search of new lands and new prey to devour. Peculiarly, the feathers and beak of a stymphalidies are made of steel-like material similar to the metallic hide of a gorgon. Smaller varieties of stymphalidies exist, including a variant with a penchant for swarming, but the towering creature presented here is by far the most commonly encountered of its kind. This stymphalidies stands 13 feet tall, with a wingspan of 25 feet and a weight of 250 pounds.
{ "content_hash": "7128a410db7a17d28dc86310590d8904", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 508, "avg_line_length": 62.166666666666664, "alnum_prop": 0.7631001706068731, "repo_name": "brunokoga/pathfinder-markdown", "id": "09f31c0a7e271f82c74e63ab9f687119eefbac5a", "size": "4124", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "prd_markdown/bestiary3/stymphalidies.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "58168" }, { "name": "JavaScript", "bytes": "97828" }, { "name": "Ruby", "bytes": "2456" } ] }
from django.core.urlresolvers import reverse from django.utils.encoding import force_unicode from django.utils.encoding import smart_str from django.utils.safestring import mark_safe from django.db.models.sql.query import LOOKUP_SEP from django.db.models.fields.related import ForeignObjectRel from django.utils.translation import ugettext as _ from django.db import models from nadmin.sites import site from nadmin.views import BaseAdminPlugin, ListAdminView, CreateAdminView, UpdateAdminView, DeleteAdminView RELATE_PREFIX = '_rel_' class RelateMenuPlugin(BaseAdminPlugin): related_list = [] use_related_menu = True def get_related_list(self): if hasattr(self, '_related_acts'): return self._related_acts _related_acts = [] for r in self.opts.get_all_related_objects() + self.opts.get_all_related_many_to_many_objects(): if self.related_list and (r.get_accessor_name() not in self.related_list): continue if r.model not in self.admin_site._registry.keys(): continue has_view_perm = self.has_model_perm(r.model, 'view') has_add_perm = self.has_model_perm(r.model, 'add') if not (has_view_perm or has_add_perm): continue _related_acts.append((r, has_view_perm, has_add_perm)) self._related_acts = _related_acts return self._related_acts def related_link(self, instance): links = [] for r, view_perm, add_perm in self.get_related_list(): label = r.opts.app_label model_name = r.opts.model_name f = r.field rel_name = f.rel.get_related_field().name verbose_name = force_unicode(r.opts.verbose_name) lookup_name = '%s__%s__exact' % (f.name, rel_name) link = ''.join(('<li class="with_menu_btn">', '<a href="%s?%s=%s" title="%s"><i class="icon fa fa-th-list"></i> %s</a>' % ( reverse('%s:%s_%s_changelist' % ( self.admin_site.app_name, label, model_name)), RELATE_PREFIX + lookup_name, str(instance.pk), verbose_name, verbose_name) if view_perm else '<a><span class="text-muted"><i class="icon fa fa-blank"></i> %s</span></a>' % verbose_name, '<a class="add_link dropdown-menu-btn" href="%s?%s=%s"><i class="icon fa fa-plus pull-right"></i></a>' % ( reverse('%s:%s_%s_add' % ( self.admin_site.app_name, label, model_name)), RELATE_PREFIX + lookup_name, str( instance.pk)) if add_perm else "", '</li>')) links.append(link) ul_html = '<ul class="dropdown-menu" role="menu">%s</ul>' % ''.join( links) return '<div class="dropdown related_menu pull-right"><a title="%s" class="relate_menu dropdown-toggle" data-toggle="dropdown"><i class="icon fa fa-list"></i></a>%s</div>' % (_('Related Objects'), ul_html) related_link.short_description = '&nbsp;' related_link.allow_tags = True related_link.allow_export = False related_link.is_column = False def get_list_display(self, list_display): if self.use_related_menu and len(self.get_related_list()): list_display.append('related_link') self.admin_view.related_link = self.related_link return list_display class RelateObject(object): def __init__(self, admin_view, lookup, value): self.admin_view = admin_view self.org_model = admin_view.model self.opts = admin_view.opts self.lookup = lookup self.value = value parts = lookup.split(LOOKUP_SEP) field = self.opts.get_field_by_name(parts[0])[0] if not hasattr(field, 'rel') and not isinstance(field, ForeignObjectRel): raise Exception(u'Relate Lookup field must a related field') if hasattr(field, 'rel'): self.to_model = field.rel.to self.rel_name = field.rel.get_related_field().name self.is_m2m = isinstance(field.rel, models.ManyToManyRel) else: self.to_model = field.model self.rel_name = self.to_model._meta.pk.name self.is_m2m = False to_qs = self.to_model._default_manager.get_queryset() self.to_objs = to_qs.filter(**{self.rel_name: value}).all() self.field = field def filter(self, queryset): return queryset.filter(**{self.lookup: self.value}) def get_brand_name(self): if len(self.to_objs) == 1: to_model_name = str(self.to_objs[0]) else: to_model_name = force_unicode(self.to_model._meta.verbose_name) return mark_safe(u"<span class='rel-brand'>%s <i class='fa fa-caret-right'></i></span> %s" % (to_model_name, force_unicode(self.opts.verbose_name_plural))) class BaseRelateDisplayPlugin(BaseAdminPlugin): def init_request(self, *args, **kwargs): self.relate_obj = None for k, v in self.request.REQUEST.items(): if smart_str(k).startswith(RELATE_PREFIX): self.relate_obj = RelateObject( self.admin_view, smart_str(k)[len(RELATE_PREFIX):], v) break return bool(self.relate_obj) def _get_relate_params(self): return RELATE_PREFIX + self.relate_obj.lookup, self.relate_obj.value def _get_input(self): return '<input type="hidden" name="%s" value="%s" />' % self._get_relate_params() def _get_url(self, url): return url + ('&' if url.find('?') > 0 else '?') + ('%s=%s' % self._get_relate_params()) class ListRelateDisplayPlugin(BaseRelateDisplayPlugin): def get_list_queryset(self, queryset): if self.relate_obj: queryset = self.relate_obj.filter(queryset) return queryset def url_for_result(self, url, result): return self._get_url(url) def get_context(self, context): context['brand_name'] = self.relate_obj.get_brand_name() context['rel_objs'] = self.relate_obj.to_objs if 'add_url' in context: context['add_url'] = self._get_url(context['add_url']) return context def get_list_display(self, list_display): if not self.relate_obj.is_m2m: try: list_display.remove(self.relate_obj.field.name) except Exception: pass return list_display class EditRelateDisplayPlugin(BaseRelateDisplayPlugin): def get_form_datas(self, datas): if self.admin_view.org_obj is None and self.admin_view.request_method == 'get': datas['initial'][ self.relate_obj.field.name] = self.relate_obj.value return datas def post_response(self, response): if isinstance(response, basestring) and response != self.get_admin_url('index'): return self._get_url(response) return response def get_context(self, context): if 'delete_url' in context: context['delete_url'] = self._get_url(context['delete_url']) return context def block_after_fieldsets(self, context, nodes): return self._get_input() class DeleteRelateDisplayPlugin(BaseRelateDisplayPlugin): def post_response(self, response): if isinstance(response, basestring) and response != self.get_admin_url('index'): return self._get_url(response) return response def block_form_fields(self, context, nodes): return self._get_input() site.register_plugin(RelateMenuPlugin, ListAdminView) site.register_plugin(ListRelateDisplayPlugin, ListAdminView) site.register_plugin(EditRelateDisplayPlugin, CreateAdminView) site.register_plugin(EditRelateDisplayPlugin, UpdateAdminView) site.register_plugin(DeleteRelateDisplayPlugin, DeleteAdminView)
{ "content_hash": "bbea971de3365cfa029c23ad43bf8764", "timestamp": "", "source": "github", "line_count": 209, "max_line_length": 213, "avg_line_length": 38.588516746411486, "alnum_prop": 0.6017358958462492, "repo_name": "A425/django-nadmin", "id": "196ed97a2873b2e6e916ea1eb6689826b239b8d5", "size": "8080", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nadmin/plugins/relate.py", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "23733" }, { "name": "HTML", "bytes": "95746" }, { "name": "JavaScript", "bytes": "66338" }, { "name": "Python", "bytes": "413023" } ] }
namespace lia { struct logical_and { template<typename T, typename U> auto operator()(T&& t, U&& u) -> decltype(std::forward<T>(t) && std::forward<U>(u)) const { return std::forward<T>(t) && std::forward<U>(u); } }; struct logical_or { template<typename T, typename U> auto operator()(T&& t, U&& u) -> decltype(std::forward<T>(t) || std::forward<U>(u)) const { return std::forward<T>(t) || std::forward<U>(u); } }; struct logical_not { template<typename T> auto operator()(T&& t) -> decltype(!std::forward<T>(t)) const { return !std::forward<T>(t); } }; } // lia #endif // LIA_LOGICAL_FUNCTIONAL_OBJ_HPP
{ "content_hash": "c863858a8dfdf4956b8e53c6f9451d74", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 95, "avg_line_length": 27.75, "alnum_prop": 0.5780780780780781, "repo_name": "Rapptz/Lia", "id": "81f1d9d99dd29b207a1acc33be9539916adb1a6f", "size": "765", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Lia/functional/objects/logical.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "47126" } ] }
package net.lospi.juno.elements; import org.javatuples.Pair; import java.util.List; /** * Created by jalospinoso on 6/14/2015. */ public interface MinistepChainCalculator { int getDiagonalLinksCount(Chain chain); int getConsecutivelyCancellingPairsCount(Chain chain, int minimumCcpLength, int maximumCcpLength); Integer indexOfNextMatchingLinkAfter(Chain chain, int start, Link link); Integer indexOfSecondMatchingLinkAfter(Chain chain, int start, Link link); List<String> getAltersFor(Chain chain, ActorAspect selectedActorAspect); int getAlterCountFor(Chain chain, ActorAspect selectedActorAspect); List<Integer> getDiagonalLinkIndices(Chain chain); List<Pair<Integer, Integer>> getConsecutivelyCancelingPairs(Chain chain, int minSegmentLength, int maxSegmentLength); }
{ "content_hash": "148043d6b094bc9395f788f20571a64d", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 108, "avg_line_length": 43.65, "alnum_prop": 0.7376861397479955, "repo_name": "JLospinoso/juno", "id": "6d77c7280cd6543429b8d107907ac518ab44c86b", "size": "873", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "juno-core/src/main/java/net/lospi/juno/elements/MinistepChainCalculator.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "615535" } ] }
End of preview.

No dataset card yet

Downloads last month
3