text
stringlengths 27
775k
|
|---|
package org.odk.collect.android.preferences.screens
import android.content.Context
import android.content.DialogInterface
import android.content.Intent
import android.os.Bundle
import androidx.appcompat.app.AlertDialog
import androidx.preference.Preference
import org.odk.collect.android.R
import org.odk.collect.android.activities.ActivityUtils
import org.odk.collect.android.activities.MainMenuActivity
import org.odk.collect.android.activities.SplashScreenActivity
import org.odk.collect.android.configure.qr.QRCodeTabsActivity
import org.odk.collect.android.injection.DaggerUtils
import org.odk.collect.android.preferences.dialogs.ResetDialogPreference
import org.odk.collect.android.preferences.dialogs.ResetDialogPreferenceFragmentCompat
import org.odk.collect.android.preferences.keys.AdminKeys
import org.odk.collect.android.projects.DeleteProjectResult
import org.odk.collect.android.projects.ProjectDeleter
import org.odk.collect.android.utilities.MultiClickGuard
import org.odk.collect.android.utilities.ToastUtils
import javax.inject.Inject
class ProjectManagementPreferencesFragment :
BaseAdminPreferencesFragment(),
Preference.OnPreferenceClickListener {
@Inject
lateinit var projectDeleter: ProjectDeleter
override fun onAttach(context: Context) {
super.onAttach(context)
DaggerUtils.getComponent(context).inject(this)
}
override fun onCreatePreferences(savedInstanceState: Bundle?, rootKey: String?) {
super.onCreatePreferences(savedInstanceState, rootKey)
setPreferencesFromResource(R.xml.project_management_preferences, rootKey)
findPreference<Preference>(AdminKeys.KEY_IMPORT_SETTINGS)!!.onPreferenceClickListener = this
findPreference<Preference>(DELETE_PROJECT_KEY)!!.onPreferenceClickListener = this
}
override fun onDisplayPreferenceDialog(preference: Preference) {
if (MultiClickGuard.allowClick(javaClass.name)) {
var resetDialogPreference: ResetDialogPreference? = null
if (preference is ResetDialogPreference) {
resetDialogPreference = preference
}
if (resetDialogPreference != null) {
val dialogFragment = ResetDialogPreferenceFragmentCompat.newInstance(preference.key)
dialogFragment.setTargetFragment(this, 0)
dialogFragment.show(parentFragmentManager, null)
} else {
super.onDisplayPreferenceDialog(preference)
}
}
}
override fun onPreferenceClick(preference: Preference): Boolean {
if (MultiClickGuard.allowClick(javaClass.name)) {
when (preference.key) {
AdminKeys.KEY_IMPORT_SETTINGS -> {
val pref = Intent(activity, QRCodeTabsActivity::class.java)
startActivity(pref)
}
DELETE_PROJECT_KEY -> AlertDialog.Builder(requireActivity())
.setTitle(R.string.delete_project)
.setMessage(R.string.delete_project_confirm_message)
.setNegativeButton(R.string.delete_project_no) { _: DialogInterface?, _: Int -> }
.setPositiveButton(R.string.delete_project_yes) { _: DialogInterface?, _: Int -> deleteProject() }
.show()
}
return true
}
return false
}
fun deleteProject() {
when (val deleteProjectResult = projectDeleter.deleteCurrentProject()) {
is DeleteProjectResult.UnsentInstances -> {
AlertDialog.Builder(requireActivity())
.setTitle(R.string.cannot_delete_project_title)
.setMessage(R.string.cannot_delete_project_message_one)
.setPositiveButton(R.string.ok, null)
.show()
}
is DeleteProjectResult.RunningBackgroundJobs -> {
AlertDialog.Builder(requireActivity())
.setTitle(R.string.cannot_delete_project_title)
.setMessage(R.string.cannot_delete_project_message_two)
.setPositiveButton(R.string.ok, null)
.show()
}
is DeleteProjectResult.DeletedSuccessfully -> {
val newCurrentProject = deleteProjectResult.newCurrentProject
if (newCurrentProject != null) {
ActivityUtils.startActivityAndCloseAllOthers(
requireActivity(),
MainMenuActivity::class.java
)
ToastUtils.showLongToast(
getString(
R.string.switched_project,
newCurrentProject.name
)
)
} else {
ActivityUtils.startActivityAndCloseAllOthers(
requireActivity(),
SplashScreenActivity::class.java
)
}
}
}
}
companion object {
const val DELETE_PROJECT_KEY = "delete_project"
}
}
|
# Istio
## Release page:
https://github.com/istio/istio/releases/
## Extract the Istio release
Istio doesn't provide a helm registry and they provide the releases in a release package. Download the release
you want to use, and extract it to this directory followed by the version number so the folder is
in this pattern: `istio-<version>` to keep our folders consistent.
The package comes with a lot of files and you only need to check in the `istion-<version>/manifest` folder.
## Install
General Helm install docs: https://istio.io/latest/docs/setup/install/helm/
Move to the Istio directory for the version you are setting up:
```
cd ./istio/istio-<VERSION>
```
Create the `istio-system` namespace:
```
kubectl create namespace istio-system
```
Install Istio base chart:
```
helm install istio-base -n istio-system manifests/charts/base
```
Some of these items we are adding in:
* nodeSelectors
* tolerations
Install Istio discovery:
```
helm install -n istio-system istiod manifests/charts/istio-control/istio-discovery
```
Install ingress gateway
```
helm install -n istio-system istio-ingress manifests/charts/gateways/istio-ingress
```
Install egress gateway
```
helm install -n istio-system istio-egress manifests/charts/gateways/istio-egress
```
## Enable auto Istio/Envoy injection
```
kubectl label namespace my-app istio-injection=enabled
```
## Verify mTLS
If you installed Istio with values.global.proxy.privileged=true, you can use tcpdump to verify traffic is encrypted or not.
```
$ kubectl exec -n foo "$(kubectl get pod -n foo -lapp=httpbin -ojsonpath={.items..metadata.name})" -c istio-proxy -- sudo tcpdump dst port 80 -A
```
## Istio networking

|
namespace GodelTech.StoryLine.Contracts
{
public interface IExpectationBuilder
{
IExpectation Build();
}
}
|
(ns api.emailer.sendgrid
(:require [clj-http.client :as http]))
(defonce ^:private sendgrid-public-api "https://api.sendgrid.com/v3/mail/send")
(defn send-email
[{:keys [from to subject content dynamic-template-data template-id api-key print-mode?]}]
(if (and (not api-key)
(not print-mode?))
(throw (Error. "Missing api-key to send email to sendgrid"))
(if print-mode?
(do
(prn "Would send email:")
(prn "From:" from)
(prn "To:" to)
(prn "Subject:" subject)
(prn "Content:" content)
(prn "Dynamic template data:" dynamic-template-data))
(let [body (cond-> {:from {:email from}
:personalizations [(cond-> {:to [{:email to}]}
dynamic-template-data
(assoc "dynamic_template_data"
dynamic-template-data))]}
subject (assoc :subject subject)
template-id (assoc :template_id template-id)
content (assoc :content [{:type "text/html"
:value content}]))]
(http/post sendgrid-public-api
{:form-params body
:headers {"Authorization" (str "Bearer " api-key)}
:content-type :json
:accept :json})))))
(comment
(let [{{:keys [api-key template-id]} :sendgrid} (api.config/load!)]
(send-email
{:from "noreply@spreadviz.org"
:to "fbielejec@gmail.com"
:template-id template-id
:dynamic-template-data
{"header" "Login to Spread"
"body" "You requested a login link to Spread. Click on the button below"
"button-title" "Login"
"button-href" "http://nodrama.io"}
:api-key api-key})))
|
library tavern.utils;
import 'package:path/path.dart' as path;
const String metadataExtension = '.metadata.json';
const String templatesPath = 'web/templates/';
String getMetadataPath(String p) {
var dirname = path.dirname(p);
var basename = path.basenameWithoutExtension(p);
var filename = basename + metadataExtension;
return path.join(dirname, filename);
}
String getHtmlPath(String p) {
// input path/to/file.txt
var dirname = path.dirname(p); // path/to/
var basename = path.basenameWithoutExtension(p); // file
var dirnamePath = path.split(dirname); // ['path', to']
var filename = basename + '.html';
var result = ['/'];
result.addAll(dirnamePath.sublist(1));
result.add(filename);
return path.joinAll(result);
}
String stripTrailingSlash(String s) {
while (s?.endsWith('/') ?? false) {
s = s.substring(0, s.length - 1);
}
return s;
}
String addLeadingSlash(String s) {
if (s != null && !s.startsWith('/')) {
s = '/$s';
}
return s;
}
|
package act.app.event;
import act.app.App;
import act.event.ActEvent;
public abstract class AppEvent extends ActEvent<App> {
private int id;
public AppEvent(AppEventId id, App source) {
super(source);
this.id = id.ordinal();
}
public int id() {
return id;
}
}
|
package com.arshadshah.nimaz.prayerTimeApi.internals
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.apparentObliquityOfTheEcliptic
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.apparentSolarLongitude
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.ascendingLunarNodeLongitude
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.meanLunarLongitude
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.meanObliquityOfTheEcliptic
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.meanSiderealTime
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.meanSolarLongitude
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.nutationInLongitude
import com.arshadshah.nimaz.prayerTimeApi.internals.Astronomical.nutationInObliquity
import com.arshadshah.nimaz.prayerTimeApi.internals.CalendricalHelper.julianCentury
import com.arshadshah.nimaz.prayerTimeApi.internals.DoubleUtil.unwindAngle
import kotlin.math.asin
import kotlin.math.atan2
import kotlin.math.cos
import kotlin.math.sin
internal class SolarCoordinates(julianDay : Double)
{
/**
* The declination of the sun, the angle between the rays of the Sun and the
* plane of the Earth's equator, in degrees.
*/
@JvmField
val declination : Double
/**
* Right ascension of the Sun, the angular distance on the celestial equator
* from the vernal equinox to the hour circle, in degrees.
*/
@JvmField
val rightAscension : Double
/**
* Apparent sidereal time, the hour angle of the vernal equinox, in degrees.
*/
@JvmField
val apparentSiderealTime : Double
init
{
val T = julianCentury(julianDay)
val L0 = meanSolarLongitude( /* julianCentury */T)
val Lp = meanLunarLongitude( /* julianCentury */T)
val Ω = ascendingLunarNodeLongitude( /* julianCentury */T)
val λ = Math.toRadians(
apparentSolarLongitude( /* julianCentury */T , /* meanLongitude */L0)
)
val θ0 = meanSiderealTime( /* julianCentury */T)
val ΔΨ = nutationInLongitude( /* julianCentury */T , /* solarLongitude */
L0 , /* lunarLongitude */
Lp , /* ascendingNode */
Ω
)
val Δε = nutationInObliquity( /* julianCentury */T , /* solarLongitude */
L0 , /* lunarLongitude */
Lp , /* ascendingNode */
Ω
)
val ε0 = meanObliquityOfTheEcliptic( /* julianCentury */T)
val εapp = Math.toRadians(
apparentObliquityOfTheEcliptic( /* julianCentury */T , /* meanObliquityOfTheEcliptic */
ε0
)
)
/* Equation from Astronomical Algorithms page 165 */
declination = Math.toDegrees(
asin(sin(εapp) * sin(λ))
)
/* Equation from Astronomical Algorithms page 165 */
rightAscension = unwindAngle(
Math.toDegrees(atan2(cos(εapp) * sin(λ) , cos(λ)))
)
/* Equation from Astronomical Algorithms page 88 */
apparentSiderealTime =
θ0 + ΔΨ * 3600 * cos(
Math.toRadians(ε0 + Δε)
) / 3600
}
}
|
import Vue from 'vue'
import swal from 'sweetalert2/dist/sweetalert2'
Vue.prototype.$swal = swal
|
import glob
headers = glob.glob('./fdacoefs_*.h')
target = open("fdacoefs.h", "w")
target.write('''
#ifndef AUDIO_PLAYER_FDACOEFS_H
#define AUDIO_PLAYER_FDACOEFS_H
#include "tmwtypes.h"
''')
print(headers)
for header in headers:
with open(header, "r") as f:
band = int(header.split("_")[1].split(".")[0])
for line in f.readlines():
if "NL" not in line \
and "DL" not in line\
and not line.startswith(("/*", " *", " */")):
if "NUM" in line:
line = line.replace("NUM", "NUM_BAND_{}".format(band))
if "DEN" in line:
line = line.replace("DEN", "DEN_BAND_{}".format(band))
if "MWSPT_NSEC" in line:
line = line.replace("MWSPT_NSEC", "MWSPT_NSEC_{}".format(band))
target.write(line)
elif "#" not in line:
target.write(line)
target.write("#endif")
target.close()
|
/*
* Copyright 2013 newzly ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.newzly.phantom.example.advanced
import java.util.UUID
import scala.concurrent.{ Future => ScalaFuture }
import com.datastax.driver.core.{ ResultSet, Row }
import com.newzly.phantom.Implicits._
import com.newzly.phantom.example.basics.DBConnector
// Now you want to enable querying Recipes by author.
// Because of the massive performance overhead of filtering,
// you can't really use a SecondaryKey for multi-billion record databases.
// Instead, you create mapping tables and ensure consistency from the application level.
// This will illustrate just how easy it is to do that with phantom.
sealed class AdvancedRecipesByTitle extends CassandraTable[AdvancedRecipesByTitle, (String, UUID)] {
// In this table, the author will be PrimaryKey and PartitionKey.
object title extends StringColumn(this) with PartitionKey[String]
// The id is just another normal field.
object id extends UUIDColumn(this)
def fromRow(row: Row): (String, UUID) = {
Tuple2(title(row), id(row))
}
}
object AdvancedRecipesByTitle extends AdvancedRecipesByTitle with DBConnector {
override lazy val tableName = "recipes_by_title"
def insertRecipe(recipe: (String, UUID)): ScalaFuture[ResultSet] = {
insert.value(_.title, recipe._1).value(_.id, recipe._2).future()
}
// now you can have the tile in a where clause
// without the performance impact of a secondary index.
def getRecipeByTitle(title: String): ScalaFuture[Option[(String, UUID)]] = {
select.where(_.title eqs title).one()
}
}
|
using AdminLTE.Models;
using Microsoft.AspNetCore.Mvc;
using System.Collections.Generic;
using System.Security.Claims;
using AdminLTE.Common.Extensions;
using AdminLTE.Common;
using AdminLTE.Data;
namespace AdminLTE.ViewComponents
{
public class NoteViewComponent : ViewComponent
{
private readonly ApplicationDbContext _context;
public NoteViewComponent(ApplicationDbContext context)
{
_context = context;
}
public IViewComponentResult Invoke(int id)
{
var messages = GetData(id);
return View(messages);
}
private Note GetData(int id)
{
Note messages = _context.Note.Find(id);
return messages;
}
}
}
|
package com.piggybank;
public class Penny extends Money {
public Penny(int amount){
super("Penny", amount, 0.01);
}
public Penny(){
super("penny", 1, 0.01);
}
}
|
package com.limpygnome.daemon.remote.service.auth;
import com.limpygnome.daemon.api.Service;
import com.limpygnome.daemon.common.rest.RestRequest;
/**
* An interface for an auth provider, which authenticates a REST handler request.
*/
public interface AuthTokenProviderService extends Service
{
/**
* All implementations of this service should use this constant as the service name.
*/
String SERVICE_NAME = "auth";
/**
* Determines if a request is authorised.
*
* @param restRequest The REST request to authorise
* @return True = authorised, false = not authorised
*/
boolean isAuthorised(RestRequest restRequest);
/**
* Retrieves the auth token currently used for authentication.
*
* @return The current auth token
*/
String getAuthToken();
}
|
<?php
/**
* Created by PhpStorm.
* User: Administrator
* Date: 2019/12/9
* Time: 14:21
*/
namespace app\index\service\excel;
use PhpOffice\PhpSpreadsheet\Writer\Xls;
use PhpOffice\PhpSpreadsheet\Writer\Xlsx;
use PhpOffice\PhpSpreadsheet\IOFactory;
class ExportDocService
{
// sheet 对象
protected $spreadsheet;
// worksheet 对象
protected $worksheet;
// 生成的文件对象
protected $filePath;
protected $currLastRow;
/*
* [loadTemplate] 加载模板01
* author:wxh@live.it addtime:2019/10/18
* @return object
*/
public function loadTemplate($templateFile ,$index = 0 , $sheetTitle = '')
{
$this->spreadsheet = IOFactory::load($templateFile);
$this->spreadsheet->setActiveSheetIndex($index); // 设置工作索引区域
$this->worksheet = $this->spreadsheet->getActiveSheet(); // 根据索引获取工作表区域
if($sheetTitle != ''){
$this->worksheet->setTitle($sheetTitle); // 设置工作表标题
}
return $this;
}
/*
* [writeTaskBaseData] 写入任务基础数据02
* author:wxh@live.it addtime:2019/10/18
* @return object
*/
public function writeTaskBaseData($data)
{
if(empty($this->worksheet)){
return false;
}
foreach ($data as $item) {
$this->worksheet->setCellValue($item['index'], $item['val']);
}
return $this;
}
/*
* [writeAssetHeadData] 写入资产头部数据
* author:wxh@live.it addtime:2019/10/18
* @return object
*/
public function writeAssetHeadData($data)
{
if(empty($this->worksheet)){
return false;
}
foreach ($data as $item) {
$this->worksheet->setCellValue($item['index'], $item['val']);
}
return $this;
}
/*
* [writeAssetData] 写入资产数据
* author:wxh@live.it addtime:2019/10/18
* @return object
*/
public function writeAssetListData($data)
{
$styleArray = [
'alignment' => [
'horizontal' => \PhpOffice\PhpSpreadsheet\Style\Alignment::HORIZONTAL_CENTER,
'vertical' => \PhpOffice\PhpSpreadsheet\Style\Alignment::VERTICAL_CENTER,
],
];
foreach ($data as $item) {
$this->worksheet->setCellValue($item['index'], ' '.$item['val'])
->getStyle($item['index'])->applyFromArray($styleArray);
}
// $this->worksheet->setCellValue('A'.$row, $data['asset_code'].' ');
// $this->worksheet->setCellValue('B'.$row, $data['asset_name']);
// $this->worksheet->setCellValue('C'.$row, $data['custom_type_name']);
// $this->worksheet->setCellValue('D'.$row, $data['org_name']);
// $this->worksheet->setCellValue('E'.$row, $data['use_org_name']);
// $this->worksheet->setCellValue('F'.$row, $data['emp_name']);
// $this->worksheet->setCellValue('G'.$row, $data['user_name']);
// $this->worksheet->setCellValue('H'.$row, $data['area_name']);
}
/**
* [makeExcelFile] 生成Excel文件
* - - - - - - - - - - - - - - - - - - - - - - - - - -
* author:wxh@live.it addtime:2018/09/22
* @param string $fileName 文件名
* @param string $fileDir 存放文件夹
* @param string $type 文件类型
* @return string
*/
public function makeExcelFile($fileName = '' , $fileDir = EXCEL_EXPORT_DIR , $type = 'Xlsx')
{
$fileDir = dir_separator_replace(rtrim(rtrim($fileDir, '\\'),'/')) . DIRECTORY_SEPARATOR;
if (!is_dir($fileDir)) {
mkdir($fileDir, 0755, true);
}
$fileName = empty($fileName) ? 'excel_'.date('YmdHis').'_'.rand(1000,9999)
: $fileName.'_excel_'.date('YmdHis').'_'.rand(1000,9999);
$filePath = $fileDir . $fileName.'.'.strtolower($type);
//通过工厂模式来写内容
$this->spreadsheet->setActiveSheetIndex(0);
$writer = new Xlsx($this->spreadsheet);
// if($type == 'Xls'){
// $writer = new Xls($this->spreadsheet);
// }
$writer->save($filePath);
$this->spreadsheet->disconnectWorksheets();
unset($this->spreadsheet);
unset($writer);
$this->filePath = $filePath;
}
/**
* [getFilePath] 获取生成的文件路径
* - - - - - - - - - - - - - - - - - - - - - - - - - -
* author:wxh@live.it addtime:2018/09/22
* @return string
*/
public function getFilePath()
{
$tmpHOST = WWW_HOST_URL;
return empty($this->filePath) ? '' : $tmpHOST.'/'.trim_dirdot($this->filePath);
}
}
|
import 'dart:io';
import 'dart:ui';
import 'package:Okuna/widgets/theming/smart_text.dart';
import 'package:dcache/dcache.dart';
import 'package:flutter/material.dart';
import 'package:http_parser/http_parser.dart';
import 'package:mime/mime.dart';
/// Temporal until https://github.com/dart-lang/mime/issues/13 hits
import 'package:mime/src/default_extension_map.dart';
import 'package:intl/date_symbol_data_local.dart';
import 'package:pigment/pigment.dart';
import 'localization.dart';
class UtilsService {
String _trustedProxyUrl = '';
static SimpleCache<String, bool> hexColorIsDarkCache =
SimpleCache(storage: new InMemoryStorage(30));
static SimpleCache<String, Color> parseHexColorCache =
SimpleCache(storage: new InMemoryStorage(30));
static RegExp hashtagsRegExp =
RegExp(r"\B#\w*[a-zA-Z]+\w*", caseSensitive: false);
void setTrustedProxyUrl(String proxyUrl) {
_trustedProxyUrl = proxyUrl;
}
Future<bool> fileHasImageMimeType(File file) async {
String fileMimeType = await getFileMimeType(file);
MediaType fileMediaType = MediaType.parse(fileMimeType);
return fileMediaType.type == 'image';
}
Future<String?> getFileExtensionForFile(File file) async {
String fileMimeType = await getFileMimeType(file);
return getFileExtensionForMimeType(fileMimeType);
}
String? getFileExtensionForMimeType(String mimeType) {
return lookupExtension(mimeType);
}
String? geFileNameMimeType(String fileName) {
return lookupMimeType(fileName);
}
Future<String> getFileMimeType(File file) async {
String? mimeType = lookupMimeType(file.path);
if (mimeType == null) {
mimeType = await _getFileMimeTypeFromMagicHeaders(file);
}
return mimeType ?? 'application/octet-stream';
}
bool hexColorIsDark(String hexColor) {
return hexColorIsDarkCache.get(hexColor) ??
_checkAndStoreHexColorIsDark(hexColor);
}
bool _checkAndStoreHexColorIsDark(String hexColor) {
Color color = parseHexColor(hexColor);
bool isDark = colorIsDark(color);
hexColorIsDarkCache.set(hexColor, isDark);
return isDark;
}
List<String?> extractHashtagsInString(String str) {
return hashtagsRegExp
.allMatches(str)
.map((match) => match.group(0))
.toList();
}
int countHashtagsInString(String str) {
return extractHashtagsInString(str).length;
}
bool colorIsDark(Color color) {
return color.computeLuminance() < 0.179;
}
Color parseHexColor(String hexColor) {
return parseHexColorCache.get(hexColor) ?? _parseAndStoreColor(hexColor);
}
Color _parseAndStoreColor(String colorValue) {
Color color = Pigment.fromString(colorValue);
parseHexColorCache.set(colorValue, color);
return color;
}
// LocalizationService localizationService
String timeAgo(DateTime date, LocalizationService _localizationService) {
/// Originally from https://gist.github.com/DineshKachhot/bc8cee616f30c323c1dd1e63a4bf65df
final now = DateTime.now();
final difference = now.difference(date);
String years = _localizationService.post__time_short_years;
String weeks = _localizationService.post__time_short_weeks;
String days = _localizationService.post__time_short_days;
String hours = _localizationService.post__time_short_hours;
String mins = _localizationService.post__time_short_minutes;
String seconds = _localizationService.post__time_short_seconds;
if ((difference.inDays / 365).floor() >= 2) {
return '${(difference.inDays / 365).floor()}$years';
} else if ((difference.inDays / 365).floor() >= 1) {
return _localizationService.post__time_short_one_year;
} else if ((difference.inDays / 7).floor() >= 2) {
return '${(difference.inDays / 7).floor()}$weeks';
} else if ((difference.inDays / 7).floor() >= 1) {
return _localizationService.post__time_short_one_week;
} else if (difference.inDays >= 2) {
return '${difference.inDays}$days';
} else if (difference.inDays >= 1) {
return _localizationService.post__time_short_one_day;
} else if (difference.inHours >= 2) {
return '${difference.inHours}$hours';
} else if (difference.inHours >= 1) {
return _localizationService.post__time_short_one_hour;
} else if (difference.inMinutes >= 2) {
return '${difference.inMinutes}$mins';
} else if (difference.inMinutes >= 1) {
return _localizationService.post__time_short_one_minute;
} else if (difference.inSeconds >= 3) {
return '${difference.inSeconds}$seconds';
} else {
return _localizationService.post__time_short_now_text;
}
}
Future<dynamic> initialiseDateFormatting(
LocalizationService localizationService) async {
Locale locale = localizationService.getLocale();
String localeName = locale.toString();
if (LocalizationService.localizedLocales.contains(locale.languageCode)) {
localeName = locale.languageCode.replaceFirst('-', '_');
}
return initializeDateFormatting(localeName, null);
}
Future<String?> _getFileMimeTypeFromMagicHeaders(File file) async {
// TODO When file uploads become larger, this needs to be turned into a stream
List<int> fileBytes = file.readAsBytesSync();
int magicHeaderBytesLeft = 12;
List<int> magicHeaders = [];
for (final fileByte in fileBytes) {
if (magicHeaderBytesLeft == 0) break;
magicHeaders.add(fileByte);
magicHeaderBytesLeft--;
}
String? mimetype = lookupMimeType(file.path, headerBytes: magicHeaders);
return mimetype;
}
/// Add an override for common extensions since different extensions may map
/// to the same MIME type.
final Map<String, String> _preferredExtensionsMap = <String, String>{
'application/vnd.ms-excel': 'xls',
'image/jpeg': 'jpg',
'text/x-c': 'c'
};
/// Lookup file extension by a given MIME type.
/// If no extension is found, `null` is returned.
String? lookupExtension(String mimeType) {
if (_preferredExtensionsMap.containsKey(mimeType)) {
return _preferredExtensionsMap[mimeType];
}
String? extension;
defaultExtensionMap.forEach((String ext, String test) {
if (mimeType.toLowerCase() == test) {
extension = ext;
}
});
return extension;
}
String getProxiedContentLink(String link) {
return '$_trustedProxyUrl?$link';
}
bool hasLinkToPreview(text){
return getLinkToPreviewFromText(text) != null;
}
String? getLinkToPreviewFromText(String text) {
List matches = [];
String? previewUrl;
matches.addAll(linkRegex.allMatches(text).map((match) {
return match.group(0);
}));
if (matches.length > 0) {
Uri url = Uri.parse(matches.first);
String? urlMimeType = geFileNameMimeType(url.path);
if (urlMimeType != null) {
String urlFirstType = urlMimeType.split('/').first;
if (urlFirstType != 'image' && urlFirstType != 'text') return null;
}
previewUrl = matches.first;
}
return previewUrl;
}
}
|
shared_examples "Add Non Person Things: Test Menu Management" do
it "Test Menu Management" do
end
it "Must be logged out" do
end
it "Must login as site admin" do
vivo_login_from_home_page_as("testAdmin@mydomain.edu", "Password")
end
it "Navigate to Menu Management" do
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
end
it "Add Activities menu" do
$browser.find_element(:id, "submit").click
expect($browser.title).to eq("Edit")
$browser.find_element(:id, "pageName").clear
$browser.find_element(:id, "pageName").send_keys("Activities")
$browser.find_element(:name, "prettyUrl").clear
$browser.find_element(:name, "prettyUrl").send_keys("/activities")
browser_find_select_list(:id, "typeSelect").select_by(:text, "Browse Class Group")
browser_find_select_list(:id, "selectClassGroup").select_by(:text, "activities")
$browser.find_element(:css, "option[value=\"http://vivoweb.org/ontology#vitroClassGroupactivities\"]").click
$browser.find_element(:id, "doneWithContent").click
$browser.find_element(:id, "menuCheckbox").click
$browser.find_element(:id, "pageSave").click
expect($browser.title).to eq("Pages")
expect(browser_page_text).to include("Home")
expect(browser_page_text).to include("People")
expect(browser_page_text).to include("Organizations")
expect(browser_page_text).to include("Research")
expect(browser_page_text).to include("Events")
expect(browser_page_text).to include("Activities")
end
it "Add Courses menu" do
$browser.find_element(:id, "submit").click
expect($browser.title).to eq("Edit")
$browser.find_element(:id, "pageName").clear
$browser.find_element(:id, "pageName").send_keys("Courses")
$browser.find_element(:name, "prettyUrl").clear
$browser.find_element(:name, "prettyUrl").send_keys("/courses")
browser_find_select_list(:id, "typeSelect").select_by(:text, "Browse Class Group")
browser_find_select_list(:id, "selectClassGroup").select_by(:text, "courses")
$browser.find_element(:css, "option[value=\"http://vivoweb.org/ontology#vitroClassGroupactivities\"]").click
$browser.find_element(:id, "doneWithContent").click
$browser.find_element(:id, "menuCheckbox").click
$browser.find_element(:id, "pageSave").click
expect($browser.title).to eq("Pages")
expect(browser_page_text).to include("Home")
expect(browser_page_text).to include("People")
expect(browser_page_text).to include("Organizations")
expect(browser_page_text).to include("Research")
expect(browser_page_text).to include("Events")
expect(browser_page_text).to include("Activities")
expect(browser_page_text).to include("Courses")
end
it "Add Equipment menu" do
$browser.find_element(:id, "submit").click
expect($browser.title).to eq("Edit")
$browser.find_element(:id, "pageName").clear
$browser.find_element(:id, "pageName").send_keys("Equipment")
$browser.find_element(:name, "prettyUrl").clear
$browser.find_element(:name, "prettyUrl").send_keys("/equipment")
browser_find_select_list(:id, "typeSelect").select_by(:text, "Browse Class Group")
browser_find_select_list(:id, "selectClassGroup").select_by(:text, "equipment")
$browser.find_element(:css, "option[value=\"http://vivoweb.org/ontology#vitroClassGroupactivities\"]").click
$browser.find_element(:id, "doneWithContent").click
$browser.find_element(:id, "menuCheckbox").click
$browser.find_element(:id, "pageSave").click
expect($browser.title).to eq("Pages")
expect(browser_page_text).to include("Home")
expect(browser_page_text).to include("People")
expect(browser_page_text).to include("Organizations")
expect(browser_page_text).to include("Research")
expect(browser_page_text).to include("Events")
expect(browser_page_text).to include("Activities")
$browser.find_element(:id, "submit").click
expect($browser.title).to eq("Edit")
expect(browser_page_text).to include("Home")
expect(browser_page_text).to include("People")
expect(browser_page_text).to include("Organizations")
expect(browser_page_text).to include("Research")
expect(browser_page_text).to include("Events")
expect(browser_page_text).to include("Activities")
expect(browser_page_text).to include("Courses")
expect(browser_page_text).to include("Equipment")
end
it "Add Locations menu" do
expect($browser.title).to eq("Edit")
$browser.find_element(:id, "pageName").clear
$browser.find_element(:id, "pageName").send_keys("Locations")
$browser.find_element(:name, "prettyUrl").clear
$browser.find_element(:name, "prettyUrl").send_keys("/locations")
browser_find_select_list(:id, "typeSelect").select_by(:text, "Browse Class Group")
browser_find_select_list(:id, "selectClassGroup").select_by(:text, "locations")
$browser.find_element(:css, "option[value=\"http://vivoweb.org/ontology#vitroClassGroupactivities\"]").click
$browser.find_element(:id, "doneWithContent").click
$browser.find_element(:id, "menuCheckbox").click
$browser.find_element(:id, "pageSave").click
expect($browser.title).to eq("Pages")
expect(browser_page_text).to include("Home")
expect(browser_page_text).to include("People")
expect(browser_page_text).to include("Organizations")
expect(browser_page_text).to include("Research")
expect(browser_page_text).to include("Events")
expect(browser_page_text).to include("Activities")
expect(browser_page_text).to include("Locations")
end
it "Check Activities menu tab" do
$browser.find_element(:link_text, "Activities").click
expect($browser.title).to eq("Activities")
$browser.find_element(:link_text, "Project (1)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Human and Ape Brain Comparison")
$browser.find_element(:link_text, "Research Project (1)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Human and Ape Brain Comparison")
$browser.find_element(:link_text, "Service (2)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Gorilla Moving Company")
$browser.find_element(:link_text, "Primate Heart Health")
$browser.find_element(:link_text, "Transport Service (1)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Gorilla Moving Company")
end
it "Check Courses menu tab" do
$browser.find_element(:link_text, "Courses").click
expect($browser.title).to eq("Courses")
$browser.find_element(:link_text, "Course (2)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
expect(browser_page_text).to include("Course")
$browser.find_element(:link_text, "Introduction to Primates")
$browser.find_element(:link_text, "Introduction to Primate Health")
end
it "Check Equipment menu tab" do
$browser.find_element(:link_text, "Equipment").click
expect($browser.title).to eq("Equipment")
$browser.find_element(:link_text, "Equipment (2)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
expect(browser_page_text).to include("Equipment")
$browser.find_element(:link_text, "Portable Primate Habitat")
$browser.find_element(:link_text, "Primate Feeder")
end
it "Check Locations menu" do
$browser.find_element(:link_text, "Locations").click
expect($browser.title).to eq("Locations")
$browser.find_element(:link_text, "Building (2)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
expect(browser_page_text).to include("Building")
$browser.find_element(:link_text, "Jane Memorial Building")
$browser.find_element(:link_text, "Primate Memorial Building")
$browser.find_element(:link_text, "Facility (5)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Jane Memorial Building")
$browser.find_element(:link_text, "Lab Admin Office")
$browser.find_element(:link_text, "Primate Memorial Building")
$browser.find_element(:link_text, "Primate Research Lab Room 123")
$browser.find_element(:link_text, "State Fair Park")
$browser.find_element(:link_text, "Room (1)").click
# #<tr><td>pause</td><td>5000</td><td></td></tr>
$browser.find_element(:link_text, "Lab Admin Office")
end
it "Delete new menus" do
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
$browser.find_element(:xpath, "(.//img[@alt='delete this page'])[2]").click
browser_accept_alert("Are you sure you wish to delete this page: Activities?")
expect($browser.title).to eq("Pages")
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
$browser.find_element(:xpath, "(.//img[@alt='delete this page'])[4]").click
browser_accept_alert("Are you sure you wish to delete this page: Courses?")
expect($browser.title).to eq("Pages")
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
$browser.find_element(:xpath, "(.//img[@alt='delete this page'])[6]").click
browser_accept_alert("Are you sure you wish to delete this page: Equipment?")
expect($browser.title).to eq("Pages")
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
$browser.find_element(:xpath, "(.//img[@alt='delete this page'])[7]").click
browser_accept_alert("Are you sure you wish to delete this page: Locations?")
expect($browser.title).to eq("Pages")
$browser.find_element(:link_text, "Site Admin").click
expect($browser.title).to eq("VIVO Site Administration")
$browser.find_element(:link_text, "Page management").click
expect($browser.title).to eq("Pages")
end
it "Logout" do
$browser.find_element(:link_text, "Home").click
vivo_logout
expect($browser.title).to eq("VIVO")
expect(browser_page_text).to include("Log in")
end
end
|
import { ethers } from 'ethers'
/**
* Buy a NFT with BNB
* @param contract
* @param collectionAddress
* @param tokenId
* @returns transaction hash, or null
*/
export const buyTokenUsingBNB = async (
contract: ethers.Contract,
collectionAddress: string,
tokenId: number,
): Promise<string> => {
try {
const tx = await contract.buyTokenUsingBNB(collectionAddress, tokenId)
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
/**
* Buy a NFT with WBNB
* @param contract
* @param collectionAddress
* @param tokenId
* @param price
* @returns transaction hash, or null
*/
export const buyTokenUsingWBNB = async (
contract: ethers.Contract,
collectionAddress: string,
tokenId: number,
price: ethers.BigNumber,
): Promise<string> => {
try {
const tx = await contract.buyTokenUsingWBNB(collectionAddress, tokenId, price)
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
/**
* List a NFT for sale
* @param contract
* @param collectionAddress
* @param tokenId
* @param askPrice
* @returns transaction hash, or null
*/
export const createAskOrder = async (
contract: ethers.Contract,
collectionAddress: string,
tokenId: number,
askPrice: ethers.BigNumber,
): Promise<string> => {
try {
const tx = await contract.createAskOrder(collectionAddress, tokenId, askPrice)
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
/**
* Update the price of a listed NFT
* @param contract
* @param collectionAddress
* @param tokenId
* @param newPrice
* @returns transaction hash, or null
*/
export const modifyAskOrder = async (
contract: ethers.Contract,
collectionAddress: string,
tokenId: number,
newPrice: ethers.BigNumber,
): Promise<string> => {
try {
const tx = await contract.modifyAskOrder(collectionAddress, tokenId, newPrice)
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
/**
* Cancel a NFT sale listing. The caller needs to be the NFT owner.
* @param contract
* @param collectionAddress
* @param tokenId
* @returns transaction hash, or null
*/
export const cancelAskOrder = async (
contract: ethers.Contract,
collectionAddress: string,
tokenId: number,
): Promise<string> => {
try {
const tx = await contract.cancelAskOrder(collectionAddress, tokenId)
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
/**
* Get pending WBNB revenues for a user
* @param contract
* @param userAddress
* @returns pending revenues, or null if failed
*/
export const getPendingRevenue = async (contract: ethers.Contract, userAddress: string): Promise<ethers.BigNumber> => {
try {
const res = await contract.pendingRevenue(userAddress)
return res
} catch (error) {
console.error(error)
return null
}
}
/**
* Claim WBNB pending revenues for a connected users
* @param contract
* @returns transaction hash, or null
*/
export const claimPendingRevenue = async (contract: ethers.Contract): Promise<string> => {
try {
const tx = await contract.claimPendingRevenue()
const receipt = await tx.wait()
return receipt.transactionHash
} catch (error) {
console.error(error)
return null
}
}
|
@file:JvmName("RandomUtility")
package net.kibotu.kotlin.presentation
fun foo() {}
|
/*====================================================================
出力ルーチン
S.Kurohashi 91. 6.25
S.Kurohashi 93. 5.31
$Id$
====================================================================*/
#include "knp.h"
#include "version.h"
char mrph_buffer[SMALL_DATA_LEN];
int Sen_Num = 1; /* -table のときのみ使用する */
int Tag_Num = 1; /* -table のときのみ使用する */
/* for printing Chinese parse tree */
int bnst_dpnd[BNST_MAX];
int bnst_level[BNST_MAX];
char* bnst_word[BNST_MAX];
char* bnst_pos[BNST_MAX];
char* bnst_tree[BNST_MAX][TREE_WIDTH_MAX];
char* bnst_inverse_tree[TREE_WIDTH_MAX][BNST_MAX];
/*==================================================================*/
char *pp2mrph(char *pp, int pp_len)
/*==================================================================*/
{
char *hira_pp;
int hinsi_id;
if (pp_len == strlen("ガ2") && !strncmp(pp, "ガ2", pp_len)) {
pp_len -= strlen("2"); /* ガ2 -> ガ */
}
sprintf(mrph_buffer, "%.*s", pp_len, pp);
hira_pp = katakana2hiragana(mrph_buffer);
hinsi_id = get_hinsi_id("助詞");
sprintf(mrph_buffer, "%s %s %s 助詞 %d 格助詞 %d * 0 * 0 NIL",
hira_pp, hira_pp, hira_pp,
hinsi_id,
get_bunrui_id("格助詞", hinsi_id));
free(hira_pp);
return mrph_buffer;
}
/*==================================================================*/
char pos2symbol(char *hinshi, char *bunrui)
/*==================================================================*/
{
if (!strcmp(hinshi, "特殊")) return ' ';
else if (!strcmp(hinshi, "動詞")) return 'v';
else if (!strcmp(hinshi, "形容詞")) return 'j';
else if (!strcmp(hinshi, "判定詞")) return 'c';
else if (!strcmp(hinshi, "助動詞")) return 'x';
else if (!strcmp(hinshi, "名詞") &&
!strcmp(bunrui, "固有名詞")) return 'N';
else if (!strcmp(hinshi, "名詞") &&
!strcmp(bunrui, "人名")) return 'J';
else if (!strcmp(hinshi, "名詞") &&
!strcmp(bunrui, "地名")) return 'C';
else if (!strcmp(hinshi, "名詞")) return 'n';
else if (!strcmp(hinshi, "指示詞")) return 'd';
else if (!strcmp(hinshi, "副詞")) return 'a';
else if (!strcmp(hinshi, "助詞")) return 'p';
else if (!strcmp(hinshi, "接続詞")) return 'c';
else if (!strcmp(hinshi, "連体詞")) return 'm';
else if (!strcmp(hinshi, "感動詞")) return '!';
else if (!strcmp(hinshi, "接頭辞")) return 'p';
else if (!strcmp(hinshi, "接尾辞")) return 's';
return '?';
}
/*==================================================================*/
void print_mrph(MRPH_DATA *m_ptr)
/*==================================================================*/
{
fprintf(Outfp, "%s %s %s ", m_ptr->Goi2, m_ptr->Yomi, m_ptr->Goi);
if (Language == JAPANESE) {
if (m_ptr->Hinshi >= CLASS_num) {
fputc('\n', Outfp);
fprintf(stderr, ";; Hinshi number is invalid. (%d)\n", m_ptr->Hinshi);
exit(1);
}
fprintf(Outfp, "%s ", Class[m_ptr->Hinshi][0].id);
}
else {
fprintf(Outfp, "* ");
}
fprintf(Outfp, "%d ", m_ptr->Hinshi);
if (Language == JAPANESE && m_ptr->Bunrui)
fprintf(Outfp, "%s ", Class[m_ptr->Hinshi][m_ptr->Bunrui].id);
else
fprintf(Outfp, "* ");
fprintf(Outfp, "%d ", m_ptr->Bunrui);
if (Language == JAPANESE && m_ptr->Katuyou_Kata)
fprintf(Outfp, "%s ", Type[m_ptr->Katuyou_Kata].name);
else
fprintf(Outfp, "* ");
fprintf(Outfp, "%d ", m_ptr->Katuyou_Kata);
if (Language == JAPANESE && m_ptr->Katuyou_Kei)
fprintf(Outfp, "%s ",
Form[m_ptr->Katuyou_Kata][m_ptr->Katuyou_Kei].name);
else
fprintf(Outfp, "* ");
fprintf(Outfp, "%d ", m_ptr->Katuyou_Kei);
fprintf(Outfp, "%s", m_ptr->Imi);
}
/*==================================================================*/
void print_mrph_f(MRPH_DATA *m_ptr)
/*==================================================================*/
{
char yomi_buffer[SMALL_DATA_LEN];
sprintf(yomi_buffer, "(%s)", m_ptr->Yomi);
fprintf(Outfp, "%-16.16s%-18.18s %-14.14s",
m_ptr->Goi2, yomi_buffer,
Class[m_ptr->Hinshi][m_ptr->Bunrui].id);
if (m_ptr->Katuyou_Kata)
fprintf(Outfp, " %-14.14s %-12.12s",
Type[m_ptr->Katuyou_Kata].name,
Form[m_ptr->Katuyou_Kata][m_ptr->Katuyou_Kei].name);
}
/*==================================================================*/
TAG_DATA *search_nearest_para_child(TAG_DATA *bp)
/*==================================================================*/
{
int i;
/* 並列のときに、最後から2番目の要素をかえす */
if (bp->para_top_p) {
for (i = 1; bp->child[i]; i++) { /* 0は最後の要素 */
if (bp->child[i]->para_type != PARA_NIL) {
return bp->child[i];
}
}
}
return NULL;
}
/*==================================================================*/
void print_eos(int eos_flag)
/*==================================================================*/
{
if (eos_flag) {
fputs("EOS\n", Outfp);
}
else {
fputs("EOP\n", Outfp);
}
}
/*==================================================================*/
void print_tags(SENTENCE_DATA *sp, int flag, int eos_flag)
/*==================================================================*/
{
/* 現在は常に flag == 1 (0は旧形式出力) */
int i, j, count = 0, b_count = 0, case_len, bp_independent_offset = 0, dpnd_head;
int t_table[TAG_MAX], b_table[BNST_MAX], t_proj_table[TAG_MAX], t_copula_table[TAG_MAX];
char *cp;
FEATURE *fp;
BNST_DATA *pre_bp = NULL;
MRPH_DATA *m_ptr;
TAG_DATA *t_ptr, *bp;
/* ノードの挿入を考慮しながら、基本句、文節の変換テーブルを作成 */
for (i = 0, t_ptr = sp->tag_data; i < sp->Tag_num; i++, t_ptr++) {
if (t_ptr->num == -1) {
continue; /* 後処理でマージされたタグ */
}
/* 追加ノード */
if (OptRecoverPerson && pre_bp != t_ptr->b_ptr) { /* 文節の切れ目ごとにチェック */
fp = (t_ptr->b_ptr->tag_ptr + t_ptr->b_ptr->tag_num - 1)->f; /* headの基本句 */
while (fp) { /* featureのloop: featureをチェック */
if (!strncmp(fp->cp, "格要素-", strlen("格要素-")) &&
strstr(fp->cp, ":#")) { /* tag_after_dpnd_and_case.ruleで使われている */
t_copula_table[count] = 0;
count++;
b_count++;
}
fp = fp->next;
}
pre_bp = t_ptr->b_ptr;
}
/* 判定詞(-copula)の基本句を分解するとき */
if (check_feature(t_ptr->f, "T基本句分解")) {
t_copula_table[count] = 0;
count++;
t_copula_table[count] = 1; /* 新しいテーブルにおける基本句分解の位置を記録 */
}
else {
t_copula_table[count] = 0;
}
t_table[t_ptr->num] = count++; /* numを更新しているので使える */
if (t_ptr->bnum >= 0) { /* 文節行 (bnumを更新しているので使える) */
b_table[t_ptr->bnum] = b_count++;
}
}
for (i = 0; i < count; i++) { /* 非交差条件チェック用 */
t_proj_table[i] = 0;
}
count = 0;
pre_bp = NULL;
for (i = 0, t_ptr = sp->tag_data; i < sp->Tag_num; i++, t_ptr++) {
if (t_ptr->num == -1) {
continue; /* 後処理でマージされたタグ */
}
if (flag == 1) {
bp_independent_offset = 0;
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "%%%% LABEL=%d_%db\n", Sen_Num - 1, i + 1);
/* 追加ノード */
if (OptRecoverPerson && pre_bp != t_ptr->b_ptr) {
fp = (t_ptr->b_ptr->tag_ptr + t_ptr->b_ptr->tag_num - 1)->f; /* headの基本句 */
while (fp) { /* featureのloop: featureをチェック */
if (!strncmp(fp->cp, "格要素-", strlen("格要素-")) &&
(cp = strstr(fp->cp, ":#"))) {
case_len = cp - fp->cp - strlen("格要素-"); /* 格の部分の長さ */
cp++; /* #の頭 */
dpnd_head = t_table[(t_ptr->b_ptr->tag_ptr + t_ptr->b_ptr->tag_num - 1)->num]; /* 係り先はhead */
if (t_proj_table[count] && dpnd_head > t_proj_table[count]) { /* 非交差条件 */
dpnd_head = t_proj_table[count];
}
if (!strncmp(fp->cp + strlen("格要素-"), "#", case_len)) {
fprintf(Outfp, "* %dD <ノード挿入>\n", b_table[t_ptr->b_ptr->num]);
fprintf(Outfp, "+ %dD <ノード挿入>\n", dpnd_head);
fprintf(Outfp, "%s %s %s 名詞 6 普通名詞 1 * 0 * 0 NIL\n", cp, cp, cp);
}
else {
fprintf(Outfp, "* %dD <ノード挿入><係:%.*s格>\n", b_table[t_ptr->b_ptr->num], case_len, fp->cp + strlen("格要素-"));
fprintf(Outfp, "+ %dD <ノード挿入><係:%.*s格><解析格:%.*s>\n", dpnd_head,
case_len, fp->cp + strlen("格要素-"), case_len, fp->cp + strlen("格要素-"));
fprintf(Outfp, "%s %s %s 名詞 6 普通名詞 1 * 0 * 0 NIL\n", cp, cp, cp);
fprintf(Outfp, "%s\n", pp2mrph(fp->cp + strlen("格要素-"), case_len));
}
count++;
}
fp = fp->next;
}
pre_bp = t_ptr->b_ptr;
}
/* 判定詞(-copula)の基本句を分解するとき */
if (check_feature(t_ptr->f, "T基本句分解")) {
if (t_ptr->bnum >= 0) { /* 文節行 */
fprintf(Outfp, "* %d%c",
t_ptr->b_ptr->dpnd_head == -1 ? -1 : b_table[t_ptr->b_ptr->dpnd_head],
t_ptr->b_ptr->dpnd_type);
if (t_ptr->b_ptr->f) {
fputc(' ', Outfp);
print_feature(t_ptr->b_ptr->f, Outfp);
}
fputc('\n', Outfp);
}
fprintf(Outfp, "+ %dD <判定詞基本句分解><係:隣>\n", t_table[t_ptr->num]);
for (j = 0, m_ptr = t_ptr->mrph_ptr; j < t_ptr->mrph_num; j++, m_ptr++) {
if (check_feature(m_ptr->f, "後処理-基本句始")) {
break;
}
print_mrph(m_ptr);
if (m_ptr->f) {
fputc(' ', Outfp);
print_feature(m_ptr->f, Outfp);
}
fputc('\n', Outfp);
bp_independent_offset++;
}
count++;
}
/* 文節行 */
if (bp_independent_offset == 0 && t_ptr->bnum >= 0) {
if (PrintNum) {
fprintf(Outfp, "* %d %d%c",
t_ptr->bnum,
t_ptr->b_ptr->dpnd_head == -1 ? -1 : b_table[t_ptr->b_ptr->dpnd_head],
t_ptr->b_ptr->dpnd_type);
}
else {
fprintf(Outfp, "* %d%c",
t_ptr->b_ptr->dpnd_head == -1 ? -1 : b_table[t_ptr->b_ptr->dpnd_head],
t_ptr->b_ptr->dpnd_type);
}
if (t_ptr->b_ptr->f) {
fputc(' ', Outfp);
print_feature(t_ptr->b_ptr->f, Outfp);
}
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "<BR><BR>");
fputc('\n', Outfp);
}
/* 判定詞分解時: 連体修飾は判定詞の前の名詞に係るように修正 */
dpnd_head = t_ptr->dpnd_head == -1 ? -1 : t_table[t_ptr->dpnd_head];
if (OptCopula &&
dpnd_head != -1 &&
t_copula_table[dpnd_head]) { /* 係り先が判定詞分解 */
if (t_table[t_ptr->num] < dpnd_head - 1 &&
(((check_feature(t_ptr->f, "連体修飾") ||
check_feature(t_ptr->f, "係:隣") ||
check_feature(t_ptr->f, "係:文節内")) &&
(t_ptr->para_type == PARA_NIL || /* 並列のときは最後から2番目の要素のみ修正 */
((bp = search_nearest_para_child(t_ptr->parent)) && t_ptr->num == bp->num))) ||
(t_proj_table[t_table[t_ptr->num]] && dpnd_head > t_proj_table[t_table[t_ptr->num]]))) { /* 非交差条件 */
dpnd_head--;
}
}
if (PrintNum) {
fprintf(Outfp, "+ %d %d%c", t_ptr->num, dpnd_head, t_ptr->dpnd_type);
}
else {
fprintf(Outfp, "+ %d%c", dpnd_head, t_ptr->dpnd_type);
}
if (t_ptr->f) {
fputc(' ', Outfp);
print_feature(t_ptr->f, Outfp);
}
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "<BR><BR>");
fputc('\n', Outfp);
for (j = t_table[t_ptr->num]; j < dpnd_head; j++) {
if (!t_proj_table[j] || t_proj_table[j] > dpnd_head) {
t_proj_table[j] = dpnd_head;
}
}
}
else {
fprintf(Outfp, "%c\n", t_ptr->bnum < 0 ? '+' : '*');
}
for (j = bp_independent_offset, m_ptr = t_ptr->mrph_ptr + bp_independent_offset; j < t_ptr->mrph_num; j++, m_ptr++) {
print_mrph(m_ptr);
if (m_ptr->f) {
fputc(' ', Outfp);
print_feature(m_ptr->f, Outfp);
}
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "<BR><BR>");
fputc('\n', Outfp);
}
count++;
}
print_eos(eos_flag);
}
/*==================================================================*/
void print_mrphs(SENTENCE_DATA *sp, int eos_flag)
/*==================================================================*/
{
int i;
MRPH_DATA *m_ptr;
TAG_DATA *t_ptr;
FEATURE *bp_f = NULL, *bp_copied_f;
for (i = 0, m_ptr = sp->mrph_data; i < sp->Mrph_num; i++, m_ptr++) {
/* 基本句行 */
if (m_ptr->tnum >= 0) {
t_ptr = sp->tag_data + m_ptr->tnum;
/* 文節行 */
if (t_ptr->bnum >= 0) {
if (PrintNum) {
fprintf(Outfp, "* %d %d%c",
t_ptr->bnum,
t_ptr->b_ptr->dpnd_head == -1 ? -1 : t_ptr->b_ptr->dpnd_head,
t_ptr->b_ptr->dpnd_type);
}
else {
fprintf(Outfp, "* %d%c",
t_ptr->b_ptr->dpnd_head == -1 ? -1 : t_ptr->b_ptr->dpnd_head,
t_ptr->b_ptr->dpnd_type);
}
if (t_ptr->b_ptr->f) {
fputc(' ', Outfp);
print_feature(t_ptr->b_ptr->f, Outfp);
}
fputc('\n', Outfp);
}
if (PrintNum) {
fprintf(Outfp, "+ %d %d%c",
m_ptr->tnum, t_ptr->dpnd_head, t_ptr->dpnd_type);
}
else {
fprintf(Outfp, "+ %d%c",
t_ptr->dpnd_head, t_ptr->dpnd_type);
}
if (t_ptr->f) {
fputc(' ', Outfp);
print_feature(t_ptr->f, Outfp);
bp_f = t_ptr->f;
}
fputc('\n', Outfp);
}
/* 形態素係り受け行 */
if (PrintNum) {
fprintf(Outfp, "- %d %d%c", m_ptr->num, m_ptr->dpnd_head, m_ptr->dpnd_type);
}
else {
fprintf(Outfp, "- %d%c", m_ptr->dpnd_head, m_ptr->dpnd_type);
}
/* 形態素featureは以下の形態素行に出力するので省略 */
fputc('\n', Outfp);
/* 形態素情報 */
print_mrph(m_ptr);
/* 基本句headの形態素に基本句のfeatureを付与 */
if (m_ptr->out_head_flag) {
if (bp_f) {
fputc(' ', Outfp);
if (m_ptr->f) { /* 形態素自身のfeature -> bp_fとマージ */
bp_copied_f = NULL;
copy_feature(&bp_copied_f, bp_f);
copy_feature(&bp_copied_f, m_ptr->f); /* bp_f中の正規化代表表記などを上書き */
print_feature(bp_copied_f, Outfp);
clear_feature(&bp_copied_f);
}
else {
print_feature(bp_f, Outfp);
}
bp_f = NULL;
}
}
else {
fputs(" <係:基本句内>", Outfp);
if (m_ptr->f) { /* 形態素自身のfeature */
print_feature(m_ptr->f, Outfp);
}
}
fputc('\n', Outfp);
}
print_eos(eos_flag);
}
/*==================================================================*/
void print_mrphs_only(SENTENCE_DATA *sp, int eos_flag)
/*==================================================================*/
{
int i;
for (i = 0; i < sp->Mrph_num; i++) {
print_mrph(sp->mrph_data + i);
if ((sp->mrph_data + i)->f) {
fprintf(Outfp, " ");
print_feature((sp->mrph_data + i)->f, Outfp);
}
fprintf(Outfp, "\n");
}
print_eos(eos_flag);
}
/*==================================================================*/
void print_bnst_with_mrphs(SENTENCE_DATA *sp, int have_dpnd_flag, int eos_flag)
/*==================================================================*/
{
int i, j;
char *cp;
MRPH_DATA *m_ptr;
BNST_DATA *b_ptr;
for (i = 0, b_ptr = sp->bnst_data; i < sp->Bnst_num; i++, b_ptr++) {
if (b_ptr->num == -1) {
continue; /* 後処理でマージされた文節 */
}
if (have_dpnd_flag == 1) {
if (Language == CHINESE && (b_ptr->is_para == 1 || b_ptr->is_para ==2)) {
fprintf(Outfp, "* %dP", b_ptr->dpnd_head);
}
else {
fprintf(Outfp, "* %d%c", b_ptr->dpnd_head, b_ptr->dpnd_type);
}
if (b_ptr->f) {
fprintf(Outfp, " ");
print_feature(b_ptr->f, Outfp);
}
fprintf(Outfp, "\n");
}
else {
fprintf(Outfp, "*\n");
}
for (j = 0, m_ptr = b_ptr->mrph_ptr; j < b_ptr->mrph_num; j++, m_ptr++) {
print_mrph(m_ptr);
if (m_ptr->f) {
fprintf(Outfp, " ");
print_feature(m_ptr->f, Outfp);
}
/* print_mrph_f(m_ptr); */
fprintf(Outfp, "\n");
}
}
print_eos(eos_flag);
}
/*==================================================================*/
void print_all_result(SENTENCE_DATA *sp, int eos_flag)
/*==================================================================*/
{
if (OptAnalysis == OPT_FILTER) {
print_mrphs_only(sp, eos_flag);
}
else if (OptAnalysis == OPT_BNST) {
print_bnst_with_mrphs(sp, 0, eos_flag);
}
else if (OptNbest == FALSE && !(OptArticle && OptEllipsis)) {
print_result(sp, 1, eos_flag);
}
if (Language == CHINESE) {
print_tree_for_chinese(sp);
}
fflush(Outfp);
}
/*==================================================================*/
void _print_bnst(TAG_DATA *ptr)
/*==================================================================*/
{
int i;
if (ptr) {
for (i = 0; i < ptr->mrph_num; i++)
fprintf(Outfp, "%s", (ptr->mrph_ptr + i)->Goi2);
}
else {
fprintf(Outfp, "不特定:人");
}
}
/*==================================================================*/
void print_mrph_with_para(MRPH_DATA *ptr, char *cp)
/*==================================================================*/
{
int i;
if (cp && ptr) {
if (OptExpress == OPT_TABLE) {
if ( ptr->para_type == PARA_NORMAL ) strcpy(cp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) strcpy(cp, "<I>");
else cp[0] = '\0';
}
else {
if ( ptr->para_type == PARA_NORMAL ) strcpy(cp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) strcpy(cp, "<I>");
else cp[0] = '\0';
}
if ( ptr->para_top_p == TRUE )
strcat(cp, "PARA");
else {
strcpy(cp, ptr->Goi2);
}
} else if (cp == NULL && ptr) {
if ( ptr->para_top_p == TRUE ) {
fprintf(Outfp, "PARA");
} else {
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "%%%% %d %d 1 LABEL=%d_%db align=right style=white-space:nowrap\n",
Sen_Num, Tag_Num++, Sen_Num, Tag_Num - 1);
fprintf(Outfp, "%s", ptr->Goi2);
if (Language == JAPANESE && OptDisplay != OPT_NORMAL && OptDisplay != OPT_SIMPLE) {
fprintf(Outfp, "%c",
pos2symbol(Class[ptr->Hinshi][0].id,
Class[ptr->Hinshi][ptr->Bunrui].id));
}
}
if (OptExpress == OPT_TABLE) {
if ( ptr->para_type == PARA_NORMAL ) fprintf(Outfp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) fprintf(Outfp, "<I>");
}
else {
if ( ptr->para_type == PARA_NORMAL ) fprintf(Outfp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) fprintf(Outfp, "<I>");
}
if ( ptr->to_para_p == TRUE ) fprintf(Outfp, "(D)");
}
}
/*==================================================================*/
void print_bnst(BNST_DATA *ptr, char *cp)
/*==================================================================*/
{
int i;
if (cp && ptr) {
if (OptExpress == OPT_TABLE) {
if ( ptr->para_type == PARA_NORMAL ) strcpy(cp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) strcpy(cp, "<I>");
else cp[0] = '\0';
}
else {
if ( ptr->para_type == PARA_NORMAL ) strcpy(cp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) strcpy(cp, "<I>");
else cp[0] = '\0';
}
if ( ptr->para_top_p == TRUE )
strcat(cp, "PARA");
else {
strcpy(cp, ptr->mrph_ptr->Goi2);
for (i = 1; i < ptr->mrph_num; i++)
strcat(cp, (ptr->mrph_ptr + i)->Goi2);
}
} else if (cp == NULL && ptr) {
if ( ptr->para_top_p == TRUE ) {
fprintf(Outfp, "PARA");
} else {
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "%%%% %d %d 1 LABEL=%d_%db align=right style=white-space:nowrap\n",
Sen_Num, Tag_Num++, Sen_Num, Tag_Num - 1);
for (i = 0; i < ptr->mrph_num; i++) {
fprintf(Outfp, "%s", (ptr->mrph_ptr + i)->Goi2);
if (Language == JAPANESE && OptDisplay != OPT_NORMAL && OptDisplay != OPT_SIMPLE) {
fprintf(Outfp, "%c",
pos2symbol(Class[(ptr->mrph_ptr + i)->Hinshi]
[0].id,
Class[(ptr->mrph_ptr + i)->Hinshi]
[(ptr->mrph_ptr + i)->Bunrui].id));
}
}
}
if (OptExpress == OPT_TABLE) {
if ( ptr->para_type == PARA_NORMAL ) fprintf(Outfp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) fprintf(Outfp, "<I>");
}
else {
if ( ptr->para_type == PARA_NORMAL ) fprintf(Outfp, "<P>");
else if ( ptr->para_type == PARA_INCOMP ) fprintf(Outfp, "<I>");
}
if ( ptr->to_para_p == TRUE ) fprintf(Outfp, "(D)");
}
}
/*==================================================================*/
void print_data2ipal_corr(BNST_DATA *b_ptr, CF_PRED_MGR *cpm_ptr)
/*==================================================================*/
{
int i, j, elem_num = 0;
int offset;
int flag;
switch (cpm_ptr->cmm[0].cf_ptr->voice) {
case FRAME_PASSIVE_I:
case FRAME_CAUSATIVE_WO_NI:
case FRAME_CAUSATIVE_WO:
case FRAME_CAUSATIVE_NI:
offset = 0;
break;
default:
offset = 1;
break;
}
flag = FALSE;
if (cpm_ptr->elem_b_num[0] == -1) {
elem_num = 0;
flag = TRUE;
}
if (flag == TRUE) {
flag = FALSE;
for (j = 0; j < cpm_ptr->cmm[0].cf_ptr->element_num; j++)
if (cpm_ptr->cmm[0].result_lists_p[0].flag[j] == elem_num) {
fprintf(Outfp, " N%d", offset + j);
flag = TRUE;
}
}
if (flag == FALSE)
fprintf(Outfp, " *");
for (i = 0; b_ptr->child[i]; i++) {
flag = FALSE;
for (j = 0; j < cpm_ptr->cf.element_num; j++)
if (cpm_ptr->elem_b_num[j] == i) {
elem_num = j;
flag = TRUE;
break;
}
if (flag == TRUE) {
flag = FALSE;
for (j = 0; j < cpm_ptr->cmm[0].cf_ptr->element_num; j++)
if (cpm_ptr->cmm[0].result_lists_p[0].flag[j] == elem_num) {
fprintf(Outfp, " N%d", offset + j);
flag = TRUE;
}
}
if (flag == FALSE)
fprintf(Outfp, " *");
}
}
/*==================================================================*/
void print_bnst_detail(BNST_DATA *ptr)
/*==================================================================*/
{
int i;
MRPH_DATA *m_ptr;
fputc('(', Outfp); /* 文節始り */
if ( ptr->para_top_p == TRUE ) {
if (ptr->child[1] &&
ptr->child[1]->para_key_type == PARA_KEY_N)
fprintf(Outfp, "noun_para");
else
fprintf(Outfp, "pred_para");
}
else {
fprintf(Outfp, "%d ", ptr->num);
/* 係り受け情報の表示 (追加:97/10/29) */
fprintf(Outfp, "(type:%c) ", ptr->dpnd_type);
fputc('(', Outfp);
for (i=0, m_ptr=ptr->mrph_ptr; i < ptr->mrph_num; i++, m_ptr++) {
fputc('(', Outfp);
print_mrph(m_ptr);
fprintf(Outfp, " ");
print_feature2(m_ptr->f, Outfp);
fputc(')', Outfp);
}
fputc(')', Outfp);
fprintf(Outfp, " ");
print_feature2(ptr->f, Outfp);
if (OptAnalysis == OPT_DPND ||
!check_feature(ptr->f, "用言") || /* 用言でない場合 */
ptr->cpm_ptr == NULL) { /* 解析前 */
fprintf(Outfp, " NIL");
}
else {
fprintf(Outfp, " (");
if (ptr->cpm_ptr->cmm[0].cf_ptr == NULL)
fprintf(Outfp, "-2"); /* 格フレームにENTRYなし */
else if ((ptr->cpm_ptr->cmm[0].cf_ptr)->cf_address == -1)
fprintf(Outfp, "-1"); /* 格要素なし */
else {
fprintf(Outfp, "%s",
(ptr->cpm_ptr->cmm[0].cf_ptr)->cf_id);
switch (ptr->cpm_ptr->cmm[0].cf_ptr->voice) {
case FRAME_ACTIVE:
fprintf(Outfp, " 能動"); break;
case FRAME_PASSIVE_I:
fprintf(Outfp, " 間受"); break;
case FRAME_PASSIVE_1:
fprintf(Outfp, " 直受1"); break;
case FRAME_PASSIVE_2:
fprintf(Outfp, " 直受2"); break;
case FRAME_CAUSATIVE_WO_NI:
fprintf(Outfp, " 使役ヲニ"); break;
case FRAME_CAUSATIVE_WO:
fprintf(Outfp, " 使役ヲ"); break;
case FRAME_CAUSATIVE_NI:
fprintf(Outfp, " 使役ニ"); break;
case FRAME_CAUSATIVE_PASSIVE:
fprintf(Outfp, " 使役&受身"); break;
case FRAME_POSSIBLE:
fprintf(Outfp, " 可能"); break;
case FRAME_POLITE:
fprintf(Outfp, " 尊敬"); break;
case FRAME_SPONTANE:
fprintf(Outfp, " 自発"); break;
default: break;
}
fprintf(Outfp, " (");
print_data2ipal_corr(ptr, ptr->cpm_ptr);
fprintf(Outfp, ")");
}
fprintf(Outfp, ")");
/* ------------変更:述語素, 格形式を出力-----------------
if (ptr->cpm_ptr != NULL &&
ptr->cpm_ptr->cmm[0].cf_ptr != NULL &&
(ptr->cpm_ptr->cmm[0].cf_ptr)->cf_address != -1) {
get_ipal_frame(i_ptr,
(ptr->cpm_ptr->cmm[0].cf_ptr)->cf_address);
if (i_ptr->DATA[i_ptr->jyutugoso]) {
fprintf(Outfp, " 述語素 %s",
i_ptr->DATA+i_ptr->jyutugoso);
} else {
fprintf(Outfp, " 述語素 nil");
}
fprintf(Outfp, " 格形式 (");
for (j=0; *((i_ptr->DATA)+(i_ptr->kaku_keishiki[j]))
!= NULL; j++){
fprintf(Outfp, " %s",
i_ptr->DATA+i_ptr->kaku_keishiki[j]);
}
fprintf(Outfp, ")");
}
------------------------------------------------------- */
}
}
fputc(')', Outfp); /* 文節終わり */
}
/*==================================================================*/
void print_sentence_slim(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
init_bnst_tree_property(sp);
fputc('(', Outfp);
for ( i=0; i<sp->Bnst_num; i++ )
print_bnst(&(sp->bnst_data[i]), NULL);
fputc(')', Outfp);
fputc('\n', Outfp);
}
/*====================================================================
行列表示
====================================================================*/
/*==================================================================*/
void print_M_bnst(SENTENCE_DATA *sp, int b_num, int max_length, int *para_char)
/*==================================================================*/
{
BNST_DATA *ptr = &(sp->bnst_data[b_num]);
int i, len, space, comma_p;
char tmp[BNST_LENGTH_MAX], *cp = tmp;
if ( ptr->mrph_num == 1 ) {
strcpy(tmp, ptr->mrph_ptr->Goi2);
comma_p = FALSE;
} else {
strcpy(tmp, ptr->mrph_ptr->Goi2);
for (i = 1; i < (ptr->mrph_num - 1); i++)
strcat(tmp, (ptr->mrph_ptr + i)->Goi2);
if (!strcmp(Class[(ptr->mrph_ptr + ptr->mrph_num - 1)->Hinshi][0].id,
"特殊") &&
!strcmp(Class[(ptr->mrph_ptr + ptr->mrph_num - 1)->Hinshi]
[(ptr->mrph_ptr + ptr->mrph_num - 1)->Bunrui].id,
"読点")) {
strcat(tmp, ",");
comma_p = TRUE;
} else {
strcat(tmp, (ptr->mrph_ptr + ptr->mrph_num - 1)->Goi2);
comma_p = FALSE;
}
}
space = ptr->para_key_type ?
max_length-(sp->Bnst_num-b_num-1)*3-2 : max_length-(sp->Bnst_num-b_num-1)*3;
len = comma_p ?
ptr->length - 1 : ptr->length;
if ( len > space ) {
if ( (space%2) != (len%2) ) {
cp += len + 1 - space;
fputc(' ', Outfp);
} else
cp += len - space;
} else
for ( i=0; i<space-len; i++ ) fputc(' ', Outfp);
if ( ptr->para_key_type ) {
fprintf(Outfp, "%c>", 'a'+ (*para_char));
(*para_char)++;
}
fprintf(Outfp, "%s", cp);
}
/*==================================================================*/
void print_line(int length, int flag)
/*==================================================================*/
{
int i;
for ( i=0; i<(length-1); i++ ) fputc('-', Outfp);
flag ? fputc(')', Outfp) : fputc('-', Outfp);
fputc('\n', Outfp);
}
/*==================================================================*/
void print_matrix(SENTENCE_DATA *sp, int type, int key_pos)
/*==================================================================*/
{
int i, j, length;
int over_flag = 0;
int max_length = 0;
int para_char = 0; /* para_key の表示用 */
PARA_DATA *ptr;
for ( i=0; i<sp->Bnst_num; i++ )
for ( j=0; j<sp->Bnst_num; j++ )
path_matrix[i][j] = 0;
/* パスのマーク付け(PARA) */
if (type == PRINT_PARA) {
for ( i=0; i<sp->Para_num; i++ ) {
ptr = &sp->para_data[i];
if (ptr->max_score < 0.0) continue;
/* statusがxでもスコアがあれば参考のため表示 */
for ( j=ptr->key_pos+1; j<=ptr->jend_pos; j++ ) {
if (Language != CHINESE) {
path_matrix[ptr->max_path[j-ptr->key_pos-1]][j] =
path_matrix[ptr->max_path[j-ptr->key_pos-1]][j] ?
-1 : 'a' + i;
}
else {
if (check_feature((sp->bnst_data + j)->f, "CC") || check_feature((sp->bnst_data + j)->f, "PU")) {
path_matrix[ptr->max_path[j-ptr->key_pos]][j] =
path_matrix[ptr->max_path[j-ptr->key_pos]][j] ?
-1 : 'a' + i;
}
else {
path_matrix[ptr->max_path[j-ptr->key_pos-1]][j] =
path_matrix[ptr->max_path[j-ptr->key_pos-1]][j] ?
-1 : 'a' + i;
}
}
}
}
}
/* 長さの計算 */
for ( i=0; i<sp->Bnst_num; i++ ) {
length = sp->bnst_data[i].length + (sp->Bnst_num-i-1)*3;
if ( sp->bnst_data[i].para_key_type ) length += 2;
if ( max_length < length ) max_length = length;
}
/* 印刷用の処理 */
if ( 0 ) {
if ( PRINT_WIDTH < sp->Bnst_num*3 ) {
over_flag = 1;
sp->Bnst_num = PRINT_WIDTH/3;
max_length = PRINT_WIDTH;
} else if ( PRINT_WIDTH < max_length ) {
max_length = PRINT_WIDTH;
}
}
if (type == PRINT_PARA)
fprintf(Outfp, "<< PARA MATRIX >>\n");
else if (type == PRINT_DPND)
fprintf(Outfp, "<< DPND MATRIX >>\n");
else if (type == PRINT_MASK)
fprintf(Outfp, "<< MASK MATRIX >>\n");
else if (type == PRINT_QUOTE)
fprintf(Outfp, "<< QUOTE MATRIX >>\n");
else if (type == PRINT_RSTR)
fprintf(Outfp, "<< RESTRICT MATRIX for PARA RELATION>>\n");
else if (type == PRINT_RSTD)
fprintf(Outfp, "<< RESTRICT MATRIX for DEPENDENCY STRUCTURE>>\n");
else if (type == PRINT_RSTQ)
fprintf(Outfp, "<< RESTRICT MATRIX for QUOTE SCOPE>>\n");
print_line(max_length, over_flag);
for ( i=0; i<(max_length-sp->Bnst_num*3); i++ ) fputc(' ', Outfp);
for ( i=0; i<sp->Bnst_num; i++ ) fprintf(Outfp, "%2d ", i);
fputc('\n', Outfp);
print_line(max_length, over_flag);
for ( i=0; i<sp->Bnst_num; i++ ) {
print_M_bnst(sp, i, max_length, ¶_char);
for ( j=i+1; j<sp->Bnst_num; j++ ) {
if (type == PRINT_PARA) {
fprintf(Outfp, "%2d", match_matrix[i][j]);
} else if (type == PRINT_DPND) {
if (Dpnd_matrix[i][j] == 0)
fprintf(Outfp, " -");
else
fprintf(Outfp, " %c", (char)Dpnd_matrix[i][j]);
} else if (type == PRINT_MASK) {
fprintf(Outfp, "%2d", Mask_matrix[i][j]);
} else if (type == PRINT_QUOTE) {
fprintf(Outfp, "%2d", Quote_matrix[i][j]);
} else if (type == PRINT_RSTR ||
type == PRINT_RSTD ||
type == PRINT_RSTQ) {
if (j <= key_pos)
fprintf(Outfp, "--");
else if (key_pos < i)
fprintf(Outfp, " |");
else
fprintf(Outfp, "%2d", restrict_matrix[i][j]);
}
switch(path_matrix[i][j]) {
case 0: fputc(' ', Outfp); break;
case -1: fputc('*', Outfp); break;
default: fputc(path_matrix[i][j], Outfp); break;
}
}
fputc('\n', Outfp);
}
print_line(max_length, over_flag);
if (type == PRINT_PARA) {
for (i = 0; i < sp->Para_num; i++) {
fprintf(Outfp, "%c(%c):%4.1f(%4.1f) ",
sp->para_data[i].para_char,
sp->para_data[i].status,
sp->para_data[i].max_score,
sp->para_data[i].pure_score);
}
fputc('\n', Outfp);
}
}
/*==================================================================*/
void assign_para_similarity_feature(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
char buffer[DATA_LEN];
for (i = 0; i < sp->Para_num; i++) {
sprintf(buffer, "並列類似度:%.3f", sp->para_data[i].max_score);
assign_cfeature(&(sp->bnst_data[sp->para_data[i].key_pos].f), buffer, FALSE);
}
}
/*====================================================================
並列構造間の関係表示
====================================================================*/
/*==================================================================*/
void print_para_manager(SENTENCE_DATA *sp, PARA_MANAGER *m_ptr, int level)
/*==================================================================*/
{
int i;
for (i = 0; i < level * 5; i++)
fputc(' ', Outfp);
for (i = 0; i < m_ptr->para_num; i++)
fprintf(Outfp, " %c", sp->para_data[m_ptr->para_data_num[i]].para_char);
fputc(':', Outfp);
for (i = 0; i < m_ptr->part_num; i++) {
if (m_ptr->start[i] == m_ptr->end[i]) {
fputc('(', Outfp);
print_bnst(&sp->bnst_data[m_ptr->start[i]], NULL);
fputc(')', Outfp);
} else {
fputc('(', Outfp);
print_bnst(&sp->bnst_data[m_ptr->start[i]], NULL);
fputc('-', Outfp);
print_bnst(&sp->bnst_data[m_ptr->end[i]], NULL);
fputc(')', Outfp);
}
}
fputc('\n', Outfp);
for (i = 0; i < m_ptr->child_num; i++)
print_para_manager(sp, m_ptr->child[i], level+1);
}
/*==================================================================*/
void print_para_relation(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
for (i = 0; i < sp->Para_M_num; i++)
if (sp->para_manager[i].parent == NULL)
print_para_manager(sp, &sp->para_manager[i], 0);
}
/*====================================================================
木構造表示(from JK)
====================================================================*/
static int max_width; /* 木の最大幅 */
/*==================================================================*/
int mylog(int n)
/*==================================================================*/
{
int i, num = 1;
for (i=0; i<n; i++)
num = num*2;
return(num);
}
/*==================================================================*/
void calc_self_space(BNST_DATA *ptr, int depth2)
/*==================================================================*/
{
if (ptr->para_top_p == TRUE)
ptr->space = 4;
else if (OptDisplay == OPT_NORMAL || OptDisplay == OPT_SIMPLE)
ptr->space = ptr->length;
else if (ptr->type == IS_MRPH_DATA)
ptr->space = ptr->length + 1;
else
ptr->space = ptr->length + ptr->mrph_num; /* *4 */
if (ptr->para_type == PARA_NORMAL ||
ptr->para_type == PARA_INCOMP ||
ptr->to_para_p == TRUE)
ptr->space += 1;
ptr->space += (depth2-1)*8;
}
/*==================================================================*/
void calc_tree_width(BNST_DATA *ptr, int depth2)
/*==================================================================*/
{
int i;
calc_self_space(ptr, depth2);
if ( ptr->space > max_width )
max_width = ptr->space;
if ( ptr->child[0] )
for ( i=0; ptr->child[i]; i++ )
calc_tree_width(ptr->child[i], depth2+1);
}
/*==================================================================*/
void show_link(int depth, char *ans_flag, char para_type, char to_para_p)
/*==================================================================*/
{
int i;
if (depth != 1) {
/* 親への枝 (兄弟を考慮) */
if (para_type == PARA_NORMAL ||
para_type == PARA_INCOMP ||
to_para_p == TRUE) {
if (OptExpress != OPT_TABLE)
fprintf(Outfp, "─");
}
else {
if (OptExpress == OPT_TABLE)
fprintf(Outfp, "─");
else
fprintf(Outfp, "──");
}
if (ans_flag[depth-1] == '1')
fprintf(Outfp, "┤");
else
fprintf(Outfp, "┐");
if (OptExpress == OPT_TABLE)
fprintf(Outfp, " ");
else
fprintf(Outfp, " ");
/* 祖先の兄弟の枝 */
for (i = depth - 1; i > 1; i--) {
if (OptExpress == OPT_TABLE)
fprintf(Outfp, " ");
else
fprintf(Outfp, " ");
if (ans_flag[i-1] == '1')
fprintf(Outfp, "│");
else if (OptExpress == OPT_TABLE)
fprintf(Outfp, " ");
else
fprintf(Outfp, " ");
if (OptExpress == OPT_TABLE)
fprintf(Outfp, " ");
else
fprintf(Outfp, " ");
}
}
}
/*==================================================================*/
void show_self(BNST_DATA *ptr, int depth, char *ans_flag_p, int flag)
/*==================================================================*/
{
/*
depth は自分の深さ(根が1)
ans_flag は自分と祖先が最後の子かどうかの履歴
深さnの祖先(または自分)が最後の子であれば ans_flag[n-1] が '0'
そうでなければ '1'(この場合枝の描画が必要)
*/
int i, j;
char ans_flag[BNST_MAX];
if (ans_flag_p) {
strncpy(ans_flag, ans_flag_p, BNST_MAX);
} else {
ans_flag[0] = '0'; /* 最初に呼ばれるとき */
}
if (ptr->child[0]) {
for (i = 0; ptr->child[i]; i++);
/* 最後の子は ans_flag を 0 に */
ans_flag[depth] = '0';
show_self(ptr->child[i-1], depth+1, ans_flag, 0);
if (i > 1) {
/* 他の子は ans_flag を 1 に */
ans_flag[depth] = '1';
for (j = i - 2; j > 0; j--) {
show_self(ptr->child[j], depth+1, ans_flag, 0);
}
/* flag: 1: ─PARA 2: -<P>PARA */
if (ptr->para_top_p == TRUE &&
ptr->para_type == PARA_NIL &&
ptr->to_para_p == FALSE) {
show_self(ptr->child[0], depth+1, ans_flag, 1);
} else if (ptr->para_top_p == TRUE) {
show_self(ptr->child[0], depth+1, ans_flag, 2);
} else {
show_self(ptr->child[0], depth+1, ans_flag, 0);
}
}
}
calc_self_space(ptr, depth);
if (OptExpress != OPT_TABLE) {
if ( ptr->para_top_p != TRUE ) {
for (i = 0; i < max_width - ptr->space; i++)
fputc(' ', Outfp);
}
}
if (OptExpress & OPT_MRPH) {
print_mrph_with_para((MRPH_DATA *)ptr, NULL);
}
else {
print_bnst(ptr, NULL);
}
if (flag == 0) {
show_link(depth, ans_flag, ptr->para_type, ptr->to_para_p);
if (OptExpress == OPT_TREEF) {
print_some_feature(ptr->f, Outfp);
}
fputc('\n', Outfp);
} else if ( flag == 1 ) {
if (OptExpress != OPT_TABLE)
fprintf(Outfp, "─");
} else if ( flag == 2 ) {
if (OptExpress != OPT_TABLE)
fprintf(Outfp, "-");
}
}
/*==================================================================*/
void show_sexp(BNST_DATA *ptr, int depth, int pars)
/*==================================================================*/
{
int i;
for (i = 0; i < depth; i++) fputc(' ', Outfp);
fprintf(Outfp, "(");
if ( ptr->para_top_p == TRUE ) {
if (ptr->child[1] &&
ptr->child[1]->para_key_type == PARA_KEY_N)
fprintf(Outfp, "(noun_para");
else
fprintf(Outfp, "(pred_para");
if (ptr->child[0]) {
fputc('\n', Outfp);
i = 0;
while (ptr->child[i+1] && ptr->child[i+1]->para_type != PARA_NIL) {
/* <P>の最後以外 */
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, 0); i ++;
}
if (ptr->child[i+1]) { /* その他がある場合 */
/* <P>の最後 */
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, 1); i ++;
/* その他の最後以外 */
while (ptr->child[i+1]) {
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, 0); i ++;
}
/* その他の最後 */
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, pars + 1);
}
else {
/* <P>の最後 */
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, pars + 1 + 1);
}
}
}
else {
print_bnst_detail(ptr);
if (ptr->child[0]) {
fputc('\n', Outfp);
for ( i=0; ptr->child[i+1]; i++ ) {
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, 0);
}
/* UCHI fputc(',', Outfp); */
show_sexp(ptr->child[i], depth + 3, pars + 1);
} else {
for (i = 0; i < pars + 1; i++) fputc(')', Outfp);
fputc('\n', Outfp);
}
}
}
/*==================================================================*/
void print_kakari(SENTENCE_DATA *sp, int type, int eos_flag)
/*==================================================================*/
{
int i, last_b_offset = 1, last_t_offset = 1;
/* 最後の文節、基本句がマージされている場合があるので、
本当の最後の文節、基本句を探す */
if (OptPostProcess) {
for (i = sp->Bnst_num - 1; i >= 0; i--) {
if ((sp->bnst_data + i)->num != -1) {
last_b_offset = sp->Bnst_num - i;
break;
}
}
for (i = sp->Tag_num - 1; i >= 0; i--) {
if ((sp->tag_data + i)->num != -1) {
last_t_offset = sp->Tag_num - i;
break;
}
}
}
/* 依存構造木の表示 */
if (type == OPT_SEXP) {
show_sexp((sp->bnst_data + sp->Bnst_num - last_b_offset), 0, 0);
}
/* 文節のtreeを描くとき */
else if (type & OPT_NOTAG) {
max_width = 0;
calc_tree_width((sp->bnst_data + sp->Bnst_num - last_b_offset), 1);
show_self((sp->bnst_data + sp->Bnst_num - last_b_offset), 1, NULL, 0);
}
/* 形態素のtreeを描くとき */
else if (type & OPT_MRPH) {
max_width = 0;
calc_tree_width((BNST_DATA *)(sp->mrph_data + sp->Mrph_num - 1), 1);
show_self((BNST_DATA *)(sp->mrph_data + sp->Mrph_num - 1), 1, NULL, 0);
}
/* tag単位のtreeを描くとき */
else {
max_width = 0;
calc_tree_width((BNST_DATA *)(sp->tag_data + sp->Tag_num - last_t_offset), 1);
show_self((BNST_DATA *)(sp->tag_data + sp->Tag_num - last_t_offset), 1, NULL, 0);
}
if (OptExpress == OPT_TABLE) {
Tag_Num = 1;
Sen_Num++;
}
else {
print_eos(eos_flag);
}
}
/*====================================================================
チェック用
====================================================================*/
/*==================================================================*/
void check_bnst(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i, j;
BNST_DATA *ptr;
char b_buffer[BNST_LENGTH_MAX];
for (i = 0; i < sp->Bnst_num; i++) {
ptr = &sp->bnst_data[i];
b_buffer[0] = '\0';
for (j = 0; j < ptr->mrph_num; j++) {
/* buffer overflow */
if (strlen(b_buffer) + strlen((ptr->mrph_ptr + j)->Goi2) + 4 > BNST_LENGTH_MAX) {
break;
}
if (ptr->mrph_ptr + j == ptr->head_ptr) {
strcat(b_buffer, "[");
strcat(b_buffer, (ptr->mrph_ptr + j)->Goi2);
strcat(b_buffer, "]");
}
else {
strcat(b_buffer, (ptr->mrph_ptr + j)->Goi2);
}
strcat(b_buffer, " ");
}
fprintf(Outfp, "%-20s", b_buffer);
print_feature(ptr->f, Outfp);
if (check_feature(ptr->f, "用言") ||
check_feature(ptr->f, "準用言")) {
fprintf(Outfp, " <表層格:");
if (ptr->SCASE_code[case2num("ガ格")])
fprintf(Outfp, "ガ,");
if (ptr->SCASE_code[case2num("ヲ格")])
fprintf(Outfp, "ヲ,");
if (ptr->SCASE_code[case2num("ニ格")])
fprintf(Outfp, "ニ,");
if (ptr->SCASE_code[case2num("デ格")])
fprintf(Outfp, "デ,");
if (ptr->SCASE_code[case2num("カラ格")])
fprintf(Outfp, "カラ,");
if (ptr->SCASE_code[case2num("ト格")])
fprintf(Outfp, "ト,");
if (ptr->SCASE_code[case2num("ヨリ格")])
fprintf(Outfp, "ヨリ,");
if (ptr->SCASE_code[case2num("ヘ格")])
fprintf(Outfp, "ヘ,");
if (ptr->SCASE_code[case2num("マデ格")])
fprintf(Outfp, "マデ,");
if (ptr->SCASE_code[case2num("ノ格")])
fprintf(Outfp, "ノ,");
if (ptr->SCASE_code[case2num("ガ2")])
fprintf(Outfp, "ガ2,");
fprintf(Outfp, ">");
}
fputc('\n', Outfp);
}
}
/*==================================================================*/
void print_case_for_table(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
char *cp, *next, buf1[SMALL_DATA_LEN2], buf2[SMALL_DATA_LEN2], buf3[SMALL_DATA_LEN2];
for (i = 0; i < sp->Tag_num; i++) {
if ((cp = check_feature((sp->tag_data + i)->f, "格解析結果"))) {
/* OPT_TABLE */
if (OptExpress == OPT_TABLE) {
fprintf(Outfp, "%%%% %d %d 2 LABEL=%d_%dd style=white-space:nowrap\n",
Sen_Num - 1, i + 2, Sen_Num - 1, i + 1);
fprintf(Outfp, "*\n");
}
/* O */
cp = check_feature((sp->tag_data + i)->f, "格解析結果");
while (next = strstr(cp, "/O/")) {
cp = next;
while (cp[0] != ';' && cp[0] != ':') cp--;
if (sscanf(cp, "%*[:;]%[^/]%*[/]%[^/]%*[/]%[^/]%*[/]", buf1, buf2, buf3)) {
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - 1, i + 2);
fprintf(Outfp, " %s:%s \n", buf1, buf3);
cp = strstr(cp, buf2) + 1;
}
}
/* C */
cp = check_feature((sp->tag_data + i)->f, "格解析結果");
while (next = strstr(cp, "/C/")) {
cp = next;
while (cp[0] != ';' && cp[0] != ':') cp--;
if (sscanf(cp, "%*[:;]%[^/]%*[/]%[^/]%*[/]%[^/]%*[/]", buf1, buf2, buf3)) {
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - 1, i + 2);
fprintf(Outfp, " [%s:%s] \n", buf1, buf3);
cp = strstr(cp, buf2) + 1;
}
}
/* N */
cp = check_feature((sp->tag_data + i)->f, "格解析結果");
while (next = strstr(cp, "/N/")) {
cp = next;
while (cp[0] != ';' && cp[0] != ':') cp--;
if (sscanf(cp, "%*[:;]%[^/]%*[/]%[^/]%*[/]%[^/]%*[/]", buf1, buf2, buf3)) {
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - 1, i + 2);
fprintf(Outfp, " [%s:%s] \n", buf1, buf3);
cp = strstr(cp, buf2) + 1;
}
}
}
}
}
/*==================================================================*/
void print_corefer_for_table(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i, s_num, t_num;
char *cp;
for (i = 0; i < sp->Tag_num; i++) {
if ((cp = check_feature((sp->tag_data + i)->f, "COREFER_ID"))) {
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - 1, i + 2);
fprintf(Outfp, " ID=%s \n", cp + 11);
if (check_feature((sp->tag_data + i)->f, "REFERRED")) {
sscanf(check_feature((sp->tag_data + i)->f, "REFERRED"),
"REFERRED:%d-%d", &s_num, &t_num);
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - s_num - 1, t_num + 2);
fprintf(Outfp, " ID=%s \n", cp + 11);
}
}
}
}
/*==================================================================*/
void print_ne_for_table(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
char *cp;
for (i = 0; i < sp->Tag_num; i++) {
if ((cp = check_feature((sp->tag_data + i)->f, "NE"))) {
fprintf(Outfp, "%%%% %d %d 2 style=white-space:nowrap\n", Sen_Num - 1, i + 2);
fprintf(Outfp, " %s \n", cp + 3);
}
}
}
/*==================================================================*/
void print_result(SENTENCE_DATA *sp, int case_print_flag, int eos_flag)
/*==================================================================*/
{
/* case_print_flag: 格解析結果を出力 */
char *date_p, time_string[64];
time_t t;
struct tm *tms;
TOTAL_MGR *tm = sp->Best_mgr;
/* 時間の取得 */
t = time(NULL);
tms = localtime(&t);
if (!strftime(time_string, 64, "%Y/%m/%d", tms))
time_string[0] = '\0';
/* PS出力の場合
dpnd_info_to_bnst(&(tm->dpnd));
make_dpnd_tree();
print_kakari2ps();
return;
*/
/* 既解析へのパターンマッチで, マッチがなければ出力しない
if (OptAnalysis == OPT_AssignF && !PM_Memo[0]) return;
*/
/* ヘッダの出力 */
if (OptExpress == OPT_TABLE) {
if (OptAnalysis == OPT_CASE || OptAnalysis == OPT_CASE2 || OptNE) {
fprintf(Outfp, "%%%% %d %d 2\n", Sen_Num, Tag_Num);
fprintf(Outfp, "解析結果\n");
}
fprintf(Outfp, "%%%% %d %d 1 style=white-space:nowrap\n", Sen_Num, Tag_Num++);
}
/* S-ID */
if (sp->KNPSID) {
fprintf(Outfp, "# %s", sp->KNPSID);
}
else {
fprintf(Outfp, "# S-ID:%d", sp->Sen_num);
}
/* コメント */
if (sp->Comment) {
fprintf(Outfp, " %s", sp->Comment);
}
if (OptInput == OPT_RAW) {
fprintf(Outfp, " KNP:%s-%s", REVISION_VERSION, REVISION_ID);
if ((date_p = (char *)getenv("DATE")))
fprintf(Outfp, " DATE:%s", date_p);
else if (time_string[0])
fprintf(Outfp, " DATE:%s", time_string);
}
/* スコアを出力 (CKY時、通常入力時) */
if (OptCKY && !(OptInput & OPT_INPUT_PARSED)) {
fprintf(Outfp, " SCORE:%.5f", sp->score);
}
/* エラーがあれば、エラーの内容 */
if (ErrorComment) {
fprintf(Outfp, " ERROR:%s", ErrorComment);
free(ErrorComment);
ErrorComment = NULL;
}
/* 警告があれば、警告の内容 */
if (WarningComment) {
fprintf(Outfp, " WARNING:%s", WarningComment);
free(WarningComment);
WarningComment = NULL;
}
if (PM_Memo[0]) {
if (sp->Comment && strstr(sp->Comment, "MEMO")) {
fprintf(Outfp, "%s", PM_Memo);
} else {
fprintf(Outfp, " MEMO:%s", PM_Memo);
}
}
fprintf(Outfp, "\n");
/* 解析結果のメインの出力 */
if (OptExpress == OPT_MRPH) {
print_mrphs(sp, eos_flag);
}
else if (OptExpress == OPT_TAB) {
print_tags(sp, 1, eos_flag);
}
else if (OptExpress == OPT_NOTAG) {
print_bnst_with_mrphs(sp, 1, eos_flag);
}
else if (OptExpress == OPT_PA) {
/* FIXME: 格解析結果の整合性をとる必要がある */
print_pa_structure(sp, eos_flag);
}
else if (OptExpress == OPT_BNSTTREE) {
/* 文節のtree出力 */
if (make_dpnd_tree(sp)) {
print_kakari(sp, OptExpress, eos_flag);
}
else {
print_eos(eos_flag);
}
}
else if (OptExpress == OPT_MRPHTREE) {
/* 形態素のtree出力 */
if (make_dpnd_tree(sp)) {
bnst_to_mrph_tree(sp); /* 形態素の木へ */
print_kakari(sp, OptExpress, eos_flag);
}
else {
print_eos(eos_flag);
}
}
else {
/* タグ単位のtree出力 */
if (make_dpnd_tree(sp)) {
bnst_to_tag_tree(sp); /* タグ単位の木へ */
print_kakari(sp, OptExpress, eos_flag); /* OPT_TREE */
}
else {
print_eos(eos_flag);
}
}
if (OptExpress == OPT_TABLE) {
print_tags(sp, 1, eos_flag);
if (OptAnalysis == OPT_CASE || OptAnalysis == OPT_CASE2)
print_case_for_table(sp);
if (OptNE)
print_ne_for_table(sp);
if (OptEllipsis & OPT_COREFER)
print_corefer_for_table(sp);
}
/* nbestオプションなどではこの関数が複数回呼ばれるので後処理を元に戻しておく */
if (OptPostProcess) {
undo_tag_bnst_postprocess(sp);
}
/* 格解析を行なった場合の出力 */
if (case_print_flag &&
!OptArticle && /* 過去の記事のBest_mgrを保存していないのでセグフォする */
(((OptAnalysis == OPT_CASE ||
OptAnalysis == OPT_CASE2) &&
(OptDisplay == OPT_DETAIL ||
OptDisplay == OPT_DEBUG ||
OptExpress == OPT_TABLE)) ||
(OptEllipsis &&
VerboseLevel >= VERBOSE1))) {
print_case_result(sp, Sen_Num);
/* 次の解析のために初期化しておく */
tm->pred_num = 0;
}
}
/*==================================================================*/
void do_postprocess(SENTENCE_DATA *sp)
/*==================================================================*/
{
/* 後処理 */
if (make_dpnd_tree(sp)) {
bnst_to_tag_tree(sp); /* タグ単位の木へ */
if (OptExpress == OPT_TAB ||
OptExpress == OPT_NOTAG) {
tag_bnst_postprocess(sp, 1);
}
else {
tag_bnst_postprocess(sp, 0); /* 木構造出力のため、num, dpnd_head の番号の付け替えはしない */
}
}
}
/*==================================================================*/
void push_entity(char ***list, char *key, int count, int *max)
/*==================================================================*/
{
if (*max == 0) {
*max = ALLOCATION_STEP;
*list = (char **)malloc_data(sizeof(char *)*(*max), "push_entity");
}
else if (*max <= count) {
*list = (char **)realloc_data(*list, sizeof(char *)*(*max <<= 1), "push_entity");
}
*(*list+count) = key;
}
/*==================================================================*/
void prepare_entity(BNST_DATA *bp)
/*==================================================================*/
{
int count = 0, max = 0, i, flag = 0;
char *cp, **list, *str;
/* モダリティ */
flag = 0;
for (i = 0; i < bp->mrph_num; i++) {
if (!(flag & 0x0001) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-依頼"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0001;
}
if (!(flag & 0x0002) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-意志"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0002;
}
if (!(flag & 0x0004) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-勧誘"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0004;
}
if (!(flag & 0x0008) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-願望"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0008;
}
if (!(flag & 0x0010) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-禁止"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0010;
}
if (!(flag & 0x0020) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-三人称意志"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0020;
}
if (!(flag & 0x0040) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-申し出"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0040;
}
if (!(flag & 0x0080) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-推量"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0080;
}
if (!(flag & 0x0100) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-意思-命令"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0100;
}
if (!(flag & 0x0200) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-当為"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0200;
}
if (!(flag & 0x0400) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-当為-許可"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0400;
}
if (!(flag & 0x0800) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-判断-可能性"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x0800;
}
if (!(flag & 0x1000) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-判断-可能性-不可能"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x1000;
}
if (!(flag & 0x2000) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-判断-推量"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x2000;
}
if (!(flag & 0x4000) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-判断-伝聞"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x4000;
}
if (!(flag & 0x8000) && (cp = check_feature((bp->mrph_ptr+i)->f, "Modality-判断-様態"))) {
push_entity(&list, cp, count++, &max);
flag |= 0x8000;
}
}
/* 出力するfeatureがあれば出力 */
if (count) {
int i, len = 0;
for (i = 0; i < count; i++) {
len += strlen(list[i])+1;
}
str = (char *)malloc_data(sizeof(char)*(len+2), "print_entity");
strcpy(str, "C:");
for (i = 0; i < count; i++) {
if (i != 0) strcat(str, " ");
strcat(str, list[i]);
}
assign_cfeature(&(bp->f), str, FALSE);
free(str);
free(list);
}
}
/*==================================================================*/
void prepare_all_entity(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i;
for (i = 0; i < sp->Bnst_num; i++) {
prepare_entity(sp->bnst_data+i);
}
}
/*==================================================================*/
void print_tree_for_chinese(SENTENCE_DATA *sp)
/*==================================================================*/
{
int i, j, k, max_len, len, max_inverse_len;
char* up_corner = "┌─";
char* down_corner = "└─";
char* middle_corner = "├─";
char* link = "│";
char* para = "<P>";
char* para_head = "<PARA><P>";
BNST_DATA *b_ptr;
/* initialization */
for (i = 0; i < sp->Bnst_num; i++) {
for (j = 0; j < TREE_WIDTH_MAX; j++) {
bnst_tree[i][j] = "";
}
}
/* read data */
for (i = 0, b_ptr = sp->bnst_data; i < sp->Bnst_num; i++, b_ptr++) {
bnst_word[i] = b_ptr->head_ptr->Goi;
bnst_pos[i] = b_ptr->head_ptr->Pos;
bnst_dpnd[i] = b_ptr->dpnd_head;
bnst_level[i] = -1;
}
/* get root level */
for (i = 0; i < sp->Bnst_num; i++) {
while (bnst_level[i] == -1) {
j = i;
while (bnst_dpnd[j] != -1 && bnst_level[bnst_dpnd[j]] == -1) {
j = bnst_dpnd[j];
}
if (bnst_dpnd[j] == -1) {
bnst_level[j] = 0;
}
else {
bnst_level[j] = bnst_level[bnst_dpnd[j]] + 1;
}
}
}
/* get print tree */
max_len = -1;
for (i = 0; i < sp->Bnst_num; i++) {
len = 0;
for (j = 0; j < (bnst_level[i] * 4); j++) {
if (bnst_dpnd[i] != -1 && j < (bnst_level[bnst_dpnd[i]] * 4)) {
if (len >= TREE_WIDTH_MAX) {
fprintf(Outfp, ">>>tree width exceeds maximum length\n");
return;
}
bnst_tree[i][len] = " ";
len++;
}
else if (bnst_dpnd[i] != -1 && j == (bnst_level[bnst_dpnd[i]] * 4)) {
if (bnst_dpnd[i] != -1 && bnst_dpnd[i] < i) {
if (len >= TREE_WIDTH_MAX) {
fprintf(Outfp, ">>>tree width exceeds maximum length\n");
return;
}
bnst_tree[i][len] = down_corner;
len++;
}
else if (bnst_dpnd[i] > i) {
if (len >= TREE_WIDTH_MAX) {
fprintf(Outfp, ">>>tree width exceeds maximum length\n");
return;
}
bnst_tree[i][len] = up_corner;
len++;
}
}
}
if (len >= TREE_WIDTH_MAX) {
fprintf(Outfp, ">>>tree width exceeds maximum length\n");
return;
}
if ((sp->bnst_data + i)->is_para == 1) {
bnst_tree[i][len] = para;
len++;
}
else if ((sp->bnst_data + i)->is_para == 2) {
bnst_tree[i][len] = para_head;
len++;
}
bnst_tree[i][len] = bnst_word[i];
len++;
bnst_tree[i][len] = "/";
len++;
bnst_tree[i][len] = bnst_pos[i];
len++;
if (len > max_len) {
max_len = len;
}
}
for (i = 0; i < sp->Bnst_num; i++) {
for (j = 0; j < max_len; j++) {
if (bnst_tree[i][j] == "") {
bnst_tree[i][j] = "***";
}
}
}
/* inverse the tree */
max_inverse_len = -1;
for (i = 0; i < max_len; i++) {
len = 0;
for (j = sp->Bnst_num - 1; j > -1; j--) {
bnst_inverse_tree[i][len] = bnst_tree[j][i];
len++;
}
if (len > max_inverse_len) {
max_inverse_len = len;
}
}
/* change bnst_inverse_tree */
for (i = 0; i < max_len; i++) {
for (j = 0; j < sp->Bnst_num; j++) {
if (bnst_inverse_tree[i][j] == down_corner) {
for (k = j + 1; k < sp->Bnst_num; k++) {
if (bnst_inverse_tree[i][k] == down_corner) {
bnst_inverse_tree[i][k] = middle_corner;
}
else if (bnst_inverse_tree[i][k] == " ") {
bnst_inverse_tree[i][k] = link;
}
else {
break;
}
}
}
else if (bnst_inverse_tree[i][j] == up_corner) {
for (k = j - 1; k > -1; k--) {
if (bnst_inverse_tree[i][k] == up_corner) {
bnst_inverse_tree[i][k] = middle_corner;
}
else if (bnst_inverse_tree[i][k] == " ") {
bnst_inverse_tree[i][k] = link;
}
else {
break;
}
}
}
}
}
/* inverse tree again and print */
for (i = max_inverse_len - 1; i > -1; i--) {
if (max_inverse_len - 1 - i < 10) {
fprintf(Outfp, "%d ", max_inverse_len - 1 - i);
}
else if (max_inverse_len - 1 - i < 100) {
fprintf(Outfp, "%d ", max_inverse_len - 1 - i);
}
else {
fprintf(Outfp, "%d ", max_inverse_len - 1 - i);
}
for (j = 0; j < max_len; j++) {
if (bnst_inverse_tree[j][i] != "***") {
fprintf(Outfp, "%s", bnst_inverse_tree[j][i]);
}
else {
fprintf(Outfp, " ");
}
}
fprintf(Outfp, "\n");
}
}
/*====================================================================
END
====================================================================*/
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
module General.TestAPI where
import Test.Tasty.HUnit (assertFailure)
import Control.Monad ((>=>))
import Control.Monad.Catch (MonadThrow, displayException,
throwM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Servant.API.Generic
import qualified Servant.Client as C
import Servant.Client.Generic (genericClientHoist)
import Protocol.Webdriver.ClientAPI (WDCore (..))
import qualified Protocol.Webdriver.ClientAPI as W
liftThrow :: (MonadThrow m, MonadIO m) => C.ClientEnv -> C.ClientM a -> m a
liftThrow env = liftIO . flip C.runClientM env >=> either throwM pure
liftTest :: C.ClientEnv -> C.ClientM a -> IO a
liftTest env = flip C.runClientM env >=> either (assertFailure . displayException) pure
mkWDCore :: (forall a. C.ClientEnv -> C.ClientM a -> m a) -> C.ClientEnv -> WDCore m
mkWDCore nt env =
let
wdcore = genericClientHoist (nt env)
in
WDCore wdcore
(fromServant . W.withSession wdcore)
(fromServant . W.withWindow)
(\sess -> fromServant . W.withElement sess)
mkWDCoreTest :: C.ClientEnv -> WDCore IO
mkWDCoreTest = mkWDCore liftTest
mkWDCoreThrow :: (MonadThrow m, MonadIO m) => C.ClientEnv -> WDCore m
mkWDCoreThrow = mkWDCore liftThrow
|
-- copied from https://github.com/QuickChick/Luck/blob/master/luck/examples-template/C.hs
-- MIT License
--
-- Copyright (c) 2016 QuickChick
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
-- copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
{-# LANGUAGE TemplateHaskell, RecordWildCards, DeriveDataTypeable #-}
import Control.Monad
import Control.Applicative
import Control.Arrow hiding ((<+>))
import System.IO
import System.Directory
import System.Process
import Control.Concurrent
import Control.Exception
import System.Exit
import System.IO
import System.IO.Error
import System.Posix.Signals
import System.Process.Internals
import System.Environment
import System.Random
import System.Console.CmdArgs
import System.Exit
import Luck.Template
import Test.QuickCheck
import Data.Data
import Data.Maybe
import Data.List
import System.Directory
import System.Process
import Data.Data
import Text.PrettyPrint (Doc, (<+>), (<>), ($$))
import qualified Text.PrettyPrint as PP
data Exp = Var Int
| Int Int
| Add Exp Exp
| Eq Exp Exp
deriving (Show, Data)
data Stmt = Declare Int Stmt
| Asgn Int Exp Stmt
| If Exp Stmt Stmt Stmt
| For Int Int Int Stmt Stmt
| PrintVar Int Stmt
| FunCall Int [Exp] Stmt
| Empty
deriving (Show, Data)
class PP a where
pp :: a -> Doc
instance PP Int where
pp = PP.int
instance PP Exp where
pp (Var x) = PP.text $ "var" ++ show x
pp (Int n) = pp n
pp (Add e1 e2) = PP.parens $ pp e1 <+> PP.char '+' <+> pp e2
pp (Eq e1 e2) = PP.parens $ pp e1 <+> PP.text "==" <+> pp e2
ppForVar :: Int -> Doc
ppForVar i = PP.char 'i' <> PP.int i
instance PP Stmt where
pp (Declare x s) = PP.text "int" <+> pp (Var x) <+> PP.char ';' $$ pp s
pp (Asgn x e s) = pp (Var x) <+> PP.char '=' <+> pp e <+> PP.char ';' $$ pp s
pp (If e s1 s2 s') = PP.text "if" <+> PP.parens (pp e) <+> PP.char '{'
$$ PP.nest 2 (pp s1)
$$ PP.char '}'
$$ PP.text "else {"
$$ PP.nest 2 (pp s2)
$$ PP.char '}'
$$ pp s'
pp (PrintVar n s') = PP.text "printf(\"%d\\n\", " <+> pp (Var n) <+> PP.text ");" $$ pp s'
pp (FunCall (-2) [] s') = PP.text "empty();" $$ pp s'
pp (FunCall (-1) [] s') = PP.text "loop();" $$ pp s'
pp (FunCall fid es s') =
PP.char 'a' <> PP.int fid <> PP.char '('
<> PP.hcat (intersperse (PP.char ',') (map pp es))
<> PP.text ");" $$ pp s'
pp Empty = PP.empty
pp (For i low high sfor s') =
PP.text "for (int" <+> ppForVar i <+> PP.char '=' <+> PP.int low <> PP.char ';'
<+> ppForVar i <+> PP.char '<' <+> PP.int high <> PP.char ';'
<+> ppForVar i <> PP.text "++) {"
$$ PP.nest 2 (pp sfor)
$$ PP.text "}"
$$ pp s'
-- pp x = error $ "out"
data Fun = Fun String [Stmt] deriving (Data, Show)
stmtGen :: Gen (Maybe Fun)
stmtGen = $(mkGenQ "minus-minus-solidity.luck") defFlags{_maxUnroll=2} TProxy1
data Contract = Contract Int deriving (Data, Show)
instance PP Contract where
pp (Contract i) =
PP.text "contract C" <> pp i <+> PP.text " {}"
stringGen :: Gen (Maybe Contract)
stringGen = $(mkGenQ "minus-minus-solidity.luck") defFlags{_maxUnroll=2} TProxy1
-- dump :: Fun -> IO ()
-- dump (Fun _ (t:ts)) = do
-- let tDoc = pp t
-- tsDoc = PP.vcat $ PP.text "#include <stdio.h>"
-- : (PP.text "void loop() { while (1) { printf(\"1\"); } }")
-- : (PP.text "void empty() { }")
-- : map (\(i,t) ->
-- PP.vcat [ PP.text "void a" <> PP.int i <> PP.text "(int var0, int var1, int var2) {"
-- , PP.nest 2 $ pp t
-- , PP.text "}" ]
-- ) (reverse $ zip [1..] $ ts)
-- putStrLn (PP.render tDoc)
main :: IO ()
main = do
(mts : _ ) <- sample' stringGen
case mts of
Just c -> putStrLn $ PP.render $ pp c
Nothing -> error "Unsuccesful generation"
|
#
# Cookbook Name:: pdftk
# Recipe:: default
package "pdftk" do
action :install
end
|
package book
import (
"context"
"time"
"github.com/calmato/gran-book/api/server/book/pkg/database"
)
// Repository - Bookリポジトリ
type Repository interface {
List(ctx context.Context, q *database.ListQuery) (Books, error)
ListBookshelf(ctx context.Context, q *database.ListQuery) (Bookshelves, error)
ListReview(ctx context.Context, q *database.ListQuery) (Reviews, error)
Count(ctx context.Context, q *database.ListQuery) (int, error)
CountBookshelf(ctx context.Context, q *database.ListQuery) (int, error)
CountReview(ctx context.Context, q *database.ListQuery) (int, error)
MultiGet(ctx context.Context, bookIDs []int) (Books, error)
Get(ctx context.Context, bookID int) (*Book, error)
GetByIsbn(ctx context.Context, isbn string) (*Book, error)
GetBookIDByIsbn(ctx context.Context, isbn string) (int, error)
GetBookshelfByUserIDAndBookID(ctx context.Context, userID string, bookID int) (*Bookshelf, error)
GetBookshelfIDByUserIDAndBookID(ctx context.Context, userID string, bookID int) (int, error)
GetReview(ctx context.Context, reviewID int) (*Review, error)
GetReviewByUserIDAndBookID(ctx context.Context, userID string, bookID int) (*Review, error)
GetReviewIDByUserIDAndBookID(ctx context.Context, userID string, bookID int) (int, error)
GetAuthorByName(ctx context.Context, name string) (*Author, error)
GetAuthorIDByName(ctx context.Context, name string) (int, error)
Create(ctx context.Context, b *Book) error
CreateBookshelf(ctx context.Context, b *Bookshelf) error
Update(ctx context.Context, b *Book) error
UpdateBookshelf(ctx context.Context, b *Bookshelf) error
MultipleCreate(ctx context.Context, bs Books) error
MultipleUpdate(ctx context.Context, bs Books) error
Delete(ctx context.Context, bookID int) error
DeleteBookshelf(ctx context.Context, bookshelfID int) error
AggregateReadTotal(ctx context.Context, userID string, since, until time.Time) (MonthlyResults, error)
}
|
#pragma once
#include <cstdint>
#include <string>
#include <vector>
namespace beluga
{
class tls_extension
{
public:
using extension_type = std::uint16_t;
enum
{
SERVER_NAME = 0x0000,
ELLIPTIC_CURVES = 0x000a,
EC_POINT_FORMATS = 0x000b,
RENEGOTIATION_INFO = 0xff01,
UNKNOWN,
};
using data_type = std::vector<std::uint8_t>;
tls_extension(extension_type type = UNKNOWN, const data_type& data = data_type());
void set_type(extension_type type);
extension_type get_type() const;
void set_data(const data_type& data);
const data_type& get_data() const;
data_type& get_data();
std::string get_server_name() const;
private:
extension_type type;
data_type data;
};
}
|
/* Copyright (c) 2021 DeflatedPickle under the MIT license */
package com.deflatedpickle.mmf.util
import com.deflatedpickle.bellatrix.Vector4
import com.deflatedpickle.mmf.util.serializer.VectorDoubleSerializer
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
@ExperimentalSerializationApi
@Serializable
@Suppress("unused", "SpellCheckingInspection")
data class ElementFace(
val uv: @Serializable(with = VectorDoubleSerializer::class) Vector4<Double> = Vector4(0.0, 0.0, 0.0, 0.0),
val texture: TexturePointer,
val rotation: Int = 0,
@SerialName("tintindex")
val tintIndex: Int = 0,
val cullface: Direction? = null,
@SerialName("__comment")
val comment: String = ""
)
|
package dev.vini2003.hammer.gui.api.common.util
import dev.vini2003.hammer.core.api.client.util.InstanceUtils
import dev.vini2003.hammer.gui.api.client.util.extension.*
import net.minecraft.client.gui.hud.InGameHud
import net.minecraft.entity.player.PlayerEntity
object InGameHudUtils {
val INSTANCE: InGameHud?
get() {
val client = InstanceUtils.CLIENT ?: return null
return client.inGameHud
}
@JvmStatic
fun getLeftBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getLeftBarPos(playerEntity)
@JvmStatic
fun getRightBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getRightBarPos(playerEntity)
@JvmStatic
fun getHeartBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getHeartBarPos(playerEntity)
@JvmStatic
fun getArmorBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getArmorBarPos(playerEntity)
@JvmStatic
fun getAirBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getAirBarPos(playerEntity)
@JvmStatic
fun getHungerBarPos(hud: InGameHud, playerEntity: PlayerEntity) = hud.getHungerBarPos(playerEntity)
}
|
{--
9. Pack consecutive duplicates of list elements into sublists. If a list
contains repeated elements they should be placed in separate sublists.
--}
module Exercises.Lists1.Exercise9
( pack
) where
import Data.List(group)
pack :: (Eq a) => [a] -> [[a]]
pack = group
|
using Bridge.Test.NUnit;
namespace Bridge.ClientTest.Batch3.BridgeIssues
{
public struct Bridge608A
{
public readonly string field;
public Bridge608A(string field)
{
this.field = field;
}
public override bool Equals(object obj)
{
return Equals(obj.ToString());
}
public bool Equals(string other)
{
return other == field;
}
public override int GetHashCode()
{
return this.field.GetHashCode();
}
}
// Bridge[#608]
[Category(Constants.MODULE_ISSUES)]
[TestFixture(TestNameFormat = "#608 - {0}")]
public class Bridge608
{
[Test(ExpectedCount = 2)]
public static void TestUseCase()
{
var s = new Bridge608A("test");
object o = "test";
Assert.True(s.Equals(o), "Bridge608 Object");
Assert.True(s.Equals("test"), "Bridge608 String");
}
}
}
|
export * from './admin'
export * from './info'
export * from './autorole'
export * from './etc'
export * from './help-channels'
export * from './reminders'
export * from './unfurling'
|
export default ({ colors }) => ({
errorText: {
color: colors.RED,
}
});
|
import 'dart:async';
import 'package:astra/bloc/auth/auth_bloc.dart';
import 'package:astra/data/model/auth.dart';
import 'package:astra/data/auth_repository.dart';
import 'package:bloc/bloc.dart';
import 'package:meta/meta.dart';
import 'package:equatable/equatable.dart';
part 'login_state.dart';
part 'login_event.dart';
class LoginBloc extends Bloc<LoginEvent, LoginState> {
final AuthRepository authRepository;
final AuthBloc authBloc;
LoginBloc({
@required this.authRepository,
@required this.authBloc,
}): assert(authRepository != null),
assert(authBloc != null);
@override
LoginState get initialState => LoginInitial();
@override
Stream<LoginState> mapEventToState(
LoginEvent event,
) async* {
if (event is LoginButtonPressed) {
yield LoginLoading();
try {
final Auth auth = await authRepository.login(
email: event.email,
password: event.password,
);
authBloc.add(LoggedIn(auth: auth));
yield LoginInitial();
} catch(e) {
yield LoginFailure(errorMsg: e.toString());
}
}
}
}
|
const path = require('path')
const StudyData = require('./StudyData')
function ScanStudy(options) {
const {directoryName, deduplicatedRoot, deduplicatedInstancesRoot} = options;
return async function (dir,studyInstanceUid) {
const studyPath = path.join(directoryName, 'studies', studyInstanceUid)
const deduplicatedPath = path.join(deduplicatedRoot, studyInstanceUid);
const deduplicatedInstancesPath = path.join(deduplicatedInstancesRoot,studyInstanceUid);
console.log('Scanning', dir, studyInstanceUid);
await this.completeStudy.getCurrentStudyData(this,{
studyPath,
deduplicatedPath,
deduplicatedInstancesPath,
studyInstanceUid,
});
}
}
module.exports = ScanStudy;
|
package typingsSlinky.azdata.mod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait ActionDescriptor extends StObject {
/**
* Name of the clickable action. If not defined then no action will be shown
*/
var actionTitle: js.UndefOr[String] = js.native
/**
* Data sent on callback being run.
*/
var callbackData: js.UndefOr[js.Any] = js.native
/**
* User-visible label to display
*/
var label: String = js.native
}
object ActionDescriptor {
@scala.inline
def apply(label: String): ActionDescriptor = {
val __obj = js.Dynamic.literal(label = label.asInstanceOf[js.Any])
__obj.asInstanceOf[ActionDescriptor]
}
@scala.inline
implicit class ActionDescriptorMutableBuilder[Self <: ActionDescriptor] (val x: Self) extends AnyVal {
@scala.inline
def setActionTitle(value: String): Self = StObject.set(x, "actionTitle", value.asInstanceOf[js.Any])
@scala.inline
def setActionTitleUndefined: Self = StObject.set(x, "actionTitle", js.undefined)
@scala.inline
def setCallbackData(value: js.Any): Self = StObject.set(x, "callbackData", value.asInstanceOf[js.Any])
@scala.inline
def setCallbackDataUndefined: Self = StObject.set(x, "callbackData", js.undefined)
@scala.inline
def setLabel(value: String): Self = StObject.set(x, "label", value.asInstanceOf[js.Any])
}
}
|
package de.htwg.se.menschaergerdichnicht.controller.controllerComponent
import de.htwg.se.menschaergerdichnicht.aview.gui.SwingGui
import de.htwg.se.menschaergerdichnicht.aview.tui.Tui
import scala.util._
import de.htwg.se.menschaergerdichnicht.controller.controllerComponent.GameState._
import de.htwg.se.menschaergerdichnicht.model.fieldComponent.PlayingInterface
import de.htwg.se.menschaergerdichnicht.model.fieldComponent.fieldBaseImpl.PlayingField
import de.htwg.se.menschaergerdichnicht.model.playerComponent.playerBaseImpl.Players
import de.htwg.se.menschaergerdichnicht.util.Observable
/**
* Created by Anastasia on 25.06.17.
*/
trait ControllerInterface extends Observable{
var players: Players
var playingField: PlayingInterface
var message: String
var gameState: GameState
var tui = new Tui(this)
var gui = new SwingGui(this)
def addPlayer(name: String): Try[_]
def startGame(): Try[_]
def chooseToken(tokenId: Int): Try[_]
def gameStatus: GameState
}
|
#!/bin/bash
echo $(($(date +%s%N)/1000000)) $1 $2 $3 $4 >> /opt/monerodarchive/reorgnotify.log
exit 0
|
#!/bin/zsh
#quick utility to get git status
gbst() {
git status
}
|
using System.Linq;
using CoreDdd.Nhibernate.TestHelpers;
using IntegrationTestsShared;
using IntegrationTestsShared.TestEntities;
using NUnit.Framework;
using Shouldly;
namespace CoreDdd.Nhibernate.Tests.Conventions
{
[TestFixture]
public class when_persisting_parent_entity_with_child_entity_not_referencing_parent : BasePersistenceTest
{
[Test]
public void child_entity_is_persisted_linked_with_the_parent()
{
var parentEntity = new ParentEntity();
parentEntity.AddChildEntityNotReferencingParentEntity();
UnitOfWork.Save(parentEntity);
UnitOfWork.Clear();
parentEntity = UnitOfWork.Get<ParentEntity>(parentEntity.Id);
parentEntity.ChildrenNotReferencingParent.Count().ShouldBe(1);
}
}
}
|
package com.gitlab.daring.image.command.transform
import com.gitlab.daring.image.command.Command
import com.gitlab.daring.image.command.CommandRegistry
import com.gitlab.daring.image.command.SimpleCommand
import com.gitlab.daring.image.util.ImageUtils.smat
import org.bytedeco.javacpp.opencv_core.inRange
import org.bytedeco.javacpp.opencv_imgproc.adaptiveThreshold
import org.bytedeco.javacpp.opencv_imgproc.threshold
internal object ThresholdCommands {
fun register(r: CommandRegistry) {
r.register("threshold", this::thresholdCommand)
r.register("adaptiveThreshold", this::adaptiveThresholdCommand)
r.register("inRange", this::inRangeCommand)
}
fun thresholdCommand(args: Array<String>): Command {
val c = SimpleCommand(args)
val th = c.doubleParam(128.0, "0-255")
val mv = c.doubleParam(255.0, "0-255")
val tp = c.enumParam(ThresholdType.Bin)
val fp = c.enumParam(ThresholdFlag.None)
return c.withFunc { m -> threshold(m, m, th.v, mv.v, tp.vi + fp.vi * 8) }
}
fun adaptiveThresholdCommand(args: Array<String>): Command {
val c = SimpleCommand(args)
val mv = c.doubleParam(255.0, "0-255")
val method = c.enumParam(AdaptiveMethod.Mean)
val type = c.enumParam(ThresholdType.Bin)
val bs = c.intParam(1, "0-50")
val cf = c.intParam(0, "0-100")
return c.withFunc { m -> adaptiveThreshold(m, m, mv.v, method.vi, type.vi, bs.v * 2 + 1, cf.dv) }
}
fun inRangeCommand(args: Array<String>): Command {
val c = SimpleCommand(args)
val lb = c.intParam(0, "0-255")
val ub = c.intParam(255, "0-255")
return c.withFunc { m -> inRange(m, smat(lb.v), smat(ub.v), m) }
}
enum class ThresholdType {
Bin, BinInv, Trunc, ToZero, ToZeroInv
}
enum class ThresholdFlag {
None, OTSU, Triangle
}
enum class AdaptiveMethod {
Mean, Gaussian
}
}
|
%%%----------------------------------------------------------------------
%%% File : yaws.erl
%%% Author : Claes Wikstrom <klacke@bluetail.com>
%%% Purpose :
%%% Created : 16 Jan 2002 by Claes Wikstrom <klacke@bluetail.com>
%%%----------------------------------------------------------------------
-module(yaws).
-author('klacke@bluetail.com').
-include("../include/yaws.hrl").
-include("../include/yaws_api.hrl").
-include("yaws_appdeps.hrl").
-include("yaws_debug.hrl").
-include_lib("kernel/include/file.hrl").
-export([start/0, stop/0, hup/0, hup/1, restart/0, modules/0, load/0]).
-export([start_embedded/1, start_embedded/2, start_embedded/3, start_embedded/4,
add_server/2, create_gconf/2, create_sconf/2, setup_sconf/2]).
-export([gconf_yaws_dir/1, gconf_trace/1, gconf_flags/1, gconf_logdir/1,
gconf_ebin_dir/1, gconf_src_dir/1, gconf_runmods/1,
gconf_keepalive_timeout/1, gconf_keepalive_maxuses/1,
gconf_max_num_cached_files/1, gconf_max_num_cached_bytes/1,
gconf_max_size_cached_file/1, gconf_max_connections/1,
gconf_process_options/1, gconf_large_file_chunk_size/1,
gconf_mnesia_dir/1, gconf_log_wrap_size/1, gconf_cache_refresh_secs/1,
gconf_include_dir/1, gconf_phpexe/1, gconf_yaws/1, gconf_id/1,
gconf_enable_soap/1, gconf_soap_srv_mods/1, gconf_ysession_mod/1,
gconf_acceptor_pool_size/1, gconf_mime_types_info/1,
gconf_nslookup_pref/1]).
-export([sconf_port/1, sconf_flags/1, sconf_redirect_map/1, sconf_rhost/1,
sconf_rmethod/1, sconf_docroot/1, sconf_xtra_docroots/1,
sconf_listen/1, sconf_servername/1, sconf_serveralias/1, sconf_yaws/1,
sconf_ets/1, sconf_ssl/1, sconf_authdirs/1, sconf_partial_post_size/1,
sconf_appmods/1, sconf_expires/1, sconf_errormod_401/1,
sconf_errormod_404/1, sconf_arg_rewrite_mode/1, sconf_logger_mod/1,
sconf_opaque/1, sconf_start_mod/1, sconf_allowed_scripts/1,
sconf_tilde_allowed_scripts/1, sconf_index_files/1, sconf_revproxy/1,
sconf_spotions/1, sconf_extra_cgi_vars/1, sconf_stats/1,
sconf_fcgi_app_server/1, sconf_php_handler/1, sconf_shaper/1,
sconf_deflate_options/1, sconf_mime_types_info/1,
sconf_dispatch_mod/1]).
-export([new_auth/0,
auth_dir/1, auth_dir/2,
auth_docroot/1, auth_docroot/2,
auth_files/1, auth_files/2,
auth_realm/1, auth_realm/2,
auth_type/1, auth_type/2,
auth_headers/1, auth_headers/2,
auth_users/1, auth_users/2,
auth_acl/1, auth_acl/2,
auth_mod/1, auth_mod/2,
auth_outmod/1, auth_outmod/2,
auth_pam/1, auth_pam/2]).
-export([new_ssl/0,
ssl_keyfile/1, ssl_keyfile/2,
ssl_certfile/1, ssl_certfile/2,
ssl_verify/1, ssl_verify/2,
ssl_fail_if_no_peer_cert/1, ssl_fail_if_no_peer_cert/2,
ssl_depth/1, ssl_depth/2,
ssl_password/1, ssl_password/2,
ssl_cacertfile/1, ssl_cacertfile/2,
ssl_ciphers/1, ssl_ciphers/2,
ssl_cachetimeout/1, ssl_cachetimeout/2,
ssl_secure_renegotiate/1, ssl_secure_renegotiate/2,
ssl_honor_cipher_order/1, ssl_honor_cipher_order/2]).
-export([new_deflate/0,
deflate_min_compress_size/1, deflate_min_compress_size/2,
deflate_compression_level/1, deflate_compression_level/2,
deflate_window_size/1, deflate_window_size/2,
deflate_mem_level/1, deflate_mem_level/2,
deflate_strategy/1, deflate_strategy/2,
deflate_use_gzip_static/1, deflate_use_gzip_static/2,
deflate_mime_types/1, deflate_mime_types/2]).
-export([new_mime_types_info/0,
mime_types_info_mime_types_file/1, mime_types_info_mime_types_file/2,
mime_types_info_types/1, mime_types_info_types/2,
mime_types_info_charsets/1, mime_types_info_charsets/2,
mime_types_info_default_type/1, mime_types_info_default_type/2,
mime_types_info_default_charset/1, mime_types_info_default_charset/2]).
-export([first/2, elog/2, filesize/1, upto/2, to_string/1, to_list/1,
integer_to_hex/1, hex_to_integer/1, string_to_hex/1, hex_to_string/1,
is_modified_p/2, flag/3, dohup/1, is_ssl/1, address/0, is_space/1,
setopts/3, eat_crnl/2, get_chunk_num/2, get_chunk_header/2,
get_chunk/4, get_chunk_trailer/2, list_to_uue/1, uue_to_list/1,
printversion/0, strip_spaces/1, strip_spaces/2,
month/1, mk2/1, home/0, arg_rewrite/1, to_lowerchar/1, to_lower/1,
funreverse/2, is_prefix/2, split_sep/2, join_sep/2, accepts_gzip/2,
upto_char/2, deepmap/2, ticker/2, ticker/3,
parse_qvalue/1, parse_auth/1]).
-export([outh_set_status_code/1,
outh_set_non_cacheable/1,
outh_set_content_type/1,
outh_set_content_encoding/1,
outh_set_cookie/1,
outh_set_static_headers/3, outh_set_static_headers/4,
outh_set_304_headers/3,
outh_set_dyn_headers/3,
outh_set_connection/1,
outh_set_content_length/1,
outh_set_dcc/2,
outh_set_transfer_encoding_off/0,
outh_set_auth/1,
outh_set_vary/1,
outh_clear_headers/0,
outh_fix_doclose/0,
dcc/2]).
-export([make_allow_header/0, make_allow_header/1,
make_server_header/0,
make_last_modified_header/1,
make_location_header/1,
make_etag_header/1,
make_content_range_header/1,
make_content_length_header/1,
make_content_encoding_header/1,
make_connection_close_header/1,
make_transfer_encoding_chunked_header/1,
make_www_authenticate_header/1,
make_etag/1,
make_content_type_header/1,
make_date_header/0,
make_vary_header/1]).
-export([outh_get_status_code/0,
outh_get_contlen/0,
outh_get_act_contlen/0,
outh_inc_act_contlen/1,
outh_get_doclose/0,
outh_get_chunked/0,
outh_get_content_encoding/0,
outh_get_content_encoding_header/0,
outh_get_content_type/0,
outh_get_vary_fields/0,
outh_serialize/0]).
-export([accumulate_header/1, headers_to_str/1,
getuid/0,
user_to_home/1,
uid_to_name/1,
exists/1,
mkdir/1]).
-export([tcp_connect/3, tcp_connect/4, ssl_connect/3, ssl_connect/4]).
-export([do_recv/3, do_recv/4, cli_recv/3,
gen_tcp_send/2,
http_get_headers/2]).
-export([sconf_to_srvstr/1,
redirect_host/2, redirect_port/1,
redirect_scheme_port/1, redirect_scheme/1,
tmpdir/0, tmpdir/1, mktemp/1, split_at/2, insert_at/3,
id_dir/1, ctl_file/1]).
-export([parse_ipmask/1, match_ipmask/2]).
-export([get_app_dir/0, get_ebin_dir/0, get_priv_dir/0,
get_inc_dir/0]).
%% Internal
-export([local_time_as_gmt_string/1, universal_time_as_string/1,
stringdate_to_datetime/1]).
start() ->
ok = start_app_deps(),
application:start(yaws, permanent).
stop() ->
application:stop(yaws).
%%% Quick and easy way of starting Yaws in embedded mode. No need for any
%%% start-script switches and no dependencies to Yaws header files. Just call
%%% start_embedded/N and you are in the air.
start_embedded(DocRoot) ->
start_embedded(DocRoot, []).
start_embedded(DocRoot, SL) when is_list(DocRoot),is_list(SL) ->
start_embedded(DocRoot, SL, []).
start_embedded(DocRoot, SL, GL) when is_list(DocRoot),is_list(SL),is_list(GL) ->
start_embedded(DocRoot, SL, GL, "default").
start_embedded(DocRoot, SL, GL, Id)
when is_list(DocRoot), is_list(SL), is_list(GL) ->
ok = start_app_deps(),
{ok, SCList, GC, _} = yaws_api:embedded_start_conf(DocRoot, SL, GL, Id),
ok = application:start(yaws, permanent),
yaws_config:add_yaws_soap_srv(GC),
yaws_api:setconf(GC, SCList),
ok.
add_server(DocRoot, SL) when is_list(DocRoot),is_list(SL) ->
SC = create_sconf(DocRoot, SL),
%% Change #auth in authdirs to {Dir, #auth} if needed
Fun = fun
(A = #auth{dir = [Dir]}, Acc) -> [{Dir, A}| Acc];
(A, Acc) -> [A| Acc]
end,
Authdirs = lists:foldr(Fun, [], SC#sconf.authdirs),
SC1 = yaws_config:add_yaws_auth(SC#sconf{authdirs = Authdirs}),
yaws_config:add_sconf(SC1).
create_gconf(GL, Id) when is_list(GL) ->
setup_gconf(GL, yaws_config:make_default_gconf(false, Id)).
create_sconf(DocRoot, SL) when is_list(DocRoot), is_list(SL) ->
SC = yaws_config:make_default_sconf(DocRoot, lkup(port, SL, undefined)),
setup_sconf(SL, SC).
start_app_deps() ->
Deps = split_sep(?YAWS_APPDEPS, $,),
catch lists:foldl(fun(App0, Acc) ->
App = list_to_existing_atom(App0),
case application:start(App, permanent) of
ok -> Acc;
{error,{already_started,App}} -> Acc;
Else -> throw(Else)
end
end, ok, Deps).
%% Access functions for the GCONF and SCONF records.
gconf_yaws_dir (#gconf{yaws_dir = X}) -> X.
gconf_trace (#gconf{trace = X}) -> X.
gconf_flags (#gconf{flags = X}) -> X.
gconf_logdir (#gconf{logdir = X}) -> X.
gconf_ebin_dir (#gconf{ebin_dir = X}) -> X.
gconf_src_dir (#gconf{src_dir = X}) -> X.
gconf_runmods (#gconf{runmods = X}) -> X.
gconf_keepalive_timeout (#gconf{keepalive_timeout = X}) -> X.
gconf_keepalive_maxuses (#gconf{keepalive_maxuses = X}) -> X.
gconf_max_num_cached_files (#gconf{max_num_cached_files = X}) -> X.
gconf_max_num_cached_bytes (#gconf{max_num_cached_bytes = X}) -> X.
gconf_max_size_cached_file (#gconf{max_size_cached_file = X}) -> X.
gconf_max_connections (#gconf{max_connections = X}) -> X.
gconf_process_options (#gconf{process_options = X}) -> X.
gconf_large_file_chunk_size(#gconf{large_file_chunk_size = X}) -> X.
gconf_mnesia_dir (#gconf{mnesia_dir = X}) -> X.
gconf_log_wrap_size (#gconf{log_wrap_size = X}) -> X.
gconf_cache_refresh_secs (#gconf{cache_refresh_secs = X}) -> X.
gconf_include_dir (#gconf{include_dir = X}) -> X.
gconf_phpexe (#gconf{phpexe = X}) -> X.
gconf_yaws (#gconf{yaws = X}) -> X.
gconf_id (#gconf{id = X}) -> X.
gconf_enable_soap (#gconf{enable_soap = X}) -> X.
gconf_soap_srv_mods (#gconf{soap_srv_mods = X}) -> X.
gconf_ysession_mod (#gconf{ysession_mod = X}) -> X.
gconf_acceptor_pool_size (#gconf{acceptor_pool_size = X}) -> X.
gconf_mime_types_info (#gconf{mime_types_info = X}) -> X.
gconf_nslookup_pref (#gconf{nslookup_pref = X}) -> X.
sconf_port (#sconf{port = X}) -> X.
sconf_flags (#sconf{flags = X}) -> X.
sconf_redirect_map (#sconf{redirect_map = X}) -> X.
sconf_rhost (#sconf{rhost = X}) -> X.
sconf_rmethod (#sconf{rmethod = X}) -> X.
sconf_docroot (#sconf{docroot = X}) -> X.
sconf_xtra_docroots (#sconf{xtra_docroots = X}) -> X.
sconf_listen (#sconf{listen = X}) -> X.
sconf_servername (#sconf{servername = X}) -> X.
sconf_serveralias (#sconf{serveralias = X}) -> X.
sconf_yaws (#sconf{yaws = X}) -> X.
sconf_ets (#sconf{ets = X}) -> X.
sconf_ssl (#sconf{ssl = X}) -> X.
sconf_authdirs (#sconf{authdirs = X}) -> X.
sconf_partial_post_size (#sconf{partial_post_size = X}) -> X.
sconf_appmods (#sconf{appmods = X}) -> X.
sconf_expires (#sconf{expires = X}) -> X.
sconf_errormod_401 (#sconf{errormod_401 = X}) -> X.
sconf_errormod_404 (#sconf{errormod_404 = X}) -> X.
sconf_arg_rewrite_mode (#sconf{arg_rewrite_mod = X}) -> X.
sconf_logger_mod (#sconf{logger_mod = X}) -> X.
sconf_opaque (#sconf{opaque = X}) -> X.
sconf_start_mod (#sconf{start_mod = X}) -> X.
sconf_allowed_scripts (#sconf{allowed_scripts = X}) -> X.
sconf_tilde_allowed_scripts(#sconf{tilde_allowed_scripts = X}) -> X.
sconf_index_files (#sconf{index_files = X}) -> X.
sconf_revproxy (#sconf{revproxy = X}) -> X.
sconf_spotions (#sconf{soptions = X}) -> X.
sconf_extra_cgi_vars (#sconf{extra_cgi_vars = X}) -> X.
sconf_stats (#sconf{stats = X}) -> X.
sconf_fcgi_app_server (#sconf{fcgi_app_server = X}) -> X.
sconf_php_handler (#sconf{php_handler = X}) -> X.
sconf_shaper (#sconf{shaper = X}) -> X.
sconf_deflate_options (#sconf{deflate_options = X}) -> X.
sconf_mime_types_info (#sconf{mime_types_info = X}) -> X.
sconf_dispatch_mod (#sconf{dispatch_mod = X}) -> X.
%% Access functions for the AUTH record.
new_auth() -> #auth{}.
auth_dir (#auth{dir = X}) -> X.
auth_docroot(#auth{docroot = X}) -> X.
auth_files (#auth{files = X}) -> X.
auth_realm (#auth{realm = X}) -> X.
auth_type (#auth{type = X}) -> X.
auth_headers(#auth{headers = X}) -> X.
auth_users (#auth{users = X}) -> X.
auth_acl (#auth{acl = X}) -> X.
auth_mod (#auth{mod = X}) -> X.
auth_outmod (#auth{outmod = X}) -> X.
auth_pam (#auth{pam = X}) -> X.
auth_dir (A, Dir) -> A#auth{dir = Dir}.
auth_docroot(A, DocRoot) -> A#auth{docroot = DocRoot}.
auth_files (A, Files) -> A#auth{files = Files}.
auth_realm (A, Realm) -> A#auth{realm = Realm}.
auth_type (A, Type) -> A#auth{type = Type}.
auth_headers(A, Headers) -> A#auth{headers = Headers}.
auth_users (A, Users) -> A#auth{users = Users}.
auth_acl (A, Acl) -> A#auth{acl = Acl}.
auth_mod (A, Mod) -> A#auth{mod = Mod}.
auth_outmod (A, Outmod) -> A#auth{outmod = Outmod}.
auth_pam (A, Pam) -> A#auth{pam = Pam}.
setup_authdirs(SL, DefaultAuthDirs) ->
case [A || {auth, A} <- SL] of
[] -> DefaultAuthDirs;
As -> [setup_auth(A) || A <- As]
end.
setup_auth(#auth{}=Auth) ->
Auth;
setup_auth(AuthProps) ->
Auth = #auth{},
#auth{dir = lkup(dir, AuthProps, Auth#auth.dir),
docroot = lkup(docroot, AuthProps, Auth#auth.docroot),
files = lkup(files, AuthProps, Auth#auth.files),
realm = lkup(realm, AuthProps, Auth#auth.realm),
type = lkup(type, AuthProps, Auth#auth.type),
headers = lkup(headers, AuthProps, Auth#auth.headers),
users = lkup(users, AuthProps, Auth#auth.users),
acl = lkup(acl, AuthProps, Auth#auth.acl),
mod = lkup(mod, AuthProps, Auth#auth.mod),
outmod = lkup(outmod, AuthProps, Auth#auth.outmod),
pam = lkup(pam, AuthProps, Auth#auth.pam)}.
%% Access functions for the SSL record.
new_ssl() -> #ssl{}.
ssl_keyfile (#ssl{keyfile = X}) -> X.
ssl_certfile (#ssl{certfile = X}) -> X.
ssl_verify (#ssl{verify = X}) -> X.
ssl_fail_if_no_peer_cert(#ssl{fail_if_no_peer_cert = X}) -> X.
ssl_depth (#ssl{depth = X}) -> X.
ssl_password (#ssl{password = X}) -> X.
ssl_cacertfile (#ssl{cacertfile = X}) -> X.
ssl_ciphers (#ssl{ciphers = X}) -> X.
ssl_cachetimeout (#ssl{cachetimeout = X}) -> X.
ssl_secure_renegotiate (#ssl{secure_renegotiate = X}) -> X.
ssl_honor_cipher_order (#ssl{honor_cipher_order = X}) -> X.
ssl_keyfile (S, File) -> S#ssl{keyfile = File}.
ssl_certfile (S, File) -> S#ssl{certfile = File}.
ssl_verify (S, Verify) -> S#ssl{verify = Verify}.
ssl_fail_if_no_peer_cert(S, Bool) -> S#ssl{fail_if_no_peer_cert = Bool}.
ssl_depth (S, Depth) -> S#ssl{depth = Depth}.
ssl_password (S, Pass) -> S#ssl{password = Pass}.
ssl_cacertfile (S, File) -> S#ssl{cacertfile = File}.
ssl_ciphers (S, Ciphers) -> S#ssl{ciphers = Ciphers}.
ssl_cachetimeout (S, Timeout) -> S#ssl{cachetimeout = Timeout}.
ssl_secure_renegotiate (S, Bool) -> S#ssl{secure_renegotiate = Bool}.
-ifdef(HAVE_SSL_HONOR_CIPHER_ORDER).
ssl_honor_cipher_order (S, Bool) -> S#ssl{honor_cipher_order = Bool}.
-else.
ssl_honor_cipher_order (S, _) -> S.
-endif.
setup_ssl(SL, DefaultSSL) ->
case lkup(ssl, SL, undefined) of
undefined ->
DefaultSSL;
SSL when is_record(SSL, ssl) ->
#ssl{protocol_version=ProtocolVersion} = SSL,
case ProtocolVersion of
undefined -> ok;
_ ->
ok = application:set_env(ssl, protocol_version, ProtocolVersion)
end,
SSL;
SSLProps when is_list(SSLProps) ->
ProtocolVersion = case lkup(protocol_version, SSLProps, undefined) of
undefined -> undefined;
PVList ->
ok = application:set_env(ssl, protocol_version,
PVList),
PVList
end,
SSL = #ssl{},
#ssl{keyfile = lkup(keyfile, SSLProps,
SSL#ssl.keyfile),
certfile = lkup(certfile, SSLProps,
SSL#ssl.certfile),
verify = lkup(verify, SSLProps, SSL#ssl.verify),
fail_if_no_peer_cert = lkup(fail_if_no_peer_cert, SSLProps,
SSL#ssl.fail_if_no_peer_cert),
depth = lkup(depth, SSLProps, SSL#ssl.depth),
password = lkup(password, SSLProps,
SSL#ssl.password),
cacertfile = lkup(cacertfile, SSLProps,
SSL#ssl.cacertfile),
ciphers = lkup(ciphers, SSLProps,
SSL#ssl.ciphers),
cachetimeout = lkup(cachetimeout, SSLProps,
SSL#ssl.cachetimeout),
secure_renegotiate = lkup(secure_renegotiate, SSLProps,
SSL#ssl.secure_renegotiate),
honor_cipher_order = lkup(honor_cipher_order, SSLProps,
SSL#ssl.honor_cipher_order),
protocol_version = ProtocolVersion}
end.
%% Access functions for the DEFLATE record.
new_deflate() -> #deflate{}.
deflate_min_compress_size(#deflate{min_compress_size = X}) -> X.
deflate_compression_level(#deflate{compression_level = X}) -> X.
deflate_window_size (#deflate{window_size = X}) -> X.
deflate_mem_level (#deflate{mem_level = X}) -> X.
deflate_strategy (#deflate{strategy = X}) -> X.
deflate_use_gzip_static (#deflate{use_gzip_static = X}) -> X.
deflate_mime_types (#deflate{mime_types = X}) -> X.
deflate_min_compress_size(D, Min) -> D#deflate{min_compress_size = Min}.
deflate_compression_level(D, Level) -> D#deflate{compression_level = Level}.
deflate_window_size (D, Size) -> D#deflate{window_size = Size}.
deflate_mem_level (D, Level) -> D#deflate{mem_level = Level}.
deflate_strategy (D, Strat) -> D#deflate{strategy = Strat}.
deflate_use_gzip_static (D, Bool) -> D#deflate{use_gzip_static = Bool}.
deflate_mime_types (D, Types) -> D#deflate{mime_types = Types}.
setup_deflate(SL, DefaultDeflate) ->
case lkup(deflate_options, SL, undefined) of
undefined ->
DefaultDeflate;
D when is_record(D, deflate) ->
D;
DProps when is_list(DProps) ->
D = #deflate{},
#deflate{min_compress_size = lkup(min_compress_size, DProps,
D#deflate.min_compress_size),
compression_level = lkup(compression_level, DProps,
D#deflate.compression_level),
window_size = lkup(window_size, DProps,
D#deflate.window_size),
mem_level = lkup(mem_level, DProps,
D#deflate.mem_level),
strategy = lkup(strategy, DProps,
D#deflate.strategy),
use_gzip_static = lkup(use_gzip_static, DProps,
D#deflate.use_gzip_static),
mime_types = lkup(mime_types, DProps,
D#deflate.mime_types)}
end.
%% Access functions to MIME_TYPES_INFO record.
new_mime_types_info() -> #mime_types_info{}.
mime_types_info_mime_types_file(#mime_types_info{mime_types_file = X}) -> X.
mime_types_info_types (#mime_types_info{types = X}) -> X.
mime_types_info_charsets (#mime_types_info{charsets = X}) -> X.
mime_types_info_default_type (#mime_types_info{default_type = X}) -> X.
mime_types_info_default_charset(#mime_types_info{default_charset = X}) -> X.
mime_types_info_mime_types_file(M, File) ->
M#mime_types_info{mime_types_file = File}.
mime_types_info_types(M, Types) ->
M#mime_types_info{types = Types}.
mime_types_info_charsets(M, Charsets) ->
M#mime_types_info{charsets = Charsets}.
mime_types_info_default_type(M, Type) ->
M#mime_types_info{default_type = Type}.
mime_types_info_default_charset(M, Charset) ->
M#mime_types_info{default_charset = Charset}.
setup_mime_types_info(SL, DefaultMTI) ->
case lkup(mime_types_info, SL, undefined) of
undefined ->
DefaultMTI;
M when is_record(M, mime_types_info) ->
M;
MProps when is_list(MProps) ->
M = #mime_types_info{},
#mime_types_info{mime_types_file =
lkup(mime_types_file, MProps,
M#mime_types_info.mime_types_file),
types = lkup(types, MProps,
M#mime_types_info.types),
charsets = lkup(charsets, MProps,
M#mime_types_info.charsets),
default_type =
lkup(default_type, MProps,
M#mime_types_info.default_type),
default_charset =
lkup(default_charset, MProps,
M#mime_types_info.default_charset)}
end.
%% Setup global configuration
setup_gconf([], GC) -> GC;
setup_gconf(GL, GC) ->
#gconf{yaws_dir = lkup(yaws_dir, GL, GC#gconf.yaws_dir),
trace = lkup(trace, GL, GC#gconf.trace),
flags = set_gc_flags(lkup(flags, GL, []),
GC#gconf.flags),
logdir = lkup(logdir, GL, GC#gconf.logdir),
ebin_dir = lkup(ebin_dir, GL, GC#gconf.ebin_dir),
src_dir = lkup(src_dir, GL, GC#gconf.src_dir),
runmods = lkup(runmods, GL, GC#gconf.runmods),
keepalive_timeout = lkup(keepalive_timeout, GL,
GC#gconf.keepalive_timeout),
keepalive_maxuses = lkup(keepalive_maxuses, GL,
GC#gconf.keepalive_maxuses),
max_num_cached_files = lkup(max_num_cached_files, GL,
GC#gconf.max_num_cached_files),
max_num_cached_bytes = lkup(max_num_cached_bytes, GL,
GC#gconf.max_num_cached_bytes),
max_size_cached_file = lkup(max_size_cached_file, GL,
GC#gconf.max_size_cached_file),
max_connections = lkup(max_connections, GL,
GC#gconf.max_connections),
process_options = lkup(process_options, GL,
GC#gconf.process_options),
large_file_chunk_size = lkup(large_file_chunk_size, GL,
GC#gconf.large_file_chunk_size),
mnesia_dir = lkup(mnesia_dir, GL, GC#gconf.mnesia_dir),
log_wrap_size = lkup(log_wrap_size, GL,
GC#gconf.log_wrap_size),
cache_refresh_secs = lkup(cache_refresh_secs, GL,
GC#gconf.cache_refresh_secs),
include_dir = lkup(include_dir, GL, GC#gconf.include_dir),
phpexe = lkup(phpexe, GL, GC#gconf.phpexe),
yaws = lkup(yaws, GL, GC#gconf.yaws),
id = lkup(id, GL, GC#gconf.id),
enable_soap = lkup(enable_soap, GL, GC#gconf.enable_soap),
soap_srv_mods = lkup(soap_srv_mods, GL,
GC#gconf.soap_srv_mods),
ysession_mod = lkup(ysession_mod, GL,
GC#gconf.ysession_mod),
acceptor_pool_size = lkup(acceptor_pool_size, GL,
GC#gconf.acceptor_pool_size),
mime_types_info = setup_mime_types_info(
GL, GC#gconf.mime_types_info
),
nslookup_pref = lkup(nslookup_pref, GL,
GC#gconf.nslookup_pref)
}.
set_gc_flags([{tty_trace, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags,?GC_TTY_TRACE, Bool));
set_gc_flags([{debug, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_DEBUG, Bool));
set_gc_flags([{copy_errlog, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_COPY_ERRLOG, Bool));
set_gc_flags([{copy_error_log, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_COPY_ERRLOG, Bool));
set_gc_flags([{backwards_compat_parse, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_BACKWARDS_COMPAT_PARSE, Bool));
set_gc_flags([{log_resolve_hostname, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_LOG_RESOLVE_HOSTNAME, Bool));
set_gc_flags([{fail_on_bind_err, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags,?GC_FAIL_ON_BIND_ERR,Bool));
set_gc_flags([{pick_first_virthost_on_nomatch, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags, ?GC_PICK_FIRST_VIRTHOST_ON_NOMATCH,Bool));
set_gc_flags([{use_old_ssl, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags,?GC_USE_OLD_SSL,Bool));
set_gc_flags([{use_erlang_sendfile, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags,?GC_USE_ERLANG_SENDFILE,Bool));
set_gc_flags([{use_yaws_sendfile, Bool}|T], Flags) ->
set_gc_flags(T, flag(Flags,?GC_USE_YAWS_SENDFILE,Bool));
set_gc_flags([_|T], Flags) ->
set_gc_flags(T, Flags);
set_gc_flags([], Flags) ->
Flags.
%% Setup vhost configuration
setup_sconf(SL, SC) ->
#sconf{port = lkup(port, SL, SC#sconf.port),
flags = set_sc_flags(lkup(flags, SL, []),
SC#sconf.flags),
redirect_map = lkup(redirect_map, SL,
SC#sconf.redirect_map),
rhost = lkup(rhost, SL, SC#sconf.rhost),
rmethod = lkup(rmethod, SL, SC#sconf.rmethod),
docroot = lkup(docroot, SL, SC#sconf.docroot),
xtra_docroots = lkup(xtra_docroots, SL,
SC#sconf.xtra_docroots),
listen = lkup(listen, SL, SC#sconf.listen),
servername = lkup(servername, SL, SC#sconf.servername),
serveralias = lkup(serveralias, SL, SC#sconf.serveralias),
yaws = lkup(yaws, SL, SC#sconf.yaws),
ets = lkup(ets, SL, SC#sconf.ets),
ssl = setup_ssl(SL, SC#sconf.ssl),
authdirs = setup_authdirs(SL, SC#sconf.authdirs),
partial_post_size = lkup(partial_post_size, SL,
SC#sconf.partial_post_size),
appmods = lkup(appmods, SL, SC#sconf.appmods),
expires = lkup(expires, SL, SC#sconf.expires),
errormod_401 = lkup(errormod_401, SL,
SC#sconf.errormod_401),
errormod_404 = lkup(errormod_404, SL,
SC#sconf.errormod_404),
errormod_crash = lkup(errormod_crash, SL,
SC#sconf.errormod_crash),
arg_rewrite_mod = lkup(arg_rewrite_mod, SL,
SC#sconf.arg_rewrite_mod),
logger_mod = lkup(logger_mod, SL, SC#sconf.logger_mod),
opaque = lkup(opaque, SL, SC#sconf.opaque),
start_mod = lkup(start_mod, SL, SC#sconf.start_mod),
allowed_scripts = lkup(allowed_scripts, SL,
SC#sconf.allowed_scripts),
tilde_allowed_scripts = lkup(tilde_allowed_scripts, SL,
SC#sconf.tilde_allowed_scripts),
index_files = lkup(index_files, SL, SC#sconf.index_files),
revproxy = lkup(revproxy, SL, SC#sconf.revproxy),
soptions = lkup(soptions, SL, SC#sconf.soptions),
extra_cgi_vars = lkup(extra_cgi_vars, SL,
SC#sconf.extra_cgi_vars),
stats = lkup(stats, SL, SC#sconf.stats),
fcgi_app_server = lkup(fcgi_app_server, SL,
SC#sconf.fcgi_app_server),
php_handler = lkup(php_handler, SL, SC#sconf.php_handler),
shaper = lkup(shaper, SL, SC#sconf.shaper),
deflate_options = setup_deflate(SL, SC#sconf.deflate_options),
mime_types_info = setup_mime_types_info(
SL, SC#sconf.mime_types_info
),
dispatch_mod = lkup(dispatchmod, SL, SC#sconf.dispatch_mod)
}.
set_sc_flags([{access_log, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_ACCESS_LOG, Bool));
set_sc_flags([{auth_log, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_AUTH_LOG, Bool));
set_sc_flags([{add_port, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_ADD_PORT, Bool));
set_sc_flags([{statistics, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_STATISTICS, Bool));
set_sc_flags([{tilde_expand, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_TILDE_EXPAND, Bool));
set_sc_flags([{dir_listings, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_DIR_LISTINGS, Bool));
set_sc_flags([{deflate, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_DEFLATE, Bool));
set_sc_flags([{dir_all_zip, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_DIR_ALL_ZIP, Bool));
set_sc_flags([{dav, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_DAV, Bool));
set_sc_flags([{fcgi_trace_protocol, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_FCGI_TRACE_PROTOCOL, Bool));
set_sc_flags([{fcgi_log_app_error, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_FCGI_LOG_APP_ERROR, Bool));
set_sc_flags([{forward_proxy, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_FORWARD_PROXY, Bool));
set_sc_flags([{auth_skip_docroot, Bool}|T], Flags) ->
set_sc_flags(T, flag(Flags, ?SC_AUTH_SKIP_DOCROOT, Bool));
set_sc_flags([_Unknown|T], Flags) ->
error_logger:format("Unknown and unhandled flag ~p~n", [_Unknown]),
set_sc_flags(T, Flags);
set_sc_flags([], Flags) ->
Flags.
lkup(Key, List, Def) ->
case lists:keyfind(Key, 1, List) of
{_,Value} -> Value;
_ -> Def
end.
hup() ->
dohup(undefined).
hup(Sock) ->
spawn(fun() ->
group_leader(whereis(user), self()),
dohup(Sock)
end).
dohup(Sock) ->
Env = yaws_sup:get_app_args(),
Res = try yaws_config:load(Env) of
{ok, Gconf, Sconfs} -> yaws_api:setconf(Gconf, Sconfs);
Err -> Err
catch
_:X ->
X
end,
gen_event:notify(yaws_event_manager, {yaws_hupped, Res}),
yaws_log:rotate(Res),
case Sock of
undefined ->
{yaws_hupped, Res};
_ ->
gen_tcp:send(Sock, io_lib:format("hupped: ~p~n", [Res])),
gen_tcp:close(Sock)
end.
%%% misc funcs
first(_F, []) ->
false;
first(F, [H|T]) ->
case F(H) of
{ok, Val} -> {ok, Val, H};
ok -> {ok, ok, H};
_ -> first(F, T)
end.
elog(F, As) ->
error_logger:format(F, As).
filesize(Fname) ->
case file:read_file_info(Fname) of
{ok, FI} when FI#file_info.type == regular ->
{ok, FI#file_info.size};
{ok, FI} ->
{error, FI#file_info.type};
Err ->
Err
end.
upto(_I, []) -> [];
upto(0, _) -> " ....";
upto(_I, [0|_]) -> " ....";
upto(I, [H|T]) -> [H|upto(I-1, T)].
to_string(X) when is_float(X) -> io_lib:format("~.2.0f",[X]);
to_string(X) when is_integer(X) -> erlang:integer_to_list(X);
to_string(X) when is_atom(X) -> atom_to_list(X);
to_string(X) -> lists:concat([X]).
to_list(L) when is_list(L) -> L;
to_list(A) when is_atom(A) -> atom_to_list(A).
integer_to_hex(I) ->
case catch erlang:integer_to_list(I, 16) of
{'EXIT', _} -> old_integer_to_hex(I);
Int -> Int
end.
old_integer_to_hex(I) when I < 10 ->
integer_to_list(I);
old_integer_to_hex(I) when I < 16 ->
[I-10+$A];
old_integer_to_hex(I) when I >= 16 ->
N = trunc(I/16),
old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16).
hex_to_integer(Hex) ->
erlang:list_to_integer(Hex, 16).
string_to_hex(String) ->
HEXC = fun (D) when D > 9 -> $a + D - 10;
(D) -> $0 + D
end,
lists:foldr(fun(E, Acc) -> [HEXC(E div 16),HEXC(E rem 16)|Acc] end,
[], String).
hex_to_string(Hex) ->
DEHEX = fun (H) when H >= $a -> H - $a + 10;
(H) when H >= $A -> H - $A + 10;
(H) -> H - $0
end,
{String, _} =
lists:foldr(fun (E, {Acc, nolow}) -> {Acc, DEHEX(E)};
(E, {Acc, LO}) -> {[DEHEX(E)*16+LO|Acc], nolow}
end, {[], nolow}, Hex),
String.
universal_time_as_string() ->
universal_time_as_string(calendar:universal_time()).
universal_time_as_string(UTime) ->
time_to_string(UTime, "GMT").
local_time_as_gmt_string(LocalTime) ->
time_to_string(erlang:localtime_to_universaltime(LocalTime), "GMT").
time_to_string({{Year, Month, Day}, {Hour, Min, Sec}}, Zone) ->
[day(Year, Month, Day), ", ",
mk2(Day), " ", month(Month), " ", erlang:integer_to_list(Year), " ",
mk2(Hour), ":", mk2(Min), ":", mk2(Sec), " ", Zone].
mk2(I) when I < 10 -> [$0 | erlang:integer_to_list(I)];
mk2(I) -> erlang:integer_to_list(I).
day(Year, Month, Day) ->
int_to_wd(calendar:day_of_the_week(Year, Month, Day)).
int_to_wd(1) -> "Mon";
int_to_wd(2) -> "Tue";
int_to_wd(3) -> "Wed";
int_to_wd(4) -> "Thu";
int_to_wd(5) -> "Fri";
int_to_wd(6) -> "Sat";
int_to_wd(7) -> "Sun".
month(1) -> "Jan";
month(2) -> "Feb";
month(3) -> "Mar";
month(4) -> "Apr";
month(5) -> "May";
month(6) -> "Jun";
month(7) -> "Jul";
month(8) -> "Aug";
month(9) -> "Sep";
month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
month_str_to_int("Jan") -> 1;
month_str_to_int("Feb") -> 2;
month_str_to_int("Mar") -> 3;
month_str_to_int("Apr") -> 4;
month_str_to_int("May") -> 5;
month_str_to_int("Jun") -> 6;
month_str_to_int("Jul") -> 7;
month_str_to_int("Aug") -> 8;
month_str_to_int("Sep") -> 9;
month_str_to_int("Oct") -> 10;
month_str_to_int("Nov") -> 11;
month_str_to_int("Dec") -> 12.
stringdate_to_datetime([$ |T]) ->
stringdate_to_datetime(T);
stringdate_to_datetime([_D1, _D2, _D3, $\,, $ |Tail]) ->
stringdate_to_datetime1(Tail).
stringdate_to_datetime1([A, B, $\s |T]) ->
stringdate_to_datetime2(T, erlang:list_to_integer([A,B]));
stringdate_to_datetime1([A, $\s |T]) ->
stringdate_to_datetime2(T, erlang:list_to_integer([A])).
stringdate_to_datetime2([M1, M2, M3, $\s , Y1, Y2, Y3, Y4, $\s,
H1, H2, $:, Min1, Min2,$:,
S1, S2,$\s ,$G, $M, $T|_], Day) ->
{{erlang:list_to_integer([Y1,Y2,Y3,Y4]),
month_str_to_int([M1, M2, M3]), Day},
{erlang:list_to_integer([H1, H2]),
erlang:list_to_integer([Min1, Min2]),
erlang:list_to_integer([S1, S2])}}.
%% used by If-Modified-Since header code
is_modified_p(FI, UTC_string) ->
case catch stringdate_to_datetime(UTC_string) of
{'EXIT', _ } ->
true;
UTC ->
MtimeUTC = erlang:localtime_to_universaltime(FI#file_info.mtime),
(MtimeUTC > UTC)
end.
ticker(Time, Msg) ->
ticker(Time, self(), Msg).
ticker(Time, To, Msg ) ->
spawn_link(fun() ->
process_flag(trap_exit, true),
yaws_ticker:ticker(Time, To, Msg)
end).
address() ->
Sc = get(sc),
?F("<address> ~s Server at ~s </address>",
[case Sc#sconf.yaws of
undefined -> (get(gc))#gconf.yaws;
Signature -> Signature
end, Sc#sconf.servername]).
is_space($\s) -> true;
is_space($\r) -> true;
is_space($\n) -> true;
is_space($\t) -> true;
is_space(_) -> false.
strip_spaces(String) ->
strip_spaces(String, both).
strip_spaces(String, left) ->
drop_spaces(String);
strip_spaces(String, right) ->
lists:reverse(drop_spaces(lists:reverse(String)));
strip_spaces(String, both) ->
strip_spaces(drop_spaces(String), right).
drop_spaces([]) ->
[];
drop_spaces(YS=[X|XS]) ->
case is_space(X) of
true -> drop_spaces(XS);
false -> YS
end.
%%% basic uuencode and decode functionality
list_to_uue(L) -> list_to_uue(L, []).
list_to_uue([], Out) ->
lists:reverse([$\n,enc(0)|Out]);
list_to_uue(L, Out) ->
{L45, L1} = get_45(L),
Encoded = encode_line(L45),
list_to_uue(L1, lists:reverse(Encoded, Out)).
uue_to_list(L) ->
uue_to_list(L, []).
uue_to_list([], Out) ->
lists:reverse(Out);
uue_to_list(L, Out) ->
{Decoded, L1} = decode_line(L),
uue_to_list(L1, lists:reverse(Decoded, Out)).
encode_line(L) ->
[enc(length(L))|encode_line1(L)].
encode_line1([C0, C1, C2|T]) ->
Char1 = enc(C0 bsr 2),
Char2 = enc((C0 bsl 4) band 8#60 bor (C1 bsr 4) band 8#17),
Char3 = enc((C1 bsl 2) band 8#74 bor (C2 bsr 6) band 8#3),
Char4 = enc(C2 band 8#77),
[Char1,Char2,Char3,Char4|encode_line1(T)];
encode_line1([C1, C2]) ->
encode_line1([C1, C2, 0]);
encode_line1([C]) ->
encode_line1([C,0,0]);
encode_line1([]) ->
[$\n].
decode_line([H|T]) ->
case dec(H) of
0 -> {[], []};
Len -> decode_line(T, Len, [])
end.
decode_line([P0,P1,P2,P3|T], N, Out) when N >= 3->
Char1 = 16#FF band ((dec(P0) bsl 2) bor (dec(P1) bsr 4)),
Char2 = 16#FF band ((dec(P1) bsl 4) bor (dec(P2) bsr 2)),
Char3 = 16#FF band ((dec(P2) bsl 6) bor dec(P3)),
decode_line(T, N-3, [Char3,Char2,Char1|Out]);
decode_line([P0,P1,P2,_|T], 2, Out) ->
Char1 = 16#FF band ((dec(P0) bsl 2) bor (dec(P1) bsr 4)),
Char2 = 16#FF band ((dec(P1) bsl 4) bor (dec(P2) bsr 2)),
{lists:reverse([Char2,Char1|Out]), tl(T)};
decode_line([P0,P1,_,_|T], 1, Out) ->
Char1 = 16#FF band ((dec(P0) bsl 2) bor (dec(P1) bsr 4)),
{lists:reverse([Char1|Out]), tl(T)};
decode_line(T, 0, Out) ->
{lists:reverse(Out), tl(T)}.
get_45(L) -> get_45(L, 45, []).
get_45(L, 0, F) -> {lists:reverse(F), L};
get_45([], _N, L) -> {lists:reverse(L), []};
get_45([H|T], N, L) -> get_45(T, N-1, [H|L]).
%% enc/1 is the basic 1 character encoding function to make a char printing
%% dec/1 is the inverse
enc(0) -> $`;
enc(C) -> (C band 8#77) + $ .
dec(Char) -> (Char - $ ) band 8#77.
printversion() ->
io:format("Yaws ~s~n", [yaws_generated:version()]),
init:stop().
%% our default arg rewriter does of course nothing
arg_rewrite(A) ->
A.
is_ssl(#sconf{ssl = undefined}) -> nossl;
is_ssl(#sconf{ssl = S}) when is_record(S, ssl) -> ssl.
to_lowerchar(C) when C >= $A, C =< $Z -> C+($a-$A);
to_lowerchar(C) -> C.
to_lower([]) -> [];
to_lower([C|Cs]) when C >= $A, C =< $Z -> [C+($a-$A)|to_lower(Cs)];
to_lower([C|Cs]) -> [C|to_lower(Cs)];
to_lower(A) when is_atom(A) -> to_lower(atom_to_list(A)).
funreverse(List, Fun) ->
funreverse(List, Fun, []).
funreverse([H|T], Fun, Ack) -> funreverse(T, Fun, [Fun(H)|Ack]);
funreverse([], _Fun, Ack) -> Ack.
%% is arg1 a prefix of arg2
is_prefix([H|T1], [H|T2]) -> is_prefix(T1, T2);
is_prefix([], T) -> {true, T};
is_prefix(_,_) -> false.
%% Split a string of words separated by Sep into a list of words and
%% strip off white space.
%%
%% HTML semantics are used, such that empty words are omitted.
split_sep(undefined, _Sep) ->
[];
split_sep(L, Sep) ->
case drop_spaces(L) of
[] -> [];
[Sep|T] -> split_sep(T, Sep);
[C|T] -> split_sep(T, Sep, [C], [])
end.
split_sep([], _Sep, AccL) ->
lists:reverse(AccL);
split_sep([Sep|T], Sep, AccL) ->
split_sep(T, Sep, AccL);
split_sep([C|T], Sep, AccL) ->
split_sep(T, Sep, [C], AccL).
split_sep([], _Sep, AccW, AccL) ->
lists:reverse([lists:reverse(drop_spaces(AccW))|AccL]);
split_sep([Sep|Tail], Sep, AccW, AccL) ->
split_sep(drop_spaces(Tail), Sep, [lists:reverse(drop_spaces(AccW))|AccL]);
split_sep([C|Tail], Sep, AccW, AccL) ->
split_sep(Tail, Sep, [C|AccW], AccL).
%% Join strings with separator. Same as string:join in later
%% versions of Erlang. Separator is expected to be a list.
join_sep([], Sep) when is_list(Sep) ->
[];
join_sep([H|T], Sep) ->
H ++ lists:append([Sep ++ X || X <- T]).
%% header parsing
parse_qval(S) ->
parse_qval([], S).
parse_qval(A, ";q="++Q) -> {lists:reverse(A), parse_qvalue(Q)};
parse_qval(A, "") -> {lists:reverse(A), 1000};
parse_qval(A, [C|T]) -> parse_qval([C|A], T).
parse_qvalue("0") -> 0;
parse_qvalue("0.") -> 0;
parse_qvalue("1") -> 1000;
parse_qvalue("1.") -> 1000;
parse_qvalue("1.0") -> 1000;
parse_qvalue("1.00") -> 1000;
parse_qvalue("1.000") -> 1000;
parse_qvalue("0."++[D1]) -> three_digits_to_integer(D1,$0,$0);
parse_qvalue("0."++[D1,D2]) -> three_digits_to_integer(D1,D2,$0);
parse_qvalue("0."++[D1,D2,D3]) -> three_digits_to_integer(D1,D2,D3);
parse_qvalue(_) -> 0. %% error
three_digits_to_integer(D1, D2, D3) ->
100*(D1-$0)+10*(D2-$0)+D3-$0.
%% Gzip encoding
accepts_gzip(H, Mime) ->
case [Val || {_,_,'Accept-Encoding',_,Val}<- H#headers.other] of
[] ->
false;
[_|_]=AcceptEncoding0 ->
AcceptEncoding = join_sep(AcceptEncoding0, ","),
EncList = [parse_qval(X) || X <- split_sep(AcceptEncoding, $,)],
case [Q || {"gzip",Q} <- EncList] ++ [Q || {"*",Q} <- EncList] of
[] ->
false;
[Q|_] ->
(Q > 100) %% just for fun
and not has_buggy_gzip(H#headers.user_agent, Mime)
end
end.
%%% Advice partly taken from Apache's documentation of `mod_deflate'.
%% Only Netscape 4.06-4.08 is really broken.
has_buggy_gzip("Mozilla/4.06"++_, _) ->
true;
has_buggy_gzip("Mozilla/4.07"++_, _) ->
true;
has_buggy_gzip("Mozilla/4.08"++_, _) ->
true;
%% Everything else handles at least HTML.
has_buggy_gzip(_, "text/html") ->
false;
has_buggy_gzip(UserAgent, Mime) ->
UA = parse_ua(UserAgent),
in_ua(fun("Mozilla/4"++_) ->
%% Netscape 4.x may choke on anything not HTML.
case Mime of
%% IE doesn't, but some versions are said to have issues
%% with plugins.
"application/pdf" ->
true;
_ -> not in_comment(
fun("MSIE"++_) -> true;
(_) -> false
end, UA)
end;
("w3m"++_) ->
%% W3m does not decompress when saving.
true;
("Opera") ->
%% Opera 6 does not uncompress downloads.
in_ua(fun("6."++_) -> true;
(_) -> false
end, UA);
("Opera/6."++_) ->
true;
(_) ->
false
end, UA).
%%% Parsing of User-Agent header.
%%% Yes, this looks a bit like overkill.
tokenize_ua([], Acc) ->
lists:reverse(Acc);
tokenize_ua([$\\ , C|T], Acc) ->
tokenize_ua(T, [C|Acc]);
tokenize_ua([$(|T], Acc) ->
tokenize_ua(T, [popen | Acc]);
tokenize_ua([$)|T], Acc) ->
tokenize_ua(T, [pclose | Acc]);
tokenize_ua([C|T], Acc) ->
tokenize_ua(T, [C|Acc]).
parse_ua(Line) ->
case catch parse_ua_l(tokenize_ua(Line, [])) of
{'EXIT', _} -> [];
Res -> Res
end.
parse_ua_l(Line) ->
case drop_spaces(Line) of
[] ->
[];
[popen|T] ->
{Comment, Tail} = parse_comment(T),
[Comment | parse_ua_l(Tail)];
[pclose|T] ->
%% Error, ignore
parse_ua_l(T);
L ->
{UA, Tail} = parse_ua1(L),
[UA | parse_ua_l(Tail)]
end.
parse_comment(L) ->
parse_comment(L, [], []).
parse_comment([], _, _) ->
%% Error
{error, []};
parse_comment([pclose|T], CAcc, CsAcc) ->
{{comment, lists:reverse([lists:reverse(CAcc)|CsAcc])}, T};
parse_comment([popen|T], CAcc, CsAcc) ->
{Comment, Tail} = parse_comment(T),
parse_comment(drop_spaces(Tail), [], [Comment, lists:reverse(CAcc)|CsAcc]);
parse_comment([$;|T], CAcc, CsAcc) ->
parse_comment(drop_spaces(T), [], [lists:reverse(CAcc)|CsAcc]);
parse_comment([C|T], CAcc, CsAcc) ->
parse_comment(T, [C|CAcc], CsAcc).
parse_ua1(L) ->
parse_ua1(L, []).
parse_ua1([], Acc) ->
{{ua,lists:reverse(Acc)}, []};
parse_ua1([popen|T], Acc) ->
{{ua, lists:reverse(Acc)}, [popen|T]};
parse_ua1([pclose|T], _Acc) ->
{error, T};
parse_ua1([$ |T], Acc) ->
{{ua, lists:reverse(Acc)}, T};
parse_ua1([C|T], Acc) ->
parse_ua1(T, [C|Acc]).
in_ua(Pred, L) ->
lists:any(fun({ua, UA}) -> Pred(UA);
(_) -> false
end, L).
in_comment(_Pred, []) ->
false;
in_comment(Pred, [{comment, Cs}|T]) ->
case in_comment_l(Pred, Cs) of
true -> true;
false -> in_comment(Pred, T)
end;
in_comment(Pred, [_|T]) ->
in_comment(Pred, T).
in_comment_l(Pred, Cs) ->
lists:any(fun({comment, Cs1}) -> in_comment_l(Pred, Cs1);
(error) -> false;
(L) -> Pred(L)
end, Cs).
%% imperative out header management
outh_set_status_code(Code) ->
put(outh, (get(outh))#outh{status = Code}),
ok.
outh_set_non_cacheable(_Version) ->
put(outh, (get(outh))#outh{cache_control = "Cache-Control: no-cache\r\n"}),
ok.
outh_set_content_type(Mime) ->
put(outh, (get(outh))#outh{content_type = make_content_type_header(Mime)}),
ok.
outh_set_content_encoding(Encoding) ->
ContentEncoding = case Encoding of
identity -> undefined;
deflate -> make_content_encoding_header(Encoding)
end,
put(outh, (get(outh))#outh{encoding = Encoding,
content_encoding = ContentEncoding}),
ok.
outh_set_cookie(C) ->
put(outh, (get(outh))#outh{set_cookie = ["Set-Cookie: ", C, "\r\n"]}),
ok.
outh_clear_headers() ->
H = get(outh),
put(outh, #outh{status = H#outh.status,
doclose = true,
chunked = false,
connection = make_connection_close_header(true)}),
ok.
outh_set_static_headers(Req, UT, Headers) ->
outh_set_static_headers(Req, UT, Headers, all).
outh_set_static_headers(Req, UT, Headers, Range) ->
H = get(outh),
FIL = (UT#urltype.finfo)#file_info.size,
{DoClose0, Chunked0} = dcc(Req, Headers),
{DoDeflate, Length}
= case Range of
all ->
case UT#urltype.deflate of
DB when is_binary(DB) -> % cached
%% Remove charset
[Mime|_] = yaws:split_sep(UT#urltype.mime, $;),
case accepts_gzip(Headers, Mime) of
true -> {true, size(DB)};
false -> {false, FIL}
end;
undefined ->
{false, FIL};
dynamic ->
%% Remove charset
[Mime|_] = yaws:split_sep(UT#urltype.mime, $;),
case accepts_gzip(Headers, Mime) of
true -> {true, undefined};
false -> {false, FIL}
end
end;
{fromto, From, To, _} ->
{false, To - From + 1}
end,
Encoding = case DoDeflate of
true -> decide;
false -> identity
end,
Chunked = Chunked0 and (Length == undefined),
DoClose = if
DoClose0 == true ->
true;
((Length == undefined) and not Chunked) ->
%% We cannot keep the connection alive, because the client
%% has no way of knowing the end of the content data.
true;
DoClose0 == keep_alive ->
keep_alive;
true ->
DoClose0
end,
H2 = H#outh{
status = case Range of
all -> 200;
{fromto, _, _, _} -> 206
end,
chunked = Chunked,
encoding = Encoding,
date = make_date_header(),
server = make_server_header(),
last_modified = make_last_modified_header(UT#urltype.finfo),
etag = make_etag_header(UT#urltype.finfo),
content_range = make_content_range_header(Range),
content_length = make_content_length_header(Length),
content_type = make_content_type_header(UT#urltype.mime),
content_encoding = make_content_encoding_header(Encoding),
transfer_encoding = make_transfer_encoding_chunked_header(Chunked),
connection = make_connection_close_header(DoClose),
doclose = DoClose,
contlen = Length
},
%% store finfo to set last_modified, expires and cache_control headers
%% during #outh{} serialization.
put(file_info, UT#urltype.finfo),
put(outh, H2).
outh_set_304_headers(Req, UT, Headers) ->
H = get(outh),
{DoClose, _Chunked} = dcc(Req, Headers),
H2 = H#outh{
status = 304,
chunked = false,
date = make_date_header(),
server = make_server_header(),
last_modified = make_last_modified_header(UT#urltype.finfo),
etag = make_etag_header(UT#urltype.finfo),
content_length = make_content_length_header(0),
connection = make_connection_close_header(DoClose),
doclose = DoClose,
contlen = 0
},
%% store finfo to set last_modified, expires and cache_control headers
%% during #outh{} serialization.
put(file_info, UT#urltype.finfo),
put(outh, H2).
outh_set_dyn_headers(Req, Headers, UT) ->
H = get(outh),
{DoClose, Chunked} = dcc(Req, Headers),
H2 = H#outh{
status = 200,
date = make_date_header(),
server = make_server_header(),
connection = make_connection_close_header(DoClose),
content_type = make_content_type_header(UT#urltype.mime),
doclose = DoClose,
chunked = Chunked,
transfer_encoding = make_transfer_encoding_chunked_header(Chunked)},
%% store finfo to set last_modified, expires and cache_control headers
%% during #outh{} serialization.
put(file_info, UT#urltype.finfo),
put(outh, H2).
outh_set_connection(What) ->
H = get(outh),
H2 = H#outh{connection = make_connection_close_header(What),
doclose = What},
put(outh, H2),
ok.
outh_set_content_length(Int) ->
H = get(outh),
H2 = H#outh{
content_length = make_content_length_header(Int),
contlen = Int
},
put(outh, H2).
outh_set_dcc(Req, Headers) ->
H = get(outh),
{DoClose, Chunked} = dcc(Req, Headers),
H2 = H#outh{
connection = make_connection_close_header(DoClose),
doclose = DoClose,
chunked = Chunked,
transfer_encoding = make_transfer_encoding_chunked_header(Chunked)
},
put(outh, H2).
%% can only turn if off, not on.
%% if it allready is off, it's off because the cli headers forced us.
outh_set_transfer_encoding_off() ->
H = get(outh),
H2 = H#outh{
chunked = false,
transfer_encoding = make_transfer_encoding_chunked_header(false)
},
put(outh, H2).
outh_set_auth([]) ->
ok;
outh_set_auth(Headers) ->
H = get(outh),
H2 = case H#outh.www_authenticate of
undefined ->
H#outh{www_authenticate = Headers};
_ ->
H#outh{www_authenticate = H#outh.www_authenticate ++ Headers}
end,
put(outh, H2).
outh_set_vary(Fields) ->
put(outh, (get(outh))#outh{vary = make_vary_header(Fields)}),
ok.
outh_fix_doclose() ->
H = get(outh),
if
(H#outh.doclose /= true) andalso
(H#outh.contlen==undefined) andalso
(H#outh.chunked == false) ->
put(outh, H#outh{doclose = true,
connection = make_connection_close_header(true)});
true ->
ok
end.
dcc(Req, Headers) ->
H = get(outh),
DoClose = case Req#http_request.version of
_ when H#outh.exceedmaxuses == true ->
true; %% too many keepalives
{1, 0} ->
case Headers#headers.connection of
"close" -> true;
"Keep-Alive" -> keep_alive;
_ -> true
end;
{1, 1} ->
Headers#headers.connection == "close";
{0,9} ->
true
end,
Chunked = case Req#http_request.version of
{1, 0} -> false;
{1,1} -> true;
{0,9} -> false
end,
{DoClose, Chunked}.
%%
%% The following all make_ function return an actual header string
%%
make_allow_header() ->
make_allow_header([]).
make_allow_header(Options) ->
case Options of
[] ->
["Allow: GET, POST, OPTIONS, HEAD\r\n"];
_ ->
["Allow: ",
lists:foldl(fun(M, "") -> atom_to_list(M);
(M, Acc) -> atom_to_list(M) ++ ", " ++ Acc
end, "", lists:reverse(Options)),
"\r\n"]
end.
make_server_header() ->
Sc = get(sc),
Signature = case Sc#sconf.yaws of
undefined -> (get(gc))#gconf.yaws;
S -> S
end,
["Server: ", Signature, "\r\n"].
make_last_modified_header(FI) ->
Then = FI#file_info.mtime,
["Last-Modified: ", local_time_as_gmt_string(Then), "\r\n"].
make_expires_header(MimeType0, FI) ->
SC = get(sc),
%% Use split_sep to remove charset
case yaws:split_sep(MimeType0, $;) of
[] -> {undefined, undefined};
[MimeType1|_] ->
case lists:keyfind(MimeType1, 1, SC#sconf.expires) of
{MimeType1, Type, TTL} -> make_expires_header(Type, TTL, FI);
false -> {undefined, undefined}
end
end.
make_expires_header(access, TTL, _FI) ->
Secs = calendar:datetime_to_gregorian_seconds(erlang:universaltime()),
ExpireTime = calendar:gregorian_seconds_to_datetime(Secs+TTL),
{["Expires: ", universal_time_as_string(ExpireTime), "\r\n"],
["Cache-Control: ", "max-age=", erlang:integer_to_list(TTL), "\r\n"]};
make_expires_header(modify, TTL, FI) ->
%% mtime is local here
Secs1 = calendar:datetime_to_gregorian_seconds(FI#file_info.mtime),
Secs2 = calendar:datetime_to_gregorian_seconds(erlang:localtime()),
ExpireTime = calendar:gregorian_seconds_to_datetime(Secs1+TTL),
MaxAge = erlang:max(0, TTL - (Secs2 - Secs1)),
{["Expires: ", local_time_as_gmt_string(ExpireTime), "\r\n"],
["Cache-Control: ", "max-age=", erlang:integer_to_list(MaxAge), "\r\n"]}.
make_location_header(Where) ->
["Location: ", Where, "\r\n"].
make_etag_header(FI) ->
ETag = make_etag(FI),
["Etag: ", ETag, "\r\n"].
make_etag(FI) ->
{{Y,M,D}, {H,Min, S}} = FI#file_info.mtime,
Inode = FI#file_info.inode,
pack_bin(<<0:6,(Y band 2#11111111):8,M:4,D:5,H:5,Min:6,S:6,Inode:32>>).
pack_bin(<<_:6,A:6,B:6,C:6,D:6,E:6,F:6,G:6,H:6,I:6,J:6,K:6>>) ->
[$", pc(A),pc(B),pc(C),pc(D),pc(E),pc(F),pc(G),pc(H),pc(I),pc(J),pc(K), $"].
%% Like Base64 for no particular reason.
pc(X) when X >= 0, X < 26 -> X + $A;
pc(X) when X >= 26, X < 52 -> X - 26 + $a;
pc(X) when X >= 52, X < 62 -> X - 52 + $0;
pc(62) -> $+;
pc(63) -> $/.
make_content_type_header(no_content_type) ->
undefined;
make_content_type_header(MimeType) ->
["Content-Type: ", MimeType, "\r\n"].
make_content_range_header(all) ->
undefined;
make_content_range_header({fromto, From, To, Tot}) ->
["Content-Range: bytes ",
erlang:integer_to_list(From), $-, erlang:integer_to_list(To),
$/, erlang:integer_to_list(Tot), $\r, $\n].
make_content_length_header(Size) when is_integer(Size) ->
["Content-Length: ", erlang:integer_to_list(Size), "\r\n"];
make_content_length_header(FI) when is_record(FI, file_info) ->
Size = FI#file_info.size,
["Content-Length: ", erlang:integer_to_list(Size), "\r\n"];
make_content_length_header(_) ->
undefined.
make_content_encoding_header(deflate) ->
"Content-Encoding: gzip\r\n";
make_content_encoding_header(_) ->
undefined.
make_connection_close_header(true) ->
"Connection: close\r\n";
make_connection_close_header(false) ->
undefined;
make_connection_close_header(keep_alive) ->
"Connection: Keep-Alive\r\n".
make_transfer_encoding_chunked_header(true) ->
"Transfer-Encoding: chunked\r\n";
make_transfer_encoding_chunked_header(false) ->
undefined.
make_www_authenticate_header({realm, Realm}) ->
["WWW-Authenticate: Basic realm=\"", Realm, ["\"\r\n"]];
make_www_authenticate_header(Method) ->
["WWW-Authenticate: ", Method, ["\r\n"]].
make_date_header() ->
N = element(2, os:timestamp()),
case get(date_header) of
{_Str, Secs} when (Secs+10) < N ->
H = ["Date: ", universal_time_as_string(), "\r\n"],
put(date_header, {H, N}),
H;
{Str, _Secs} ->
Str;
undefined ->
H = ["Date: ", universal_time_as_string(), "\r\n"],
put(date_header, {H, N}),
H
end.
make_vary_header(Fields) ->
case lists:member("*", Fields) of
true -> ["Vary: ", "*", "\r\n"];
false -> ["Vary: ", join_sep(Fields, ", "), "\r\n"]
end.
%% access functions into the outh record
outh_get_status_code() ->
(get(outh))#outh.status.
outh_get_contlen() ->
(get(outh))#outh.contlen.
outh_get_act_contlen() ->
(get(outh))#outh.act_contlen.
outh_inc_act_contlen(Int) ->
O = get(outh),
L = case O#outh.act_contlen of
undefined -> Int;
Len -> Len+Int
end,
put(outh, O#outh{act_contlen = L}),
L.
outh_get_doclose() ->
(get(outh))#outh.doclose.
outh_get_chunked() ->
(get(outh))#outh.chunked.
outh_get_content_encoding() ->
(get(outh))#outh.encoding.
outh_get_content_encoding_header() ->
(get(outh))#outh.content_encoding.
outh_get_content_type() ->
case (get(outh))#outh.content_type of
undefined -> undefined;
[_, Mime, _] -> Mime
end.
outh_get_vary_fields() ->
case (get(outh))#outh.vary of
undefined -> [];
[_, Fields, _] -> split_sep(Fields, $,)
end.
outh_serialize() ->
H = get(outh),
Code = case H#outh.status of
undefined -> 200;
Int -> Int
end,
StatusLine = ["HTTP/1.1 ", erlang:integer_to_list(Code), " ",
yaws_api:code_to_phrase(Code), "\r\n"],
GC=get(gc),
if ?gc_has_debug(GC) -> yaws_debug:check_headers(H);
true -> ok
end,
ContentEnc = case H#outh.content_encoding of
undefined -> make_content_encoding_header(H#outh.encoding);
CE -> CE
end,
{Expires, CacheControl} =
case erase(file_info) of
undefined ->
{H#outh.expires, H#outh.cache_control};
FI ->
{E, CC} = case {H#outh.expires, H#outh.cache_control} of
{undefined, undefined} ->
CT = outh_get_content_type(),
make_expires_header(CT, FI);
_ ->
{H#outh.expires, H#outh.cache_control}
end,
{E, CC}
end,
%% Add 'Accept-Encoding' in the 'Vary:' header if the compression is enabled
%% or if the response is compressed _AND_ if the response has a non-empty
%% body.
SC=get(sc),
Vary = case (?sc_has_deflate(SC) orelse H#outh.encoding == deflate) of
true when H#outh.contlen /= undefined, H#outh.contlen /= 0;
H#outh.act_contlen /= undefined,
H#outh.act_contlen /= 0 ->
Fields = outh_get_vary_fields(),
Fun = fun("*") -> true;
(F) -> (to_lower(F) == "accept-encoding")
end,
case lists:any(Fun, Fields) of
true -> H#outh.vary;
false -> make_vary_header(["Accept-Encoding"|Fields])
end;
_ ->
H#outh.vary
end,
Headers = [noundef(H#outh.connection),
noundef(H#outh.server),
noundef(H#outh.location),
noundef(H#outh.date),
noundef(H#outh.allow),
noundef(H#outh.last_modified),
noundef(Expires),
noundef(CacheControl),
noundef(H#outh.etag),
noundef(H#outh.content_range),
noundef(H#outh.content_length),
noundef(H#outh.content_type),
noundef(ContentEnc),
noundef(H#outh.set_cookie),
noundef(H#outh.transfer_encoding),
noundef(H#outh.www_authenticate),
noundef(Vary),
noundef(H#outh.other)],
{StatusLine, Headers}.
noundef(undefined) -> [];
noundef(Str) -> Str.
accumulate_header({X, erase}) when is_atom(X) ->
erase_header(X);
%% special headers
accumulate_header({connection, What}) ->
DC = case What of
"close" -> true;
_ -> false
end,
H = get(outh),
put(outh, H#outh{connection = ["Connection: ", What, "\r\n"],
doclose = DC});
accumulate_header({"Connection", What}) ->
accumulate_header({connection, What});
accumulate_header({server, What}) ->
put(outh, (get(outh))#outh{server = ["Server: ", What, "\r\n"]});
accumulate_header({"Server", What}) ->
accumulate_header({server, What});
accumulate_header({location, What}) ->
put(outh, (get(outh))#outh{location = ["Location: ", What, "\r\n"]});
accumulate_header({"Location", What}) ->
accumulate_header({location, What});
accumulate_header({cache_control, What}) ->
put(outh, (get(outh))#outh{cache_control = ["Cache-Control: ", What,
"\r\n"]});
accumulate_header({"Cache-Control", What}) ->
accumulate_header({cache_control, What});
accumulate_header({expires, What}) ->
put(outh, (get(outh))#outh{expires = ["Expires: ", What, "\r\n"]});
accumulate_header({"Expires", What}) ->
accumulate_header({expires, What});
accumulate_header({date, What}) ->
put(outh, (get(outh))#outh{date = ["Date: ", What, "\r\n"]});
accumulate_header({"Date", What}) ->
accumulate_header({date, What});
accumulate_header({allow, What}) ->
put(outh, (get(outh))#outh{date = ["Allow: ", What, "\r\n"]});
accumulate_header({"Allow", What}) ->
accumulate_header({allow, What});
accumulate_header({last_modified, What}) ->
put(outh, (get(outh))#outh{last_modified = ["Last-Modified: ", What,
"\r\n"]});
accumulate_header({"Last-Modified", What}) ->
accumulate_header({last_modified, What});
accumulate_header({etag, What}) ->
put(outh, (get(outh))#outh{etag = ["Etag: ", What, "\r\n"]});
accumulate_header({"Etag", What}) ->
accumulate_header({etag, What});
accumulate_header({set_cookie, What}) ->
O = get(outh),
Old = case O#outh.set_cookie of
undefined -> "";
X -> X
end,
put(outh, O#outh{set_cookie = ["Set-Cookie: ", What, "\r\n"|Old]});
accumulate_header({"Set-Cookie", What}) ->
accumulate_header({set_cookie, What});
accumulate_header({content_range, What}) ->
put(outh, (get(outh))#outh{content_range = ["Content-Range: ", What,
"\r\n"]});
accumulate_header({"Content-Range", What}) ->
accumulate_header({content_range, What});
accumulate_header({content_type, What}) ->
put(outh, (get(outh))#outh{content_type = ["Content-Type: ", What,
"\r\n"]});
accumulate_header({"Content-Type", What}) ->
accumulate_header({content_type, What});
accumulate_header({content_encoding, What}) ->
case What of
"identity" ->
put(outh, (get(outh))#outh{encoding = identity,
content_encoding = undefined});
_ ->
put(outh, (get(outh))#outh{encoding = deflate,
content_encoding = ["Content-Encoding: ",
What, "\r\n"]})
end;
accumulate_header({"Content-Encoding", What}) ->
accumulate_header({content_encoding, What});
accumulate_header({content_length, Len}) when is_integer(Len) ->
H = get(outh),
put(outh, H#outh{
chunked = false,
transfer_encoding = undefined,
contlen = Len,
act_contlen = 0,
content_length = make_content_length_header(Len)});
accumulate_header({"Content-Length", Len}) ->
case Len of
I when is_integer(I) ->
accumulate_header({content_length, I});
L when is_list(L) ->
accumulate_header({content_length, erlang:list_to_integer(L)})
end;
accumulate_header({transfer_encoding, What}) ->
put(outh, (get(outh))#outh{chunked = true,
contlen = 0,
transfer_encoding = ["Transfer-Encoding: ", What,
"\r\n"]});
accumulate_header({"Transfer-Encoding", What}) ->
accumulate_header({transfer_encoding, What});
accumulate_header({www_authenticate, What}) ->
put(outh, (get(outh))#outh{www_authenticate = ["WWW-Authenticate: ", What,
"\r\n"]});
accumulate_header({"WWW-Authenticate", What}) ->
accumulate_header({www_authenticate, What});
accumulate_header({vary, What}) ->
put(outh, (get(outh))#outh{vary = ["Vary: ", What, "\r\n"]});
accumulate_header({"Vary", What}) ->
accumulate_header({vary, What});
%% non-special headers (which may be special in a future Yaws version)
accumulate_header({Name, What}) when is_list(Name) ->
H = get(outh),
Old = case H#outh.other of
undefined -> [];
V -> V
end,
H2 = H#outh{other = [Name, ": ", What, "\r\n", Old]},
put(outh, H2);
%% backwards compatible clause
accumulate_header(Data) when is_list(Data) ->
Str = lists:flatten(Data),
accumulate_header(split_header(Str)).
split_header(Str) ->
split_header(Str, []).
split_header([], A) -> {lists:reverse(A), ""};
split_header([$:, $ |W], A) -> {lists:reverse(A), W};
split_header([$:|W], A) -> {lists:reverse(A), W};
split_header([C|S], A) -> split_header(S, [C|A]).
erase_header(connection) ->
put(outh, (get(outh))#outh{connection=undefined, doclose=false});
erase_header(server) ->
put(outh, (get(outh))#outh{server=undefined});
erase_header(cache_control) ->
put(outh, (get(outh))#outh{cache_control=undefined});
erase_header(expires) ->
put(outh, (get(outh))#outh{expires=undefined});
erase_header(date) ->
put(outh, (get(outh))#outh{date=undefined});
erase_header(allow) ->
put(outh, (get(outh))#outh{allow=undefined});
erase_header(last_modified) ->
put(outh, (get(outh))#outh{last_modified=undefined});
erase_header(etag) ->
put(outh, (get(outh))#outh{etag=undefined});
erase_header(set_cookie) ->
put(outh, (get(outh))#outh{set_cookie=undefined});
erase_header(content_range) ->
put(outh, (get(outh))#outh{content_range=undefined});
erase_header(content_length) ->
put(outh, (get(outh))#outh{contlen=0, content_length=undefined});
erase_header(content_type) ->
put(outh, (get(outh))#outh{content_type=undefined});
erase_header(content_encoding) ->
put(outh, (get(outh))#outh{encoding=decide, content_encoding=undefined});
erase_header(transfer_encoding) ->
put(outh, (get(outh))#outh{chunked = false,
act_contlen = 0,
transfer_encoding = undefined});
erase_header(www_authenticate) ->
put(outh, (get(outh))#outh{www_authenticate=undefined});
erase_header(location) ->
put(outh, (get(outh))#outh{location=undefined});
erase_header(vary) ->
put(outh, (get(outh))#outh{vary=undefined}).
getuid() ->
case os:type() of
{win32, _} ->
{ok, "0"};
_ ->
load_setuid_drv(),
P = open_port({spawn, "setuid_drv g"},[]),
receive
{P, {data, "ok " ++ IntList}} ->
{ok, IntList}
end
end.
user_to_home(User) ->
case os:type() of
{win32, _} ->
".";
_ ->
load_setuid_drv(),
P = open_port({spawn, "setuid_drv " ++ [$h|User]}, []),
receive
{P, {data, "ok " ++ Home}} ->
Home
end
end.
uid_to_name(Uid) ->
load_setuid_drv(),
P = open_port({spawn, "setuid_drv " ++
[$n|erlang:integer_to_list(Uid)]}, []),
receive
{P, {data, "ok " ++ Name}} ->
Name
end.
load_setuid_drv() ->
Path = filename:join(get_priv_dir(), "lib"),
case erl_ddll:load_driver(Path, "setuid_drv") of
ok ->
ok;
{error, Reason} ->
error_logger:format("Failed to load setuid_drv (from ~p) : ~p",
[Path, erl_ddll:format_error(Reason)]),
exit(normal)
end.
exists(F) ->
case file:open(F, [read, raw]) of
{ok, Fd} ->
file:close(Fd),
ok;
_ ->
false
end.
mkdir(Path) ->
[Hd|Parts] = filename:split(Path),
mkdir([Hd], Parts).
mkdir(Ack, []) ->
ensure_exist(filename:join(Ack));
mkdir(Ack, [H|T]) ->
ensure_exist(filename:join(Ack ++ [H])),
mkdir(Ack ++ [H], T).
ensure_exist(Path) ->
case file:read_file_info(Path) of
{ok, _} ->
ok;
_ ->
case file:make_dir(Path) of
ok ->
ok;
ERR ->
error_logger:format("Failed to mkdir ~p: ~p~n", [Path, ERR])
end
end.
%%
%%
%% TCP/SSL connection with a configurable IPv4/IPv6 preference on NS lookup.
%%
%%
tcp_connect(Host, Port, Options) ->
tcp_connect(Host, Port, Options, infinity).
tcp_connect(Host, Port, Options, Timeout) ->
parse_ipaddr_and_connect(tcp, Host, Port, Options, Timeout).
ssl_connect(Host, Port, Options) ->
ssl_connect(Host, Port, Options, infinity).
ssl_connect(Host, Port, Options, Timeout) ->
parse_ipaddr_and_connect(ssl, Host, Port, Options, Timeout).
parse_ipaddr_and_connect(Proto, IP, Port, Options, Timeout)
when is_tuple(IP) ->
%% The caller handled name resolution himself.
filter_tcpoptions_and_connect(Proto, undefined,
IP, Port, Options, Timeout);
parse_ipaddr_and_connect(Proto, [$[ | Rest], Port, Options, Timeout) ->
%% yaws_api:parse_url/1 keep the "[...]" enclosing an IPv6 address.
%% Remove them now, and parse the address.
IP = string:strip(Rest, right, $]),
parse_ipaddr_and_connect(Proto, IP, Port, Options, Timeout);
parse_ipaddr_and_connect(Proto, Host, Port, Options, Timeout) ->
%% First, try to parse an IP address, because inet:getaddr/2 could
%% return nxdomain if the family doesn't match the IP address
%% format.
case parse_strict_address(Host) of
{ok, IP} ->
filter_tcpoptions_and_connect(Proto, undefined,
IP, Port, Options, Timeout);
{error, einval} ->
NsLookupPref = get_nslookup_pref(Options),
filter_tcpoptions_and_connect(Proto, NsLookupPref,
Host, Port, Options, Timeout)
end.
-ifdef(HAVE_INET_PARSE_STRICT_ADDRESS).
parse_strict_address(Host) ->
inet:parse_strict_address(Host).
-else.
parse_strict_address(Host) when is_list(Host) ->
case inet_parse:ipv4strict_address(Host) of
{ok,IP} -> {ok,IP};
_ -> inet_parse:ipv6strict_address(Host)
end;
parse_strict_address(_) ->
{error, einval}.
-endif.
filter_tcpoptions_and_connect(Proto, NsLookupPref,
Host, Port, Options, Timeout) ->
%% Now that we have IP addresses, remove family from the TCP options,
%% because calling gen_tcp:connect/3 with {127,0,0,1} and [inet6]
%% would return {error, nxdomain otherwise}.
OptionsWithoutFamily = lists:filter(fun
(inet) -> false;
(inet6) -> false;
(_) -> true
end, Options),
resolve_and_connect(Proto, NsLookupPref, Host, Port, OptionsWithoutFamily, Timeout).
resolve_and_connect(Proto, _, IP, Port, Options, Timeout)
when is_tuple(IP) ->
do_connect(Proto, IP, Port, Options, Timeout);
resolve_and_connect(Proto, [Family | Rest], Host, Port, Options, Timeout) ->
Result = case inet:getaddr(Host, Family) of
{ok, IP} -> do_connect(Proto, IP, Port, Options, Timeout);
R -> R
end,
case Result of
{ok, Socket} ->
{ok, Socket};
{error, _} when length(Rest) >= 1 ->
%% If the connection fails here, ignore the error and
%% continue with the next address family.
resolve_and_connect(Proto, Rest, Host, Port, Options, Timeout);
{error, Reason} ->
%% This was the last IP address in the list, return the
%% connection error.
{error, Reason}
end.
do_connect(Proto, IP, Port, Options, Timeout) ->
case Proto of
tcp -> gen_tcp:connect(IP, Port, Options, Timeout);
ssl -> ssl:connect(IP, Port, Options, Timeout)
end.
%% If the caller specified inet or inet6 in the TCP options, prefer
%% this to the global nslookup_pref parameter.
%%
%% This can be used in processes which can't use get(gc) to get the
%% global conf: if they are given the global conf, they can get
%% nslookup_pref value and add it the TCP options.
%%
%% If neither TCP options specify the family, nor the global conf is
%% accessible, use default value declared in #gconf definition.
get_nslookup_pref(TcpOptions) ->
get_nslookup_pref(TcpOptions, []).
get_nslookup_pref([inet | Rest], Result) ->
get_nslookup_pref(Rest, [inet | Result]);
get_nslookup_pref([inet6 | Rest], Result) ->
get_nslookup_pref(Rest, [inet6 | Result]);
get_nslookup_pref([_ | Rest], Result) ->
get_nslookup_pref(Rest, Result);
get_nslookup_pref([], []) ->
case get(gc) of
undefined -> gconf_nslookup_pref(#gconf{});
GC -> gconf_nslookup_pref(GC)
end;
get_nslookup_pref([], Result) ->
lists:reverse(Result).
%%
%%
%% http/tcp send receive functions
%%
%%
do_recv(Sock, Num, nossl) ->
gen_tcp:recv(Sock, Num, (get(gc))#gconf.keepalive_timeout);
do_recv(Sock, Num, ssl) ->
ssl:recv(Sock, Num, (get(gc))#gconf.keepalive_timeout).
do_recv(Sock, Num, nossl, Timeout) ->
gen_tcp:recv(Sock, Num, Timeout);
do_recv(Sock, Num, ssl, Timeout) ->
ssl:recv(Sock, Num, Timeout).
cli_recv(S, Num, SslBool) ->
Res = do_recv(S, Num, SslBool),
cli_recv_trace(yaws_trace:get_type(get(gc)), Res),
Res.
cli_recv_trace(undefined, _) ->
ok;
cli_recv_trace(Trace, Res) ->
case Res of
{ok, Val} when is_tuple(Val) ->
yaws_trace:write(from_client, ?F("~p~n", [Val]));
{error, What} ->
yaws_trace:write(from_client, ?F("~p~n", [What]));
{ok, http_eoh} ->
ok;
{ok, Val} when Trace == traffic ->
yaws_trace:write(from_client, Val);
_ ->
ok
end.
gen_tcp_send(S, Data) ->
Res = case (get(sc))#sconf.ssl of
undefined -> gen_tcp:send(S, Data);
_SSL -> ssl:send(S, Data)
end,
case ?gc_has_debug((get(gc))) of
false ->
case Res of
ok ->
yaws_stats:sent(iolist_size(Data)),
ok;
_Err ->
exit(normal) %% keep quiet
end;
true ->
case Res of
ok ->
yaws_stats:sent(iolist_size(Data)),
?Debug("Sent ~p~n", [yaws_debug:nobin(Data)]),
ok;
Err ->
{B2, Size} = strip(Data),
yaws_debug:derror("Failed to send ~w bytes:~n~p "
"on socket ~p: ~p~n~p~n",
[Size, B2, S, Err,
yaws_debug:nobin(Data)]),
erlang:error(Err)
end
end.
strip(Data) ->
L = list_to_binary([Data]),
case L of
<<Head:50/binary, _/binary>> ->
{binary_to_list(<<Head/binary, ".....">>), size(L)};
_ ->
{binary_to_list(L), size(L)}
end.
%% This is the api function
%% return {Req, Headers}
%% or closed
http_get_headers(CliSock, SSL) ->
do_http_get_headers(CliSock, SSL).
headers_to_str(Headers) ->
lists:map(fun(H) -> [H, "\r\n"] end, yaws_api:reformat_header(Headers)).
setopts(Sock, Opts, nossl) ->
ok = inet:setopts(Sock, Opts);
setopts(Sock, Opts, ssl) ->
ok = ssl:setopts(Sock, Opts).
do_http_get_headers(CliSock, SSL) ->
case http_recv_request(CliSock,SSL) of
bad_request ->
{#http_request{method=bad_request, version={0,9}}, #headers{}};
closed ->
closed;
R ->
%% Http request received. Store the current time. it will be usefull
%% to get the time taken to serve the request.
put(request_start_time, os:timestamp()),
case http_collect_headers(CliSock, R, #headers{}, SSL, 0) of
{error, _}=Error ->
Error;
H ->
{R, H}
end
end.
http_recv_request(CliSock, SSL) ->
setopts(CliSock, [{packet, http}, {packet_size, 16#4000}], SSL),
case do_recv(CliSock, 0, SSL) of
{ok, R} when is_record(R, http_request) ->
R;
{ok, R} when is_record(R, http_response) ->
R;
{_, {http_error, "\r\n"}} ->
http_recv_request(CliSock, SSL);
{_, {http_error, "\n"}} ->
http_recv_request(CliSock,SSL);
{_, {http_error, _}} ->
bad_request;
{error, closed} ->
closed;
{error, timeout} ->
closed;
_Other ->
error_logger:format("Unhandled reply fr. do_recv() ~p~n", [_Other]),
exit(normal)
end.
http_collect_headers(CliSock, Req, H, SSL, Count) when Count < 1000 ->
setopts(CliSock, [{packet, httph}, {packet_size, 16#4000}], SSL),
Recv = do_recv(CliSock, 0, SSL),
case Recv of
{ok, {http_header, _Num, 'Host', _, Host}} ->
http_collect_headers(CliSock, Req, H#headers{host = Host},
SSL, Count+1);
{ok, {http_header, _Num, 'Connection', _, Conn}} ->
http_collect_headers(CliSock, Req,
H#headers{connection = Conn},SSL, Count+1);
{ok, {http_header, _Num, 'Accept', _, Accept}} ->
http_collect_headers(CliSock, Req, H#headers{accept = Accept},
SSL, Count+1);
{ok, {http_header, _Num, 'If-Modified-Since', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{if_modified_since = X},SSL, Count+1);
{ok, {http_header, _Num, 'If-Match', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{if_match = X},
SSL, Count+1);
{ok, {http_header, _Num, 'If-None-Match', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{if_none_match = X},SSL, Count+1);
{ok, {http_header, _Num, 'If-Range', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{if_range = X},
SSL, Count+1);
{ok, {http_header, _Num, 'If-Unmodified-Since', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{if_unmodified_since = X},SSL,
Count+1);
{ok, {http_header, _Num, 'Range', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{range = X},
SSL, Count+1);
{ok, {http_header, _Num, 'Referer',_, X}} ->
http_collect_headers(CliSock, Req, H#headers{referer = X},
SSL, Count+1);
{ok, {http_header, _Num, 'User-Agent', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{user_agent = X},
SSL, Count+1);
{ok, {http_header, _Num, 'Accept-Ranges', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{accept_ranges = X},SSL, Count+1);
{ok, {http_header, _Num, 'Cookie', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{cookie = [X|H#headers.cookie]},
SSL, Count+1);
{ok, {http_header, _Num, 'Keep-Alive', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{keep_alive = X},
SSL, Count+1);
{ok, {http_header, _Num, 'Content-Length', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{content_length = X},SSL,
Count+1);
{ok, {http_header, _Num, 'Content-Type', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{content_type = X},SSL, Count+1);
{ok, {http_header, _Num, 'Content-Encoding', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{content_encoding = X},SSL, Count+1);
{ok, {http_header, _Num, 'Transfer-Encoding', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{transfer_encoding=X},SSL, Count+1);
{ok, {http_header, _Num, 'Location', _, X}} ->
http_collect_headers(CliSock, Req, H#headers{location=X},
SSL, Count+1);
{ok, {http_header, _Num, 'Authorization', _, X}} ->
http_collect_headers(CliSock, Req,
H#headers{authorization = parse_auth(X)},
SSL, Count+1);
{ok, {http_header, _Num, 'X-Forwarded-For', _, X}} ->
case H#headers.x_forwarded_for of
undefined ->
http_collect_headers(CliSock, Req, H#headers{x_forwarded_for=X},
SSL, Count+1);
PrevXF ->
NewXF = join_sep([PrevXF,X], ", "),
http_collect_headers(CliSock, Req, H#headers{x_forwarded_for=NewXF},
SSL, Count+1)
end;
{ok, http_eoh} ->
H;
%% these are here to be a little forgiving to
%% bad (typically test script) clients
{_, {http_error, "\r\n"}} ->
http_collect_headers(CliSock, Req, H,SSL, Count+1);
{_, {http_error, "\n"}} ->
http_collect_headers(CliSock, Req, H,SSL, Count+1);
%% auxiliary headers we don't have builtin support for
{ok, X} ->
?Debug("OTHER header ~p~n", [X]),
http_collect_headers(CliSock, Req,
H#headers{other=[X|H#headers.other]},
SSL, Count+1);
_Err ->
exit(normal)
end;
http_collect_headers(_CliSock, Req, _H, _SSL, _Count) ->
{error, {too_many_headers, Req}}.
parse_auth(Orig = "Basic " ++ Auth64) ->
case decode_base64(Auth64) of
{error, _Err} ->
{undefined, undefined, Orig};
Auth ->
case string:tokens(Auth, ":") of
[User, Pass] -> {User, Pass, Orig};
_ -> {undefined, undefined, Orig}
end
end;
parse_auth(Orig = "Negotiate " ++ _Auth64) ->
{undefined, undefined, Orig};
parse_auth(Orig) ->
{undefined, undefined, Orig}.
decode_base64([]) ->
[];
decode_base64(Auth64) ->
decode_base64(Auth64, []).
decode_base64([], Acc) ->
lists:reverse(Acc);
decode_base64([Sextet1,Sextet2,$=,$=|Rest], Acc) ->
Bits2x6 =
(d(Sextet1) bsl 18) bor
(d(Sextet2) bsl 12),
Octet1 = Bits2x6 bsr 16,
decode_base64(Rest, [Octet1|Acc]);
decode_base64([Sextet1,Sextet2,Sextet3,$=|Rest], Acc) ->
Bits3x6 =
(d(Sextet1) bsl 18) bor
(d(Sextet2) bsl 12) bor
(d(Sextet3) bsl 6),
Octet1 = Bits3x6 bsr 16,
Octet2 = (Bits3x6 bsr 8) band 16#ff,
decode_base64(Rest, [Octet2,Octet1|Acc]);
decode_base64([Sextet1,Sextet2,Sextet3,Sextet4|Rest], Acc) ->
Bits4x6 =
(d(Sextet1) bsl 18) bor
(d(Sextet2) bsl 12) bor
(d(Sextet3) bsl 6) bor
d(Sextet4),
Octet1 = Bits4x6 bsr 16,
Octet2 = (Bits4x6 bsr 8) band 16#ff,
Octet3 = Bits4x6 band 16#ff,
decode_base64(Rest, [Octet3,Octet2,Octet1|Acc]);
decode_base64(_CatchAll, _Acc) ->
{error, bad_base64}.
d(X) when X >= $A, X =<$Z -> X-65;
d(X) when X >= $a, X =<$z -> X-71;
d(X) when X >= $0, X =<$9 -> X+4;
d($+) -> 62;
d($/) -> 63;
d(_) -> 63.
flag(CurFlag, Bit, true) -> CurFlag bor Bit;
flag(CurFlag, Bit, false) -> CurFlag band (bnot Bit).
%% misc debug funcs .... use from cli only
restart() ->
stop(),
load(),
start().
modules() ->
application:load(yaws),
M = case application:get_all_key(yaws) of
{ok, L} ->
case lists:keysearch(modules, 1, L) of
{value, {modules, Mods}} -> Mods;
_ -> []
end;
_ ->
[]
end,
M.
load() ->
load(modules()).
load(M) ->
lists:foreach(fun(Mod) ->
?Debug("Load ~p~n", [Mod]),
c:l(Mod)
end, M).
upto_char(Char, [Char|_]) ->
[];
upto_char(Char, [H|T]) when is_integer(H) ->
[H|upto_char(Char, T)];
upto_char(_, []) ->
[];
%% deep lists
upto_char(Char, [H|T]) when is_list(H) ->
case lists:member(Char ,H) of
true -> upto_char(Char, H);
false -> [H, upto_char(Char, T)]
end.
%% map over deep list and maintain
%% list structure as is
deepmap(Fun, [H|T]) when is_list(H) ->
[deepmap(Fun, H) | deepmap(Fun, T)];
deepmap(Fun, [H|T]) ->
[Fun(H) | deepmap(Fun,T)];
deepmap(_Fun, []) ->
[].
sconf_to_srvstr(SC) ->
redirect_scheme(SC) ++ redirect_host(SC,undefined).
redirect_scheme(SC) ->
case {SC#sconf.ssl,SC#sconf.rmethod} of
{_, Method} when is_list(Method) -> Method++"://";
{undefined, _} -> "http://";
{_SSl, _} -> "https://"
end.
redirect_host(SC, HostHdr) ->
case SC#sconf.rhost of
undefined ->
if HostHdr == undefined ->
ServerName = SC#sconf.servername,
SnameNoPort = case string:chr(ServerName, $:) of
0 -> ServerName;
N -> lists:sublist(ServerName, N-1)
end,
SnameNoPort ++ redirect_port(SC);
true ->
HostHdr
end;
_ ->
SC#sconf.rhost
end.
redirect_port(SC) ->
case {SC#sconf.rmethod, SC#sconf.ssl, SC#sconf.port} of
{"https", _, 443} -> "";
{"http", _, 80} -> "";
{_, undefined, 80} -> "";
{_, undefined, Port} -> [$:|erlang:integer_to_list(Port)];
{_, _SSL, 443} -> "";
{_, _SSL, Port} -> [$:|erlang:integer_to_list(Port)]
end.
redirect_scheme_port(SC) ->
Scheme = redirect_scheme(SC),
PortPart = redirect_port(SC),
{Scheme, PortPart}.
tmpdir() ->
tmpdir(filename:join([home(), ".yaws"])).
tmpdir(DefaultTmpDir) ->
case os:type() of
{win32,_} ->
case os:getenv("TEMP") of
false ->
case os:getenv("TMP") of
%%
%% No temporary path set?
%% Then try standard paths.
%%
false ->
case file:read_file_info("C:/WINNT/Temp") of
{error, _} -> "C:/WINDOWS/Temp";
{ok, _} -> "C:/WINNT/Temp"
end;
PathTMP ->
PathTMP
end;
PathTEMP ->
PathTEMP
end;
_ ->
DefaultTmpDir
end.
%% mktemp function borrowed from Klacke's misc module
%% Modified to use tmpdir/1 so it works on Windows too.
%% Note that mktemp/2 could be exported too, but no Yaws
%% code needs it, yet anyway.
mktemp(Template) ->
mktemp(Template, file).
mktemp(Template, Ret) ->
Tdir = tmpdir("/tmp"),
Max = 1000,
mktemp(Tdir, Template, Ret, 0, Max, "").
mktemp(Dir, Template, Ret, I, Max, Suffix) when I < Max ->
{X,Y,Z} = now(),
PostFix = erlang:integer_to_list(X) ++ "-" ++
erlang:integer_to_list(Y) ++ "-" ++
erlang:integer_to_list(Z),
F = filename:join(Dir, Template ++ [$_ | PostFix] ++ Suffix),
filelib:ensure_dir(F),
case file:open(F, [read, raw]) of
{error, enoent} when Ret == file ->
{ok, F};
{error, enoent} when Ret == fd ->
case file:open(F, [read, write, raw]) of
{ok, Fd} ->
file:delete(F),
{ok, Fd};
Err ->
Err
end;
{error, enoent} when Ret == binfd ->
case file:open(F, [read, write, raw, binary]) of
{ok, Fd} ->
file:delete(F),
{ok, Fd};
Err ->
Err
end;
{ok, Fd} ->
file:close(Fd),
mktemp(Dir, Template, Ret, I+1, Max, Suffix);
_Err ->
mktemp(Dir, Template, Ret, I+1, Max, Suffix)
end;
mktemp(_Dir, _Template, _Ret, _I, _Max, _Suffix) ->
{error, too_many}.
%% This feature is usable together with
%% privbind and authbind on linux
home() ->
case os:getenv("YAWSHOME") of
false -> os:getenv("HOME");
DIR -> DIR
end.
id_dir(Id) ->
filename:join([tmpdir(), "yaws", to_list(Id)]).
ctl_file(Id) ->
filename:join([id_dir(Id), "CTL"]).
eat_crnl(Fd,SSL) ->
setopts(Fd, [{packet, line}],SSL),
case do_recv(Fd,0, SSL) of
{ok, <<13,10>>} -> ok;
{ok, [13,10]} -> ok;
_ -> exit(normal)
end.
get_chunk_num(Fd, SSL) ->
{N, _} = get_chunk_header(Fd, SSL),
N.
get_chunk_header(Fd, SSL) ->
case do_recv(Fd, 0, SSL) of
{ok, Data} ->
Line = if is_binary(Data) -> binary_to_list(Data);
true -> Data
end,
?Debug("Get chunk num from line ~p~n",[Line]),
{N, Exts} = split_at(Line, $;),
{erlang:list_to_integer(strip_spaces(N),16), strip_spaces(Exts)};
{error, _Rsn} ->
exit(normal)
end.
get_chunk(_Fd, N, N, _) ->
[];
get_chunk(Fd, N, Asz,SSL) ->
case do_recv(Fd, N, SSL) of
{ok, Bin} ->
SZ = size(Bin),
[Bin|get_chunk(Fd, N, SZ+Asz,SSL)];
_ ->
exit(normal)
end.
get_chunk_trailer(Fd, SSL) ->
Hdrs = #headers{},
case http_collect_headers(Fd, undefined, Hdrs, SSL, 0) of
{error,_} -> exit(normal);
Hdrs -> <<>>;
NewHdrs -> {<<>>, NewHdrs}
end.
%% split inputstring at first occurrence of Char
split_at(String, Char) ->
split_at(String, Char, []).
split_at([H|T], H, Ack) ->
{lists:reverse(Ack), T};
split_at([H|T], Char, Ack) ->
split_at(T, Char, [H|Ack]);
split_at([], _Char, Ack) ->
{lists:reverse(Ack), []}.
%% insert an elemant at a given position into a list
insert_at(Elm, 0, Ls) ->
Ls ++ [Elm];
insert_at(Elm, Pos, Ls) ->
insert_at(Elm, Pos, Ls, []).
insert_at(Elm, _, [], Res) ->
lists:reverse([Elm|Res]);
insert_at(Elm, 1, Ls, Res) ->
lists:reverse([Elm|Res]) ++ Ls;
insert_at(Elm, Pos, [H|T], Res) ->
insert_at(Elm, Pos-1, T, [H|Res]).
%% Parse an Ip address or an Ip address range
%% Return Ip || {IpMin, IpMax} where:
%% Ip, IpMin, IpMax ::= ip_address()
parse_ipmask(Str) when is_list(Str) ->
case string:tokens(Str, [$/]) of
[IpStr] ->
case inet_parse:address(IpStr) of
{ok, Ip} -> Ip;
{error, Reason} -> throw({error, Reason})
end;
[IpStr, NetMask] ->
{Type, IpInt} = ip_to_integer(IpStr),
MaskInt = netmask_to_integer(Type, NetMask),
case netmask_to_wildcard(Type, MaskInt) of
0 ->
integer_to_ip(Type, IpInt);
Wildcard when Type =:= ipv4 ->
NetAddr = (IpInt band MaskInt),
Broadcast = NetAddr + Wildcard,
IpMin = NetAddr + 1,
IpMax = Broadcast - 1,
{integer_to_ip(ipv4, IpMin), integer_to_ip(ipv4, IpMax)};
Wildcard when Type =:= ipv6 ->
NetAddr = (IpInt band MaskInt),
IpMin = NetAddr,
IpMax = NetAddr + Wildcard,
{integer_to_ip(ipv6, IpMin), integer_to_ip(ipv6, IpMax)}
end;
_ ->
throw({error, einval})
end;
parse_ipmask(_) ->
throw({error, einval}).
-define(MAXBITS_IPV4, 32).
-define(MASK_IPV4, 16#FFFFFFFF).
-define(MAXBITS_IPV6, 128).
-define(MASK_IPV6, 16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF).
ip_to_integer(Str) when is_list(Str) ->
case inet_parse:address(Str) of
{ok, Ip} -> ip_to_integer(Ip);
{error, Reason} -> throw({error, Reason})
end;
ip_to_integer({N1,N2,N3,N4}) ->
<<Int:32>> = <<N1:8, N2:8, N3:8, N4:8>>,
if
(Int bsr ?MAXBITS_IPV4) == 0 -> {ipv4, Int};
true -> throw({error, einval})
end;
ip_to_integer({N1,N2,N3,N4,N5,N6,N7,N8}) ->
<<Int:128>> = <<N1:16, N2:16, N3:16, N4:16, N5:16, N6:16, N7:16, N8:16>>,
if
(Int bsr ?MAXBITS_IPV6) == 0 -> {ipv6, Int};
true -> throw({error, einval})
end;
ip_to_integer(_) ->
throw({error, einval}).
integer_to_ip(ipv4, I) when is_integer(I), I =< ?MASK_IPV4 ->
<<N1:8, N2:8, N3:8, N4:8>> = <<I:32>>,
{N1, N2, N3, N4};
integer_to_ip(ipv6, I) when is_integer(I), I =< ?MASK_IPV6 ->
<<N1:16, N2:16, N3:16, N4:16, N5:16, N6:16, N7:16, N8:16>> = <<I:128>>,
{N1, N2, N3, N4, N5, N6, N7, N8};
integer_to_ip(_, _) ->
throw({error, einval}).
netmask_to_integer(Type, NetMask) ->
case catch erlang:list_to_integer(NetMask) of
I when is_integer(I) ->
case Type of
ipv4 -> (1 bsl ?MAXBITS_IPV4) - (1 bsl (?MAXBITS_IPV4 - I));
ipv6 -> (1 bsl ?MAXBITS_IPV6) - (1 bsl (?MAXBITS_IPV6 - I))
end;
_ ->
case ip_to_integer(NetMask) of
{Type, MaskInt} -> MaskInt;
_ -> throw({error, einval})
end
end.
netmask_to_wildcard(ipv4, Mask) -> ((1 bsl ?MAXBITS_IPV4) - 1) bxor Mask;
netmask_to_wildcard(ipv6, Mask) -> ((1 bsl ?MAXBITS_IPV6) - 1) bxor Mask.
%% Compare an ip to another ip or a range of ips
match_ipmask(Ip, Ip) ->
true;
match_ipmask(Ip, {IpMin, IpMax}) ->
case compare_ips(Ip, IpMin) of
error -> false;
less -> false;
_ ->
case compare_ips(Ip, IpMax) of
error -> false;
greater -> false;
_ -> true
end
end;
match_ipmask(_, _) ->
false.
compare_ips({A,B,C,D}, {A,B,C,D}) -> equal;
compare_ips({A,B,C,D,E,F,G,H}, {A,B,C,D,E,F,G,H}) -> equal;
compare_ips({A,B,C,D1}, {A,B,C,D2}) when D1 < D2 -> less;
compare_ips({A,B,C,D1}, {A,B,C,D2}) when D1 > D2 -> greater;
compare_ips({A,B,C1,_}, {A,B,C2,_}) when C1 < C2 -> less;
compare_ips({A,B,C1,_}, {A,B,C2,_}) when C1 > C2 -> greater;
compare_ips({A,B1,_,_}, {A,B2,_,_}) when B1 < B2 -> less;
compare_ips({A,B1,_,_}, {A,B2,_,_}) when B1 > B2 -> greater;
compare_ips({A1,_,_,_}, {A2,_,_,_}) when A1 < A2 -> less;
compare_ips({A1,_,_,_}, {A2,_,_,_}) when A1 > A2 -> greater;
compare_ips({A,B,C,D,E,F,G,H1}, {A,B,C,D,E,F,G,H2}) when H1 < H2 -> less;
compare_ips({A,B,C,D,E,F,G,H1}, {A,B,C,D,E,F,G,H2}) when H1 > H2 -> greater;
compare_ips({A,B,C,D,E,F,G1,_}, {A,B,C,D,E,F,G2,_}) when G1 < G2 -> less;
compare_ips({A,B,C,D,E,F,G1,_}, {A,B,C,D,E,F,G2,_}) when G1 > G2 -> greater;
compare_ips({A,B,C,D,E,F1,_,_}, {A,B,C,D,E,F2,_,_}) when F1 < F2 -> less;
compare_ips({A,B,C,D,E,F1,_,_}, {A,B,C,D,E,F2,_,_}) when F1 > F2 -> greater;
compare_ips({A,B,C,D,E1,_,_,_}, {A,B,C,D,E2,_,_,_}) when E1 < E2 -> less;
compare_ips({A,B,C,D,E1,_,_,_}, {A,B,C,D,E2,_,_,_}) when E1 > E2 -> greater;
compare_ips({A,B,C,D1,_,_,_,_}, {A,B,C,D2,_,_,_,_}) when D1 < D2 -> less;
compare_ips({A,B,C,D1,_,_,_,_}, {A,B,C,D2,_,_,_,_}) when D1 > D2 -> greater;
compare_ips({A,B,C1,_,_,_,_,_}, {A,B,C2,_,_,_,_,_}) when C1 < C2 -> less;
compare_ips({A,B,C1,_,_,_,_,_}, {A,B,C2,_,_,_,_,_}) when C1 > C2 -> greater;
compare_ips({A,B1,_,_,_,_,_,_}, {A,B2,_,_,_,_,_,_}) when B1 < B2 -> less;
compare_ips({A,B1,_,_,_,_,_,_}, {A,B2,_,_,_,_,_,_}) when B1 > B2 -> greater;
compare_ips({A1,_,_,_,_,_,_,_}, {A2,_,_,_,_,_,_,_}) when A1 < A2 -> less;
compare_ips({A1,_,_,_,_,_,_,_}, {A2,_,_,_,_,_,_,_}) when A1 > A2 -> greater;
compare_ips(_, _) -> error.
%% ----
get_app_subdir(SubDir) when is_atom(SubDir) ->
filename:join(get_app_dir(), atom_to_list(SubDir)).
get_app_dir() ->
case application:get_env(yaws, app_dir) of
{ok, AppDir} ->
AppDir;
undefined ->
AppDir = filename:absname(
filename:dirname(filename:dirname(code:which(?MODULE)))
),
application:set_env(yaws, app_dir, AppDir),
AppDir
end.
get_ebin_dir() ->
get_app_subdir(ebin).
get_priv_dir() ->
get_app_subdir(priv).
get_inc_dir() ->
get_app_subdir(include).
|
<?php
namespace App\Repositories;
use App\Models\FaqsModel;
/**
* Class FaqsModel
* @package App\Repositories
* @author Richard Guevara
*/
class FaqsRepository
{
/**
* Get all instances
*
* @return App/Models/FaqsModel;
*/
public function showAllfaqs()
{
$faqs = FaqsModel::where('deleted',0)->get();
return $faqs;
}
public function faqsEdit($id)
{
$faqs = FaqsModel::where('id',$id)
->where('deleted',0)
->first();
return $faqs;
}
public function storeFaqs($dataFaqs)
{
$codepic = strtoupper(str_random(6));
$faqsPic = time().'.'.$dataFaqs->photo->getClientOriginalExtension();
$moveFaqspic = $codepic."-".$faqsPic;
$dataFaqs->photo->move(public_path('images/faqs'), $moveFaqspic);
$storeFaqs = FaqsModel::create([
'title' => $dataFaqs->title,
'description' => $dataFaqs->description,
'content' => $dataFaqs->content,
'photo' => "images/faqs/".$moveFaqspic,
'created_by' => auth()->user()->id
]);
return $storeFaqs;
}
public function updateFaqs($dataFaqs, $id)
{
if ($dataFaqs->photo != NULL) {
$codepic = strtoupper(str_random(6));
$faqsPic = time().'.'.$dataFaqs->photo->getClientOriginalExtension();
$moveFaqspic = $codepic."-".$faqsPic;
$dataFaqs->photo->move(public_path('images/faqs'), $moveFaqspic);
$faqs = FaqsModel::where('id', $id)
->update([
'title' => $dataFaqs->title,
'description' => $dataFaqs->description,
'content' => $dataFaqs->content,
'photo' => "images/faqs/".$moveFaqspic
]);
} else {
$faqs = FaqsModel::where('id', $id)
->update([
'title' => $dataFaqs->title,
'description' => $dataFaqs->description,
'content' => $dataFaqs->content,
]);
}
return $faqs;
}
public function deleteFaqs($dataFaqs)
{
$faqs = FaqsModel::where('id', $dataFaqs->id)
->update([
'deleted' => 1,
]);
return $faqs;
}
}
|
/*! ******************************************************************************
*
* Hop : The Hop Orchestration Platform
*
* http://www.project-hop.org
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.apache.hop.workflow.actions.ftp;
import org.apache.hop.workflow.action.loadsave.WorkflowActionLoadSaveTestSupport;
import org.apache.hop.junit.rules.RestoreHopEngineEnvironment;
import org.junit.ClassRule;
import java.util.Arrays;
import java.util.List;
public class WorkflowActionFtpLoadSaveTest extends WorkflowActionLoadSaveTestSupport<ActionFtp> {
@ClassRule public static RestoreHopEngineEnvironment env = new RestoreHopEngineEnvironment();
@Override
protected Class<ActionFtp> getActionClass() {
return ActionFtp.class;
}
@Override
protected List<String> listCommonAttributes() {
return Arrays.asList( new String[] { "port",
"serverName",
"userName",
"password",
"ftpDirectory",
"targetDirectory",
"wildcard",
"binaryMode",
"timeout",
"remove",
"onlyGettingNewFiles",
"activeConnection",
"controlEncoding",
"moveFiles",
"moveToDirectory",
"dateInFilename",
"timeInFilename",
"specifyFormat",
"date_time_format",
"addDateBeforeExtension",
"addToResult",
"createMoveFolder",
"proxyHost",
"proxyPort",
"proxyUsername",
"proxyPassword",
"socksProxyHost",
"socksProxyPort",
"socksProxyUsername",
"socksProxyPassword",
"SifFileExists",
"limit",
"success_condition" } );
}
}
|
.\" Copyright (c) 1983, 1993, 1994
.\" The Regents of the University of California. All rights reserved.
.\"
.\" Redistribution and use in source and binary forms, with or without
.\" modification, are permitted provided that the following conditions
.\" are met:
.\" 1. Redistributions of source code must retain the above copyright
.\" notice, this list of conditions and the following disclaimer.
.\" 2. Redistributions in binary form must reproduce the above copyright
.\" notice, this list of conditions and the following disclaimer in the
.\" documentation and/or other materials provided with the distribution.
.\" 3. All advertising materials mentioning features or use of this software
.\" must display the following acknowledgement:
.\" This product includes software developed by the University of
.\" California, Berkeley and its contributors.
.\" 4. Neither the name of the University nor the names of its contributors
.\" may be used to endorse or promote products derived from this software
.\" without specific prior written permission.
.\"
.\" THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
.\" ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
.\" IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
.\" ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
.\" FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
.\" DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
.\" OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
.\" HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
.\" LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
.\" OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
.\" SUCH DAMAGE.
.\"
.\" @(#)2.0.t 8.4 (Berkeley) 5/26/94
.\"
.Sh 1 "System facilities
.LP
The system abstractions described are:
.IP "Directory contexts
.br
A directory context is a position in the filesystem name
space. Operations on files and other named objects in a filesystem are
always specified relative to such a context.
.IP "Files
.br
Files are used to store uninterpreted sequences of bytes,
which may be \fIread\fP and \fIwritten\fP randomly.
Pages from files may also be mapped into the process address space.
A directory may be read as a file if permitted by the underlying
storage facility,
though it is usually accessed using
.Fn getdirentries
(see section
.Xr 2.2.3.1 ).
(Local filesystems permit directories to be read,
although most NFS implementations do not allow reading of directories.)
.IP "Communications domains
.br
A communications domain represents
an interprocess communications environment, such as the communications
facilities of the 4.4BSD system,
communications in the INTERNET, or the resource sharing protocols
and access rights of a resource sharing system on a local network.
.IP "Sockets
.br
A socket is an endpoint of communication and the focal
point for IPC in a communications domain. Sockets may be created in pairs,
or given names and used to rendezvous with other sockets
in a communications domain, accepting connections from these
sockets or exchanging messages with them. These operations model
a labeled or unlabeled communications graph, and can be used in a
wide variety of communications domains. Sockets can have different
\fItypes\fP\| to provide different semantics of communication,
increasing the flexibility of the model.
.IP "Terminals and other devices
.br
Devices include terminals (providing input editing, interrupt
generation, output flow control, and editing), magnetic tapes,
disks, and other peripherals.
They normally support the generic
\fIread\fP and \fIwrite\fP operations as well as a number of \fIioctl\fP\|'s.
.IP "Processes
.br
Process descriptors provide facilities for control and debugging of
other processes.
|
// SPDX-FileCopyrightText: 2021 Jorrit Rouwe
// SPDX-License-Identifier: MIT
#pragma once
#include <Tests/Test.h>
#include <Physics/Constraints/SixDOFConstraint.h>
class SixDOFConstraintTest : public Test
{
public:
JPH_DECLARE_RTTI_VIRTUAL(SixDOFConstraintTest)
virtual void Initialize() override;
virtual void GetInitialCamera(CameraState &ioState) const override;
virtual bool HasSettingsMenu() const override { return true; }
virtual void CreateSettingsMenu(DebugUI *inUI, UIElement *inSubMenu) override;
private:
using SettingsRef = Ref<SixDOFConstraintSettings>;
using EAxis = SixDOFConstraintSettings::EAxis;
static float sLimitMin[EAxis::Num];
static float sLimitMax[EAxis::Num];
static bool sEnableLimits[EAxis::Num];
static SettingsRef sSettings;
Vec3 mTargetOrientationCS = Vec3::sZero();
Ref<SixDOFConstraint> mConstraint;
};
|
import invariant from "invariant";
import {NativeModules} from "react-native";
const {UIManager} = NativeModules;
const {GLCanvas} = UIManager;
invariant(GLCanvas,
`gl-react-native: the native module is not available.
Make sure you have properly configured it.
See README install instructions.
NativeModules.UIManager.GLCanvas is %s`, GLCanvas);
const {Commands} = GLCanvas;
module.exports = (handle, config) => UIManager.dispatchViewManagerCommand(handle, Commands.captureFrame, [ config ]);
|
C----------------------------------------------------------------------
subroutine ftuscm(input,np,scaled,scale,zero,output)
C unscale the array of complex numbers, prior to writing to the FITS file
C input d array of complex numbers (pairs of real/imaginay numbers)
C np i total number of values to scale (no. of pairs times 2)
C scaled l is the data scaled?
C scale d scale factor
C zero d offset
C output d output array
integer np,i,j
logical scaled
double precision input(np),output(np)
double precision scale,zero
j=1
if (scaled)then
do 10 i=1,np/2
output(j)=(input(j)-zero)/scale
j=j+1
C the imaginary part of the number is not offset!!
output(j)=input(j)/scale
j=j+1
10 continue
else
do 20 i=1,np
output(i)=input(i)
20 continue
end if
end
|
import * as uuid from 'uuid';
import { AuthCredentials } from '../models/auth.model';
import { AuthenticationError, UnauthorizedError, NotFoundError } from 'tree-house-errors';
import { comparePassword, createJwt } from 'tree-house-authentication';
import { jwtConfig } from '../config/auth.config';
import { logger } from '../lib/logger';
import { errors } from '../config/errors.config';
import { getForgotPwContent } from '../templates/forgot-pw.mail.template';
import { User } from '../models/user.model';
import * as userRepository from '../repositories/user.repository';
import * as mailer from '../lib/mailer';
/**
* Generate a new jwt token and refresh token for a user
*/
export async function generateTokens(userId: string) {
const accessToken = await createJwt({ userId }, jwtConfig);
const refreshToken = uuid.v4();
await userRepository.update(userId, { refreshToken });
return { accessToken, refreshToken };
}
/**
* Login user with username and password
* Returns accessToken and refreshToken
*/
export async function login(payload: AuthCredentials) {
const { username, password } = payload;
try {
const user = await userRepository.findByEmail(username);
if (!user) throw new AuthenticationError();
// Check if still has access
if (!user.hasAccess) throw new UnauthorizedError(errors.USER_INACTIVE);
// Match password
const passwordMatch = await comparePassword(password, user.password);
if (!passwordMatch) throw new AuthenticationError();
// Generate JWT and refresh token
return await generateTokens(user.id);
} catch (error) {
logger.error(`An error occured trying to login: %${error}`);
throw error;
}
}
/**
* Refresh access token via a refresh token
*/
export async function refresh(userId: string, refreshToken: string) {
try {
const user = await userRepository.findByRefreshToken(userId, refreshToken);
if (!user) throw new NotFoundError();
// Generate JWT and refresh token
return await generateTokens(user.id);
} catch (error) {
logger.error(`An error occured trying to refresh token: %${error}`);
throw error;
}
}
/**
* Logout an existing user by removing its refresh token
*/
export async function logout(userId: string) {
try {
return await userRepository.update(userId, { refreshToken: null });
} catch (error) {
logger.error(`An error occured trying to logout: %${error}`);
throw error;
}
}
/**
* Start the forgot password flow by generating an email with a reset link
*/
export async function initForgotPw(email: string) {
try {
const user = await userRepository.findByEmail(email);
if (!user) throw new NotFoundError();
const token = uuid.v4();
await userRepository.update(user.id, { resetPwToken: token });
// Send email with reset link
const content = getForgotPwContent({ email, token, firstName: user.firstName });
return await mailer.sendTemplate(content, mailer.getDefaultClient());
} catch (error) {
logger.error(`An error occured trying to reset password: %${error}`);
// Do not rethrow error, this will be an async function
}
}
/**
* Verify if a forgot password reset token is still valid
*/
export async function verifyForgotPw(token: string): Promise<void> {
try {
const user = await userRepository.findByResetToken(token);
if (!user || !user.resetPwToken) throw new NotFoundError();
} catch (error) {
logger.error(`An error occured trying to verify reset password token: %${error}`);
throw error;
}
}
/**
* Confirm a newly choosen password
*/
export async function confirmForgotPw(token: string, password: string): Promise<User> {
try {
const user = await userRepository.findByResetToken(token);
if (!user || !user.resetPwToken) throw new NotFoundError();
return userRepository.updatePassword(user.id, password);
} catch (error) {
logger.error(`An error occured trying to change password: %${error}`);
throw error;
}
}
|
import IsUID from '../../chess/IsUID.js';
import IsChess from '../../chess/IsChess';
import GetChessUID from '../../chess/GetChessUID.js';
import IsTileXY from '../../utils/IsTileXY.js';
var ChessToTileXYZ = function (chess) {
// chess: chess object, UID, or tileXYZ
if (IsUID(chess) || IsChess(chess)) { // UID, or game object
var uid = GetChessUID(chess);
return this.boardData.getXYZ(uid);
} else if (IsTileXY(chess)) { // {x, y}, or {x, y, z}
var tileXYZ = chess;
return tileXYZ;
} else {
return null;
}
}
export default ChessToTileXYZ;
|
/**
* Copied (and slightly modified) from here:
* https://github.com/cypress-io/cypress/issues/915#issuecomment-344389511
*
* This works because cypress commands are not promises.
* They're executed sequentially, never in parallel.
*
* See here: https://docs.cypress.io/guides/core-concepts/introduction-to-cypress.html#Commands-Are-Not-Promises
* > You cannot race or run multiple commands at the same time (in parallel).
*/
const all = (...commands) => {
const results = []
commands.forEach((command) =>
command().then((result) => results.push(result))
)
return cy.wrap(results)
}
Cypress.Commands.add('all', all)
|
class Comfy::Admin::ApiActionsController < Comfy::Admin::Cms::BaseController
before_action :ensure_authority_to_manage_api
before_action :set_api_action, except: :new
def new
@index = params[:index]
@type = params[:type]
@api_action = ApiAction.new(type: params[:type].classify, position: @index)
respond_to do |format|
format.js
end
end
def index
params[:q] ||= {}
@api_actions_q = @api_namespace.executed_api_actions.order(created_at: :desc).ransack(params[:q])
@api_actions = @api_actions_q.result.paginate(page: params[:page], per_page: 10)
end
def show
end
def action_workflow
end
private
def set_api_action
@api_namespace = ApiNamespace.find_by(id: params[:api_namespace_id])
@api_action = @api_namespace.executed_api_actions.find_by(id: params[:id])
end
end
|
# Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :list_item do
index 1
order 0
title "Great list item"
list_type "item"
factory :list_item2 do
index 2
order 1
title "Another great list item"
end
factory :list_item3 do
index 3
order 2
title "Truck wheels"
end
end
end
|
#!/bin/sh
export SETTINGS='dev'
export POSTGRES_USER='postgres'
export POSTGRES_PASSWORD='password'
export POSTGRES_HOST='172.16.42.43'
export POSTGRES_PORT=5432
export POSTGRES_DB='register_data'
export LOGGING_CONFIG_FILE_PATH='logging_config.json'
export FAULT_LOG_FILE_PATH='/var/log/applications/digital-register-api-fault.log'
export ELASTICSEARCH_ENDPOINT_URI='http://localhost:9200'
export ELASTICSEARCH_INDEX_NAME='landregistry'
export ADDRESS_SEARCH_API='http://landregistry.local:8002/'
export MAX_NUMBER_SEARCH_RESULTS=50
export PYTHONPATH=.
export SEARCH_RESULTS_PER_PAGE=20
export POSTCODE_SEARCH_DOC_TYPE=property_by_postcode_3
export ADDRESS_SEARCH_DOC_TYPE=property_by_address
export NOMINAL_PRICE=300
export LOGGING_LEVEL='DEBUG'
|
package com.example.base_bottom.baidu_dingwei.network
import retrofit2.await
object BusNetWork {
private val service = RetrofitCreator.create(GetBusData::class.java)
suspend fun getAllBus()= service.getAllBus().await()
suspend fun getBusRoute(busId: String)= service.getBusRoute(busId).await()
}
|
/*
* Copyright 2019 Louis Cognault Ayeva Derman. Use of this source code is governed by the Apache 2.0 license.
*/
package com.example.splitties.main
import android.content.Context
import android.util.AttributeSet
import androidx.appcompat.app.AppCompatActivity
import com.example.splitties.R
import splitties.dimensions.dip
import splitties.resources.dimenPxSize
import splitties.resources.styledColor
import splitties.views.coroutines.awaitOneClick
import splitties.views.coroutines.material.showAndAwaitOneClickThenHide
import splitties.views.dsl.appcompat.toolbar
import splitties.views.dsl.coordinatorlayout.anchorTo
import splitties.views.dsl.coordinatorlayout.appBarLParams
import splitties.views.dsl.coordinatorlayout.coordinatorLayout
import splitties.views.dsl.coordinatorlayout.defaultLParams
import splitties.views.dsl.core.*
import splitties.views.dsl.idepreview.UiPreView
import splitties.views.dsl.material.EXIT_UNTIL_COLLAPSED
import splitties.views.dsl.material.MaterialComponentsStyles
import splitties.views.dsl.material.PIN
import splitties.views.dsl.material.SCROLL
import splitties.views.dsl.material.actionBarLParams
import splitties.views.dsl.material.appBarLayout
import splitties.views.dsl.material.collapsingToolbarLayout
import splitties.views.dsl.material.contentScrollingWithAppBarLParams
import splitties.views.dsl.material.defaultLParams
import splitties.views.dsl.material.floatingActionButton
import splitties.views.dsl.recyclerview.wrapInRecyclerView
import splitties.views.gravityCenterHorizontal
import splitties.views.gravityEndBottom
import splitties.views.imageResource
import splitties.views.material.contentScrimColor
import splitties.views.textResource
import com.google.android.material.R as MaterialR
class MainUiImpl(override val ctx: Context) : MainAndroidUi {
override suspend fun awaitFabClick() = fab.showAndAwaitOneClickThenHide()
override suspend fun awaitLaunchMaterialListDemoRequest() = launchDemoBtn.awaitOneClick()
override suspend fun awaitLaunchPermissionDemoRequest() {
bePoliteWithPermissionsBtn.awaitOneClick()
}
override suspend fun awaitLaunchSayHelloDemoRequest() {
sayHelloBtn.awaitOneClick()
}
override suspend fun awaitToggleNightModeRequest() {
toggleNightModeBtn.awaitOneClick()
}
override suspend fun awaitTrySoundRequest() {
trySoundBtn.awaitOneClick()
}
private val materialStyles = MaterialComponentsStyles(ctx)
private val materialButtons = materialStyles.button
private val launchDemoBtn = materialButtons.text {
textResource = R.string.go_to_the_demo
}
private val bePoliteWithPermissionsBtn = materialButtons.filled {
textResource = R.string.be_polite_with_permissions
}
private val sayHelloBtn = materialButtons.text {
textResource = R.string.say_hello
}
private val toggleNightModeBtn = materialButtons.filledWithIcon {
setIconResource(R.drawable.ic_invert_colors_white_24dp)
textResource = R.string.toggle_night_mode
}
private val trySoundBtn = materialButtons.filled {
textResource = R.string.play_a_sound
}
private val fab = floatingActionButton {
imageResource = R.drawable.ic_favorite_white_24dp
}
private val content = verticalLayout {
add(launchDemoBtn, lParams {
gravity = gravityCenterHorizontal
topMargin = dip(8)
})
add(bePoliteWithPermissionsBtn, lParams {
gravity = gravityCenterHorizontal
topMargin = dip(8)
})
add(sayHelloBtn, lParams {
gravity = gravityCenterHorizontal
topMargin = dip(8)
})
add(toggleNightModeBtn, lParams {
gravity = gravityCenterHorizontal
bottomMargin = dip(8)
})
add(trySoundBtn, lParams {
gravity = gravityCenterHorizontal
bottomMargin = dip(8)
})
add(textView {
textResource = R.string.large_text
}, lParams {
margin = dimenPxSize(R.dimen.text_margin)
})
}.wrapInRecyclerView()
private val appBar = appBarLayout(theme = R.style.AppTheme_AppBarOverlay) {
add(collapsingToolbarLayout {
fitsSystemWindows = true
contentScrimColor = styledColor(MaterialR.attr.colorPrimary)
add(toolbar {
(ctx as? AppCompatActivity)?.setSupportActionBar(this)
popupTheme = R.style.AppTheme_PopupOverlay
}, actionBarLParams(collapseMode = PIN))
}, defaultLParams(height = matchParent) {
scrollFlags = SCROLL or EXIT_UNTIL_COLLAPSED
})
}
override val root = coordinatorLayout {
fitsSystemWindows = true
add(appBar, appBarLParams(dip(180)))
add(content, contentScrollingWithAppBarLParams())
add(fab, defaultLParams {
anchorTo(appBar, gravity = gravityEndBottom)
margin = dip(16)
})
}
}
//region IDE preview
@Deprecated("For IDE preview only", level = DeprecationLevel.HIDDEN)
private class MainUiImplPreview(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0
) : UiPreView(
context = context.withTheme(R.style.AppTheme),
attrs = attrs,
defStyleAttr = defStyleAttr,
createUi = { MainUiImpl(it) }
)
//endregion
|
name = cookbook_name.to_s
n = node[name]
###############################################################################
# Initial setup and pre-requisites
###############################################################################
apt_repository 'nginx' do
uri "#{n["release_bases"][n["release"]]}/#{Nginx.get_os()}/"
components ['nginx']
deb_src true
key "https://nginx.org/keys/nginx_signing.key"
keyserver false
notifies :update, "apt_update[#{name}-repo-update]", :immediately
end
apt_update "#{name}-repo-update" do
action :nothing
end
# set permissions on first nginx install
directory n["dirs"]["log"] do
mode '0755'
action :nothing
end
###############################################################################
# Installation
###############################################################################
if n.has_key?("version") && !n["version"].empty?
# http://stackoverflow.com/a/40237749/5006
["nginx-full", "nginx-common", "nginx"].each do |pkg|
package "nginx-remove-#{pkg}" do
package_name pkg
action :remove
not_if "nginx -v 2>&1 | grep \"#{n["version"]}\""
end
end
end
# http://nginx.org/en/docs/beginners_guide.html
# https://www.digitalocean.com/community/tutorials/how-to-install-nginx-on-ubuntu-14-04-lts
package "nginx" do
notifies :create, "directory[#{n["dirs"]["log"]}]", :immediately
version Nginx.get_version(n["version"])
end
###############################################################################
# configuration
###############################################################################
execute "remove current nginx configs" do
command "find \"#{n["dirs"]["conf.d"]}\" -type l -delete"
end
# global configuration
conf_d = n.fetch("config_global", {})
conf_path = ::File.join(n["dirs"]["conf.d"], "conf.conf")
template conf_path do
source "conf.conf.erb"
variables(conf_d)
#notifies :stop, "service[#{name}]", :delayed
#notifies :start, "service[#{name}]", :delayed
notifies :restart, "service[#{name}]", :delayed
end
# per server configuration
default_options = n.fetch("config", {})
n["servers"].each do |server_name, server_options|
variables = Nginx.get_config(server_name, server_options, default_options)
server_path = ::File.join(n["dirs"]["conf.d"], "#{server_name}.conf")
# http://serverfault.com/questions/10854/nginx-https-serving-with-same-config-as-http
template server_path do
source "server.conf.erb"
variables(variables)
#notifies :stop, "service[#{name}]", :delayed
#notifies :start, "service[#{name}]", :delayed
notifies :restart, "service[#{name}]", :delayed
end
end
# http://wiki.opscode.com/display/chef/Resources#Resources-Service
service name do
service_name name
action :nothing
restart_command "systemctl stop #{name}; systemctl start #{name}"
#supports :start => true, :stop => true, :status => true, :restart => true, :reload => true
end
|
<?php
namespace App\Http\Requests;
use Illuminate\Foundation\Http\FormRequest;
use Illuminate\Validation\Rule;
class ProductRequest extends FormRequest
{
public function authorize()
{
return \Auth::check();
}
public function rules()
{
$id = (isset($this->product->id)) ? $this->product->id : 0;
$rules = [];
if (!$this->ajax()) {
$rules = [
'name' => 'required',
'code' => ['required', Rule::unique('products')->ignore($id)],
'cost' => 'required',
'category_id' => 'numeric',
];
}
return $rules;
}
public function messages()
{
return [
'name.required' => 'El nombre del producto es obligatorio',
'code.required' => 'El código del producto es obligatorio',
'code.unique' => 'El código elegido ya le pertenece a otro producto',
'cost.required' => 'El costo es obligatorio',
'category_id.numeric' => 'La categoría es obligatoria',
];
}
}
|
/*
* Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file
* for details. All rights reserved. Use of this source code is governed by a
* BSD-style license that can be found in the LICENSE file.
*/
/**
* @description Tests autofocus when a modal dialog is opened.
*/
import "dart:html";
import "../../../testcommon.dart";
main() {
document.body.setInnerHtml('''
<button id="outer-button" autofocus></button>
<dialog id="dialog">
<button></button>
<dialog>
<button autofocus></button>
</dialog>
<div>
<span>
<button id="autofocus-button" autofocus></button>
</span>
</div>
<button id="another-button" autofocus></button>
</dialog>
''', treeSanitizer: new NullTreeSanitizer());
debug('Initial active element');
shouldBe(document.activeElement, document.getElementById("outer-button"));
DialogElement dialog = document.getElementById('dialog');
dialog.showModal();
ButtonElement autofocusButton = document.getElementById('autofocus-button');
shouldBe(document.activeElement, autofocusButton);
ButtonElement anotherButton = document.getElementById('another-button');
anotherButton.focus();
shouldBe(document.activeElement, anotherButton);
debug('Test that reattaching does not give focus back to a previously autofocused element.');
autofocusButton.style.display = 'none';
document.body.offsetHeight;
autofocusButton.style.display = 'block';
document.body.offsetHeight;
shouldBe(document.activeElement, anotherButton);
debug('Test that reinserting does not give focus back to a previously autofocused element.');
var parentNode = autofocusButton.parentNode;
autofocusButton.remove();
document.body.offsetHeight;
parentNode.append(autofocusButton);
document.body.offsetHeight;
shouldBe(document.activeElement, anotherButton);
dialog.close('');
debug('Test that autofocus runs again when a dialog is reopened.');
dialog.showModal();
shouldBe(document.activeElement, autofocusButton);
dialog.close('');
}
|
package com.tcwong.pattern.singleton;
/**
* 枚举
*/
public enum EnumSingleton {
INSTANCE;
public static EnumSingleton getInstance() {
return INSTANCE;
}
}
|
/*
* Copyright 2021 4Paradigm
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com._4paradigm.hybridsql.spark.utils
import org.slf4j.LoggerFactory
class ArgumentParser(args: Array[String]) {
private val logger = LoggerFactory.getLogger(this.getClass)
private var idx = 0
private var curKey: String = _
def parseArgs(fn: PartialFunction[String, Unit]): Unit = {
while (idx < args.length) {
curKey = args(idx)
try {
if (fn.isDefinedAt(curKey)) {
fn.apply(curKey)
}
} catch {
case e: Exception =>
logger.error(s"Parse argument $curKey failed: ${e.getMessage}")
}
idx += 1
}
}
def parsePair(): (String, String) = {
val value = parseValue()
val splitPos = value.indexOf("=")
if (splitPos < 0) {
throw new IllegalArgumentException(
s"Illegal value for $curKey: $value")
}
val (k, v) = (value.substring(0, splitPos), value.substring(splitPos + 1))
k -> v
}
def parseValue(): String = {
idx += 1
if (idx >= args.length) {
throw new IllegalArgumentException(
s"Argument index out of bound for $curKey")
}
args(idx)
}
def parseInt(): Int = {
parseValue().toInt
}
}
|
INSERT INTO `items` (`name`, `label`, `limit`, `rare`, `can_remove`) VALUES
('lockpick', 'Lock Pick', -1, 0, 1),
('darknet', 'Dark Net', 1, 0, 1),
('drill', 'Drill', 1, 0, 1),
('binoculars', 'Binoculars', 1, 0, 1),
('oxygen_mask', 'Oxygen Mask', -1, 0, 1),
('bulletproof', 'Bullet-Proof Vest', -1, 0, 1),
('firstaidkit', 'First Aid Kit', -1, 0, 1),
('clip', 'Weapon Clip', -1, 0, 1)
;
INSERT INTO `shops` (store, item, price) VALUES
('ExtraItemsShop', 'lockpick', 20),
('ExtraItemsShop', 'darknet', 25),
('ExtraItemsShop', 'drill', 180),
('ExtraItemsShop', 'binoculars', 10),
('ExtraItemsShop', 'oxygen_mask', 400),
('ExtraItemsShop', 'bulletproof', 300),
('ExtraItemsShop', 'firstaidkit', 80),
('ExtraItemsShop', 'clip', 250),
;
|
module Lunch
module Sql
class Group < Sequel::Model
one_to_many :memberships
many_to_many :restaurants, join_table: :memberships
end
end
end
|
/*
* BSD 2-Clause License
*
* Copyright (c) 2017, Redis Labs
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.redislabs.modules.rejson;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertNull;
import static junit.framework.TestCase.assertSame;
import static junit.framework.TestCase.assertTrue;
import static junit.framework.TestCase.fail;
import static org.junit.Assert.assertArrayEquals;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.junit.Before;
import org.junit.Test;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.GsonBuilder;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.exceptions.JedisDataException;
public class ClientTest {
/* A simple class that represents an object in real life */
@SuppressWarnings("unused")
private static class IRLObject {
public String str;
public boolean bool;
public IRLObject() {
this.str = "string";
this.bool = true;
}
}
@SuppressWarnings("unused")
private static class FooBarObject {
public String foo;
public boolean fooB;
public int fooI;
public float fooF;
public String[] fooArr;
public FooBarObject() {
this.foo = "bar";
this.fooB = true;
this.fooI = 6574;
this.fooF = 435.345f;
this.fooArr = new String[]{"a", "b","c"};
}
}
private static class Baz {
private String quuz;
private String grault;
private String waldo;
public Baz(final String quuz, final String grault, final String waldo) {
this.quuz = quuz;
this.grault = grault;
this.waldo = waldo;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
if (getClass() != o.getClass())
return false;
Baz other = (Baz) o;
return Objects.equals(quuz, other.quuz) && //
Objects.equals(grault, other.grault) && //
Objects.equals(waldo, other.waldo);
}
}
private static class Qux {
private String quux;
private String corge;
private String garply;
private Baz baz;
public Qux(final String quux, final String corge, final String garply, final Baz baz) {
this.quux = quux;
this.corge = corge;
this.garply = garply;
this.baz = baz;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
if (getClass() != o.getClass())
return false;
Qux other = (Qux) o;
return Objects.equals(quux, other.quux) && //
Objects.equals(corge, other.corge) && //
Objects.equals(garply, other.garply) && //
Objects.equals(baz, other.baz);
}
}
private final Gson g = new Gson();
private final JReJSON client = new JReJSON("localhost",6379);
private final Jedis jedis = new Jedis("localhost",6379);
@Before
public void cleanup() {
jedis.flushDB();
}
@Test
public void noArgsConstructorReturnsClientToLocalMachine() {
final JReJSON defaultClient = new JReJSON();
final JReJSON explicitLocalClient = new JReJSON("localhost", 6379);
// naive set with a path
defaultClient.set("null", null, Path.ROOT_PATH);
assertNull(explicitLocalClient.get("null", String.class, Path.ROOT_PATH));
}
@Test
public void testJedisConstructor() {
final JReJSON defaultClient = new JReJSON();
final JReJSON withJedis = new JReJSON(jedis);
// naive set with a path
defaultClient.set("null", null, Path.ROOT_PATH);
assertNull(withJedis.get("null", String.class, Path.ROOT_PATH));
}
@Test
public void basicSetGetShouldSucceed() {
// naive set with a path
client.set("null", null, Path.ROOT_PATH);
assertNull(client.get("null", String.class, Path.ROOT_PATH));
// real scalar value and no path
client.set( "str", "strong");
assertEquals("strong", client.get( "str"));
// a slightly more complex object
IRLObject obj = new IRLObject();
client.set( "obj", obj);
Object expected = g.fromJson(g.toJson(obj), Object.class);
assertTrue(expected.equals(client.get( "obj")));
// check an update
Path p = Path.of(".str");
client.set( "obj", "strung", p);
assertEquals("strung", client.get( "obj", String.class, p));
}
@Test
public void setExistingPathOnlyIfExistsShouldSucceed() {
client.set( "obj", new IRLObject());
Path p = Path.of(".str");
client.set( "obj", "strangle", JReJSON.ExistenceModifier.MUST_EXIST, p);
assertEquals("strangle", client.get( "obj", String.class, p));
}
@Test
public void setNonExistingOnlyIfNotExistsShouldSucceed() {
client.set( "obj", new IRLObject());
Path p = Path.of(".none");
client.set( "obj", "strangle", JReJSON.ExistenceModifier.NOT_EXISTS, p);
assertEquals("strangle", client.get( "obj", String.class, p));
}
@Test
public void setWithoutAPathDefaultsToRootPath() {
client.set( "obj1", new IRLObject());
client.set( "obj1", "strangle", JReJSON.ExistenceModifier.MUST_EXIST);
assertEquals("strangle", client.get( "obj1", String.class, Path.ROOT_PATH));
}
@Test(expected = NullPointerException.class)
public void setExistingPathOnlyIfNotExistsShouldFail() {
client.set( "obj", new IRLObject());
Path p = Path.of(".str");
client.set( "obj", "strangle", JReJSON.ExistenceModifier.NOT_EXISTS, p);
}
@Test(expected = NullPointerException.class)
public void setNonExistingPathOnlyIfExistsShouldFail() {
client.set( "obj", new IRLObject());
Path p = Path.of(".none");
client.set( "obj", "strangle", JReJSON.ExistenceModifier.MUST_EXIST, p);
}
@Test(expected = JedisDataException.class)
public void setException() {
// should error on non root path for new key
client.set( "test", "bar", Path.of(".foo"));
}
@Test
public void getMultiplePathsShouldSucceed() {
// check multiple paths
IRLObject obj = new IRLObject();
client.set( "obj", obj);
Object expected = g.fromJson(g.toJson(obj), Object.class);
assertTrue(expected.equals(client.get( "obj", Object.class, Path.of("bool"), Path.of("str"))));
}
@Test
public void toggle() {
IRLObject obj = new IRLObject();
client.set( "obj", obj);
Path pbool = Path.of(".bool");
// check initial value
assertTrue(client.get("obj", Boolean.class, pbool));
// true -> false
client.toggle("obj", pbool);
assertFalse(client.get("obj", Boolean.class, pbool));
// false -> true
client.toggle("obj", pbool);
assertTrue(client.get("obj", Boolean.class, pbool));
// ignore non-boolean field
Path pstr = Path.of(".str");
try {
client.toggle("obj", pstr);
fail("Path not a bool");
} catch (JedisDataException jde) {
assertTrue(jde.getMessage().contains("not a bool"));
}
assertEquals("string", client.get("obj", String.class, pstr));
}
@Test(expected = JedisDataException.class)
public void getException() {
client.set( "test", "foo", Path.ROOT_PATH);
client.get( "test", String.class, Path.of(".bar"));
}
@Test
public void delValidShouldSucceed() {
// check deletion of a single path
client.set( "obj", new IRLObject(), Path.ROOT_PATH);
client.del( "obj", Path.of(".str"));
assertTrue(jedis.exists("obj"));
// check deletion root using default root -> key is removed
client.del( "obj");
assertFalse(jedis.exists("obj"));
}
@Test
public void delNonExistingPathsAreIgnored() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
client.del( "foobar", Path.of(".foo[1]")).longValue();
}
@Test
public void typeChecksShouldSucceed() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
assertSame(Object.class, client.type( "foobar"));
assertSame(Object.class, client.type( "foobar", Path.ROOT_PATH));
assertSame(String.class, client.type( "foobar", Path.of(".foo")));
assertSame(int.class, client.type( "foobar", Path.of(".fooI")));
assertSame(float.class, client.type( "foobar", Path.of(".fooF")));
assertSame(List.class, client.type( "foobar", Path.of(".fooArr")));
assertSame(boolean.class, client.type( "foobar", Path.of(".fooB")));
try {
client.type( "foobar", Path.of(".fooErr"));
fail();
}catch(Exception e) {}
}
@Test(expected = NullPointerException.class)
public void typeException() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
client.type( "foobar", Path.of(".foo[1]"));
}
@Test(expected = NullPointerException.class)
public void type1Exception() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
client.type( "foobar", Path.of(".foo[1]"));
}
@Test
public void testMultipleGetAtRootPathAllKeysExist() {
Baz baz1 = new Baz("quuz1", "grault1", "waldo1");
Baz baz2 = new Baz("quuz2", "grault2", "waldo2");
Qux qux1 = new Qux("quux1", "corge1", "garply1", baz1);
Qux qux2 = new Qux("quux2", "corge2", "garply2", baz2);
client.set("qux1", qux1);
client.set("qux2", qux2);
List<Qux> oneQux = client.mget(Qux.class, "qux1");
List<Qux> allQux = client.mget(Qux.class, "qux1", "qux2");
assertEquals(1, oneQux.size());
assertEquals(2, allQux.size());
assertEquals(qux1, oneQux.get(0));
Qux testQux1 = allQux.stream() //
.filter(q -> q.quux.equals("quux1")) //
.findFirst() //
.orElseThrow(() -> new NullPointerException(""));
Qux testQux2 = allQux.stream() //
.filter(q -> q.quux.equals("quux2")) //
.findFirst() //
.orElseThrow(() -> new NullPointerException(""));
assertEquals(qux1, testQux1);
assertEquals(qux2, testQux2);
}
@Test
public void testMultipleGetAtRootPathWithMissingKeys() {
Baz baz1 = new Baz("quuz1", "grault1", "waldo1");
Baz baz2 = new Baz("quuz2", "grault2", "waldo2");
Qux qux1 = new Qux("quux1", "corge1", "garply1", baz1);
Qux qux2 = new Qux("quux2", "corge2", "garply2", baz2);
client.set("qux1", qux1);
client.set("qux2", qux2);
List<Qux> allQux = client.mget(Qux.class, "qux1", "qux2", "qux3");
assertEquals(3, allQux.size());
assertNull(allQux.get(2));
allQux.removeAll(Collections.singleton(null));
assertEquals(2, allQux.size());
}
@Test
public void testMultipleGetWithPathPathAllKeysExist() {
Baz baz1 = new Baz("quuz1", "grault1", "waldo1");
Baz baz2 = new Baz("quuz2", "grault2", "waldo2");
Qux qux1 = new Qux("quux1", "corge1", "garply1", baz1);
Qux qux2 = new Qux("quux2", "corge2", "garply2", baz2);
client.set("qux1", qux1);
client.set("qux2", qux2);
List<Baz> allBaz = client.mget(Path.of("baz"), Baz.class, "qux1", "qux2");
assertEquals(2, allBaz.size());
Baz testBaz1 = allBaz.stream() //
.filter(b -> b.quuz.equals("quuz1")) //
.findFirst() //
.orElseThrow(() -> new NullPointerException(""));
Baz testBaz2 = allBaz.stream() //
.filter(q -> q.quuz.equals("quuz2")) //
.findFirst() //
.orElseThrow(() -> new NullPointerException(""));
assertEquals(baz1, testBaz1);
assertEquals(baz2, testBaz2);
}
@Test
public void testArrayLength() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
assertEquals(Long.valueOf(3L), client.arrLen( "foobar", Path.of(".fooArr")));
}
@Test
public void clearArray() {
client.set("foobar", new FooBarObject(), Path.ROOT_PATH);
Path arrPath = Path.of(".fooArr");
assertEquals(Long.valueOf(3L), client.arrLen("foobar", arrPath));
assertEquals(1L, client.clear("foobar", arrPath));
assertEquals(Long.valueOf(0L), client.arrLen("foobar", arrPath));
// ignore non-array
Path strPath = Path.of("foo");
assertEquals(0L, client.clear("foobar", strPath));
assertEquals("bar", client.get("foobar", String.class, strPath));
}
@Test
public void clearObject() {
Baz baz = new Baz("quuz", "grault", "waldo");
Qux qux = new Qux("quux", "corge", "garply", baz);
client.set("qux", qux);
Path objPath = Path.of("baz");
assertEquals(baz, client.get("qux", Baz.class, objPath));
assertEquals(1L, client.clear("qux", objPath));
assertEquals(new Baz(null, null, null), client.get("qux", Baz.class, objPath));
}
@Test
public void testArrayAppendSameType() {
String json = "{ a: 'hello', b: [1, 2, 3], c: { d: ['ello'] }}";
JsonObject jsonObject = new Gson().fromJson(json, JsonObject.class);
client.set( "test_arrappend", jsonObject, Path.ROOT_PATH);
assertEquals(Long.valueOf(6L), client.arrAppend( "test_arrappend", Path.of(".b"), 4, 5, 6));
Integer[] array = client.get("test_arrappend", Integer[].class, Path.of(".b"));
assertArrayEquals(new Integer[] {1, 2, 3, 4, 5, 6}, array);
}
@Test
public void testArrayAppendMultipleTypes() {
String json = "{ a: 'hello', b: [1, 2, 3], c: { d: ['ello'] }}";
JsonObject jsonObject = new Gson().fromJson(json, JsonObject.class);
client.set( "test_arrappend", jsonObject, Path.ROOT_PATH);
assertEquals(Long.valueOf(6L), client.arrAppend( "test_arrappend", Path.of(".b"), "foo", true, null));
Object[] array = client.get("test_arrappend", Object[].class, Path.of(".b"));
// NOTE: GSon converts numeric types to the most accommodating type (Double)
// when type information is not provided (as in the Object[] below)
assertArrayEquals(new Object[] {1.0, 2.0, 3.0, "foo", true, null}, array);
}
@Test
public void testArrayAppendMultipleTypesWithDeepPath() {
String json = "{ a: 'hello', b: [1, 2, 3], c: { d: ['ello'] }}";
JsonObject jsonObject = new Gson().fromJson(json, JsonObject.class);
client.set( "test_arrappend", jsonObject, Path.ROOT_PATH);
assertEquals(Long.valueOf(4L), client.arrAppend( "test_arrappend", Path.of(".c.d"), "foo", true, null));
Object[] array = client.get("test_arrappend", Object[].class, Path.of(".c.d"));
assertArrayEquals(new Object[] {"ello", "foo", true, null}, array);
}
@Test
public void testArrayAppendAgaintsEmptyArray() {
String json = "{ a: 'hello', b: [1, 2, 3], c: { d: [] }}";
JsonObject jsonObject = new Gson().fromJson(json, JsonObject.class);
client.set( "test_arrappend", jsonObject, Path.ROOT_PATH);
assertEquals(Long.valueOf(3L), client.arrAppend( "test_arrappend", Path.of(".c.d"), "a", "b", "c"));
String[] array = client.get("test_arrappend", String[].class, Path.of(".c.d"));
assertArrayEquals(new String[] {"a", "b", "c"}, array);
}
@Test(expected = JedisDataException.class)
public void testArrayAppendPathIsNotArray() {
String json = "{ a: 'hello', b: [1, 2, 3], c: { d: ['ello'] }}";
JsonObject jsonObject = new Gson().fromJson(json, JsonObject.class);
client.set( "test_arrappend", jsonObject, Path.ROOT_PATH);
client.arrAppend( "test_arrappend", Path.of(".a"), 1);
}
@Test
public void testArrayIndexWithInts() {
client.set( "quxquux", new int[] {8,6,7,5,3,0,9}, Path.ROOT_PATH);
assertEquals(Long.valueOf(2L), client.arrIndex( "quxquux", Path.ROOT_PATH, 7));
}
@Test
public void testArrayIndexWithStrings() {
client.set( "quxquux", new String[] {"8","6","7","5","3","0","9"}, Path.ROOT_PATH);
assertEquals(Long.valueOf(2L), client.arrIndex( "quxquux", Path.ROOT_PATH, "7"));
}
@Test
public void testArrayIndexWithStringsAndPath() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
assertEquals(Long.valueOf(1L), client.arrIndex( "foobar", Path.of(".fooArr"), "b"));
}
@Test(expected = JedisDataException.class)
public void testArrayIndexNonExistentPath() {
client.set( "foobar", new FooBarObject(), Path.ROOT_PATH);
assertEquals(Long.valueOf(1L), client.arrIndex( "foobar", Path.of(".barArr"), "x"));
}
@Test
public void testArrayInsert() {
String json = "['hello', 'world', true, 1, 3, null, false]";
JsonArray jsonArray = new Gson().fromJson(json, JsonArray.class);
client.set( "test_arrinsert", jsonArray, Path.ROOT_PATH);
assertEquals(Long.valueOf(8L), client.arrInsert( "test_arrinsert", Path.ROOT_PATH, 1L, "foo"));
Object[] array = client.get("test_arrinsert", Object[].class, Path.ROOT_PATH);
// NOTE: GSon converts numeric types to the most accommodating type (Double)
// when type information is not provided (as in the Object[] below)
assertArrayEquals(new Object[] {"hello", "foo", "world", true, 1.0, 3.0, null, false}, array);
}
@Test
public void testArrayInsertWithNegativeIndex() {
String json = "['hello', 'world', true, 1, 3, null, false]";
JsonArray jsonArray = new Gson().fromJson(json, JsonArray.class);
client.set( "test_arrinsert", jsonArray, Path.ROOT_PATH);
assertEquals(Long.valueOf(8L), client.arrInsert( "test_arrinsert", Path.ROOT_PATH, -1L, "foo"));
Object[] array = client.get("test_arrinsert", Object[].class, Path.ROOT_PATH);
assertArrayEquals(new Object[] {"hello", "world", true, 1.0, 3.0, null, "foo", false}, array);
}
@Test
public void testArrayPop() {
client.set( "arr", new int[] {0,1,2,3,4}, Path.ROOT_PATH);
assertEquals(Long.valueOf(4L), client.arrPop( "arr", Long.class, Path.ROOT_PATH, 4L));
assertEquals(Long.valueOf(3L), client.arrPop( "arr", Long.class, Path.ROOT_PATH, -1L));
assertEquals(Long.valueOf(2L), client.arrPop( "arr", Long.class));
assertEquals(Long.valueOf(0L), client.arrPop( "arr", Long.class, Path.ROOT_PATH, 0L));
assertEquals(Long.valueOf(1L), client.arrPop( "arr", Long.class));
}
@Test
public void testArrayTrim() {
client.set( "arr", new int[] {0,1,2,3,4}, Path.ROOT_PATH);
assertEquals(Long.valueOf(3L), client.arrTrim( "arr", Path.ROOT_PATH, 1L, 3L));
Integer[] array = client.get("arr", Integer[].class, Path.ROOT_PATH);
assertArrayEquals(new Integer[] {1, 2, 3}, array);
}
@Test
public void testStringAppend() {
client.set( "str", "foo", Path.ROOT_PATH);
assertEquals(Long.valueOf(6L), client.strAppend( "str", Path.ROOT_PATH, "bar"));
assertEquals("foobar", client.get("str", String.class, Path.ROOT_PATH));
}
@Test
public void testStringLen() {
client.set( "str", "foo", Path.ROOT_PATH);
assertEquals(Long.valueOf(3L), client.strLen( "str", Path.ROOT_PATH));
}
@Test
public void testPassingCustomGsonBuilder() {
final JReJSON defaultClient = new JReJSON();
final JReJSON customizedClient = new JReJSON();
final GsonBuilder builder = new GsonBuilder();
customizedClient.setGsonBuilder(builder);
// naive set with a path
defaultClient.set("null", null, Path.ROOT_PATH);
assertNull(customizedClient.get("null", String.class, Path.ROOT_PATH));
}
}
|
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using Object = UnityEngine.Object;
namespace NF.ObjectPooling.Runtime
{
public sealed class Pool
{
private const int INITIAL_INSTANCES_SIZE = 128;
private readonly Action<GameObject, Pool> _onInstanceCreated;
private readonly Stack<GameObject> _instances;
private readonly GameObject _prefab;
private readonly Transform _poolParent;
private readonly Vector3 _initialPosition = default;
private readonly Quaternion _initialRotation = default;
private readonly Vector3 _initialScale = default;
public Pool(GameObject prefab, Transform poolParent, Action<GameObject, Pool> onInstanceCreated)
{
_instances = new Stack<GameObject>(INITIAL_INSTANCES_SIZE);
_prefab = prefab;
_poolParent = poolParent;
_onInstanceCreated = onInstanceCreated;
_initialPosition = prefab.transform.position;
_initialRotation = prefab.transform.rotation;
_initialScale = prefab.transform.localScale;
}
public void Preload(int count)
{
for (int i = 0; i < count; i++)
{
GameObject instance = PopulatePrefab();
instance.transform.SetParent(_poolParent);
_instances.Push(instance);
}
}
private GameObject PopulatePrefab()
{
GameObject instance = Object.Instantiate(_prefab);
_onInstanceCreated(instance, this);
instance.SetActive(false);
return instance;
}
public GameObject Get() => Get(null);
public GameObject Get(Vector3 position) => Get(null, position);
public GameObject Get(Vector3 position, Quaternion rotation) => Get(null, position, rotation);
public GameObject Get(Transform parent) => Get(parent, _initialPosition);
public GameObject Get(Transform parent, Vector3 position) => Get(parent, position, _initialRotation);
public GameObject Get(Transform parent, Vector3 position, Quaternion rotation)
{
GameObject instance = GetFromPool();
InitializeInstance(instance, parent, position, rotation);
return instance;
}
private void InitializeInstance(GameObject instance, Transform parent, Vector3 position, Quaternion rotation)
{
Transform transformCache = instance.transform;
transformCache.SetPositionAndRotation(position, rotation);
transformCache.SetParent(parent);
instance.SetActive(true);
if (parent == null)
MoveParentlessInstanceToActiveScene(instance);
if (instance.TryGetComponent(out IReusablePrefab reusable))
reusable.OnGet();
}
private void MoveParentlessInstanceToActiveScene(GameObject instance)
{
Scene activeScene = SceneManager.GetActiveScene();
SceneManager.MoveGameObjectToScene(instance, activeScene);
}
private GameObject GetFromPool()
{
int count = _instances.Count;
if (count > 0)
{
while (count > 0)
{
GameObject instance = _instances.Pop();
if (instance != null)
return instance;
count--;
}
}
Preload(1);
return _instances.Pop();
}
public void Return(GameObject instance)
{
if (instance.TryGetComponent(out IReusablePrefab reusable))
reusable.OnRelease();
instance.SetActive(false);
Transform transformCache = instance.transform;
transformCache.SetPositionAndRotation(Vector3.zero, _initialRotation);
transformCache.localScale = _initialScale;
transformCache.SetParent(_poolParent);
_instances.Push(instance);
}
}
}
|
#!/bin/bash
set +e
FINGERPRINT=`node -e "console.log(JSON.parse(require('fs').readFileSync('$HOME/hyperledger-fabric-network/.hfc-org1/user1', 'utf8')).enrollment.identity.certificate)" | openssl x509 -fingerprint -noout | cut -d '=' -f2`
echo "Setup Organizations"
read -r -d '' basicOrganizations << EndOfMessage
[
"ABC_HEALTHCARE", "XYZ_PROVIDER", "InstaMed"
]
EndOfMessage
./node_modules/.bin/hurl invoke financial governance_updateOrganizationsList "$basicOrganizations"
echo "The mock data has been provisioned successfully"
# echo "Installing CouchDB views"
# npm run views:install
# echo "CouchDB views ready"
|
using System;
using System.Collections.Generic;
using System.Linq;
using MBran.Components.Attributes;
using MBran.Components.Models;
using Umbraco.Core;
using Umbraco.Core.Services;
namespace MBran.Components.Helpers
{
public class DocTypesHelper
{
private DocTypesHelper()
{
}
private static Lazy<DocTypesHelper> _instance => new Lazy<DocTypesHelper>(() => new DocTypesHelper());
public static DocTypesHelper Instance => _instance.Value;
public IEnumerable<DocTypeDefinition> GetDocTypes(IContentTypeService contentTypeService)
{
var cacheName = string.Join("_", GetType().FullName, nameof(GetDocTypes));
return (IEnumerable<DocTypeDefinition>) ApplicationContext.Current
.ApplicationCache
.RequestCache
.GetCacheItem(cacheName, () =>
{
var docTypes = contentTypeService.GetAllContentTypes();
return docTypes.Select(docType => new DocTypeDefinition
{
Id = docType.Id,
Name = docType.Name,
Value = docType.Alias
});
});
}
public IEnumerable<DocTypeDefinition> GetDocTypesDefinition(IContentTypeService contentTypeService,
IEnumerable<string> docTypeAliases)
{
if (!docTypeAliases?.Any() ?? true) return new List<DocTypeDefinition>();
return docTypeAliases.Select(docType => GetDocTypeDefinition(contentTypeService, docType));
}
public DocTypeDefinition GetDocTypeDefinition(IContentTypeService contentTypeService, string docTypeAlias)
{
var cacheName = string.Join("_", GetType().FullName, nameof(GetDocTypeDefinition), docTypeAlias);
return (DocTypeDefinition) ApplicationContext.Current
.ApplicationCache
.RequestCache
.GetCacheItem(cacheName, () =>
{
var allDocTypes = contentTypeService.GetAllContentTypes();
return allDocTypes
.Where(docType =>
string.Equals(docType.Alias, docTypeAlias, StringComparison.InvariantCultureIgnoreCase))
.Select(docType => new DocTypeDefinition
{
Id = docType.Id,
Name = docType.Name,
Value = docType.Alias
})
.FirstOrDefault();
});
}
public IEnumerable<RenderOptionsDefinition> GetComponentRenderOptions(string docTypeAlias)
{
var cacheName = string.Join("_", GetType().FullName, nameof(GetComponentRenderOptions), docTypeAlias);
return (IEnumerable<RenderOptionsDefinition>) ApplicationContext.Current
.ApplicationCache
.RuntimeCache
.GetCacheItem(cacheName, () =>
{
var docType = ComponentsHelper.Instance.FindController(docTypeAlias);
var viewOptions = docType.GetMethods()
.SelectMany(method =>
method.GetCustomAttributes(typeof(RenderOptionAttribute), false) as
IEnumerable<RenderOptionAttribute>)
.Where(attribute => attribute != null)
.Select(attribute => new RenderOptionsDefinition
{
Name = attribute.Name,
Value = attribute.Code,
Description = attribute.Description
});
return viewOptions;
});
}
}
}
|
package vrouter
import (
apps "k8s.io/api/apps/v1"
core "k8s.io/api/core/v1"
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
)
//GetDaemonset returns DaemonSet object for vRouter
func GetDaemonset() *apps.DaemonSet {
var labelsMountPermission int32 = 0644
var trueVal = true
var contrailStatusImageEnv = core.EnvVar{
Name: "CONTRAIL_STATUS_IMAGE",
Value: "docker.io/opencontrailnightly/contrail-status:latest",
}
var podIPEnv = core.EnvVar{
Name: "POD_IP",
ValueFrom: &core.EnvVarSource{
FieldRef: &core.ObjectFieldSelector{
FieldPath: "status.podIP",
},
},
}
var physicalInterfaceEnv = core.EnvVar{
Name: "PHYSICAL_INTERFACE",
ValueFrom: &core.EnvVarSource{
FieldRef: &core.ObjectFieldSelector{
FieldPath: "metadata.annotations['physicalInterface']",
},
},
}
var podInitContainers = []core.Container{
{
Name: "init",
Image: "busybox",
Command: []string{
"sh",
"-c",
"until grep ready /tmp/podinfo/pod_labels > /dev/null 2>&1; do sleep 1; done",
},
Env: []core.EnvVar{
podIPEnv,
},
VolumeMounts: []core.VolumeMount{
{
Name: "status",
MountPath: "/tmp/podinfo",
},
},
ImagePullPolicy: "IfNotPresent",
},
{
Name: "nodeinit",
Image: "docker.io/michaelhenkel/contrail-node-init:5.2.0-dev1",
Env: []core.EnvVar{
contrailStatusImageEnv,
podIPEnv,
},
VolumeMounts: []core.VolumeMount{
{
Name: "host-usr-local-bin",
MountPath: "/host/usr/bin",
},
},
ImagePullPolicy: "IfNotPresent",
SecurityContext: &core.SecurityContext{
Privileged: &trueVal,
},
},
{
Name: "vrouterkernelinit",
Image: "docker.io/michaelhenkel/contrail-vrouter-kernel-init:5.2.0-dev1",
Env: []core.EnvVar{
podIPEnv,
},
VolumeMounts: []core.VolumeMount{
{
Name: "host-usr-local-bin",
MountPath: "/host/usr/bin",
},
{
Name: "network-scripts",
MountPath: "/etc/sysconfig/network-scripts",
},
{
Name: "host-usr-local-bin",
MountPath: "/host/bin",
},
{
Name: "usr-src",
MountPath: "/usr/src",
},
{
Name: "lib-modules",
MountPath: "/lib/modules",
},
},
ImagePullPolicy: "IfNotPresent",
SecurityContext: &core.SecurityContext{
Privileged: &trueVal,
},
},
}
var podContainers = []core.Container{
{
Name: "vrouteragent",
Image: "docker.io/michaelhenkel/contrail-vrouter-agent:5.2.0-dev1",
Env: []core.EnvVar{
physicalInterfaceEnv,
podIPEnv,
},
VolumeMounts: []core.VolumeMount{
{
Name: "vrouter-logs",
MountPath: "/var/log/contrail",
},
{
Name: "dev",
MountPath: "/dev",
},
{
Name: "network-scripts",
MountPath: "/etc/sysconfig/network-scripts",
},
{
Name: "host-usr-local-bin",
MountPath: "/host/bin",
},
{
Name: "usr-src",
MountPath: "/usr/src",
},
{
Name: "lib-modules",
MountPath: "/lib/modules",
},
{
Name: "var-lib-contrail",
MountPath: "/var/lib/contrail",
},
{
Name: "var-crashes",
MountPath: "/var/contrail/crashes",
},
{
Name: "resolv-conf",
MountPath: "/etc/resolv.conf",
},
},
ImagePullPolicy: "IfNotPresent",
SecurityContext: &core.SecurityContext{
Privileged: &trueVal,
},
Lifecycle: &core.Lifecycle{
PreStop: &core.Handler{
Exec: &core.ExecAction{
Command: []string{"/clean-up.sh"},
},
},
},
},
{
Name: "nodemanager",
Image: "docker.io/michaelhenkel/contrail-nodemgr:5.2.0-dev1",
Env: []core.EnvVar{
podIPEnv,
{
Name: "DOCKER_HOST",
Value: "unix://mnt/docker.sock",
},
{
Name: "NODE_TYPE",
Value: "vrouter",
},
},
VolumeMounts: []core.VolumeMount{
{
Name: "vrouter-logs",
MountPath: "/var/log/contrail",
},
{
Name: "docker-unix-socket",
MountPath: "/mnt",
},
},
ImagePullPolicy: "IfNotPresent",
},
}
var podVolumes = []core.Volume{
{
Name: "vrouter-logs",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/var/log/contrail/vrouter",
},
},
},
{
Name: "docker-unix-socket",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/var/run",
},
},
},
{
Name: "host-usr-local-bin",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/usr/local/bin",
},
},
},
{
Name: "var-crashes",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/var/contrail/crashes",
},
},
},
{
Name: "var-lib-contrail",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/var/lib/contrail",
},
},
},
{
Name: "lib-modules",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/lib/modules",
},
},
},
{
Name: "usr-src",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/usr/src",
},
},
},
{
Name: "network-scripts",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/etc/sysconfig/network-scripts",
},
},
},
{
Name: "dev",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/dev",
},
},
},
{
Name: "resolv-conf",
VolumeSource: core.VolumeSource{
HostPath: &core.HostPathVolumeSource{
Path: "/etc/resolv.conf",
},
},
},
{
Name: "status",
VolumeSource: core.VolumeSource{
DownwardAPI: &core.DownwardAPIVolumeSource{
Items: []core.DownwardAPIVolumeFile{
{
Path: "pod_labels",
FieldRef: &core.ObjectFieldSelector{
APIVersion: "v1",
FieldPath: "metadata.labels",
},
},
{
Path: "pod_labelsx",
FieldRef: &core.ObjectFieldSelector{
APIVersion: "v1",
FieldPath: "metadata.labels",
},
},
},
DefaultMode: &labelsMountPermission,
},
},
},
}
var podTolerations = []core.Toleration{
{
Operator: "Exists",
Effect: "NoSchedule",
},
{
Operator: "Exists",
Effect: "NoExecute",
},
}
var podSpec = core.PodSpec{
Volumes: podVolumes,
InitContainers: podInitContainers,
Containers: podContainers,
RestartPolicy: "Always",
DNSPolicy: "ClusterFirst",
HostNetwork: true,
Tolerations: podTolerations,
}
var daemonSetSelector = meta.LabelSelector{
MatchLabels: map[string]string{"app": "vrouter"},
}
var daemonsetTemplate = core.PodTemplateSpec{
ObjectMeta: meta.ObjectMeta{},
Spec: podSpec,
}
var daemonSet = apps.DaemonSet{
TypeMeta: meta.TypeMeta{
Kind: "DaemonSet",
APIVersion: "apps/v1",
},
ObjectMeta: meta.ObjectMeta{
Name: "vrouter",
Namespace: "default",
},
Spec: apps.DaemonSetSpec{
Selector: &daemonSetSelector,
Template: daemonsetTemplate,
},
}
return &daemonSet
}
|
package com.andreapivetta.blu.ui.timeline
import com.andreapivetta.blu.R
import com.andreapivetta.blu.data.model.Tweet
import com.andreapivetta.blu.data.twitter.TwitterAPI
import com.andreapivetta.blu.ui.base.BasePresenter
import io.reactivex.android.schedulers.AndroidSchedulers
import io.reactivex.disposables.CompositeDisposable
import io.reactivex.schedulers.Schedulers
import timber.log.Timber
import twitter4j.Paging
import twitter4j.User
/**
* Created by andrea on 17/05/16.
*/
open class TimelinePresenter : BasePresenter<TimelineMvpView>() {
var page: Int = 1
protected var isLoading: Boolean = false
protected val disposables = CompositeDisposable()
override fun detachView() {
super.detachView()
disposables.clear()
}
open fun getTweets() {
checkViewAttached()
mvpView?.showLoading()
isLoading = true
disposables.add(TwitterAPI.getHomeTimeline(Paging(page, 50))
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
mvpView?.hideLoading()
when {
it == null -> mvpView?.showError()
it.isEmpty() -> mvpView?.showEmpty()
else -> {
mvpView?.showTweets(it.map(::Tweet).toMutableList())
page++
}
}
isLoading = false
}, {
Timber.e(it?.message)
mvpView?.hideLoading()
mvpView?.showError()
isLoading = false
}))
}
open fun getMoreTweets() {
if (isLoading)
return
checkViewAttached()
isLoading = true
disposables.add(TwitterAPI.getHomeTimeline(Paging(page, 50))
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
if (it != null) {
if (it.isNotEmpty())
mvpView?.showMoreTweets(it.map(::Tweet).toMutableList())
page++
}
isLoading = false
}, {
Timber.e(it?.message)
isLoading = false
}))
}
open fun onRefresh() {
checkViewAttached()
val sinceId = mvpView?.getLastTweetId()
if (sinceId != null && sinceId > 0) {
val page = Paging(1, 200)
page.sinceId = sinceId
disposables.add(TwitterAPI.refreshTimeLine(page)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
mvpView?.stopRefresh()
if (it != null) {
it.reversed().forEach { status -> mvpView?.showTweet(Tweet(status)) }
} else {
mvpView?.showSnackBar(R.string.error_refreshing_timeline)
}
}, {
Timber.e(it?.message)
mvpView?.stopRefresh()
mvpView?.showSnackBar(R.string.error_refreshing_timeline)
}))
} else mvpView?.stopRefresh()
}
fun favorite(tweet: Tweet) {
checkViewAttached()
disposables.add(TwitterAPI.favorite(tweet.id)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.map(::Tweet)
.subscribe({
if (it != null) {
tweet.favorited = true
tweet.favoriteCount++
mvpView?.updateRecyclerViewView()
} else {
mvpView?.showSnackBar(R.string.error_favorite)
}
}, {
Timber.e(it?.message)
mvpView?.showSnackBar(R.string.error_favorite)
}))
}
fun retweet(tweet: Tweet) {
checkViewAttached()
disposables.add(TwitterAPI.retweet(tweet.id)
.map(::Tweet)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
if (it != null) {
tweet.retweeted = true
tweet.retweetCount++
mvpView?.updateRecyclerViewView()
} else {
mvpView?.showSnackBar(R.string.error_retweet)
}
}, {
Timber.e(it?.message)
mvpView?.showSnackBar(R.string.error_retweet)
}))
}
fun unfavorite(tweet: Tweet) {
checkViewAttached()
disposables.add(TwitterAPI.unfavorite(tweet.id)
.map(::Tweet)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
if (it != null) {
tweet.favorited = false
tweet.favoriteCount--
mvpView?.updateRecyclerViewView()
} else {
mvpView?.showSnackBar(R.string.error_unfavorite)
}
}, {
Timber.e(it?.message)
mvpView?.showSnackBar(R.string.error_unfavorite)
}))
}
fun unretweet(tweet: Tweet) {
checkViewAttached()
disposables.add(TwitterAPI.unretweet(tweet.status.currentUserRetweetId)
.map(::Tweet)
.observeOn(AndroidSchedulers.mainThread())
.subscribeOn(Schedulers.io())
.subscribe({
if (it != null) {
tweet.retweeted = false
tweet.retweetCount--
mvpView?.updateRecyclerViewView()
} else {
mvpView?.showSnackBar(R.string.error_unretweet)
}
}, {
Timber.e(it?.message)
mvpView?.showSnackBar(R.string.error_unretweet)
}))
}
fun reply(tweet: Tweet, user: User) {
mvpView?.showNewTweet(tweet, user)
}
}
|
import aiohttp
import asyncio
loop = asyncio.get_event_loop()
async def _call(http_method=None, url=None, *args, **kwargs):
session = aiohttp.ClientSession()
async with eval('session.' + http_method)(url) as response:
assert response.status == 200
return await response.text()
def _serializer(*args, **kwargs):
if len(args) and len(kwargs):
return args, kwargs
elif len(args):
return args
else:
return kwargs
async def head(url=None, serializer=_serializer, *args, **kwargs):
"""
get method implementation
:param url:
:param serializer: callback function
:param args:
:param kwargs:
:return:
"""
return serializer(await _call(http_method='head', url=url))
async def get(url=None, serializer=_serializer, *args, **kwargs):
"""
get method implementation
:param url:
:param serializer: callback function
:param args:
:param kwargs:
:return:
"""
return serializer(await _call(http_method='get', url=url))
async def post(url=None, serializer=_serializer, *args, **kwargs):
"""
put method implementation
:param url:
:param serializer: callback function
:param args:
:param kwargs:
:return:
"""
if len(args):
kwargs['payload'] = args[0]
return serializer(await _call(http_method='post', url=url, data=kwargs['payload']))
async def options(url=None, serializer=_serializer, *args, **kwargs):
"""
option method implementation
:param url:
:param serializer: callback function
:param args:
:param kwargs:
:return:
"""
return serializer(await _call(http_method='options', url=url))
async def delete(url=None, serializer=_serializer, *args, **kwargs):
"""
delete method implementation
:param url:
:param serializer: callback function
:param args:
:param kwargs:
:return:
"""
return serializer(await _call(http_method='delete', url=url))
def event_loop(callback=None):
loop.run_until_complete(callback())
|
#!/bin/bash
set -e
name=${1::-4}
docker build -t lc3compiler compiler/
docker run --rm -it -v $(pwd):/data lc3compiler /data/$name.asm
rm -f $name.sym
mv -f $name.obj programs/bin
|
class DownloadGitRevisionController < ApplicationController
include XitoliteRepositoryFinder
before_filter :find_xitolite_repository
before_filter :can_download_git_revision
before_filter :set_download
before_filter :validate_download
helper :redmine_bootstrap_kit
def index
begin
send_data(@download.content, filename: @download.filename, type: @download.content_type)
rescue => e
flash.now[:error] = l(:git_archive_timeout, timeout: e.output)
render_404
end
end
private
def find_repository_param
params[:id]
end
def can_download_git_revision
render_403 unless User.current.allowed_to_download?(@repository)
end
def set_download
@download = Repositories::DownloadRevision.new(@repository, download_revision, download_format)
end
def download_revision
@download_revision ||= (params[:rev] || 'master')
end
def download_format
@download_format ||= (params[:download_format] || 'tar')
end
def validate_download
if !@download.valid_commit?
flash.now[:error] = l(:error_download_revision_no_such_commit, commit: download_revision)
render_404
end
end
end
|
#!/usr/bin/env bash
set -euo pipefail
# Shared variables used in other scripts
exports () {
# The directory of this file
local DIR
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export PROJ_ROOT="${DIR}/.."
export CHANGES="${PROJ_ROOT}/CHANGES.md"
export RELEASE_NOTES="${PROJ_ROOT}/RELEASE.md"
}
exports
|
//SAMPLE FUNCTION - DON'T CHANGE
int multiply_numbers(int num1, int num2);
//write factorial function prototype here
int factorialize(int value);
|
/*
This file is part of GNUnet.
Copyright (C) 2009-2013, 2016 GNUnet e.V.
GNUnet is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 3, or (at your
option) any later version.
GNUnet is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNUnet; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
/**
* @author Christian Grothoff
*
* @file
* Library for tokenizing a message stream
* @defgroup server Server library
* Library for tokenizing a message stream
*
* @see [Documentation](https://gnunet.org/mst)
*
* @{
*/
#ifndef GNUNET_MST_LIB_H
#define GNUNET_MST_LIB_H
#ifdef __cplusplus
extern "C"
{
#if 0 /* keep Emacsens' auto-indent happy */
}
#endif
#endif
#include "gnunet_common.h"
/**
* Handle to a message stream tokenizer.
*/
struct GNUNET_MessageStreamTokenizer;
/**
* Functions with this signature are called whenever a
* complete message is received by the tokenizer.
*
* Do not call #GNUNET_mst_destroy from within
* the scope of this callback.
*
* @param cls closure
* @param message the actual message
* @return #GNUNET_OK on success, #GNUNET_SYSERR to stop further processing
*/
typedef int
(*GNUNET_MessageTokenizerCallback) (void *cls,
const struct GNUNET_MessageHeader *message);
/**
* Create a message stream tokenizer.
*
* @param cb function to call on completed messages
* @param cb_cls closure for @a cb
* @return handle to tokenizer
*/
struct GNUNET_MessageStreamTokenizer *
GNUNET_MST_create (GNUNET_MessageTokenizerCallback cb,
void *cb_cls);
/**
* Add incoming data to the receive buffer and call the
* callback for all complete messages.
*
* @param mst tokenizer to use
* @param buf input data to add
* @param size number of bytes in @a buf
* @param purge should any excess bytes in the buffer be discarded
* (i.e. for packet-based services like UDP)
* @param one_shot only call callback once, keep rest of message in buffer
* @return #GNUNET_OK if we are done processing (need more data)
* #GNUNET_NO if one_shot was set and we have another message ready
* #GNUNET_SYSERR if the data stream is corrupt
*/
int
GNUNET_MST_from_buffer (struct GNUNET_MessageStreamTokenizer *mst,
const char *buf,
size_t size,
int purge,
int one_shot);
/**
* Add incoming data to the receive buffer and call the
* callback for all complete messages.
*
* @param mst tokenizer to use
* @param buf input data to add
* @param size number of bytes in @a buf
* @param purge should any excess bytes in the buffer be discarded
* (i.e. for packet-based services like UDP)
* @param one_shot only call callback once, keep rest of message in buffer
* @return #GNUNET_OK if we are done processing (need more data)
* #GNUNET_NO if one_shot was set and we have another message ready
* #GNUNET_SYSERR if the data stream is corrupt
*/
int
GNUNET_MST_read (struct GNUNET_MessageStreamTokenizer *mst,
struct GNUNET_NETWORK_Handle *sock,
int purge,
int one_shot);
/**
* Obtain the next message from the @a mst, assuming that
* there are more unprocessed messages in the internal buffer
* of the @a mst.
*
* @param mst tokenizer to use
* @param one_shot only call callback once, keep rest of message in buffer
* @return #GNUNET_OK if we are done processing (need more data)
* #GNUNET_NO if one_shot was set and we have another message ready
* #GNUNET_SYSERR if the data stream is corrupt
*/
int
GNUNET_MST_next (struct GNUNET_MessageStreamTokenizer *mst,
int one_shot);
/**
* Destroys a tokenizer.
*
* @param mst tokenizer to destroy
*/
void
GNUNET_MST_destroy (struct GNUNET_MessageStreamTokenizer *mst);
#if 0 /* keep Emacsens' auto-indent happy */
{
#endif
#ifdef __cplusplus
}
#endif
#endif
/** @} */ /* end of group server */
/* end of gnunet_mst_lib.h */
|
//navbar active class change
$(document).ready(function() {
var pathname = window.location.pathname;
$('.navbar-nav > li > a[href="'+pathname+'"]').addClass('active');
$( ".mr-auto .nav-item" ).bind( "click", function(event) {
window.location.href = $(event.target).attr('href');
var clickedItem = $( this );
$( ".navbar-nav .nav-item" ).each( function() {
$( this ).removeClass( "active" );
});
clickedItem.addClass( "active" );
});
});
//article modal display
$(document).ready(function(){
//get the clicked element value
$("[name='pop']").on("click", function () {
$(this).addClass('article-trigger-clicked');
var options = {
'backdrop': 'dynamic'
};
$('#artModal').modal(options);
});
// on modal show
$('#artModal').on('show.bs.modal', function() {
var id = $(".article-trigger-clicked").attr('id');
var title = $('#title'+id).text();
var article = $('#article'+id).attr('value');
var edit = $('#edit'+id).attr('href');
var artDelete = $('#delete'+id).attr('action');
if(typeof edit == 'undefined') {
$('#artEdit').hide();
$('#artDelete').hide();
}
$('#artTitle').text(title);
$('#artArticle').html(article);
$('#artEdit').attr('href', edit);
$('#artDelete').attr('action', artDelete);
});
// on modal hide
$('#artModal').on('hide.bs.modal', function() {
$('.article-trigger-clicked').removeClass('article-trigger-clicked')
})
});
|
package br.com.itau.casadocodigo.casadocodigoAPI.config.validacao;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
import org.springframework.util.Assert;
import br.com.itau.casadocodigo.casadocodigoAPI.config.validacao.anotacoes.VerificaExistenciaTuplaRelacao;
public class VerificaExistenciaTuplaRelacaoValidator
implements ConstraintValidator<VerificaExistenciaTuplaRelacao, Object> {
private String nomeDominio;
private Class<?> klass;
@PersistenceContext
private EntityManager manager;
@Override
public void initialize(VerificaExistenciaTuplaRelacao params) {
nomeDominio = params.fieldName();
klass = params.domainClass();
}
@Override
public boolean isValid(Object value, ConstraintValidatorContext context) {
Query query = manager.createQuery("select 1 from " + klass.getName() + " where " + nomeDominio + "=:value");
query.setParameter("value", value);
List<?> list = query.getResultList();
return !list.isEmpty();
}
}
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\User;
use DB;
use Hash;
use Validator;
use Session;
use Route;
use Spatie\Permission\Models\Role;
use Spatie\Permission\Models\Permission;
class UserController extends Controller {
public function __construct(Request $request) {
$this->middleware(function ($request, $next) {
checkPermission();
return $next($request);
});
}
public function index(Request $request) {
$role = Role::where(['name' => 'Admin'])->first();
/*
// create a role
$role = Role::create(['name' => 'Admin']);
// create a permission
$permission = Permission::create(['name' => 'publish-post']);
// assign role to permission
$role->givePermissionTo($permission);
// assign permission to role
$permission->assignRole($role);
*/
/*
// get all permissions and assign to role -
$getPermissions = Permission::all()->toArray();
foreach ($getPermissions as $permission) {
// assign role to permission
$role->givePermissionTo(['name' => $permission['name']]);
}
// get role assigned permission
$role->getAllPermissions();
*/
// assign permission to user
auth()->user()->givePermissionTo(['name' => 'publish-post']);
// get all permissions of a user
$userAllPermission = auth()->user()->getAllPermissions()->toArray();
// assign role or permission to a user
auth()->user()->assignRole('Publisher');
// get permission via role
$userPermissionViaRoles = auth()->user()->getPermissionsViaRoles()->toArray();
// get direct permission
$userDirectPermission = auth()->user()->getDirectPermissions()->toArray();
echo '<pre>';
echo 'User All Permissions'.'<br><br>';
print_r($userAllPermission);
echo '<hr>';
echo 'User Permissions Via Roles'.'<br><br>';
print_r($userPermissionViaRoles);
echo '<hr>';
echo 'User Direct Permissions'.'<br><br>';
print_r($userDirectPermission);
echo '</pre>';
die();
/*
return "Hello";
$data['title'] = '';
return view('', $data);
*/
}
public function testing() {
echo "1";
die();
}
public function create(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function store(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function show(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function edit(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function update(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function destroy(Request $request) {
$data['title'] = '';
return view('', $data);
}
public function manageUser(Request $request) {
$data['users'] = User::all();
$data['title'] = 'Manage User';
return view('user.manage-user',$data);
}
public function roleManagement(Request $request) {
$data['permissions'] = Permission::all();
$data['title'] = 'Role Management';
return view('user.role-management', $data);
}
public function validateRolePermission(Request $request) {
$validator = Validator::make($request->all(),
[
'permission' => 'required'
]
);
if ($validator->fails()) {
return redirect()->back()->withErrors($validator);
} else {
$user = auth()->user();
$permission = Permission::find($validator->getData(), ['name'])->toArray();
$permission_assigned = $user->givePermissionTo($permission);
$username = $user->name;
if ($permission_assigned) {
$return['success'] = 'Permission successfully assigned to '.$username;
} else {
$return['error'] = 'Something went wrong while assigning permission to '.$username;
}
return redirect()->back()->with($return);
}
}
public function createPermission(Request $request) {
$data['update_id'] = 0;
$data['form_location'] = '/permission/create';
$data['title'] = 'Role Management';
return view('user.create-permission', $data);
}
public function assignPermissionToUser(Request $request) {
$user_id = $request->route('user_id');
$d = User::find($user_id)->roles->toArray();
$data['username'] = auth()->user()->name;
$data['permissions'] = Permission::all();
$data['roles'] = Role::all();
$data['update_id'] = 0;
$data['form_location'] = '/user/'.$user_id.'/permission';
$data['title'] = 'Role Management';
return view('user.assign-permission', $data);
}
public function assignRoleToUser(Request $request) {
$user_id = $request->route('user_id');
$user = User::find($user_id);
$roles = $user->roles->toArray();
$roles = !empty($roles) ? array_column($roles,'pivot') : [];
$roles = !empty($roles) ? array_column($roles,'role_id') : [];
$data['username'] = $user->name;
$data['roles'] = Role::all();
$data['assigned_roles'] = $roles;
$data['update_id'] = 0;
$data['form_location'] = '/user/'.$user_id.'/role';
$data['title'] = 'Role Management';
return view('user.assign-role', $data);
}
public function validateAssignPermissionToUser(Request $request) {
$submit = $request->input('submit');
$assign_role = $assign_permission = FALSE;
if ($submit == 'Assign Permissions') {
$validation_rules = [
'rules' => [
'permission' => 'required'
],
'rules_label' => [
'permission.required' => 'Permission is required'
]
];
$assign_permission = TRUE;
}
if ($submit == 'Assign Roles') {
$validation_rules = [
'rules' => [
'role' => 'required'
],
'rules_label' => [
'role.required' => 'Role is required'
]
];
$assign_role = TRUE;
}
$validator = Validator::make($request->all(),$validation_rules['rules'],$validation_rules['rules_label']);
if ($validator->fails()) {
return redirect()->back()->withErrors($validator);
} else {
$user_id = $request->route('user_id');
$user = User::find($user_id);
$username = $user->name;
if ($assign_permission == TRUE) {
$permissions = Permission::find($validator->getData(), ['name'])->toArray();
$permission_assigned = $user->givePermissionTo($permissions);
if ($permission_assigned) {
$return['success_permission'] = 'Permission successfully assigned to '.$username;
} else {
$return['error_permission'] = 'Something went wrong while assigning permission to '.$username;
}
}
if ($assign_role == TRUE) {
$role = Role::find($validator->getData()['role'], ['name'])->toArray();
$role_assigned = $user->syncRoles($role);
// only use when u assign sigle role not from multiple role
// $role_assigned = $user->assignRole($role);
if ($role_assigned) {
$return['success_role'] = 'Role successfully assigned to '.$username;
} else {
$return['error_role'] = 'Something went wrong while assigning role to '.$username;
}
}
return redirect()->back()->with($return);
}
}
/*
Roles - Functioning Parts Create , Update, Delete, Assign to Permission
*/
public function roleShow(Request $request) {
$data['title'] = 'Role | List';
$data['roles'] = Role::all();
return view('roles.show',$data);
}
public function roleCreate(Request $request) {
$data['permissions'] = Permission::all();
$data['update_id'] = 0;
$data['form_location'] = '/roles/create';
$data['title'] = 'Role | Create';
return view('roles.create',$data);
}
public function roleEdit(Request $request) {
$update_id = $request->route('role_id');
$role = Role::find($update_id);
$data['role'] = $role->toArray();
$data['role_permissions'] = $role->getAllPermissions()->pluck('id')->toArray();
$data['permissions'] = Permission::all();
$data['update_id'] = $update_id;
$data['form_location'] = '/roles/'. $update_id .'/edit';
$data['title'] = 'Role | Create';
return view('roles.create',$data);
}
public function roleDestroy(Request $request) {
$data['title'] = 'Role | Destroy';
return view('roles.destroy',$data);
}
public function validateCreateRole(Request $request) {
$validator = Validator::make($request->all(),
['name' => 'required' , 'permission' => 'required'],
['name.required' => 'Role name is required.' , 'permission.required' => 'Permission is required.']
);
if (!$validator->fails()) {
$data = $validator->getData();
$role = Role::create(['name' => $data['name']]);
$permissions = Permission::find($data['permission'], ['name'])->toArray();
$assign_permission_to_role = $role->syncPermissions($permissions);
if ($assign_permission_to_role) {
$return['success'] = 'Role has been created successfully.';
} else {
$return['error'] = 'Something went wrong while creating role.';
}
return redirect()->back()->with($return);
} else {
return redirect()->back()->withErrors($validator)->withInput();
}
}
public function roleUpdate(Request $request) {
$validator = Validator::make($request->all(),
['name' => 'required' , 'permission' => 'required'],
['name.required' => 'Role name is required.' , 'permission.required' => 'Permission is required.']
);
if (!$validator->fails()) {
$data = $validator->getData();
$role_id = $request->route('role_id');
$role = Role::find($role_id);
$role->update(['name' => $data['name']]);
$permissions = Permission::find($data['permission'], ['name'])->toArray();
$assign_permission_to_role = $role->syncPermissions($permissions);
if ($assign_permission_to_role) {
$return['success'] = 'Role updated successfully.';
} else {
$return['error'] = 'Something went wrong while updating role.';
}
return redirect()->back()->with($return);
} else {
return redirect()->back()->withErrors($validator)->withInput();
}
}
/*
Permission Create, Read, Update, Delete
*/
public function permissionShow(Request $request) {
$data['title'] = 'Permission | List';
$data['permissions'] = Permission::all();
return view('permission.show', $data);
}
public function permissionCreate(Request $request) {
$data['update_id'] = 0;
$data['form_location'] = '/permissions/create';
$data['title'] = 'Permission | Create';
return view('permission.create', $data);
}
public function permissionEdit(Request $request) {
$update_id = $request->route('permission_id');
$data['update_id'] = $update_id;
$data['permission'] = Permission::find($update_id)->toArray();
$data['form_location'] = '/permissions/'.$update_id.'/edit';
$data['title'] = 'Permission | Edit';
return view('permission.create', $data);
}
public function permissionDestroy(Request $request) {
$permission_id = $request->route('permission_id');
$permission_delete = Permission::where('id',$permission_id)->delete();
if ($permission_delete) {
return redirect()->back()->with(['success' => 'Permission removed successfully']);
} else {
return redirect()->back()->with(['error' => 'Something went wrong']);
}
}
public function validatePermission(Request $request) {
$update_id = $request->route('permission_id') ? $request->route('permission_id') : 0;
$validator = Validator::make($request->all(),
['name' => 'required'],
['name.required' => 'Permission is required.']
);
if (!$validator->fails()) {
if ($update_id > 0) {
$permission = Permission::find($update_id)->update(['name' => $validator->getData()['name']]);
if ($permission) {
$return['success'] = 'Permission updated successfully.';
} else {
$return['error'] = 'Something went wrong while updating permission.';
}
} else {
$permission = Permission::create(['name' => $validator->getData()['name']]);
if ($permission) {
$return['success'] = 'New Permission has been created successfully.';
} else {
$return['error'] = 'Something went wrong while creating permission.';
}
}
return redirect()->back()->with($return);
} else {
return redirect()->back()->withErrors($validator)->withInput();
}
}
}
?>
|
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.scalastyle.file
import org.junit.Test
import org.scalatestplus.junit.AssertionsForJUnit
// scalastyle:off magic.number multiple.string.literals
class HeaderMatchesCheckerTest extends AssertionsForJUnit with CheckerTest {
val key = "header.matches"
val classUnderTest = classOf[HeaderMatchesChecker]
val licenseLines = List(
"/**",
" * Copyright (C) 2009-2010 the original author or authors.",
" * See the notice.md file distributed with this work for additional",
" * information regarding copyright ownership.",
" *",
" * Licensed under the Apache License, Version 2.0 (the \"License\");",
" * you may not use this file except in compliance with the License.",
" * You may obtain a copy of the License at",
" *",
" * http://www.apache.org/licenses/LICENSE-2.0",
" *",
" * Unless required by applicable law or agreed to in writing, software",
" * distributed under the License is distributed on an \"AS IS\" BASIS,",
" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
" * See the License for the specific language governing permissions and",
" * limitations under the License.",
" */"
)
val licenseUnix = licenseLines.mkString("\n")
val licenseWin = licenseLines.mkString("\r\n")
val baseSourceLines = List(
"",
"package foobar",
"",
" object Foobar {",
"}",
""
)
@Test def testOK(): Unit = {
val sourceLines = licenseLines ::: baseSourceLines
val sourceUnix = sourceLines.mkString("\n")
val sourceWin = sourceLines.mkString("\r\n")
assertErrors(List(), sourceUnix, Map("header" -> licenseUnix))
assertErrors(List(), sourceWin, Map("header" -> licenseWin))
assertErrors(List(), sourceUnix, Map("header" -> licenseWin))
assertErrors(List(), sourceWin, Map("header" -> licenseUnix))
}
@Test def testKO(): Unit = {
val sourceLines = licenseLines ::: baseSourceLines
val sourceUnix = sourceLines.mkString("\n").replaceAll("BASIS,", "XXX")
val sourceWin = sourceLines.mkString("\r\n").replaceAll("BASIS,", "XXX")
assertErrors(List(lineError(13)), sourceUnix, Map("header" -> licenseUnix))
assertErrors(List(lineError(13)), sourceWin, Map("header" -> licenseWin))
assertErrors(List(lineError(13)), sourceUnix, Map("header" -> licenseWin))
assertErrors(List(lineError(13)), sourceWin, Map("header" -> licenseUnix))
}
@Test def testTooShort(): Unit = {
val shortSourceLines = licenseLines.take(4)
val shortSourceUnix = shortSourceLines.mkString("\n")
val shortSourceWin = shortSourceLines.mkString("\r\n")
assertErrors(List(lineError(4)), shortSourceUnix, Map("header" -> licenseUnix))
assertErrors(List(lineError(4)), shortSourceWin, Map("header" -> licenseWin))
assertErrors(List(lineError(4)), shortSourceUnix, Map("header" -> licenseWin))
assertErrors(List(lineError(4)), shortSourceWin, Map("header" -> licenseUnix))
}
def literalOK(c: Char): Boolean = c match {
case ' ' | '-' | ':' | '/' | '\n' => true
case ld: Any if ld.isLetterOrDigit => true
case _ => false
}
val licenceRegexUnix = {
(licenseUnix flatMap { c =>
if (literalOK(c)) c.toString else "\\" + c
}).replace("2009-2010", "(?:\\d{4}-)?\\d{4}")
}
val licenceRegexWin = {
(licenseWin flatMap { c =>
if (literalOK(c)) c.toString else "\\" + c
}).replace("2009-2010", "(?:\\d{4}-)?\\d{4}")
}
@Test def testRegexOK(): Unit = {
val sourceLines = licenseLines ::: baseSourceLines
val sourceUnix = sourceLines.mkString("\n")
val sourceWin = sourceLines.mkString("\r\n")
assertErrors(List(), sourceUnix, Map("header" -> licenceRegexUnix, "regex" -> "true"))
assertErrors(List(), sourceWin, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(), sourceUnix, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(), sourceWin, Map("header" -> licenceRegexUnix, "regex" -> "true"))
}
@Test def testRegexFlexible(): Unit = {
val sourceLines = licenseLines ::: baseSourceLines
val sourceUnix = sourceLines.mkString("\n").replace("2009-2010", "2009-2014")
val sourceWin = sourceLines.mkString("\r\n").replace("2009-2010", "2009-2014")
assertErrors(List(), sourceUnix, Map("header" -> licenceRegexUnix, "regex" -> "true"))
assertErrors(List(), sourceWin, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(), sourceUnix, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(), sourceWin, Map("header" -> licenceRegexUnix, "regex" -> "true"))
}
@Test def testRegexKO(): Unit = {
val sourceLines = licenseLines ::: baseSourceLines
val sourceUnix = sourceLines.mkString("\n").replace("2009-2010", "xxxx-xxxx")
val sourceWin = sourceLines.mkString("\r\n").replace("2009-2010", "xxxx-xxxx")
assertErrors(List(fileError()), sourceUnix, Map("header" -> licenceRegexUnix, "regex" -> "true"))
assertErrors(List(fileError()), sourceWin, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(fileError()), sourceUnix, Map("header" -> licenceRegexWin, "regex" -> "true"))
assertErrors(List(fileError()), sourceWin, Map("header" -> licenceRegexUnix, "regex" -> "true"))
}
}
|
// dropdown menu show
$(document).on('show.bs.dropdown', '.dropdown', function() {
var $dropdownMenu = $('.dropdown-menu', $(this)),
$dropdownToggle = $('[class*="dropdown-toggle"]', $(this)),
dropdownPadding = $('a', $dropdownMenu).css('padding-left').replace('px', ''),
dropdownWidth;
if ($dropdownMenu.length && $dropdownToggle.length) {
// dropdown menu max width
if ($dropdownMenu.hasClass('dropdown-menu-right') || $dropdownMenu.parents('.nav.pull-right').length) {
dropdownWidth = $dropdownToggle.offset().left + $dropdownToggle.outerWidth() - dropdownPadding;
} else {
dropdownWidth = window.innerWidth - $dropdownToggle.offset().left - dropdownPadding;
}
$dropdownMenu.css('max-width', dropdownWidth);
};
});
|
require 'observer'
class LeaderboardView
include(Observable)
def initialize(window)
@window = window # Reference to the application window
end
def draw(leaderboard)
scroll_window = Gtk::ScrolledWindow.new
content_box = Gtk::Box.new(:vertical, 10)
grid = Gtk::Grid.new
grid.set_column_homogeneous(true)
username_header = Gtk::Label.new
username_header.set_markup('<b>Username</b>')
grid.attach(username_header, 0, 0, 1, 1)
wins_header = Gtk::Label.new
wins_header.set_markup('<b>Wins</b>')
grid.attach(wins_header, 1, 0, 1, 1)
losses_header = Gtk::Label.new
losses_header.set_markup('<b>Losses</b>')
grid.attach(losses_header, 2, 0, 1, 1)
ties_header = Gtk::Label.new
ties_header.set_markup('<b>Ties</b>')
grid.attach(ties_header, 3, 0, 1, 1)
leaderboard.each_with_index do |row, i|
row.each_with_index do |entry, j|
next if j.zero?
label = Gtk::Label.new("#{entry[1]}")
grid.attach(label, j - 1, i + 1, 1, 1)
end
end
main_menu_button = Gtk::Button.new(label: 'Back to Main Menu')
main_menu_button.signal_connect('clicked') do |_, _|
changed
notify_observers('main_menu_clicked')
end
content_box.add(grid)
content_box.set_child_packing(grid, :expand => true)
content_box.add(main_menu_button)
scroll_window.add(content_box)
@window.add(scroll_window)
@window.show_all
end
end
|
# This library was merged to Rails 4.1.
#
# Prefer Active Support's implementation if it's available.
#
# This allows libraries which support multiple Rails versions to depend on
# `concerning` without worrying about implementation collision. This lib
# will step aside if it sees its work is done.
require 'active_support/concern'
# Check for Concern#class_methods
if !ActiveSupport::Concern.method_defined?(:class_methods)
require 'concerning/concern_class_methods'
end
# Check for Module#concerning
begin
require 'active_support/core_ext/module/concerning'
rescue LoadError
require 'concerning/module_concerning'
end
# Check for Kernel#concern
begin
require 'active_support/core_ext/kernel/concern'
rescue LoadError
require 'concerning/kernel_concern'
end
|
namespace SciColorMaps.WinForms
{
partial class ColorSetupForm
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this._colorPanel = new System.Windows.Forms.Panel();
this._color1 = new System.Windows.Forms.Panel();
this._color2 = new System.Windows.Forms.Panel();
this._okButton = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// _colorPanel
//
this._colorPanel.BackColor = System.Drawing.SystemColors.ControlLightLight;
this._colorPanel.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this._colorPanel.Location = new System.Drawing.Point(37, 22);
this._colorPanel.Name = "_colorPanel";
this._colorPanel.Size = new System.Drawing.Size(699, 99);
this._colorPanel.TabIndex = 0;
this._colorPanel.MouseClick += new System.Windows.Forms.MouseEventHandler(this._colorPanel_MouseClick);
//
// _color1
//
this._color1.BackColor = System.Drawing.SystemColors.ActiveCaptionText;
this._color1.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this._color1.Location = new System.Drawing.Point(37, 127);
this._color1.Name = "_color1";
this._color1.Size = new System.Drawing.Size(37, 40);
this._color1.TabIndex = 1;
this._color1.Tag = 0;
this._color1.MouseClick += new System.Windows.Forms.MouseEventHandler(this._color_MouseClick);
//
// _color2
//
this._color2.BackColor = System.Drawing.Color.White;
this._color2.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this._color2.Location = new System.Drawing.Point(698, 127);
this._color2.Name = "_color2";
this._color2.Size = new System.Drawing.Size(38, 40);
this._color2.TabIndex = 2;
this._color2.Tag = 1;
this._color2.MouseClick += new System.Windows.Forms.MouseEventHandler(this._color_MouseClick);
//
// _okButton
//
this._okButton.Location = new System.Drawing.Point(324, 184);
this._okButton.Name = "_okButton";
this._okButton.Size = new System.Drawing.Size(126, 53);
this._okButton.TabIndex = 3;
this._okButton.Text = "OK";
this._okButton.UseVisualStyleBackColor = true;
this._okButton.Click += new System.EventHandler(this._okButton_Click);
//
// ColorSetupForm
//
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(773, 249);
this.Controls.Add(this._okButton);
this.Controls.Add(this._color2);
this.Controls.Add(this._color1);
this.Controls.Add(this._colorPanel);
this.Name = "ColorSetupForm";
this.Text = "ColorSetupForm";
this.Load += new System.EventHandler(this.ColorSetupForm_Load);
this.Paint += new System.Windows.Forms.PaintEventHandler(this.ColorSetupForm_Paint);
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Panel _colorPanel;
private System.Windows.Forms.Panel _color1;
private System.Windows.Forms.Panel _color2;
private System.Windows.Forms.Button _okButton;
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ModulesParser
{
/// <summary>
/// Parses an Enum element
/// </summary>
public class EnumParser : Parser, IDeclaration
{
public bool Flags;
public Dictionary<string, EnumValueParser> EnumValues = new Dictionary<string, EnumValueParser>();
public EnumParser(string name, System.Xml.XmlNode node): base(name, node)
{
}
public override void Parse(ModuleParser moduleParser, DataParser data)
{
Flags = (Node.Attributes["flags"] != null) ? Node.Attributes["flags"].InnerText == "true" : false;
Parse(moduleParser, data, EnumValues, "./en:Value");
}
public string GetDeclaration()
{
return Name;
}
public bool ImplementsEvent(string eventName)
{
return false;
}
public string GetTypeName()
{
return Name;
}
public string GetName()
{
return Name;
}
}
}
|
# redis-demonstration
redis-demonstration is a Python application that allows for the presentation of Redis features.
## Basic setup
Install the requirements:
```
$ pip install -r requirements.txt
```
Run the application:
```
$ python -m redis-demonstration --help
```
To run the tests:
```
$ pytest
```
# redis_demonstration
|
import { Coins } from "@arkecosystem/platform-sdk";
import { Input, InputAddon, InputAddonStart, InputGroup } from "app/components/Input";
import { NetworkIcon } from "domains/network/components/NetworkIcon";
import React from "react";
type Props = {
network?: Coins.Network;
suggestion?: string;
} & React.InputHTMLAttributes<any>;
const TypeAhead = ({ value }: { value?: string }) => (
<InputAddon
as="span"
data-testid="SelectNetworkInput__typeahead"
className="py-3 font-normal border border-transparent opacity-50 pointer-events-none pl-15"
>
{value}
</InputAddon>
);
export const SelectNetworkInput = React.forwardRef<HTMLInputElement, Props>(
({ network, suggestion, ...props }: Props, ref) => (
<InputGroup data-testid="SelectNetworkInput">
<InputAddonStart className="px-4">
<NetworkIcon
data-testid="SelectNetworkInput__network"
coin={network?.coin()}
network={network?.id()}
size="sm"
showTooltip={false}
noShadow
/>
</InputAddonStart>
{suggestion && <TypeAhead value={suggestion} />}
<Input data-testid="SelectNetworkInput__input" ref={ref} className="pl-15" {...props} />
</InputGroup>
),
);
SelectNetworkInput.displayName = "SelectNetworkInput";
|
const styles = {
primary_theme: {
dark: false,
colors: {
primary: "rgb(255, 45, 85)",
background: "rgb(242, 242, 242)",
card: "rgb(255, 255, 255)",
text: "rgb(28, 28, 30)",
border: "rgb(199, 199, 204)",
notification: "rgb(255, 69, 58)",
primaryGreen: "#14793F",
primaryWhite: "#FFFFFF",
backgroundWhite: "#FDFDFF",
primaryBlack: "#000000",
backgroundBlack: "#121212",
primaryBlue: "#4F99EE",
backgroundLightBlue: "#8CBBF1",
backgroundLightPink: "#F9CFE0",
backgroundLightYellow: "#FCEECB",
backgroundLightGrey: "#D7DDE9",
backgroundPurple: "#232232",
primaryGrey: "#9C9CA8",
},
fonts: {
regular: "Sora-Regular",
bold: "Sora-Bold",
medium: "Sora-Medium",
semiBold: "Sora-SemiBold",
extraBold: "Sora-ExtraBold",
thin: "Sora-Thin",
},
},
};
export default styles;
|
---
layout: "post"
title: "웹 기반 WYSIWYG 에디터 Summernote 사용하기"
date: 2016-12-19 19:34:31
category: frontend-jQuery
tags: [javascript,jQuery,Summernote]
draft: false
---
WYSIWYG 에디터 Summernote 사용방법을 정리합니다.
<!--more-->
###### 1. CDN 추가
```html
<link href="http://cdnjs.cloudflare.com/ajax/libs/summernote/0.8.2/summernote.css" rel="stylesheet">
<script src="http://cdnjs.cloudflare.com/ajax/libs/summernote/0.8.2/summernote.js"></script>
```
###### 2. 에디터로 사용할 div 영역 생성
```html
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<%@ include file="/WEB-INF/include/app-header.jspf" %>
<title>Insert title here</title>
</head>
<body>
<div id="summernote"></div>
</body>
</html>
```
###### 3. 에디터 생성
```js
$(document).ready(function(){
var toolbar = [
['style', ['bold', 'italic', 'underline', 'clear']],
['font', ['strikethrough', 'superscript', 'subscript']],
['fontsize', ['fontsize']],
['color', ['color']],
['para', ['ul', 'ol', 'paragraph']],
['height', ['height']],
['table', ['table']],
['insert', ['link', 'picture', 'hr']],
['view', ['fullscreen', 'codeview']],
['help', ['help']]
];
var setting = {
height : 300,
minHeight: null,
maxHeight: null,
focus : true,
lang : 'ko-KR',
toolbar : toolbar
};
$('#summernote').summernote(setting);
});
```
간단하다.
toolbar 변수는 에디터 상단에 어떤 버튼들을 보여줄지 설정하는 것이다.

###### 4. 에디터에 작성한 내용 HTML코드로 가져오기
```js
var htmlStr = $('#summernote').summernote('code');
```
기타 더 다양한 설정 및 활용방법은 [Summernote 공식 홈페이지](https://summernote.org/)에서 확인가능하다.
|
"""
doc(fname, title="", width=800, height=600)
Display the contents of file name `fname` in a browser (webview) window.
Alternatively, `fname` can be a URL address.
Example, display the contents of the README.md file located in current dir.
doc("README.md")
to display the contents of the `examples.md` file located in the docs/src dir of the module GMT, do
doc(GMT, "examples.md")
To display the doc strings of any documented function, just do (i.e to show the *sin* docs)
doc(sin)
"""
function doc(fname::AbstractString; title::AbstractString="", width::Integer=0, height::Integer=0)
if (width == 0) width = 800 end
if (height == 0) height = 600 end
if (title == "") title, = splitext(basename(fname)) end
if (startswith(fname, "http") || startswith(fname, "ftp") || startswith(fname, "file"))
webview(title, fname, width, height)
else
MD = Markdown.parse_file(fname);
webview(title, "data:text/html,<html>" * html(MD) * "</html>", width, height)
end
end
function doc(mod::Module, fname::AbstractString; title::AbstractString="", width::Integer=0, height::Integer=0)
if (fname == "README" || fname == "README.md")
fn = joinpath(dirname(pathof(mod)), "..", "README.md")
else
fn = joinpath(dirname(pathof(mod)), "..", joinpath("docs/src", fname))
end
doc(fn, title=title, width=width, height=height)
end
function doc(fun; width::Integer=0, height::Integer=0)
# Method to show the @doc string of a function
if (width == 0) width = 800 end
if (height == 0) height = 600 end
webview(string(fun), "data:text/html,<html>" * html(Base.Docs.doc(fun)) * "</html>", width, height)
end
|
#ifndef APPLOG_PRI_
#define APPLOG_PRI_
#include <stdint.h>
#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
示例
void* dataIn = xxx;
size_t data_size = xxx;
size_t bufferSize = applog_decorated_buffer_min_size(data_size);
uint8_t *buffer = malloc(sizeof(uint8_t) * bufferSize);
applog_decorated(dataIn, data_size, buffer, &bufferSize)
if (bufferSize > 0)
{
// 加密成功,加密数据为 buffer[0 -> bufferSize]
}
else
{
// 加密失败
}
*/
#define applog_decorated_buffer_min_size(x) (x + 6 + 32 + 64 + 16 - x % 16)
/// buffer_size = data_size + 6 + 32 + 64 + 16
void applog_decorated_private(const void* dataIn,
const size_t data_size,
uint8_t *buffer_out,
size_t *buffer_size);
#ifdef __cplusplus
}
#endif
#endif
|
package data
data class AppDrawerState(
val inboxProject: AppProject,
val projects: List<AppProject>,
val labels: List<AppLabel>,
val isFavoritesOpened: Boolean = true,
val isProjectsOpened: Boolean = true,
val isLabelsOpened: Boolean = false,
)
|
import { HttpClient } from '@angular/common/http';
import { Injectable } from '@angular/core';
import { Store } from '@ngrx/store';
import { BehaviorSubject } from 'rxjs';
import { Observable } from 'rxjs/internal/Observable';
import { catchError, tap, map } from 'rxjs/operators';
import { IRootState } from '../+store';
import { IUser } from '../shared/interfaces';
import { login, register, authenticate, logout } from '../+store/actions';
@Injectable()
export class AuthService {
// tslint:disable-next-line:variable-name
currentUser$ = this.store.select((state) => state.auth.currentUser);
isLogged$ = this.currentUser$.pipe(map(currentUser => currentUser !== null));
isReady$ = this.currentUser$.pipe(map(currentUser => currentUser !== undefined));
constructor(private http: HttpClient, private store: Store<IRootState>) { }
login(data: any): Observable<any> {
return this.http.post(`/users/login`, data).pipe(
tap((user: IUser) => this.store.dispatch(login({ user })))
);
}
register(data: any): Observable<any> {
return this.http.post(`/users/register`, data).pipe(
tap((user: IUser) => this.store.dispatch(register({ user })))
);
}
logout(): Observable<any> {
return this.http.post(`/users/logout`, {}).pipe(
tap((user: IUser) => this.store.dispatch(logout()))
);
}
authenticate(): Observable<any> {
return this.http.get(`/users/profile`).pipe(
tap((user: IUser) => this.store.dispatch(authenticate({ user }))),
catchError(() => {
this.store.dispatch(authenticate({ user: null }));
return [null];
})
);
}
}
|
import React, {FunctionComponent} from 'react';
import {
GappedStack,
Title, useConcessionHoursImage,
useConcessionSignupLink,
useConcessionsLocation,
useDirectorOfConcessionsMailLink
} from '../common';
import {TreatTickets} from './TreatTickets';
import {ConcessionStandDutyAndSignup} from './ConcessionStandDutyAndSignup';
export const ConcessionsPage: FunctionComponent = () => {
const concessionLocation = useConcessionsLocation();
const signupLink = useConcessionSignupLink();
const contactLink = useDirectorOfConcessionsMailLink();
const hoursImage = useConcessionHoursImage();
return (
<GappedStack padded flex>
<Title>Concessions Info</Title>
<TreatTickets location={concessionLocation}/>
<ConcessionStandDutyAndSignup signupLink={signupLink} contactLink={contactLink} hoursImage={hoursImage}/>
</GappedStack>
);
};
|
package pl.elpassion.eltc.recap
import android.annotation.TargetApi
import android.app.*
import android.content.Context
import android.content.Intent
import android.support.v4.app.NotificationCompat
import android.support.v4.content.ContextCompat
import pl.elpassion.eltc.Build
import pl.elpassion.eltc.R
import pl.elpassion.eltc.builds.BuildsActivity
import pl.elpassion.eltc.util.notificationManager
class RecapNotifierImpl(private val application: Application) : RecapNotifier {
private val notificationManager get() = application.notificationManager
@TargetApi(26)
override fun createRecapChannel() {
val name = application.getString(R.string.recap_channel)
val importance = NotificationManager.IMPORTANCE_HIGH
val channel = NotificationChannel(RECAP_CHANNEL_ID, name, importance).apply {
enableLights(true)
enableVibration(true)
}
notificationManager.createNotificationChannel(channel)
}
override fun showFailureNotifications(failedBuilds: List<Build>) {
failedBuilds.forEach { notify(it) }
if (failedBuilds.count() > 1) {
notificationManager.notify(0, application.createGroupSummary())
}
}
private fun notify(build: Build) {
val title = "Build #${build.number} in ${build.buildType.projectName} failed"
val text = build.statusText
val notification = application.createNotification(title, text, getResultIntent(build))
notificationManager.notify(build.id, notification)
}
private fun getResultIntent(build: Build): PendingIntent? {
val intent = Intent(application, BuildsActivity::class.java).apply {
putExtra(BuildsActivity.BUILD_KEY, build)
flags = Intent.FLAG_ACTIVITY_SINGLE_TOP
}
val flags = PendingIntent.FLAG_UPDATE_CURRENT
return PendingIntent.getActivity(application, 0, intent, flags)
}
private fun Context.createNotification(title: String, text: String?, intent: PendingIntent?) =
NotificationCompat.Builder(this, RECAP_CHANNEL_ID)
.setSmallIcon(R.drawable.ic_failure_recap)
.setColor(ContextCompat.getColor(this, R.color.failure))
.setContentTitle(title)
.setContentText(text)
.setContentIntent(intent)
.setAutoCancel(true)
.setCategory(Notification.CATEGORY_STATUS)
.setGroup(FAILURES_GROUP_KEY)
.build()
private fun Application.createGroupSummary() =
NotificationCompat.Builder(this, RECAP_CHANNEL_ID)
.setSmallIcon(R.drawable.ic_failure_recap)
.setColor(ContextCompat.getColor(this, R.color.failure))
.setCategory(Notification.CATEGORY_STATUS)
.setGroup(FAILURES_GROUP_KEY)
.setGroupSummary(true)
.build()
companion object {
const val RECAP_CHANNEL_ID = "recap_channel_id"
const val FAILURES_GROUP_KEY = "failures_group_key"
}
}
|
use strict;
use warnings;
use Test::More;
BEGIN {
require Dancer;
plan skip_all => 'Dancer 1 tests'
if Dancer->VERSION >= 2;
}
plan tests => 1;
use lib 't/apps/Foo/lib';
use Foo;
use Dancer::Test appdir => 't/apps/Foo/yadah'; # ... this ain't right
response_content_like [GET => '/'], qr/<escape>/,
"escape config was passed";
|
import puppeteer from 'puppeteer'
import lighthouse from 'lighthouse'
import devices from 'puppeteer/DeviceDescriptors'
import auditSite from './helpers'
import PuppeteerHar from 'puppeteer-har'
import fs from 'fs'
let browser
let page
const homepage = 'https://www.google.com.mx/'
const iphone = devices['iPhone 6']
describe('Lighhoutse suite', () => {
it(
'audits homepage',
async () => {
const browser = await puppeteer.launch()
const results = await auditSite(browser, homepage)
console.table(results)
browser.close()
},
25000
)
it(
'other lighthouse',
async () => {
const browser = await puppeteer.launch()
const page = await browser.newPage()
await page.emulate(iphone)
const flags = {
port: new URL(browser.wsEndpoint()).port,
output: 'html',
}
const result = await lighthouse(homepage, flags)
const html = result.report
fs.writeFileSync('reports/html/reportfull.html', html)
await browser.close()
},
25000
)
it('check code coverage', async () => {
const browser = await puppeteer.launch()
const page = await browser.newPage()
await page.emulate(iphone)
// Enable both JavaScript and CSS coverage
await Promise.all([
page.coverage.startJSCoverage(),
page.coverage.startCSSCoverage(),
])
await page.goto(homepage)
// wait for page to render completely
await page.waitFor('input')
// Disable both JavaScript and CSS coverage
const [jsCoverage, cssCoverage] = await Promise.all([
page.coverage.stopJSCoverage(),
page.coverage.stopCSSCoverage(),
])
const jsTotalBytes = jsCoverage.reduce(
(acc, val) => acc + val.text.length,
0
)
const cssTotalBytes = cssCoverage.reduce(
(acc, val) => acc + val.text.length,
0
)
const jsUsedBytes = jsCoverage.reduce(
(acc, val) =>
acc +
val.ranges.reduce((racc, rval) => racc + rval.end - rval.start - 1, 0),
0
)
const cssUsedBytes = cssCoverage.reduce(
(acc, val) =>
acc +
val.ranges.reduce((racc, rval) => racc + rval.end - rval.start - 1, 0),
0
)
console.log(
`Used js bytes: ${Math.floor((jsUsedBytes / jsTotalBytes) * 100)}%`
)
console.log(
`Used css bytes: ${Math.floor((cssUsedBytes / cssTotalBytes) * 100)}%`
)
await browser.close()
})
})
describe('Some tests', () => {
beforeAll(async () => {
browser = await puppeteer.launch()
page = await browser.newPage()
await page.emulate(iphone)
})
afterAll(async () => {
browser.close()
})
it('one test', () => {
expect(1).toBe(1)
})
it('should create a browser', async () => {
expect(browser).not.toBeNull()
})
it('should create a page', async () => {
expect(page).not.toBeNull()
})
it(
'navigate to betreut page',
async () => {
const har = new PuppeteerHar(page)
await har.start({ path: 'reports/har/results.har' })
await page.goto(homepage)
expect(page.url()).toBe(homepage)
await har.stop()
},
10000
)
})
|
class SyncsController < ApplicationController
respond_to :js
def create
Sync.new(current_user).perform
@user = current_user.reload
respond_with @user
end
end
|
import { PageBaseModel } from './page-base-model';
/**
* Article Class
*/
export class ArticleModel extends PageBaseModel {
}
|
# Mapo Tofu

|
using System.Collections.Generic;
using System.ComponentModel;
namespace Gimela.Presentation.Controls.Timeline
{
/// <summary>
/// 元素结束时间比较器
/// </summary>
internal class TimelinePanelChildEndTimeComparer : IComparer<TimelinePanelChild>
{
/// <summary>
/// 元素结束时间比较器
/// </summary>
public TimelinePanelChildEndTimeComparer()
{
SortDirection = ListSortDirection.Descending;
}
/// <summary>
/// 指定排序方向,默认为降序排序。
/// </summary>
public ListSortDirection SortDirection { get; set; }
#region IComparer<TimelinePanelChild> Members
public int Compare(TimelinePanelChild x, TimelinePanelChild y)
{
if (x.End.HasValue)
{
if (y.End.HasValue)
{
// two values, we can make a real comparison
if (SortDirection == ListSortDirection.Ascending)
return x.End.Value.CompareTo(y.End.Value);
return y.End.Value.CompareTo(x.End.Value);
}
return 1;
}
if (y.End.HasValue)
return -1;
return 0;
}
#endregion
}
}
|
import { STRING, Optional, Model } from 'sequelize'
import sequelize from './index'
interface RoomAttributes {
roomId: string
roomName: string
createdByUserId: string
}
interface RoomCreationAttributes extends Optional<RoomAttributes, 'roomId'> {}
interface RoomInstance
extends Model<RoomAttributes, RoomCreationAttributes>,
RoomAttributes {}
const Rooms = sequelize.define<RoomInstance>(
'Rooms',
{
roomId: {
type: STRING,
primaryKey: true,
},
roomName: {
type: STRING,
},
createdByUserId: {
type: STRING,
allowNull: false,
},
},
{
tableName: 'Rooms',
}
)
Rooms.sync()
export default Rooms
|
#!/bin/bash -eu
source /home/u6k/.env
curl -v -f http://${MYDNS_USER}:${MYDNS_PASS}@www.mydns.jp/login.html
|
#
# Landing page
#
PAGETITLE = "ASF Roster Tool" # Wvisible:projects
_html do
_link rel: 'stylesheet', href: "stylesheets/app.css?#{cssmtime}"
_body? do
_whimsy_body(
title: PAGETITLE,
breadcrumbs: {
roster: '.'
}
) do
person = ASF::Person.find(env.user)
_table.counts do
_tr do
_td do
_a '1', href: 'committer/__self__'
end
_td do
_a env.user, href: 'committer/__self__'
end
_td 'Your personal page'
end
### committers
_tr do
_td do
_a @committers.length, href: 'committer/'
end
_td do
_a 'Committers', href: 'committer/'
end
_td do
_ 'Search for committers by name, user id, or email address'
_ ' (includes '
_ @committers.select{|c| c.inactive?}.length
_ ' inactive accounts)'
end
end
if person.asf_member? or ASF.pmc_chairs.include? person
_tr do
_td do
_a @committers.length, href: 'committer2/'
end
_td do
_a 'Committers', href: 'committer2/'
end
_td do
_ 'Search for committers by name, user id, or email address.'
_ ' Also includes pending ICLAs'
end
end
end
### members
_tr do
_td do
_a @members.length, href: 'members'
end
_td do
_a 'Members', href: 'members'
end
_td 'Active ASF members'
end
### PMCs
_tr do
_td do
_a @committees.length, href: 'committee/'
end
_td do
_a 'PMCs', href: 'committee/'
end
_td 'Active projects at the ASF'
end
_tr do
_td do
_a @nonpmcs.length, href: 'nonpmc/'
end
_td do
_a 'nonPMCs', href: 'nonpmc/'
end
_td 'ASF Committees (non-PMC)'
end
### Podlings
_tr do
_td do
_a @podlings.select {|podling| podling.status == 'current'}.length,
href: 'ppmc/'
end
_td do
_a 'Podlings', href: 'ppmc/'
end
_td! do
_span 'Active podlings at the ASF ('
_a @podlings.length, href: 'podlings'
_span ' total)'
end
end
### Groups
_tr do
_td do
_a @groups.length, href: 'group/'
end
_td do
_a 'Groups', href: 'group/'
end
_td 'Assorted other groups from various sources'
end
end
if person.asf_member? or ASF.pmc_chairs.include? person
_hr
_p do
_a 'Search pending ICLAs', href: 'icla/'
_span.glyphicon.glyphicon_lock :aria_hidden, class: "text-primary", aria_label: "ASF Members and Officers"
end
_p do
_a 'Organization Chart ', href: 'orgchart/'
_span.glyphicon.glyphicon_lock :aria_hidden, class: "text-primary", aria_label: "ASF Members and Officers"
end
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.