text
stringlengths 3
1.05M
|
|---|
/*
* Copyright 2006 The Android Open Source Project
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef SkAnalyticEdge_DEFINED
#define SkAnalyticEdge_DEFINED
#include "SkEdge.h"
struct SkAnalyticEdge {
// Similar to SkEdge, the conic edges will be converted to quadratic edges
enum Type {
kLine_Type,
kQuad_Type,
kCubic_Type
};
SkAnalyticEdge* fNext;
SkAnalyticEdge* fPrev;
// During aaa_walk_edges, if this edge is a left edge,
// then fRiteE is its corresponding right edge. Otherwise it's nullptr.
SkAnalyticEdge* fRiteE;
SkFixed fX;
SkFixed fDX;
SkFixed fUpperX; // The x value when y = fUpperY
SkFixed fY; // The current y
SkFixed fUpperY; // The upper bound of y (our edge is from y = fUpperY to y = fLowerY)
SkFixed fLowerY; // The lower bound of y (our edge is from y = fUpperY to y = fLowerY)
SkFixed fDY; // abs(1/fDX); may be SK_MaxS32 when fDX is close to 0.
// fDY is only used for blitting trapezoids.
SkFixed fSavedX; // For deferred blitting
SkFixed fSavedY; // For deferred blitting
SkFixed fSavedDY; // For deferred blitting
int8_t fCurveCount; // only used by kQuad(+) and kCubic(-)
uint8_t fCurveShift; // appled to all Dx/DDx/DDDx except for fCubicDShift exception
uint8_t fCubicDShift; // applied to fCDx and fCDy only in cubic
int8_t fWinding; // 1 or -1
static const int kDefaultAccuracy = 2; // default accuracy for snapping
static inline SkFixed SnapY(SkFixed y) {
const int accuracy = kDefaultAccuracy;
// This approach is safer than left shift, round, then right shift
return ((unsigned)y + (SK_Fixed1 >> (accuracy + 1))) >> (16 - accuracy) << (16 - accuracy);
}
// Update fX, fY of this edge so fY = y
inline void goY(SkFixed y) {
if (y == fY + SK_Fixed1) {
fX = fX + fDX;
fY = y;
} else if (y != fY) {
// Drop lower digits as our alpha only has 8 bits
// (fDX and y - fUpperY may be greater than SK_Fixed1)
fX = fUpperX + SkFixedMul(fDX, y - fUpperY);
fY = y;
}
}
inline void goY(SkFixed y, int yShift) {
SkASSERT(yShift >= 0 && yShift <= kDefaultAccuracy);
SkASSERT(fDX == 0 || y - fY == SK_Fixed1 >> yShift);
fY = y;
fX += fDX >> yShift;
}
inline void saveXY(SkFixed x, SkFixed y, SkFixed dY) {
fSavedX = x;
fSavedY = y;
fSavedDY = dY;
}
inline bool setLine(const SkPoint& p0, const SkPoint& p1);
inline bool updateLine(SkFixed ax, SkFixed ay, SkFixed bx, SkFixed by, SkFixed slope);
#ifdef SK_DEBUG
void dump() const {
SkDebugf("edge: upperY:%d lowerY:%d y:%g x:%g dx:%g w:%d\n",
fUpperY, fLowerY, SkFixedToFloat(fY), SkFixedToFloat(fX),
SkFixedToFloat(fDX), fWinding);
}
void validate() const {
SkASSERT(fPrev && fNext);
SkASSERT(fPrev->fNext == this);
SkASSERT(fNext->fPrev == this);
SkASSERT(fUpperY < fLowerY);
SkASSERT(SkAbs32(fWinding) == 1);
}
#endif
};
struct SkAnalyticQuadraticEdge : public SkAnalyticEdge {
SkQuadraticEdge fQEdge;
// snap y to integer points in the middle of the curve to accelerate AAA path filling
SkFixed fSnappedX, fSnappedY;
bool setQuadratic(const SkPoint pts[3]);
bool updateQuadratic();
inline void keepContinuous() {
// We use fX as the starting x to ensure the continuouty.
// Without it, we may break the sorted edge list.
SkASSERT(SkAbs32(fX - SkFixedMul(fY - fSnappedY, fDX) - fSnappedX) < SK_Fixed1);
SkASSERT(SkAbs32(fY - fSnappedY) < SK_Fixed1); // This may differ due to smooth jump
fSnappedX = fX;
fSnappedY = fY;
}
};
struct SkAnalyticCubicEdge : public SkAnalyticEdge {
SkCubicEdge fCEdge;
SkFixed fSnappedY; // to make sure that y is increasing with smooth jump and snapping
bool setCubic(const SkPoint pts[4]);
bool updateCubic();
inline void keepContinuous() {
SkASSERT(SkAbs32(fX - SkFixedMul(fDX, fY - SnapY(fCEdge.fCy)) - fCEdge.fCx) < SK_Fixed1);
fCEdge.fCx = fX;
fSnappedY = fY;
}
};
bool SkAnalyticEdge::setLine(const SkPoint& p0, const SkPoint& p1) {
fRiteE = nullptr;
// We must set X/Y using the same way (e.g., times 4, to FDot6, then to Fixed) as Quads/Cubics.
// Otherwise the order of the edge might be wrong due to precision limit.
const int accuracy = kDefaultAccuracy;
const int multiplier = (1 << kDefaultAccuracy);
SkFixed x0 = SkFDot6ToFixed(SkScalarToFDot6(p0.fX * multiplier)) >> accuracy;
SkFixed y0 = SnapY(SkFDot6ToFixed(SkScalarToFDot6(p0.fY * multiplier)) >> accuracy);
SkFixed x1 = SkFDot6ToFixed(SkScalarToFDot6(p1.fX * multiplier)) >> accuracy;
SkFixed y1 = SnapY(SkFDot6ToFixed(SkScalarToFDot6(p1.fY * multiplier)) >> accuracy);
int winding = 1;
if (y0 > y1) {
SkTSwap(x0, x1);
SkTSwap(y0, y1);
winding = -1;
}
// are we a zero-height line?
SkFDot6 dy = SkFixedToFDot6(y1 - y0);
if (dy == 0) {
return false;
}
SkFDot6 dx = SkFixedToFDot6(x1 - x0);
SkFixed slope = QuickSkFDot6Div(dx, dy);
SkFixed absSlope = SkAbs32(slope);
fX = x0;
fDX = slope;
fUpperX = x0;
fY = y0;
fUpperY = y0;
fLowerY = y1;
fDY = dx == 0 || slope == 0 ? SK_MaxS32 : absSlope < kInverseTableSize
? QuickFDot6Inverse::Lookup(absSlope)
: SkAbs32(QuickSkFDot6Div(dy, dx));
fCurveCount = 0;
fWinding = SkToS8(winding);
fCurveShift = 0;
return true;
}
#endif
|
const { Component } = Shopware;
const { Criteria } = Shopware.Data;
Component.override('sw-users-permissions-user-create', {
computed: {
languageCriteria() {
return this.$super('languageCriteria')
.addFilter(Criteria.multi('OR', [
Criteria.equals('extensions.swagLanguagePackLanguage.id', null),
Criteria.equals('extensions.swagLanguagePackLanguage.administrationActive', true),
Criteria.equals('id', Shopware.Defaults.systemLanguageId),
]));
},
},
methods: {
onSave() {
// This override is needed to fix the broken inheritance
this.$super('onSave');
},
},
});
|
const router = require('express').Router();
const { Comment } = require('../../models');
const withAuth = require('../../utils/auth');
router.get('/', (req, res) => {
Comment.findAll()
.then((dbCommentData) => res.json(dbCommentData))
.catch((err) => {
console.log(err);
res.status(500).json(err);
});
});
router.post('/', withAuth, (req, res) => {
// check the session
if (req.session) {
Comment.create({
comment_text: req.body.comment_text,
post_id: req.body.post_id,
// use the id from the session
user_id: req.session.user_id,
})
.then((dbCommentData) => res.json(dbCommentData))
.catch((err) => {
console.log(err);
res.status(400).json(err);
});
}
});
router.delete('/:id', withAuth, (req, res) => {
Comment.destroy({
where: {
id: req.params.id,
},
})
.then((dbCommentData) => {
if (!dbCommentData) {
res.status(404).json({ message: 'No comment found with this id' });
return;
}
res.json(dbCommentData);
})
.catch((err) => {
console.log(err);
res.status(500).json(err);
});
});
module.exports = router;
|
import numpy as np
# sigmoid function
def nonlin(x, deriv=False):
if deriv:
return x*(1-x)
return 1/(1+np.exp(-x))
# input data
x = np.array([
[0,0,1],
[0,1,1],
[1,0,1],
[1,1,1]
])
y = np.array([
[0,0],
[1,0],
[1,1],
[0,1]
])
print x
print y
# seed
np.random.seed(1)
# synapses
syn0 = 2 * np.random.random((3,4)) - 1
syn1 = 2 * np.random.random((4,2)) - 1
print syn0
print syn1
# training
for j in xrange(60000):
# layers
l0 = x
l1 = nonlin(np.dot(l0, syn0))
l2 = nonlin(np.dot(l1, syn1))
# backpropagation
l2_error = y - l2
if (j % 10000) == 0:
print 'Error: ' + str(np.mean(np.abs(l2_error)))
# calculate deltas
l2_delta = l2_error * nonlin(l2, deriv=True)
l1_error = l2_delta.dot(syn1.T)
l1_delta = l1_error * nonlin(l1, deriv=True)
# update synapses
syn1 += l1.T.dot(l2_delta)
syn0 += l0.T.dot(l1_delta)
if j == 0:
print syn1.shape
print l1.T.shape
print l2_delta.shape
print syn0.shape
print l0.T.shape
print l1_delta.shape
print 'output after training'
print l2
print syn0
print syn1
print 'output after changed input'
x = np.array([
[0,1,1]
])
# layers
l0 = x
l1 = nonlin(np.dot(l0, syn0))
l2 = nonlin(np.dot(l1, syn1))
print l2
|
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pipeline import Logger, S3Bucket, SampleSheetParser, EnvironmentParametersParser
from batch import AbstractFolderScanner, AbstractPipelineLauncher
import click
import os
import time
bucket = S3Bucket()
SAMPLE_ID = "Sample_ID"
SAMPLE_NAME = "Sample_Name"
SAMPLE_PROJECT = "Sample_Project"
class FolderScanner(AbstractFolderScanner):
def __init__(self, folder, patterns, exclude_patterns, samples):
AbstractFolderScanner.__init__(self, folder, patterns, exclude_patterns)
self.samples = samples
def format_sample_patterns(self, sample, patterns):
result = {}
for pattern, pattern_list in patterns.iteritems():
formatted = []
for value in pattern_list:
formatted.append(value.format(sample_id=sample[SAMPLE_ID],
sample_name=sample[SAMPLE_NAME],
sample_project=sample[SAMPLE_PROJECT]))
result[pattern] = formatted
return result
def find_files(self, recursive=True):
Logger.info("Starting parsing input directory: {}.".format(self.folder), task_name=self.TASK_NAME)
all_files = bucket.ls_s3(self.folder, self.MAX_ATTEMPTS, recursive=recursive)
patterns_files = {}
if recursive:
all_folders = self.get_folders(all_files)
for folder in all_folders:
self.check_file_match(self.samples, folder, patterns_files)
for file in all_files:
# recursive version of s3 ls returns path from bucket root
# non-recursive ls returns path relative to the requested folder
if recursive:
file_name = file[len(self.get_path_without_bucket()) - 1:]
else:
file_name = file
self.check_file_match(self.samples, file_name, patterns_files)
Logger.info('Collected batch files: {}.'.format(str(patterns_files)), task_name=self.TASK_NAME)
if len(patterns_files) != len(self.samples):
self.fail_task("Failed to find all parameters for all samples.".format())
Logger.success('Successfully collected batch files: {}.'.format(str(patterns_files)), task_name=self.TASK_NAME)
return patterns_files
def check_file_match(self, samples, file_name, patterns_files):
for sample in samples:
sample_name = sample[SAMPLE_NAME]
patterns = self.format_sample_patterns(sample, self.patterns)
exclude = self.format_sample_patterns(sample, self.exclude_patterns)
for pattern_name, glob in patterns.iteritems():
if self.match_patterns(file_name, glob):
if pattern_name in exclude:
exclude = exclude[pattern_name]
if self.match_patterns(file_name, exclude):
Logger.info("Skipping filename '{}' since it matches exclude patterns '{}'."
.format(file_name, str(exclude)))
continue
if sample_name not in patterns_files:
patterns_files[sample_name] = {}
if pattern_name not in patterns_files[sample_name]:
patterns_files[sample_name][pattern_name] = []
patterns_files[sample_name][pattern_name].append(os.path.join(self.folder, file_name))
def get_folders(self, all_files):
folders = set()
for file in all_files:
folders.add(os.path.dirname(file[len(self.get_path_without_bucket()) - 1:]))
result = set()
for folder in folders:
result.add(folder + '/')
current = folder
while current:
result.add(current + '/')
current = os.path.dirname(current)
return result
class PipelineLauncher(AbstractPipelineLauncher):
def __init__(self, run_files, param_names, run_id, pipe_id, version, pipe_params, samples, param_types):
AbstractPipelineLauncher.__init__(self, run_files, param_names, run_id, pipe_id, version, pipe_params,
param_types)
self.samples = samples
def launch(self, nodes, instance_size, instance_disk, docker_image, cmd, wait_finish=False):
running = 0
scheduled = 0
Logger.info('Starting {} sample(s) scheduling.'.format(len(self.samples)), task_name=self.TASK_NAME)
while scheduled != len(self.samples):
if running < nodes:
sample = self.samples[scheduled]
self.launch_pipeline(self.run_files[sample[SAMPLE_NAME]], self.param_names,
instance_size, instance_disk, docker_image, cmd, sample=sample)
scheduled = scheduled + 1
running = running + 1
else:
Logger.info('Processing {} sample(s).'.format(running), task_name=self.TASK_NAME)
Logger.info('Total scheduled {} sample(s).'.format(scheduled), task_name=self.TASK_NAME)
time.sleep(self.POLL_TIMEOUT)
running = self.get_running_samples()
while self.child_run_active():
Logger.info('Waiting a child run {} to finish.'.format(self.child_id), task_name=self.TASK_NAME)
time.sleep(self.POLL_TIMEOUT)
if wait_finish:
Logger.info('Waiting for all runs to finish.', task_name=self.TASK_NAME)
self.wait_all_samples_finish()
Logger.success('Successfully scheduled {} sample(s).'.format(scheduled), task_name=self.TASK_NAME)
@click.command(name='batch', context_settings=dict(ignore_unknown_options=True))
@click.option('-c', '--cmd', help='Command to run sample processing', default=None)
@click.option('-s', '--sample_sheet', help='SampleSheet to run sample processing', default=None)
def batch(cmd, sample_sheet):
pipeline_parameters, file_patterns, exclude_patterns, param_types = EnvironmentParametersParser({'nodes'})\
.collect_params_from_env()
nodes = int(EnvironmentParametersParser.get_env_value('nodes', 1))
if nodes < 1:
raise RuntimeError('Number of nodes should be greater than zero.')
pipeline = EnvironmentParametersParser.get_env_value('PIPELINE_ID')
version = EnvironmentParametersParser.get_env_value('PIPELINE_VERSION')
run_id = EnvironmentParametersParser.get_env_value('RUN_ID')
fastq_dir = bucket.normalize_path(EnvironmentParametersParser.get_env_value('fastq_dir'))
instance_size = EnvironmentParametersParser.get_env_value('instance_size')
instance_disk = EnvironmentParametersParser.get_env_value('instance_disk')
docker_image = EnvironmentParametersParser.get_env_value('docker_image')
samples = SampleSheetParser(sample_sheet, [SAMPLE_ID, SAMPLE_NAME, SAMPLE_PROJECT]).parse_sample_sheet()
run_files = FolderScanner(fastq_dir, file_patterns, exclude_patterns, samples).find_files()
wait_finish = EnvironmentParametersParser.has_flag('wait_finish')
PipelineLauncher(run_files, file_patterns.keys(), run_id, pipeline, version, pipeline_parameters,
samples, param_types)\
.launch(nodes, instance_size, instance_disk, docker_image, cmd, wait_finish=wait_finish)
if __name__ == '__main__':
batch()
|
import React from 'react'
// eslint-disable-next-line
import { Link } from 'gatsby'
import Layout from '../components/layout'
const Secondchance = () => (
<Layout>
<section className="pageheader-default text-center">
<div className="semitransparentbg">
<h1 className="animated fadeInLeftBig notransition">Second Chance Animal Services </h1>
<p className="animated fadeInRightBig notransition container page-description">
A large vet network <br></br>
Second Chance Animal Services MA.
</p>
</div>
</section>
<div className="wrapsemibox">
<div className="container">
<div className="row">
<div className="span8">
<div className="secondchance-image" id="secondchance-image">
</div>
</div>
</div>
</div>
<section className="container">
<div className="row">
<div className="col-md-8 animated fadeInLeft notransition">
<h1 className="smalltitle">
<span><a href="https://www.secondchanceanimals.org//">Second Chance Animal Services </a></span>
</h1>
<p>
<span className="drop-cap round">S</span>econd Chance Animal Services a no-kill, non-profit organization,
helping over 37,400 pets, through adoption, low cost spay/neuter, high-quality veterinary care for all community outreach, education, training, a pet food pantry, and other services.
</p>
<br />
<p>
<b>Year Creation</b> : 2018 <br />
<b>Client's Name</b> : Second Chance Animal Services<br />
<b>Web Category</b> : Non-profit web site <br />
<b>Contribution</b> : WP Dev, Design and Consultation<br />
</p>
</div>
<div className="col-md-4 animated fadeInRight notransition">
<h1 className="smalltitle">
<span>Testimonial</span>
</h1>
<blockquote>
<p>
The client said
</p>
<p>
"With Luke expert consulting we are able to service more animals and owner."<br/><b></b>
</p>
<p>
<small><b> R.R.T. -SecondChance </b></small>
</p>
</blockquote>
</div>
</div>
</section>
</div>
</Layout>
)
export default Secondchance
|
import React from 'react';
import {Button,Checkbox,FormControl,FormGroup,Form} from 'react-bootstrap';
function download(strData, strFileName, strMimeType) {
var D = document,
a = D.createElement("a");
strMimeType= strMimeType || "application/octet-stream";
if (navigator.msSaveBlob) { // IE10
return navigator.msSaveBlob(new Blob([strData], {type: strMimeType}), strFileName);
} /* end if(navigator.msSaveBlob) */
if ('download' in a) { //html5 A[download]
a.href = "data:" + strMimeType + "," + encodeURIComponent(strData);
a.setAttribute("download", strFileName);
a.innerHTML = "downloading...";
D.body.appendChild(a);
setTimeout(function() {
a.click();
D.body.removeChild(a);
}, 66);
return true;
} /* end if('download' in a) */
//do iframe dataURL download (old ch+FF):
var f = D.createElement("iframe");
D.body.appendChild(f);
f.src = "data:" + strMimeType + "," + encodeURIComponent(strData);
setTimeout(function() {
D.body.removeChild(f);
}, 333);
return true;
} /* end download() */
export class StrategyControl extends React.Component {
constructor(props) {
super(props);
this.start = this.start.bind(this);
this.stop = this.stop.bind(this);
this.removeRow = this.removeRow.bind(this);
this.exportParams = this.exportParams.bind(this);
}
post(url, data) {
$.post(url, data);
}
start() {
var data = this.props.data;
this.post("/strategy/start", data);
}
stop() {
var data = this.props.data;
this.post("/strategy/stop", data);
}
removeRow() {
this.props.colDef.parent.removeRow(this.props);
}
exportParams() {
download(JSON.stringify(this.props.data),
this.props.data['id'] + '.json');
}
render() {
var status = this.props.data.status;
var start_disabled = true;
var pause_disabled = true;
var stop_disabled = true;
var remove_row_disabled = true;
if (status == undefined || status == "stopped"
|| status == "error" || status == "done") {
start_disabled = false;
remove_row_disabled = false;
} else if (status == "paused") {
start_disabled = false;
} else if (status == "running") {
pause_disabled = false;
stop_disabled = false;
}
return (
<div>
<Button onClick={this.start}
disabled={start_disabled}>Start</Button>
<Button onClick={this.stop}
disabled={stop_disabled}>Stop</Button>
<Button onClick={this.removeRow}
disabled={remove_row_disabled}>Remove row</Button>
</div>
);
}
}
export class BacktestControl extends React.Component {
constructor(props) {
super(props);
this.backtest = this.backtest.bind(this);
this.removeRow = this.removeRow.bind(this);
this.exportParams = this.exportParams.bind(this);
}
post(url, data) {
var myWindow = window.open("", "Backtest " + Date.now().toString());
myWindow.document.open();
myWindow.document.write("<img src='/static/loading_anim.gif' width='20' height='20' />Generating backtest");
myWindow.document.close();
$.post(url, data, function(data) {
myWindow.document.open();
myWindow.document.write(data);
myWindow.document.close();
});
}
backtest() {
var data = this.props.data;
this.post("/backtest", data);
}
removeRow() {
this.props.colDef.parent.removeRow(this.props);
}
exportParams() {
download(JSON.stringify(this.props.data),
this.props.data['id'] + '.json');
}
render() {
return (
<div>
<Button onClick={this.backtest}>Backtest</Button>
<Button onClick={this.removeRow}>Remove row</Button>
<Button onClick={this.exportParams}>Export parameters</Button>
</div>
);
}
}
export function isNumber(obj) {
return !isNaN(parseFloat(obj))
};
export function formatNumber(n, d) {
return Number(n).toFixed(2).replace(/(\d)(?=(\d{3})+\.)/g, '$1,')
};
export function renderNumber(params) {
var x = params.value;
if (isNumber(x)) {
return formatNumber(x, 2);
} else {
return x;
}
};
export function renderBuySell(params) {
var x = params.value;
if (x == 66) {
return "Buy";
} else if (x == 83) {
return "Sell";
} else {
return x;
}
};
export function renderChar(params) {
var x = params.value;
return String.fromCharCode(x);
};
function zeroPad(num, places) {
var zero = places - num.toString().length + 1;
return Array(+(zero > 0 && zero)).join("0") + num;
}
export function renderDateTime(params) {
var x = parseInt(params.value);
var a = new Date(x * 1000);
var months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
var wday = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var year = a.getFullYear();
var month = months[a.getMonth()];
var wdaystr = wday[a.getDay()];
var date = zeroPad(a.getDate(), 2);
var hour = zeroPad(a.getHours(), 2);
var min = zeroPad(a.getMinutes(), 2);
var sec = zeroPad(a.getSeconds(), 2);
var time = wdaystr + ' ' + date + ' ' + month + ' ' + year + ' ' + hour + ':' + min + ':' + sec ;
return time;
};
export function renderDate(params) {
var x = parseInt(params.value);
var a = new Date(x * 1000);
var months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'];
var wday = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
var wdaystr = wday[a.getDay()];
var year = a.getFullYear();
var month = months[a.getMonth()];
var date = zeroPad(a.getDate(), 2);
var time = wdaystr + ' ' + date + ' ' + month + ' ' + year;
return time;
};
export function renderTime(params) {
var x = parseInt(params.value);
var a = new Date(x * 1000);
var hour = zeroPad(a.getHours(), 2);
var min = zeroPad(a.getMinutes(), 2);
var sec = zeroPad(a.getSeconds(), 2);
var time = hour + ':' + min + ':' + sec ;
return time;
};
export function renderLog(params) {
return "<a href='/strategy/log/" +
params.data.id + "' target='_blank'>Log</a>";
};
export function pad(num, size) {
var s = num+"";
while (s.length < size) s = "0" + s;
return s;
}
export function process_headers(l, start, finish, d, default_columns) {
for (var i=0; i < d.length; i++) {
d[i]['editable'] = true;
d[i]['volatile'] = true;
if ("select" in d[i]) {
d[i]['cellEditor'] = 'select';
d[i]['cellEditorParams'] = {
values: d[i]['select']
}
}
}
var items = start.concat(d).concat(finish);
var defaultData = default_columns;
for (var i=0; i < items.length; i++) {
if (items[i].defaultData != undefined) {
defaultData[items[i].field] =
items[i].defaultData;
}
}
l.setState({columnDefs: items,
defaultData: defaultData});
}
export var shortnumberwidth = 100;
export var actionBoxWidth = 300;
|
/*
Copyright (c) 2010, Yahoo! Inc. All rights reserved.
Code licensed under the BSD License:
http://developer.yahoo.com/yui/license.html
version: 3.1.1
build: 47
*/
YUI.add("lang/datatype-date-format_ja",function(A){A.Intl.add("datatype-date-format","ja",{"a":["日","月","火","水","木","金","土"],"A":["日曜日","月曜日","火曜日","水曜日","木曜日","金曜日","土曜日"],"b":["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],"B":["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],"c":"%Y年%m月%d日(%a)%k時%M分%S秒 %Z","p":["午前","午後"],"P":["午前","午後"],"x":"%y/%m/%d","X":"%k時%M分%S秒"});},"3.1.1");
|
#!/usr/bin/env python3
"""Home Assistant setup script."""
from datetime import datetime as dt
from setuptools import find_packages, setup
import homeassistant.const as hass_const
PROJECT_NAME = "Home Assistant"
PROJECT_PACKAGE_NAME = "homeassistant"
PROJECT_LICENSE = "Apache License 2.0"
PROJECT_AUTHOR = "The Home Assistant Authors"
PROJECT_COPYRIGHT = f" 2013-{dt.now().year}, {PROJECT_AUTHOR}"
PROJECT_URL = "https://www.home-assistant.io/"
PROJECT_EMAIL = "hello@home-assistant.io"
PROJECT_GITHUB_USERNAME = "home-assistant"
PROJECT_GITHUB_REPOSITORY = "core"
PYPI_URL = f"https://pypi.python.org/pypi/{PROJECT_PACKAGE_NAME}"
GITHUB_PATH = f"{PROJECT_GITHUB_USERNAME}/{PROJECT_GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{hass_const.__version__}.zip"
PROJECT_URLS = {
"Bug Reports": f"{GITHUB_URL}/issues",
"Dev Docs": "https://developers.home-assistant.io/",
"Discord": "https://discordapp.com/invite/c5DvZ4e",
"Forum": "https://community.home-assistant.io/",
}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
REQUIRES = [
"aiohttp==3.7.1",
"astral==1.10.1",
"async_timeout==3.0.1",
"attrs==19.3.0",
"bcrypt==3.1.7",
"certifi>=2020.6.20",
"ciso8601==2.1.3",
"httpx==0.16.1",
"importlib-metadata==1.6.0;python_version<'3.8'",
"jinja2>=2.11.2",
"PyJWT==1.7.1",
# PyJWT has loose dependency. We want the latest one.
"cryptography==3.2",
"pip>=8.0.3",
"python-slugify==4.0.1",
"pytz>=2020.1",
"pyyaml==5.3.1",
"requests==2.24.0",
"ruamel.yaml==0.15.100",
"voluptuous==0.12.0",
"voluptuous-serialize==2.4.0",
"yarl==1.4.2",
]
MIN_PY_VERSION = ".".join(map(str, hass_const.REQUIRED_PYTHON_VER))
setup(
name=PROJECT_PACKAGE_NAME,
version=hass_const.__version__,
url=PROJECT_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
author=PROJECT_AUTHOR,
author_email=PROJECT_EMAIL,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIRES,
python_requires=f">={MIN_PY_VERSION}",
test_suite="tests",
entry_points={"console_scripts": ["hass = homeassistant.__main__:main"]},
)
|
import json
from pathlib import Path
import cv2
import birdvision.character as character
from birdvision.node import Node
from birdvision.testing import TestResult
def run():
test_cases = json.loads(Path('data/tests/character.json').read_text())
char_model = character.CharacterModel()
char_finders = character.finders_from_model(char_model)
by_name = {finder.name: finder for finder in char_finders}
for fp, case in test_cases.items():
img = cv2.imread('data/tests/character/' + fp)
for key, expected in case.items():
frame = Node(img)
finder = by_name[key]
string = finder(frame)
actual = string.to_str()
yield TestResult(fp, name=finder.name, frame=frame, data=string, ok=actual == expected, actual=actual,
expected=expected, relevant_nodes=string.nodes)
|
def hangman(secretWord):
'''
secretWord: string, the secret word to guess.
Starts up an interactive game of Hangman.
* At the start of the game, let the user know how many
letters the secretWord contains.
* Ask the user to supply one guess (i.e. letter) per round.
* The user should receive feedback immediately after each guess
about whether their guess appears in the computers word.
* After each round, you should also display to the user the
partially guessed word so far, as well as letters that the
user has not yet guessed.
Follows the other limitations detailed in the problem write-up.
'''
print ("Welcome to the game, Hangman!")
print ("I'm thinking of a word that is " + str(len(secretWord)) + " letters long.")
lettersGuessed = ''
guessesLeft = 8
print ("------------")
while True:
print ("You have " + str(guessesLeft) + " guesses left.")
print ("Available letters: " + getAvailableLetters(lettersGuessed))
guess = input("Please guess a letter: ")
if guess in secretWord and guess not in lettersGuessed:
lettersGuessed += guess
print ("Good guess: " + getGuessedWord(secretWord, lettersGuessed))
elif guess in lettersGuessed:
print ("Oops! You've already guessed that letter: " + getGuessedWord(secretWord, lettersGuessed))
else:
lettersGuessed += guess
print ("Oops! That letter is not in my word: " + getGuessedWord(secretWord, lettersGuessed))
guessesLeft -= 1
print ("------------")
if guessesLeft <= 0:
print ("Sorry, You've ran out of guesses. The word was " + secretWord + ".")
break
if isWordGuessed(secretWord, lettersGuessed):
print ("Congratulations! You've won!")
break
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <objc/NSObject.h>
@class NSMutableArray, NSMutableDictionary;
@protocol OS_dispatch_queue;
@interface GPUImageFramebufferCache : NSObject
{
NSMutableDictionary *framebufferCache;
NSMutableDictionary *framebufferTypeCounts;
NSMutableArray *activeImageCaptureList;
id memoryWarningObserver;
NSObject<OS_dispatch_queue> *framebufferCacheQueue;
}
- (void).cxx_destruct;
- (void)removeFramebufferFromActiveImageCaptureList:(id)arg1;
- (void)addFramebufferToActiveImageCaptureList:(id)arg1;
- (void)purgeAllUnassignedFramebuffers;
- (void)returnFramebufferToCache:(id)arg1;
- (id)fetchFramebufferForSize:(struct CGSize)arg1 onlyTexture:(_Bool)arg2;
- (id)fetchFramebufferForSize:(struct CGSize)arg1 textureOptions:(struct GPUTextureOptions)arg2 onlyTexture:(_Bool)arg3;
- (id)hashForSize:(struct CGSize)arg1 textureOptions:(struct GPUTextureOptions)arg2 onlyTexture:(_Bool)arg3;
- (void)dealloc;
- (id)init;
@end
|
'use strict';
var express = require('express');
var mongoose = require('mongoose');
var bodyParser = require('body-parser');
var passport = require('passport');
var config = require('./db');
var users = require('./routes/user');
mongoose.connect(config.DB, { useNewUrlParser: true }).then(function () {
console.log('Database is connected');
}, function (err) {
console.log('Can not connect to the database' + err);
});
var app = express();
app.use(passport.initialize());
require('./passport')(passport);
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.use('/api/users', users);
app.get('/', function (req, res) {
res.send('hello');
});
var PORT = process.env.PORT || 5000;
app.listen(PORT, function () {
console.log('Server is running on PORT ' + PORT);
});
|
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.core.helpers.wrap_matcher import is_matchable_type
__author__ = "Jon Reid"
__copyright__ = "Copyright 2011 hamcrest.org"
__license__ = "BSD, see License.txt"
import types
class IsInstanceOf(BaseMatcher):
def __init__(self, expected_type):
if not is_matchable_type(expected_type):
raise TypeError('IsInstanceOf requires type or a tuple of classes and types')
self.expected_type = expected_type
def _matches(self, item):
return isinstance(item, self.expected_type)
def describe_to(self, description):
try:
type_description = self.expected_type.__name__
except AttributeError:
type_description = "one of %s" % ",".join(str(e) for e in self.expected_type)
description.append_text('an instance of ') \
.append_text(type_description)
def instance_of(atype):
"""Matches if object is an instance of, or inherits from, a given type.
:param atype: The type to compare against as the expected type or a tuple
of types.
This matcher checks whether the evaluated object is an instance of
``atype`` or an instance of any class that inherits from ``atype``.
Example::
instance_of(str)
"""
return IsInstanceOf(atype)
|
user_pref("browser.startup.homepage", "https://www.startpage.com");
user_pref("browser.search.defaultenginename", "DuckDuckGo");
|
#from log import log
import json
import csv
import os
import multiprocessing
from multiprocessing import Pool, cpu_count
csv_path = 'E:\\conceptnet\\assertions.csv'
def job_split(**kw):
'''
>>> concepnet.parse.job_split(path=csv_path)
[{'start': 0L, 'end': 1071224347L},
{'start': 1071224348L, 'end': 2142448695L},
{'start': 2142448696L, 'end': 3213673043L},
{'start': 3213673044L, 'end': 4284897391L},
{'start': 4284897392L, 'end': 5356121739L},
{'start': 5356121740L, 'end': 6427346087L},
{'start': 6427346088L, 'end': 7498570435L},
{'start': 7498570436L, 'end': 8569794783L}]
'''
bytes = os.stat(kw.get('path', csv_path)).st_size
count = kw.get('cpu_count', cpu_count())
chunk = bytes / count
jobs = []
for i in xrange(count):
job = dict(
job_index=i,
byte_start=chunk * i,
byte_end=chunk * (i + 1) - 1,
)
job.update(kw)
jobs.append(job)
return jobs
def spread(func, **kw):
jobs = job_split(**kw)
pool = Pool(len(jobs))
print('Spreading {} jobs to pool {}'.format(len(jobs), pool))
result = pool.map(func, jobs)
return result
STOP = False
def parse_csv(path=csv_path, max_count=20000, iter_line=None, as_set=False,
keep_sample=False, **iter_line_kwargs):
global skip_stream
start = iter_line_kwargs.get('byte_start', None)
end = iter_line_kwargs.get('byte_end', None)
if os.path.isfile(path) is False:
raise Exception('Parse CSV is not a file: "{}"'.format(path))
stream = open(path, 'rt', encoding='utf-8')
if start is not None:
stream.seek(start)
print( next(stream))
if end is not None:
print('Seek from {} to {}'.format(start, end))
count = 0
pc = 0
res = () if as_set is not True else set()
samples = ()
skip_stream = open('./skips.csv', 'w')
func = iter_line or clean_line
print('Reading {} maximum lines'.format(max_count))
iter_line_kwargs['row_index'] = iter_line_kwargs.get('row_index', 0)
def kill_function():
print('KILL')
global STOP
STOP = True
iter_line_kwargs['kill'] = kill_function
start_index = iter_line_kwargs.get('start_index', -1)
if start_index > -1:
print('Starting at {}'.format(start_index))
has_shown_start = False
for raw_line in stream:
if STOP is True:
print('Stop by STOP flag.')
break
iter_line_kwargs['current_lineno'] = iter_line_kwargs['row_index'] + count
line = next(csv.reader((raw_line,), delimiter='\t'))
iter_line_kwargs['row_index'] += 1
if max_count is not None and count > max_count:
break
if iter_line_kwargs['row_index'] < start_index:
# Miss this
continue
if has_shown_start is False:
print('Continuing functionality',)
has_shown_start = True
dval = func(line, parser_index=pc, **iter_line_kwargs)
# print('?? ', stop,)
if dval is None:
if line[1] == '/r/ExternalURL':
continue
s = '/c/en/'
if line[0][2].startswith(s) is True and line[0][3].startswith(s) is True:
import pdb; pdb.set_trace() # breakpoint 1073775b //
if as_set is True:
res.add(dval)
if keep_sample is True and len(res) > len(samples):
s = '/c/en/'
if line[2].startswith(s) is True and line[3].startswith(s) is True:
samples += (line,)
else:
res += (dval,)
count += 1
pc += 1
if end is not None and stream.tell() > end:
print('Hit limit')
break
if pc > 100000:
pc = 0
print( 'count', count)
stream.close()
skip_stream.close()
return res, samples
postfix_map = {
"n": "noun", # /n
"v": "verb", # /v
"a": "adjective", # /a
"s": "adjective satellite", # /s
"r": "adverb", # /r
}
POSTMAP = postfix_map.keys()
def clean_line(line, **kw):
'''Return a cleaned CSV line as a tuple
'''
try:
uri, rel, start, end, js = line
except ValueError as e:
skip_stream.write('{}\n'.format(line))
uri, rel, start, end, js = None, None, None,None,None
if rel == '/r/ExternalURL':
return None
if start is None:
return None
startl = start[3:5]
endl = end[3:5]
startw = start[6:]
endw = end[6:]
sub_rel = rel[3:]
langs = ['en']
sl = startw[:-1]
el = endw[:-1]
if (startl in langs) is False or (endl in langs) is False:
return None
if js is None:
return None
_json = json.loads(js)
weight = _json['weight']
_json['start'] = dict(lang=startl, word=startw)
_json['end'] = dict(lang=endl, word=endw)
if sl in POSTMAP:
_json['start']['synset'] = postfix_map[sl]
if el in POSTMAP:
_json['end']['synset'] = postfix_map[el]
del _json['sources']
if 'dataset' in _json:
del _json['dataset']
if 'license' in _json:
del _json['license']
_json['id'] = uri
return (sub_rel, startw, endw, weight, _json)
|
window.__NUXT__=(function(a,b,c,d){return {staticAssetsBase:"\u002Fstatic\u002F1597376630",layout:"default",error:b,state:{notification:{show:a,title:c,message:c},isShowSidebar:a,isSupportWebShare:a,headerTitle:"e-AlQur'an",page:"home",lastReadVerse:b,settingActiveTheme:{name:"dark",bgColor:"#071e3d",fgColor:"#fff"},settingShowTranslation:a,settingShowTafsir:a,settingShowMuqaddimah:d,surahFavorite:[]},serverRendered:d,routePath:"\u002Famp\u002F3\u002F137",config:{}}}(false,null,"",true));
|
"use strict";
function _typeof(obj) { "@babel/helpers - typeof"; if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { _typeof = function _typeof(obj) { return typeof obj; }; } else { _typeof = function _typeof(obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; } return _typeof(obj); }
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _react = _interopRequireDefault(require("react"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _htmlReactParser = _interopRequireDefault(require("html-react-parser"));
var _Skeleton = _interopRequireDefault(require("@material-ui/lab/Skeleton"));
var _remoteAssetsMap = _interopRequireDefault(require("../../remoteAssetsMap/remoteAssetsMap"));
var _drupalFooter = _interopRequireDefault(require("../../remoteAssets/drupal-footer.html"));
var _NeonContext = _interopRequireWildcard(require("../NeonContext/NeonContext"));
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); }
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
function _iterableToArrayLimit(arr, i) { var _i = arr && (typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]); if (_i == null) return; var _arr = []; var _n = true; var _d = false; var _s, _e; try { for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
var DRUPAL_FOOTER_HTML = _remoteAssetsMap.default.DRUPAL_FOOTER_HTML.KEY;
var NeonFooter = function NeonFooter(props) {
var drupalCssLoaded = props.drupalCssLoaded;
var _NeonContext$useNeonC = _NeonContext.default.useNeonContextState(),
_NeonContext$useNeonC2 = _slicedToArray(_NeonContext$useNeonC, 1),
_NeonContext$useNeonC3 = _NeonContext$useNeonC2[0],
neonContextIsActive = _NeonContext$useNeonC3.isActive,
footerFetch = _NeonContext$useNeonC3.fetches[DRUPAL_FOOTER_HTML],
footerHTML = _NeonContext$useNeonC3.html[DRUPAL_FOOTER_HTML];
var renderMode = 'legacy';
if (neonContextIsActive) {
switch (footerFetch.status) {
case _NeonContext.FETCH_STATUS.SUCCESS:
renderMode = footerHTML && drupalCssLoaded ? 'drupal' : 'loading';
break;
case _NeonContext.FETCH_STATUS.ERROR:
renderMode = drupalCssLoaded ? 'drupal-fallback' : 'loading';
break;
default:
renderMode = 'loading';
break;
}
}
switch (renderMode) {
case 'loading':
return /*#__PURE__*/_react.default.createElement("footer", {
id: "footer"
}, /*#__PURE__*/_react.default.createElement(_Skeleton.default, {
variant: "rect",
height: "300px",
width: "100%"
}));
case 'drupal':
return /*#__PURE__*/_react.default.createElement("footer", {
id: "footer"
}, (0, _htmlReactParser.default)(footerHTML));
case 'drupal-fallback':
default:
return /*#__PURE__*/_react.default.createElement("footer", {
id: "footer"
}, (0, _htmlReactParser.default)(_drupalFooter.default));
}
};
NeonFooter.propTypes = {
drupalCssLoaded: _propTypes.default.bool
};
NeonFooter.defaultProps = {
drupalCssLoaded: false
};
var _default = NeonFooter;
exports.default = _default;
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('bugs', '0020_auto_20151123_1803'),
]
operations = [
migrations.AlterField(
model_name='bug',
name='create_time',
field=models.DateTimeField(default=b'2015-11-23 18:04:32', verbose_name=b'Time created'),
),
migrations.AlterField(
model_name='stage',
name='update_time',
field=models.DateTimeField(default=b'2015-11-23 18:04:32', verbose_name=b'Time updeted'),
),
]
|
# coding=utf-8
# Copyright 2018 The Open AI Team Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tokenization classes for OpenAI GPT."""
import json
import os
import warnings
from functools import lru_cache
import regex as re
from .tokenization_utils import AddedToken, PreTrainedTokenizer
from .tokenization_utils_base import BatchEncoding
from .tokenization_utils_fast import PreTrainedTokenizerFast
from .utils import logging
logger = logging.get_logger(__name__)
VOCAB_FILES_NAMES = {
"vocab_file": "vocab.json",
"merges_file": "merges.txt",
}
PRETRAINED_VOCAB_FILES_MAP = {
"vocab_file": {
"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-vocab.json",
"gpt2-medium": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-vocab.json",
"gpt2-large": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-vocab.json",
"gpt2-xl": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-xl-vocab.json",
"distilgpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/distilgpt2-vocab.json",
},
"merges_file": {
"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-merges.txt",
"gpt2-medium": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-medium-merges.txt",
"gpt2-large": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-large-merges.txt",
"gpt2-xl": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-xl-merges.txt",
"distilgpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/distilgpt2-merges.txt",
},
}
PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES = {
"gpt2": 1024,
"gpt2-medium": 1024,
"gpt2-large": 1024,
"gpt2-xl": 1024,
"distilgpt2": 1024,
}
@lru_cache()
def bytes_to_unicode():
"""
Returns list of utf-8 byte and a mapping to unicode strings.
We specifically avoids mapping to whitespace/control characters the bpe code barfs on.
The reversible bpe codes work on unicode strings.
This means you need a large # of unicode characters in your vocab if you want to avoid UNKs.
When you're at something like a 10B token dataset you end up needing around 5K for decent coverage.
This is a signficant percentage of your normal, say, 32K bpe vocab.
To avoid that, we want lookup tables between utf-8 bytes and unicode strings.
"""
bs = (
list(range(ord("!"), ord("~") + 1)) + list(range(ord("¡"), ord("¬") + 1)) + list(range(ord("®"), ord("ÿ") + 1))
)
cs = bs[:]
n = 0
for b in range(2 ** 8):
if b not in bs:
bs.append(b)
cs.append(2 ** 8 + n)
n += 1
cs = [chr(n) for n in cs]
return dict(zip(bs, cs))
def get_pairs(word):
"""Return set of symbol pairs in a word.
Word is represented as tuple of symbols (symbols being variable-length strings).
"""
pairs = set()
prev_char = word[0]
for char in word[1:]:
pairs.add((prev_char, char))
prev_char = char
return pairs
class GPT2Tokenizer(PreTrainedTokenizer):
"""
Construct a GPT-2 tokenizer. Based on byte-level Byte-Pair-Encoding.
This tokenizer has been trained to treat spaces like parts of the tokens (a bit like sentencepiece) so a word will
be encoded differently whether it is at the beginning of the sentence (without space) or not:
::
>>> from transformers import GPT2Tokenizer
>>> tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
>>> tokenizer("Hello world")['input_ids']
[15496, 995]
>>> tokenizer(" Hello world")['input_ids']
[18435, 995]
You can get around that behavior by passing ``add_prefix_space=True`` when instantiating this tokenizer or when you
call it on some text, but since the model was not pretrained this way, it might yield a decrease in performance.
.. note::
When used with ``is_split_into_words=True``, this tokenizer will add a space before each word (even the first one).
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizer` which contains most of the main methods.
Users should refer to this superclass for more information regarding those methods.
Args:
vocab_file (:obj:`str`):
Path to the vocabulary file.
merges_file (:obj:`str`):
Path to the merges file.
errors (:obj:`str`, `optional`, defaults to :obj:`"replace"`):
Paradigm to follow when decoding bytes to UTF-8. See `bytes.decode
<https://docs.python.org/3/library/stdtypes.html#bytes.decode>`__ for more information.
unk_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
bos_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The beginning of sequence token.
eos_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The end of sequence token.
add_prefix_space (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to add an initial space to the input. This allows to treat the leading word just as any
other word. (GPT2 tokenizer detect beginning of words by the preceding space).
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
model_input_names = ["attention_mask"]
def __init__(
self,
vocab_file,
merges_file,
errors="replace",
unk_token="<|endoftext|>",
bos_token="<|endoftext|>",
eos_token="<|endoftext|>",
add_prefix_space=False,
**kwargs
):
bos_token = AddedToken(bos_token, lstrip=False, rstrip=False) if isinstance(bos_token, str) else bos_token
eos_token = AddedToken(eos_token, lstrip=False, rstrip=False) if isinstance(eos_token, str) else eos_token
unk_token = AddedToken(unk_token, lstrip=False, rstrip=False) if isinstance(unk_token, str) else unk_token
super().__init__(bos_token=bos_token, eos_token=eos_token, unk_token=unk_token, **kwargs)
with open(vocab_file, encoding="utf-8") as vocab_handle:
self.encoder = json.load(vocab_handle)
self.decoder = {v: k for k, v in self.encoder.items()}
self.errors = errors # how to handle errors in decoding
self.byte_encoder = bytes_to_unicode()
self.byte_decoder = {v: k for k, v in self.byte_encoder.items()}
with open(merges_file, encoding="utf-8") as merges_handle:
bpe_merges = merges_handle.read().split("\n")[1:-1]
bpe_merges = [tuple(merge.split()) for merge in bpe_merges]
self.bpe_ranks = dict(zip(bpe_merges, range(len(bpe_merges))))
self.cache = {}
self.add_prefix_space = add_prefix_space
# Should haved added re.IGNORECASE so BPE merges can happen for capitalized versions of contractions
self.pat = re.compile(r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+""")
@property
def vocab_size(self):
return len(self.encoder)
def get_vocab(self):
return dict(self.encoder, **self.added_tokens_encoder)
def bpe(self, token):
if token in self.cache:
return self.cache[token]
word = tuple(token)
pairs = get_pairs(word)
if not pairs:
return token
while True:
bigram = min(pairs, key=lambda pair: self.bpe_ranks.get(pair, float("inf")))
if bigram not in self.bpe_ranks:
break
first, second = bigram
new_word = []
i = 0
while i < len(word):
try:
j = word.index(first, i)
except ValueError:
new_word.extend(word[i:])
break
else:
new_word.extend(word[i:j])
i = j
if word[i] == first and i < len(word) - 1 and word[i + 1] == second:
new_word.append(first + second)
i += 2
else:
new_word.append(word[i])
i += 1
new_word = tuple(new_word)
word = new_word
if len(word) == 1:
break
else:
pairs = get_pairs(word)
word = " ".join(word)
self.cache[token] = word
return word
def _tokenize(self, text):
""" Tokenize a string. """
bpe_tokens = []
for token in re.findall(self.pat, text):
token = "".join(
self.byte_encoder[b] for b in token.encode("utf-8")
) # Maps all our bytes to unicode strings, avoiding controle tokens of the BPE (spaces in our case)
bpe_tokens.extend(bpe_token for bpe_token in self.bpe(token).split(" "))
return bpe_tokens
def _convert_token_to_id(self, token):
""" Converts a token (str) in an id using the vocab. """
return self.encoder.get(token, self.encoder.get(self.unk_token))
def _convert_id_to_token(self, index):
"""Converts an index (integer) in a token (str) using the vocab."""
return self.decoder.get(index)
def convert_tokens_to_string(self, tokens):
""" Converts a sequence of tokens (string) in a single string. """
text = "".join(tokens)
text = bytearray([self.byte_decoder[c] for c in text]).decode("utf-8", errors=self.errors)
return text
def save_vocabulary(self, save_directory):
"""
Save the vocabulary and special tokens file to a directory.
Args:
save_directory (:obj:`str`):
The directory in which to save the vocabulary.
Returns:
:obj:`Tuple(str)`: Paths to the files saved.
"""
if not os.path.isdir(save_directory):
logger.error("Vocabulary path ({}) should be a directory".format(save_directory))
return
vocab_file = os.path.join(save_directory, VOCAB_FILES_NAMES["vocab_file"])
merge_file = os.path.join(save_directory, VOCAB_FILES_NAMES["merges_file"])
with open(vocab_file, "w", encoding="utf-8") as f:
f.write(json.dumps(self.encoder, ensure_ascii=False))
index = 0
with open(merge_file, "w", encoding="utf-8") as writer:
writer.write("#version: 0.2\n")
for bpe_tokens, token_index in sorted(self.bpe_ranks.items(), key=lambda kv: kv[1]):
if index != token_index:
logger.warning(
"Saving vocabulary to {}: BPE merge indices are not consecutive."
" Please check that the tokenizer is not corrupted!".format(merge_file)
)
index = token_index
writer.write(" ".join(bpe_tokens) + "\n")
index += 1
return vocab_file, merge_file
def prepare_for_tokenization(self, text, is_split_into_words=False, **kwargs):
if "is_pretokenized" in kwargs:
warnings.warn(
"`is_pretokenized` is deprecated and will be removed in a future version, use `is_split_into_words` instead.",
FutureWarning,
)
is_split_into_words = kwargs.pop("is_pretokenized")
add_prefix_space = kwargs.pop("add_prefix_space", self.add_prefix_space)
if is_split_into_words or add_prefix_space:
text = " " + text
return (text, kwargs)
class GPT2TokenizerFast(PreTrainedTokenizerFast):
"""
Construct a "fast" GPT-2 tokenizer (backed by HuggingFace's `tokenizers` library). Based on byte-level
Byte-Pair-Encoding.
This tokenizer has been trained to treat spaces like parts of the tokens (a bit like sentencepiece) so a word will
be encoded differently whether it is at the beginning of the sentence (without space) or not:
::
>>> from transformers import GPT2TokenizerFast
>>> tokenizer = GPT2TokenizerFast.from_pretrained("gpt2")
>>> tokenizer("Hello world")['input_ids']
[15496, 995]
>>> tokenizer(" Hello world")['input_ids']
[18435, 995]
You can get around that behavior by passing ``add_prefix_space=True`` when instantiating this tokenizer or when you
call it on some text, but since the model was not pretrained this way, it might yield a decrease in performance.
.. note::
When used with ``is_split_into_words=True``, this tokenizer needs to be instantiated with
``add_prefix_space=True``.
This tokenizer inherits from :class:`~transformers.PreTrainedTokenizerFast` which contains most of the main
methods. Users should refer to this superclass for more information regarding those methods.
Args:
vocab_file (:obj:`str`):
Path to the vocabulary file.
merges_file (:obj:`str`):
Path to the merges file.
errors (:obj:`str`, `optional`, defaults to :obj:`"replace"`):
Paradigm to follow when decoding bytes to UTF-8. See `bytes.decode
<https://docs.python.org/3/library/stdtypes.html#bytes.decode>`__ for more information.
unk_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The unknown token. A token that is not in the vocabulary cannot be converted to an ID and is set to be this
token instead.
bos_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The beginning of sequence token.
eos_token (:obj:`str`, `optional`, defaults to :obj:`<|endoftext|>`):
The end of sequence token.
add_prefix_space (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to add an initial space to the input. This allows to treat the leading word just as any
other word. (GPT2 tokenizer detect beginning of words by the preceding space).
trim_offsets (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not the post-processing step should trim offsets to avoid including whitespaces.
"""
vocab_files_names = VOCAB_FILES_NAMES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
model_input_names = ["attention_mask"]
slow_tokenizer_class = GPT2Tokenizer
def __init__(
self,
vocab_file,
merges_file,
unk_token="<|endoftext|>",
bos_token="<|endoftext|>",
eos_token="<|endoftext|>",
add_prefix_space=False,
**kwargs
):
super().__init__(
vocab_file,
merges_file,
unk_token=unk_token,
bos_token=bos_token,
eos_token=eos_token,
add_prefix_space=add_prefix_space,
**kwargs,
)
self.add_prefix_space = add_prefix_space
def _batch_encode_plus(self, *args, **kwargs) -> BatchEncoding:
if "is_pretokenized" in kwargs:
warnings.warn(
"`is_pretokenized` is deprecated and will be removed in a future version, use `is_split_into_words` instead.",
FutureWarning,
)
is_split_into_words = kwargs.pop("is_pretokenized")
is_split_into_words = kwargs.get("is_split_into_words", False)
assert self.add_prefix_space or not is_split_into_words, (
f"You need to instantiate {self.__class__.__name__} with add_prefix_space=True "
"to use it with pretokenized inputs."
)
return super()._batch_encode_plus(*args, **kwargs)
def _encode_plus(self, *args, **kwargs) -> BatchEncoding:
if "is_pretokenized" in kwargs:
warnings.warn(
"`is_pretokenized` is deprecated and will be removed in a future version, use `is_split_into_words` instead.",
FutureWarning,
)
is_split_into_words = kwargs.pop("is_pretokenized")
else:
is_split_into_words = kwargs.get("is_split_into_words", False)
assert self.add_prefix_space or not is_split_into_words, (
f"You need to instantiate {self.__class__.__name__} with add_prefix_space=True "
"to use it with pretokenized inputs."
)
return super()._encode_plus(*args, **kwargs)
|
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
outfile):
import os
import shutil
from Betsy import bie3
outfile_folder = outfile
outfile = os.path.join(outfile_folder, 'report.html')
if not os.path.exists(outfile_folder):
os.mkdir(outfile_folder)
result_files = []
for index, data_node in enumerate(antecedents):
filename = data_node.identifier
new_name = os.path.join(outfile_folder, os.path.split(filename)[-1])
#rename one of the pcaplot filename
if index == 2:
new_name = os.path.join(outfile_folder,
'after_' + os.path.split(filename)[-1])
if os.path.isdir(filename):
shutil.copytree(filename, new_name)
else:
shutil.copyfile(filename, new_name)
result_files.append(os.path.split(new_name)[-1])
data_node1, data_node2, data_node3, data_node4, data_node5, data_node6 = antecedents
#write the report.html
from genomicode import parselib
from genomicode import htmllib
#def highlight(s):
# from genomicode import htmllib
# return htmllib.SPAN(s, style="background-color:yellow")
#def smaller(s):
# from genomicode import htmllib
# return htmllib.FONT(s, size=-1)
try:
lines = []
w = lines.append
w("<HTML>")
title = "Normalization Results"
x = parselib.remove_all_tags(title)
w(htmllib.HEAD(htmllib.TITLE(x)))
w("<BODY>")
w(htmllib.CENTER(htmllib.H1(title)))
w('I generated a file that contains the normalized gene expression values')
w(htmllib.P())
w(htmllib.A(result_files[0], result_files[0]))
w(htmllib.P())
w(htmllib.A("Methods", href="#methods_normalization"))
w(htmllib.P())
## if pipelines[1] == pipelines[2]:
## w(htmllib.A(htmllib.IMG(height=500,
## src=result_files[1]), href=result_files[1]))
## else:
rows = []
x = htmllib.TR(htmllib.TD(htmllib.A(htmllib.IMG(height=500,
src=result_files[1]),
href=result_files[1]),
align="CENTER") +
htmllib.TD(htmllib.A(htmllib.IMG(height=500,
src=result_files[2]),
href=result_files[2]),
align="CENTER"))
rows.append(x)
x = htmllib.TR(htmllib.TH("Before",
align="CENTER") + htmllib.TH("After",
align="CENTER"))
rows.append(x)
w(htmllib.TABLE("\n".join(rows),
border=None,
cellpadding=3,
cellspacing=0))
w(htmllib.P())
w(htmllib.P())
name = 'Figure 1: This pca plot shows the similarities among your samples'
w(htmllib.B(name))
w(htmllib.P())
w(htmllib.A(htmllib.IMG(height=500,
src=result_files[3]),
href=result_files[3]))
w(htmllib.P())
name = 'Figure 2: This boxplot shows the distribution of signal values'
w(htmllib.B(name))
w(htmllib.P())
w(htmllib.A(htmllib.IMG(height=500,
src=result_files[4]),
href=result_files[4]))
w(htmllib.P())
name = 'Figure 3: This plot shows the values of ACTB and TUBB genes'
w(htmllib.B(name))
w(htmllib.P())
w(htmllib.A(htmllib.IMG(height=500,
src=result_files[5]),
href=result_files[5]))
name = 'Figure 4: This plot shows the average values control genes'
w(htmllib.P())
w(htmllib.B(name))
w(htmllib.HR())
w(htmllib.A("<methods_normalization>", name="methods_normalization"))
w(htmllib.CENTER(htmllib.H2("Methods")))
w(htmllib.H3("1.Normalization File"))
w('To generate this file, I ran the following analysis:')
w(htmllib.P())
bie3.plot_network_gv(os.path.join(outfile_folder, "network.png"),
network)
w(htmllib.A(htmllib.IMG(height=500,
src="network.png"),
href="network.png"))
w('I used the following parameters:')
rows = []
x = htmllib.TR(htmllib.TH("Parameter",
align="LEFT") + htmllib.TH("Value",
align="LEFT"))
rows.append(x)
for key in data_node1.data.attributes.keys():
x = htmllib.TR(htmllib.TD(key,
align="LEFT") +
htmllib.TD(data_node1.data.attributes[key],
align="LEFT"))
rows.append(x)
w(htmllib.TABLE("\n".join(rows),
border=1,
cellpadding=3,
cellspacing=0))
w(htmllib.P())
w(htmllib.H3("2. PCA analysis"))
w('I made a principal component plot that shows the similarities among your samples.')
w(htmllib.P())
w(htmllib.H3("3. Signal distribution"))
w('I made a box plot that shows the distribution of signal values.')
w(htmllib.P())
w(htmllib.H3("4. Control signal"))
w('I made two plots that show the values of control signal.')
w(htmllib.P())
# Write out the footer.
#time_str = parselib.pretty_date(time.time())
#hostname = pretty_hostname()
w(htmllib.P())
w(htmllib.HR())
#w(htmllib.EM(
# "This analysis was run on %s on %s. \n" %
# (time_str, hostname)))
w("</BODY>")
w("</HTML>")
x = "\n".join(lines) + "\n"
open(outfile, 'w').write(x)
except:
raise
def name_outfile(self, antecedents, user_options):
filename = 'report'
return filename
|
import React, { useState, useEffect } from "react"
const MarketoForm = ({ baseUrl, munchkinId, formId, formName }) => {
const [initialized, setInitialized] = useState(false)
const [isSent, setIsSent] = useState(false)
useEffect(() => {
if (!initialized) {
const scriptsToLoad = ["/js/forms2/js/forms2.min.js"]
Promise.all(
scriptsToLoad.map(scriptItem => {
return new Promise((resolve, reject) => {
const script = document.createElement("script")
script.src = `${baseUrl}${scriptItem}`
script.async = true
document.body.appendChild(script)
script.onload = () => resolve(scriptItem)
script.onerror = () => reject(scriptItem)
})
})
)
.then(item => {
const windowGlobal = typeof window !== "undefined" && window
if (windowGlobal && windowGlobal.MktoForms2) {
MktoForms2.loadForm(baseUrl, munchkinId, formId, o => {
o.onSuccess(() => {
// Set component to sent
setIsSent(true)
// return false to avoid page reload
return false
})
})
}
setInitialized(true)
})
.catch(() => console.log("An error ocurred loading Marketo scripts..."))
}
})
return !isSent ? (
<form id={formName} name={formName} />
) : (
<p>Thank You for Signing Up</p>
)
}
export default MarketoForm
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def home(request):
return render(request, 'home.html')
def contact(request):
return render(request, 'contact.html')
|
#
# PySNMP MIB module MICOM-56KCSU-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MICOM-56KCSU-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:12:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
micom_oscar, = mibBuilder.importSymbols("MICOM-OSCAR-MIB", "micom-oscar")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, TimeTicks, NotificationType, ModuleIdentity, iso, IpAddress, Bits, Counter32, ObjectIdentity, Counter64, Unsigned32, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "TimeTicks", "NotificationType", "ModuleIdentity", "iso", "IpAddress", "Bits", "Counter32", "ObjectIdentity", "Counter64", "Unsigned32", "MibIdentifier")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
micom_56kcsu = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28)).setLabel("micom-56kcsu")
csu56k_configuration = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1)).setLabel("csu56k-configuration")
csu56k_status = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2)).setLabel("csu56k-status")
mcm56kCsuCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1))
mcm56kCsuCfgOperatingMode = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("dds-pri-4Wire-56k", 1), ("dds-sc-WithSecondaryChannel-72k", 2), ("cc-64k-ClearChannel-64k", 3), ("dds-pri-2Wire-56k", 4))).clone('dds-pri-4Wire-56k')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgOperatingMode.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgOperatingMode.setDescription('NAME = ; DESC = The value of this object selects the 56K CSU \\ operating mode. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgClockingSource = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("externalClock", 1), ("internalClock", 2))).clone('externalClock')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgClockingSource.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgClockingSource.setDescription('NAME = ; DESC = The value of this object selects the 56K CSU \\ clocking source. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgTxOutOfFrame = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitOutOfFrame", 2))).clone('normalTransmitCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgTxOutOfFrame.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgTxOutOfFrame.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Out Of Frame sequence for DDS-PRI mode, or \\ transmission of Multiplexer Out Of Sync \\ sequence for DDS-SC mode. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgTxOutOfService = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitOutOfService", 2))).clone('normalTransmitCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgTxOutOfService.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgTxOutOfService.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Out Of Service sequence for DDS-PRI mode, or \\ transmission of Abnormal Station Code \\ sequence for DDS-SC mode. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgTxControlModeIdle = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitControlModeIdle", 2))).clone('normalTransmitCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgTxControlModeIdle.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgTxControlModeIdle.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Control Mode Idle sequence. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgZeroSuppressDisable = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("zeroSuppressionDisable", 2))).clone('normalTransmitCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgZeroSuppressDisable.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgZeroSuppressDisable.setDescription('NAME = ; DESC = This object selects the 56K CSU disable \\ transmit Zero Suppression for DDS-PRI mode, \\ or transmission of all zeros for DDS-SC mode. \\ (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgTxIdle = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitIdle", 2))).clone('normalTransmitCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgTxIdle.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgTxIdle.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ All Marks (or 1s) of the Data Mode Idle sequence \\ for all the modes. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgCSULoopback = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalReceiveCondition", 1), ("forceCSUtoLoopback", 2))).clone('normalReceiveCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgCSULoopback.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgCSULoopback.setDescription('NAME = ; DESC = This object selects the 56K CSU to force the CSU \\ to be in a loopback mode. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgFilterForceEnable = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalReceiveCondition", 1), ("filterForceEnable", 2))).clone('normalReceiveCondition')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgFilterForceEnable.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgFilterForceEnable.setDescription('NAME = ; DESC = This object selects the 56K CSU to have \\ filter forcing enabled or not. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56kCsuCfgFilterForcingCntl = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("filterGain-90db", 1), ("filterGain-84db", 2), ("filterGain-78db", 3), ("filterGain-72db", 4), ("filterGain-66db", 5), ("filterGain-60db", 6), ("filterGain-54db", 7), ("filterGain-48db", 8), ("filterGain-42db", 9), ("filterGain-36db", 10), ("filterGain-30db", 11), ("filterGain-24db", 12), ("filterGain-18db", 13), ("filterGain-12db", 14), ("filterGain-6db", 15), ("filterGain-0db", 16))).clone('filterGain-0db')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcm56kCsuCfgFilterForcingCntl.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56kCsuCfgFilterForcingCntl.setDescription('NAME = ; DESC = This object selects the 56K CSU filter \\ gain if filter force is enabled. (Operational); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2))
nvm56kCsuCfgOperatingMode = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("dds-pri-4Wire-56k", 1), ("dds-sc-WithSecondaryChannel-72k", 2), ("cc-64k-ClearChannel-64k", 3), ("dds-pri-2Wire-56k", 4))).clone('dds-pri-4Wire-56k')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgOperatingMode.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgOperatingMode.setDescription('NAME = ; DESC = The value of this object selects the 56K CSU \\ operating mode. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgClockingSource = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("externalClock", 1), ("internalClock", 2))).clone('externalClock')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgClockingSource.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgClockingSource.setDescription('NAME = ; DESC = The value of this object selects the 56K CSU \\ clocking source. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgTxOutOfFrame = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitOutOfFrame", 2))).clone('normalTransmitCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgTxOutOfFrame.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgTxOutOfFrame.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Out Of Frame sequence for DDS-PRI mode, or \\ transmission of Multiplexer Out Of Sync \\ sequence for DDS-SC mode. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgTxOutOfService = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitOutOfService", 2))).clone('normalTransmitCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgTxOutOfService.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgTxOutOfService.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Out Of Service sequence for DDS-PRI mode, or \\ transmission of Abnormal Station Code \\ sequence for DDS-SC mode. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgTxControlModeIdle = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitControlModeIdle", 2))).clone('normalTransmitCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgTxControlModeIdle.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgTxControlModeIdle.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ Control Mode Idle sequence. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgZeroSuppressDisable = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("zeroSuppressionDisable", 2))).clone('normalTransmitCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgZeroSuppressDisable.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgZeroSuppressDisable.setDescription('NAME = ; DESC = This object selects the 56K CSU disable \\ transmit Zero Suppression for DDS-PRI mode, \\ or transmission of all zeros for DDS-SC mode. \\ (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgTxIdle = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalTransmitCondition", 1), ("transmitIdle", 2))).clone('normalTransmitCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgTxIdle.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgTxIdle.setDescription('NAME = ; DESC = This object selects transmission of the 56K CSU \\ All Marks (or 1s) of the Data Mode Idle sequence \\ for all the modes. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgCSULoopback = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalReceiveCondition", 1), ("forceCSUtoLoopback", 2))).clone('normalReceiveCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgCSULoopback.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgCSULoopback.setDescription('NAME = ; DESC = This object selects the 56K CSU to force the CSU \\ to be in a loopback mode. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgFilterForceEnable = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normalReceiveCondition", 1), ("filterForceEnable", 2))).clone('normalReceiveCondition')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgFilterForceEnable.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgFilterForceEnable.setDescription('NAME = ; DESC = This object selects the 56K CSU to have \\ filter forcing enabled or not. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
nvm56kCsuCfgFilterForcingCntl = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 1, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("filterGain-90db", 1), ("filterGain-84db", 2), ("filterGain-78db", 3), ("filterGain-72db", 4), ("filterGain-66db", 5), ("filterGain-60db", 6), ("filterGain-54db", 7), ("filterGain-48db", 8), ("filterGain-42db", 9), ("filterGain-36db", 10), ("filterGain-30db", 11), ("filterGain-24db", 12), ("filterGain-18db", 13), ("filterGain-12db", 14), ("filterGain-6db", 15), ("filterGain-0db", 16))).clone('filterGain-0db')).setMaxAccess("readonly")
if mibBuilder.loadTexts: nvm56kCsuCfgFilterForcingCntl.setStatus('obsolete')
if mibBuilder.loadTexts: nvm56kCsuCfgFilterForcingCntl.setDescription('NAME = ; DESC = This object selects the 56K CSU filter \\ gain if filter force is enabled. (Configuration); HELP = ; CAPABILITIES = NET_CFG, VPN_CFG;')
mcm56KCsuStatusGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1))
mcm56KCsuStatusLineStatus = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("operational", 1), ("linkDown", 2), ("testMode", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusLineStatus.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusLineStatus.setDescription('NAME = ; DESC = The value of this object indicates the 56K \\ CSU line status.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusRxLossOfSignal = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusRxLossOfSignal.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusRxLossOfSignal.setDescription('NAME = ; DESC = The value of this object indicates if the 56K \\ CSU receiver has lost signal.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusFAWSync = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusFAWSync.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusFAWSync.setDescription('NAME = ; DESC = The value of this object indicates the 56K \\ CSU DDS FAW Sync in DDS-SC or CC-64K.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusLoopPresent = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusLoopPresent.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusLoopPresent.setDescription('NAME = ; DESC = The value of this object indicates if the 56K \\ CSU loop is currently present.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusInsertLossLineLength = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusInsertLossLineLength.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusInsertLossLineLength.setDescription('NAME = ; DESC = The value of this object indicates the 56K \\ CSU receiver line length.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusRxSignalMag = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusRxSignalMag.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusRxSignalMag.setDescription('NAME = ; DESC = The value of this object indicates the 56K \\ CSU receive signal magnitude.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mcm56KCsuStatusInvalidBPVcount = MibScalar((1, 3, 6, 1, 4, 1, 335, 1, 4, 28, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcm56KCsuStatusInvalidBPVcount.setStatus('deprecated')
if mibBuilder.loadTexts: mcm56KCsuStatusInvalidBPVcount.setDescription('NAME = ; DESC = The value of this object indicates the 56K \\ CSU invalid Bipolar Violation (BPV) count.; HELP = ; CAPABILITIES = NET_CFG, VPN_CFG ;')
mibBuilder.exportSymbols("MICOM-56KCSU-MIB", nvm56kCsuCfgClockingSource=nvm56kCsuCfgClockingSource, nvm56kCsuCfgOperatingMode=nvm56kCsuCfgOperatingMode, mcm56kCsuCfgTxOutOfFrame=mcm56kCsuCfgTxOutOfFrame, mcm56kCsuCfgZeroSuppressDisable=mcm56kCsuCfgZeroSuppressDisable, mcm56kCsuCfgTxOutOfService=mcm56kCsuCfgTxOutOfService, mcm56KCsuStatusLineStatus=mcm56KCsuStatusLineStatus, mcm56kCsuCfgCSULoopback=mcm56kCsuCfgCSULoopback, nvm56kCsuCfgCSULoopback=nvm56kCsuCfgCSULoopback, nvm56kCsuCfgTxControlModeIdle=nvm56kCsuCfgTxControlModeIdle, mcm56kCsuCfgFilterForcingCntl=mcm56kCsuCfgFilterForcingCntl, mcm56kCsuCfgFilterForceEnable=mcm56kCsuCfgFilterForceEnable, mcm56KCsuStatusRxLossOfSignal=mcm56KCsuStatusRxLossOfSignal, nvm56kCsuCfgTxIdle=nvm56kCsuCfgTxIdle, mcm56kCsuCfgTxControlModeIdle=mcm56kCsuCfgTxControlModeIdle, nvm56kCsuCfgFilterForcingCntl=nvm56kCsuCfgFilterForcingCntl, nvm56kCsuCfgTxOutOfFrame=nvm56kCsuCfgTxOutOfFrame, csu56k_status=csu56k_status, mcm56KCsuStatusGroup=mcm56KCsuStatusGroup, micom_56kcsu=micom_56kcsu, mcm56KCsuStatusInsertLossLineLength=mcm56KCsuStatusInsertLossLineLength, nvm56kCsuCfgZeroSuppressDisable=nvm56kCsuCfgZeroSuppressDisable, mcm56kCsuCfgClockingSource=mcm56kCsuCfgClockingSource, mcm56KCsuStatusInvalidBPVcount=mcm56KCsuStatusInvalidBPVcount, csu56k_configuration=csu56k_configuration, mcm56KCsuStatusLoopPresent=mcm56KCsuStatusLoopPresent, nvm56kCsuCfgFilterForceEnable=nvm56kCsuCfgFilterForceEnable, mcm56KCsuStatusFAWSync=mcm56KCsuStatusFAWSync, mcm56kCsuCfgGroup=mcm56kCsuCfgGroup, nvm56kCsuCfgGroup=nvm56kCsuCfgGroup, nvm56kCsuCfgTxOutOfService=nvm56kCsuCfgTxOutOfService, mcm56kCsuCfgTxIdle=mcm56kCsuCfgTxIdle, mcm56KCsuStatusRxSignalMag=mcm56KCsuStatusRxSignalMag, mcm56kCsuCfgOperatingMode=mcm56kCsuCfgOperatingMode)
|
const db = require('../config/db');
const Sequelize = db.sequelize;
const Category = Sequelize.import('../schema/category');
const Article = Sequelize.import('../schema/article');
Category.sync({force: false});
class CategoryModel {
/**
* 创建分类
* @param data
* @returns {Promise<*>}
*/
static async createCategory(data) {
return await Category.create({
name: data.name
})
}
/**
* 更新分类数据
* @param id 分类ID
* @param data 事项的状态
* @returns {Promise.<boolean>}
*/
static async updateCategory(id, data) {
await Category.update({
name: data.name
}, {
where: {
id
},
fields: ['name']
});
return true
}
/**
* 获取分类列表
* @returns {Promise<*>}
*/
static async getCategoryList() {
return await Category.findAll({
attributes: ['id', 'name'],
})
}
// 查询ID分类下的所有文章
static async getCategoryArticleList(id) {
return await Category.findAll({
where: {
id,
},
include: [{
model: Article
}]
})
}
/**
* 获取分类详情数据
* @param id 文章ID
* @returns {Promise<Model>}
*/
static async getCategoryDetail(id) {
return await Category.findOne({
where: {
id,
},
})
}
/**
* 删除分类
* @param id
* @returns {Promise.<boolean>}
*/
static async deleteCategory(id) {
await Category.destroy({
where: {
id,
}
})
return true
}
}
module.exports = CategoryModel
|
// ==== INPUT LAYOUTS ====
let language = "EN"; // Pick one of "EN, "DE"
let keyboardLayout = {
["EN"]: ["Q,W,E,R,T,Y,U,I,O,P",
"A,S,D,F,G,H,J,K,L",
"Enter,Z,X,C,V,B,N,M,Backspace"],
["DE"]: ["Q,W,E,R,T,Y,U,I,O,P,Ä",
"A,S,D,F,G,H,J,K,L,Ö,Ü",
"Enter,Z,X,C,V,B,N,M,Backspace"]
};
let keyboardValidation = {
["EN"]: /^[a-z]$/i,
["DE"]: /^[a-zäöü]$/i
};
let encodingString = {
EN: "abcdefghijklmnopqrstuvwxyz",
DE: "abcdefghijklmnopqrstuvwxyzäöü",
}
export default {
language, keyboardLayout, keyboardValidation, encodingString
};
|
/*
* This module is the seed data code generator. It generates code to bulk insert seed data for a schema
*/
export class SeedDataCodeGen {
generate(schemaName, seedDataArr) {
let code = `"use strict";
// Bulk insert of seed data for ${schemaName}
const dal = require('./dal/${schemaName}');
// Iterates through an array and invokes an asynchronous function on each element
// arr: The array
// fnEach: Invoked on each element of the array. Signature: void function (item, done), where done is void function (err)
// callback: Invoked at the end or if an error occurs. Signature: void function (err)
function sequence(arr, fnEach, callback) {
let index = 0;
function onDone() {
let item = arr[index];
fnEach(item, err => {
if (err) {
callback(err);
return;
}
index++;
if (index < arr.length) {
onDone();
} else {
callback();
}
});
}
onDone();
}
const seedDataArr = ${JSON.stringify(seedDataArr[schemaName], undefined, 2)};
sequence(seedDataArr,
(seedDataObj, done) => {
dal.insert(seedDataObj)
.then(ret => {
console.log('[Bulk insert success for ${schemaName}] Inserted data for id: ' + ret.id);
done();
})
.catch(done);
}, err => {
if (err) {
console.error('[Bulk insert error for ${schemaName}] id: ' + ret.id + '. The error is:', err);
} else {
console.log('[Bulk insert success for ${schemaName}] completed successfully');
process.exit(0);
}
});`;
return code;
}
};
|
# coding: utf-8
# file: caching.py
# Full article: http://www.debrice.com/flask-sqlalchemy-caching/
import functools
import hashlib
from flask_sqlalchemy import BaseQuery
from sqlalchemy import event, select
from sqlalchemy.orm.interfaces import MapperOption
from sqlalchemy.orm.attributes import get_history
from sqlalchemy.ext.declarative import declared_attr
from dogpile.cache.region import make_region
from dogpile.cache.api import NO_VALUE
def md5_key_mangler(key):
"""
Encodes SELECT queries (key) into md5 hashes
"""
if key.startswith('SELECT '):
key = hashlib.md5(key.encode('ascii')).hexdigest()
return key
def memoize(obj):
"""
Local cache of the function return value
"""
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
# cache_config = {
# 'backend': 'dogpile.cache.memory',
# 'expiration_time': 60,
# }
cache_config = {
'backend': 'dogpile.cache.redis',
'arguments' : {
'host': 'localhost',
'port': 6379,
'db': 0,
'redis_expiration_time': 60*60*2, # 2 hours
'distributed_lock':True
}
}
regions = dict(
default=make_region(key_mangler=md5_key_mangler).configure(**cache_config)
)
class CachingQuery(BaseQuery):
"""
A Query subclass which optionally loads full results from a dogpile
cache region.
"""
def __init__(self, regions, entities, *args, **kw):
self.cache_regions = regions
BaseQuery.__init__(self, entities=entities, *args, **kw)
def __iter__(self):
"""
override __iter__ to pull results from dogpile
if particular attributes have been configured.
"""
if hasattr(self, '_cache_region'):
return self.get_value(createfunc=lambda: list(BaseQuery.__iter__(self)))
else:
return BaseQuery.__iter__(self)
def _get_cache_plus_key(self):
"""
Return a cache region plus key.
"""
dogpile_region = self.cache_regions[self._cache_region.region]
if self._cache_region.cache_key:
key = self._cache_region.cache_key
else:
key = _key_from_query(self)
return dogpile_region, key
def invalidate(self):
"""
Invalidate the cache value represented by this Query.
"""
dogpile_region, cache_key = self._get_cache_plus_key()
dogpile_region.delete(cache_key)
def get_value(self, merge=True, createfunc=None,
expiration_time=None, ignore_expiration=False):
"""
Return the value from the cache for this query.
Raise KeyError if no value present and no
createfunc specified.
"""
dogpile_region, cache_key = self._get_cache_plus_key()
assert not ignore_expiration or not createfunc, \
"Can't ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = dogpile_region.get(cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration)
else:
cached_value = dogpile_region.get_or_create(
cache_key,
createfunc,
expiration_time=expiration_time)
if cached_value is NO_VALUE:
raise KeyError(cache_key)
if merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value
def set_value(self, value):
"""
Set the value in the cache for this query.
"""
dogpile_region, cache_key = self._get_cache_plus_key()
dogpile_region.set(cache_key, value)
def query_callable(regions, query_cls=CachingQuery):
return functools.partial(query_cls, regions)
def _key_from_query(query, qualifier=None):
"""
Given a Query, create a cache key.
"""
stmt = query.with_labels().statement
compiled = stmt.compile()
params = compiled.params
return " ".join(
[str(compiled)] +
[str(params[k]) for k in sorted(params)])
class FromCache(MapperOption):
"""Specifies that a Query should load results from a cache."""
propagate_to_loaders = False
def __init__(self, region="default", cache_key=None):
"""Construct a new FromCache.
:param region: the cache region. Should be a
region configured in the dictionary of dogpile
regions.
:param cache_key: optional. A string cache key
that will serve as the key to the query. Use this
if your query has a huge amount of parameters (such
as when using in_()) which correspond more simply to
some other identifier.
"""
self.region = region
self.cache_key = cache_key
def process_query(self, query):
"""Process a Query during normal loading operation."""
query._cache_region = self
class Cache(object):
def __init__(self, model, regions, label):
self.model = model
self.regions = regions
self.label = label
# allow custom pk or default to 'id'
self.pk = getattr(model, 'cache_pk', 'id')
def get(self, pk):
"""
Equivalent to the Model.query.get(pk) but using cache
"""
return self.model.query.options(self.from_cache(pk=pk)).get(pk)
def filter(self, order_by='asc', offset=None, limit=None, **kwargs):
"""
Retrieve all the objects ids then pull them independently from cache.
kwargs accepts one attribute filter, mainly for relationship pulling.
offset and limit allow pagination, order by for sorting (asc/desc).
"""
query_kwargs = {}
if kwargs:
if len(kwargs) > 1:
raise TypeError('filter accept only one attribute for filtering')
key, value = kwargs.items()[0]
if key not in self._columns():
raise TypeError('%s does not have an attribute %s' % self, key)
query_kwargs[key] = value
cache_key = self._cache_key(**kwargs)
pks = self.regions[self.label].get(cache_key)
if pks is NO_VALUE:
pks = [o.id for o in self.model.query.filter_by(**kwargs).with_entities(getattr(self.model, self.pk))]
self.regions[self.label].set(cache_key, pks)
if order_by == 'desc':
pks.reverse()
if offset is not None:
pks = pks[pks:]
if limit is not None:
pks = pks[:limit]
keys = [self._cache_key(id) for id in pks]
for pos, obj in enumerate(self.regions[self.label].get_multi(keys)):
if obj is NO_VALUE:
yield self.get(pks[pos])
else:
yield obj[0]
def flush(self, key):
"""
flush the given key from dogpile.cache
"""
self.regions[self.label].delete(key)
@memoize
def _columns(self):
return [c.name for c in self.model.__table__.columns if c.name != self.pk]
@memoize
def from_cache(self, cache_key=None, pk=None):
"""
build the from cache option object the the given object
"""
if pk:
cache_key = self._cache_key(pk)
# if cache_key is none, the mangler will generate a MD5 from the query
return FromCache(self.label, cache_key)
@memoize
def _cache_key(self, pk="all", **kwargs):
"""
Generate a key as query
format: '<tablename>.<column>[<value>]'
'user.id[all]': all users
'address.user_id=4[all]': all address linked to user id 4
'user.id[4]': user with id=4
"""
q_filter = "".join("%s=%s" % (k, v) for k, v in kwargs.items()) or self.pk
return "%s.%s[%s]" % (self.model.__tablename__, q_filter, pk)
def _flush_all(self, obj):
for column in self._columns():
added, unchanged, deleted = get_history(obj, column)
for value in list(deleted) + list(added):
self.flush(self._cache_key(**{column: value}))
# flush "all" listing
self.flush(self._cache_key())
# flush the object
self.flush(self._cache_key(getattr(obj, self.pk)))
class CacheableMixin(object):
@declared_attr
def cache(cls):
"""
Add the cache features to the model
"""
return Cache(cls, cls.cache_regions, cls.cache_label)
@staticmethod
def _flush_event(mapper, connection, target):
"""
Called on object modification to flush cache of dependencies
"""
target.cache._flush_all(target)
@classmethod
def __declare_last__(cls):
"""
Auto clean the caches, including listings possibly associated with
this instance, on delete, update and insert.
"""
event.listen(cls, 'before_delete', cls._flush_event)
event.listen(cls, 'before_update', cls._flush_event)
event.listen(cls, 'before_insert', cls._flush_event)
|
#
# This is Seisflows
#
# See LICENCE file
#
###############################################################################
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
;(function() {
'use strict';
var Observable = Rx.Observable;
var fromEvent = Observable.fromEvent;
var canv = document.getElementById('canvas');
var contx = canv.getContext('2d');
contx.clearRect(0, 0, canv.width, canv.height);
var points = document.getElementById('points');
var outerRadius = document.getElementById('outer-radius');
var innerRadius = document.getElementById('inner-radius');
var angle = document.getElementById('clockwise');
var lineWidth = document.getElementById('line-width');
var strokeColor = document.getElementById('stroke-color');
var fillColor = document.getElementById('fill-color');
var points$ = fromEvent(points, 'input', function(e) {
return e.target.value
}).startWith(points.value);
var outerRadius$ = fromEvent(outerRadius, 'input', function(e) {
return e.target.value;
}).startWith(outerRadius.value).distinctUntilChanged();
var innerRadius$ = fromEvent(innerRadius, 'input', function (e) {
return e.target.value;
}).startWith(innerRadius.value);
var angle$ = fromEvent(angle, 'input', function(e) {
return e.target.value;
}).startWith(angle.value);
var lineWidth$ = fromEvent(lineWidth, 'input', function(e) {
return e.target.value;
}).startWith(lineWidth.value);
var strokeColor$ = fromEvent(strokeColor, 'input', function(e) {
return e.target.value;
}).startWith(strokeColor.value);
var fillColor$ = fromEvent(fillColor, 'input', function(e) {
return e.target.value;
}).startWith(fillColor.value);
Rx.Observable
.combineLatest(points$, outerRadius$, innerRadius$, angle$, strokeColor$, fillColor$, lineWidth$)
.subscribe(function(values) {
draw.apply(null, values);
});
function draw(points, radius1, radius2, alpha0, strokeColor, fillColor, lineWidth) {
contx.clearRect(0, 0, canv.width, canv.height);
contx.beginPath();
drawShape(contx, canv.width / 2,
canv.height / 2, parseInt(points), parseInt(radius1), parseInt(radius2), parseInt(alpha0), 1);
contx.strokeStyle = strokeColor;
contx.fillStyle = fillColor;
contx.lineWidth = lineWidth;
contx.stroke();
contx.fill();
}
function drawShape(ctx, x, y, points, radius1, radius2, alpha0, ratio) {
//points: number of points (or number of sides for polygons)
//radius1: "outer" radius of the star
//radius2: "inner" radius of the star (if equal to radius1, a polygon is drawn)
//angle0: initial angle (clockwise), by default, stars and polygons are 'pointing' up
var i, angle, radius;
if (radius2 !== radius1) {
points = 2 * points;
}
for (i = 0; i <= points; i++) {
angle = i * 2 * Math.PI / points - Math.PI / 2 + alpha0;
radius = i % 2 === 0 ? radius1 : radius2;
ctx.lineTo(x + radius * Math.cos(angle), y + radius * Math.sin(angle) * ratio);
}
}
})();
|
"""Pytest fixture for the tracker agent."""
import pytest
import os
from unittest import mock
from ostorlab.agent import definitions as agent_definitions
from ostorlab.runtimes import definitions as runtime_definitions
from ostorlab.runtimes.local.models import models
from agent import tracker_agent as agent_tracker
SCAN_DONE_TIMEOUT_SEC = 1
POSTSCAN_DONE_TIMEOUT_SEC = 1
@pytest.fixture(scope='function', name='tracker_agent')
@mock.patch('ostorlab.runtimes.local.models.models.ENGINE_URL', 'sqlite:////tmp/ostorlab_db1.sqlite')
def fixture_tracker_agent():
"""Instantiate a tracker agent."""
definition = agent_definitions.AgentDefinition(
name='agent_tracker',
out_selectors=[
'v3.report.event.scan.done',
'v3.report.event.scan.timeout',
'v3.report.event.post_scan.timeout',
'v3.report.event.post_scan.done'
],
args=[
{
'name': 'init_sleep_seconds',
'type': 'number',
'value': SCAN_DONE_TIMEOUT_SEC,
'description': 'blabla'
},
{
'name': 'scan_done_timeout_sec',
'type': 'number',
'value': SCAN_DONE_TIMEOUT_SEC,
'description': 'blabla'
},
{
'name': 'postscane_done_timeout_sec',
'type': 'number',
'value': POSTSCAN_DONE_TIMEOUT_SEC,
'description': 'blabla'
}
])
settings = runtime_definitions.AgentSettings(
key='agent_tracker_key',
bus_url='NA',
bus_exchange_topic='NA',
bus_management_url='http://guest:guest@localhost:15672/',
bus_vhost='/',
)
database = models.Database()
database.create_db_tables()
scan = models.Scan.create('test')
os.environ['UNIVERSE'] = str(scan.id)
agent = agent_tracker.TrackerAgent(definition, settings)
return agent
|
import os
import platform
import re
import subprocess
import xml.etree.ElementTree as ET
from subprocess import CalledProcessError, PIPE, STDOUT
from six.moves.urllib.parse import quote_plus, unquote, urlparse
from conans.client.tools import check_output
from conans.client.tools.env import environment_append, no_op
from conans.client.tools.files import chdir
from conans.errors import ConanException
from conans.model.version import Version
from conans.util.files import decode_text, to_file_bytes, walk, mkdir
def _run_muted(cmd, folder=None):
with chdir(folder) if folder else no_op():
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.communicate()
return process.returncode
def _check_repo(cmd, folder, msg=None):
msg = msg or "Not a valid '{}' repository".format(cmd[0])
try:
ret = _run_muted(cmd, folder=folder)
except Exception:
raise ConanException(msg)
else:
if bool(ret):
raise ConanException(msg)
class SCMBase(object):
cmd_command = None
@classmethod
def get_version(cls):
try:
out, _ = subprocess.Popen([cls.cmd_command, "--version"], stdout=subprocess.PIPE).communicate()
version_line = decode_text(out).split('\n', 1)[0]
version_str = version_line.split(' ', 3)[2]
return Version(version_str)
except Exception as e:
raise ConanException("Error retrieving {} version: '{}'".format(cls.cmd_command, e))
def __init__(self, folder=None, verify_ssl=True, username=None, password=None,
force_english=True, runner=None, output=None):
self.folder = folder or os.getcwd()
if not os.path.exists(self.folder):
os.makedirs(self.folder)
self._verify_ssl = verify_ssl
self._force_eng = force_english
self._username = username
self._password = password
self._runner = runner
self._output = output
def run(self, command):
command = "%s %s" % (self.cmd_command, command)
with chdir(self.folder) if self.folder else no_op():
with environment_append({"LC_ALL": "en_US.UTF-8"}) if self._force_eng else no_op():
if not self._runner:
return check_output(command).strip()
else:
return self._runner(command)
def get_url_with_credentials(self, url):
if not self._username or not self._password:
return url
if urlparse(url).password:
return url
user_enc = quote_plus(self._username)
pwd_enc = quote_plus(self._password)
url = url.replace("://", "://" + user_enc + ":" + pwd_enc + "@", 1)
return url
@classmethod
def _remove_credentials_url(cls, url):
parsed = urlparse(url)
netloc = parsed.hostname
if parsed.port:
netloc += ":{}".format(parsed.port)
replaced = parsed._replace(netloc=netloc)
return replaced.geturl()
class Git(SCMBase):
cmd_command = "git"
@property
def _configure_ssl_verify(self):
return "-c http.sslVerify=%s " % ("true" if self._verify_ssl else "false")
def run(self, command):
command = self._configure_ssl_verify + command
return super(Git, self).run(command)
def _fetch(self, url, branch, shallow):
if not branch:
raise ConanException("The destination folder '%s' is not empty, "
"specify a branch to checkout (not a tag or commit) "
"or specify a 'subfolder' "
"attribute in the 'scm'" % self.folder)
output = self.run("init")
output += self.run('remote add origin "%s"' % url)
if shallow:
output += self.run('fetch --depth 1 origin "%s"' % branch)
output += self.run('checkout FETCH_HEAD')
else:
output += self.run("fetch")
output += self.run("checkout -t origin/%s" % branch)
return output
def clone(self, url, branch=None, args="", shallow=False):
"""
:param url: repository remote URL to clone from (e.g. https, git or local)
:param branch: actually, can be any valid git ref expression like,
- None, use default branch, usually it's "master"
- branch name
- tag name
- revision sha256
- expression like HEAD~1
:param args: additional arguments to be passed to the git command (e.g. config args)
:param shallow:
:return: output of the clone command
"""
# TODO: rename "branch" -> "element" in Conan 2.0
url = self.get_url_with_credentials(url)
if os.path.exists(url):
url = url.replace("\\", "/") # Windows local directory
mkdir(self.folder) # might not exist in case of shallow clone
if os.listdir(self.folder):
return self._fetch(url, branch, shallow)
if shallow and branch:
return self._fetch(url, branch, shallow)
branch_cmd = "--branch %s" % branch if branch else ""
shallow_cmd = "--depth 1" if shallow else ""
output = self.run('clone "%s" . %s %s %s' % (url, branch_cmd, shallow_cmd, args))
return output
def checkout(self, element, submodule=None):
# Element can be a tag, branch or commit
self.check_repo()
output = self.run('checkout "%s"' % element)
output += self.checkout_submodules(submodule)
return output
def checkout_submodules(self, submodule=None):
"""Do the checkout only for submodules"""
if not submodule:
return ""
if submodule == "shallow":
output = self.run("submodule sync")
output += self.run("submodule update --init")
return output
elif submodule == "recursive":
output = self.run("submodule sync --recursive")
output += self.run("submodule update --init --recursive")
return output
else:
raise ConanException("Invalid 'submodule' attribute value in the 'scm'. "
"Unknown value '%s'. Allowed values: ['shallow', 'recursive']"
% submodule)
def excluded_files(self):
ret = []
try:
file_paths = [os.path.normpath(
os.path.join(
os.path.relpath(folder, self.folder), el)).replace("\\", "/")
for folder, dirpaths, fs in walk(self.folder)
for el in fs + dirpaths]
if file_paths:
p = subprocess.Popen(['git', 'check-ignore', '--stdin'],
stdout=PIPE, stdin=PIPE, stderr=STDOUT, cwd=self.folder)
paths = to_file_bytes("\n".join(file_paths))
grep_stdout = decode_text(p.communicate(input=paths)[0])
ret = grep_stdout.splitlines()
except (CalledProcessError, IOError, OSError) as e:
if self._output:
self._output.warn("Error checking excluded git files: %s. "
"Ignoring excluded files" % e)
ret = []
return ret
def get_remote_url(self, remote_name=None, remove_credentials=False):
self.check_repo()
remote_name = remote_name or "origin"
remotes = self.run("remote -v")
for remote in remotes.splitlines():
name, url = remote.split(None, 1)
if name == remote_name:
url, _ = url.rsplit(None, 1)
if remove_credentials and not os.path.exists(url): # only if not local
url = self._remove_credentials_url(url)
if os.path.exists(url): # Windows local directory
url = url.replace("\\", "/")
return url
return None
def is_local_repository(self):
url = self.get_remote_url()
return os.path.exists(url)
def get_commit(self):
self.check_repo()
try:
commit = self.run("rev-parse HEAD")
commit = commit.strip()
return commit
except Exception as e:
raise ConanException("Unable to get git commit from '%s': %s" % (self.folder, str(e)))
get_revision = get_commit
def get_commit_message(self):
self.check_repo()
try:
message = self.run("log -1 --format=%s%n%b")
return message.strip()
except Exception:
return None
def is_pristine(self):
self.check_repo()
status = self.run("status --porcelain").strip()
if not status:
return True
else:
return False
def get_repo_root(self):
self.check_repo()
return self.run("rev-parse --show-toplevel")
def get_branch(self):
self.check_repo()
try:
status = self.run("status -bs --porcelain")
# ## feature/scm_branch...myorigin/feature/scm_branch
branch = status.splitlines()[0].split("...")[0].strip("#").strip()
return branch
except Exception as e:
raise ConanException("Unable to get git branch from %s: %s" % (self.folder, str(e)))
def get_tag(self):
self.check_repo()
try:
status = self.run("describe --exact-match --tags")
tag = status.strip()
return tag
except Exception:
return None
def check_repo(self):
""" Check if it is a valid GIT repo """
_check_repo(["git", "status"], folder=self.folder)
class SVN(SCMBase):
cmd_command = "svn"
file_protocol = 'file:///' if platform.system() == "Windows" else 'file://'
API_CHANGE_VERSION = Version("1.9") # CLI changes in 1.9
def __init__(self, folder=None, runner=None, *args, **kwargs):
def runner_no_strip(command):
return check_output(command)
runner = runner or runner_no_strip
super(SVN, self).__init__(folder=folder, runner=runner, *args, **kwargs)
@property
def version(self):
if not hasattr(self, '_version'):
version = SVN.get_version()
setattr(self, '_version', version)
return getattr(self, '_version')
def run(self, command):
# Ensure we always pass some params
extra_options = " --no-auth-cache --non-interactive"
if not self._verify_ssl:
if self.version >= SVN.API_CHANGE_VERSION:
extra_options += " --trust-server-cert-failures=unknown-ca"
else:
extra_options += " --trust-server-cert"
if self._username and self._password:
extra_options += " --username=" + self._username
extra_options += " --password=" + self._password
return super(SVN, self).run(command="{} {}".format(command, extra_options))
def _show_item(self, item, target='.'):
self.check_repo()
if self.version >= SVN.API_CHANGE_VERSION:
value = self.run("info --show-item {item} \"{target}\"".format(item=item, target=target))
return value.strip()
else:
output = self.run("info --xml \"{target}\"".format(target=target))
root = ET.fromstring(output)
if item == 'revision':
return root.findall("./entry")[0].get("revision")
elif item == 'url':
return root.findall("./entry/url")[0].text
elif item == 'wc-root':
return root.findall("./entry/wc-info/wcroot-abspath")[0].text
elif item == 'last-changed-revision':
return root.findall("./entry/commit")[0].get("revision")
elif item == 'relative-url':
root_url = root.findall("./entry/repository/root")[0].text
url = self._show_item(item='url', target=target)
if url.startswith(root_url):
return url[len(root_url):]
raise ConanException("Retrieval of item '{}' not implemented for SVN<{}".format(
item, SVN.API_CHANGE_VERSION))
def checkout(self, url, revision="HEAD"):
output = ""
try:
self.check_repo()
except ConanException:
output += self.run('co "{url}" .'.format(url=url))
else:
assert url.lower() == self.get_remote_url().lower(), \
"%s != %s" % (url, self.get_remote_url())
output += self.run("revert . --recursive")
finally:
output += self.update(revision=revision)
return output
def update(self, revision='HEAD'):
self.check_repo()
return self.run("update -r {rev}".format(rev=revision))
def excluded_files(self):
self.check_repo()
excluded_list = []
output = self.run("status --no-ignore")
for it in output.splitlines():
if it.startswith('I'): # Only ignored files
filepath = it[8:].strip()
excluded_list.append(os.path.normpath(filepath))
return excluded_list
def get_remote_url(self, remove_credentials=False):
url = self._show_item('url')
if remove_credentials and not os.path.exists(url): # only if not local
url = self._remove_credentials_url(url)
return url
def get_qualified_remote_url(self, remove_credentials=False):
# Return url with peg revision
url = self.get_remote_url(remove_credentials=remove_credentials)
revision = self.get_revision()
return "{url}@{revision}".format(url=url, revision=revision)
def is_local_repository(self):
url = self.get_remote_url()
return (url.startswith(self.file_protocol) and
os.path.exists(unquote(url[len(self.file_protocol):])))
def is_pristine(self):
# Check if working copy is pristine/consistent
if self.version >= SVN.API_CHANGE_VERSION:
try:
output = self.run("status -u -r {} --xml".format(self.get_revision()))
except subprocess.CalledProcessError:
return False
else:
root = ET.fromstring(output)
pristine_item_list = ['external', 'ignored', 'none', 'normal']
pristine_props_list = ['normal', 'none']
for item in root.findall('.//wc-status'):
if item.get('item', 'none') not in pristine_item_list:
return False
if item.get('props', 'none') not in pristine_props_list:
return False
for item in root.findall('.//repos-status'):
if item.get('item', 'none') not in pristine_item_list:
return False
if item.get('props', 'none') not in pristine_props_list:
return False
return True
else:
if self._output:
self._output.warn("SVN::is_pristine for SVN v{} (less than {}) is not implemented,"
" it is returning not-pristine always because it cannot compare"
" with checked out version.".format(self.version,
SVN.API_CHANGE_VERSION))
return False
def get_revision(self):
return self._show_item('revision')
def get_revision_message(self):
output = self.run("log -r COMMITTED").splitlines()
return output[3] if len(output) > 2 else None
def get_repo_root(self):
return self._show_item('wc-root')
def get_last_changed_revision(self, use_wc_root=True):
if use_wc_root:
return self._show_item(item='last-changed-revision', target=self.get_repo_root())
else:
return self._show_item(item='last-changed-revision')
def get_branch(self):
item = self._get_item("branches/[^/]+|trunk", "branch")
return item.replace("branches/", "") if item else None
def get_tag(self):
item = self._get_item("tags/[^/]+", "tag")
return item.replace("tags/", "") if item else None
def _get_item(self, pattern, item_name):
try:
url = self._show_item('relative-url')
except Exception as e:
raise ConanException("Unable to get svn %s from %s: %s"
% (item_name, self.folder, str(e)))
item = re.search(pattern, url)
return item.group(0) if item else None
def check_repo(self):
""" Check if it is a valid SVN repo """
_check_repo(["svn", "info"], folder=self.folder)
|
import {load} from 'test/common';
import {methods} from '../maskAlgorithms';
// if the values are the same as in imageJ we consider it as currently correct
// TODO not obivious that those algorithms can deal with 16 bits images !
/*
Here are the results from imageJ
Default: 134
Huang: 134
Intermodes: 166
IsoData: 135
Li: 115
MaxEntropy: 126
Mean: 106
MinError(I): 101
Minimum: 234
Moments: 127
Otsu: 135
Percentile: 90
RenyiEntropy: 115
Shanbhag: 116
Triangle: 87
Yen: 108
*/
describe('Threshold calculation', function () {
it.skip('Huang should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.huang(img.histogram).should.equal(134);
});
});
it('Intermodes should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.intermodes(img.histogram).should.equal(166);
});
});
it('Isodata should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.isodata(img.histogram).should.equal(135);
});
});
it('Percentile should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.percentile(img.histogram).should.equal(90);
});
});
it.skip('Li should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.li(img.histogram).should.equal(115);
});
});
it.skip('MaxEntropy should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.maxEntropy(img.histogram).should.equal(126);
});
});
it.skip('Mean should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.mean(img.histogram).should.equal(106);
});
});
it.skip('MinError should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.minError(img.histogram).should.equal(101);
});
});
it('Minimum should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.minimum(img.histogram).should.equal(234);
});
});
it.skip('Moments should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.moments(img.histogram).should.equal(127);
});
});
it.skip('Otsu should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.otsu(img.histogram).should.equal(135);
});
});
it('Percentile should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.percentile(img.histogram).should.equal(90);
});
});
it.skip('RenyiEntropy should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.renyiEntropy(img.histogram).should.equal(115);
});
});
it.skip('Shanbhag should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.shanbhag(img.histogram).should.equal(116);
});
});
it('Triangle should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.triangle(img.histogram).should.equal(87);
});
});
it.skip('Yem should work like ImageJ', function () {
return load('grayscale_by_zimmyrose.png').then(function (img) {
methods.yen(img.histogram).should.equal(108);
});
});
});
|
from tfdlg.eval import perplexity
from tfdlg.schedules import WarmupLinearDecay
from tfdlg.generations import TopKTopPGenerator
from tfdlg.utils import import_class
from tfdlg.utils import save_model
from tfdlg.utils import load_model
from tfdlg.utils import set_mixed_precision_policy
from tfdlg.utils import set_memory_growth
import tensorflow.keras as keras
import numpy as np
def main(tokenizer_model_dir, task_cls="task.LMTask",
load_model_dir=None,
do_train=True, do_eval=True, do_generate=False,
# model parameters. The default value is the same as GPT2SmallConfig
config_cls="tfdlg.configs.Config",
num_layers=12, d_model=768, num_heads=12, d_ff=3072, vocab_size=50257,
context_size=1024, attention_dropout_rate=0.1,
residual_dropout_rate=0.1,
embedding_dropout_rate=0.1, activation="gelu",
kernel_initializer="he_normal", epsilon=1e-6,
# Parameters for training
train_file=None, valid_file=None, save_model_dir=None,
batch_size=2, epochs=1,
warmup_steps=0, max_learning_rate=1e-4, patience=1, clipnorm=1.0,
# Flag to use mixed precision or not
fp16=False,
# Set memory growth no to allocate all the memory
memory_growth=False,
# Tensorboard setting
tensorboard_dir=None,
tensorboard_update_freq=100,
):
# memory_growth should be set before any GPU operations
# (e.g. set_mixed_precision policy)
if memory_growth:
print("Set memory growth")
set_memory_growth()
if fp16:
print("Set mixed precision policy")
set_mixed_precision_policy()
# Prepare config, model and task
if load_model_dir:
model, config = load_model(load_model_dir)
task = import_class(task_cls)(config=config)
tokenizer = task.prepare_tokenizer(model_dir=tokenizer_model_dir)
else:
# Define model config
config = import_class(config_cls)(
num_layers=num_layers, d_model=d_model, num_heads=num_heads,
d_ff=d_ff, vocab_size=vocab_size, context_size=context_size,
attention_dropout_rate=attention_dropout_rate,
residual_dropout_rate=residual_dropout_rate,
embedding_dropout_rate=embedding_dropout_rate,
activation=activation, kernel_initializer=kernel_initializer,
epsilon=epsilon,
)
# Define task
task = import_class(task_cls)(config=config)
tokenizer = task.prepare_tokenizer(model_dir=tokenizer_model_dir)
# Override the vocab_size with the number of tokens in tokenizer
config.vocab_size = len(tokenizer)
# Prepare model
model = task.model_cls(config)
model.build(input_shape=(None, config.context_size))
print("Model config:", config)
model.summary()
if do_train or do_eval:
valid_dataset = task.prepare_dataset(
filename=valid_file,
encode_fn=tokenizer.encode,
batch_size=batch_size,
shuffle=False,
buffer_size=10000,
)
# Train
if do_train:
train_dataset = task.prepare_dataset(
filename=train_file,
encode_fn=tokenizer.encode,
batch_size=batch_size,
shuffle=True,
buffer_size=10000,
)
# Prepare model
print("Calculating num_steps")
num_steps = sum(1 for _ in train_dataset)
print("Num steps per epoch:", num_steps)
schedule = WarmupLinearDecay(
max_learning_rate=max_learning_rate,
warmup_steps=warmup_steps,
training_steps=num_steps*epochs
)
optimizer = keras.optimizers.Adam(
schedule,
beta_1=0.9,
beta_2=0.999,
epsilon=1e-8,
clipnorm=clipnorm
)
model.compile(loss=task.loss_fn, optimizer=optimizer)
# Define callbacks
callbacks = [
keras.callbacks.EarlyStopping(
patience=patience,
restore_best_weights=True
),
# If you want to save chekcpoints, remove the next comment out
#keras.callbacks.ModelCheckpoint("keras_model/", save_best_only=True)
]
if tensorboard_dir:
callbacks.append(keras.callbacks.TensorBoard(
log_dir=tensorboard_dir,
update_freq=tensorboard_update_freq,
profile_batch=0)
)
history = model.fit(
train_dataset,
validation_data=valid_dataset,
epochs=epochs,
callbacks=callbacks,
)
if save_model_dir:
save_model(save_model_dir, model, config)
# Evaluate
if do_eval:
ppl = perplexity(model, valid_dataset)
print("Validation PPL:", ppl)
if __name__ == "__main__":
import fire
fire.Fire(main)
|
import Vue from 'vue'
import QField from '../field/QField.js'
import MaskMixin from '../../mixins/mask.js'
import debounce from '../../utils/debounce.js'
import { stop } from '../../utils/event.js'
export default Vue.extend({
name: 'QInput',
mixins: [ QField, MaskMixin ],
props: {
value: { required: true },
type: {
type: String,
default: 'text'
},
debounce: [String, Number],
maxlength: [Number, String],
autogrow: Boolean, // makes a textarea
autofocus: Boolean,
inputClass: [Array, String, Object],
inputStyle: [Array, String, Object]
},
watch: {
value (v) {
if (this.hasMask === true) {
if (this.stopValueWatcher === true) {
this.stopValueWatcher = false
return
}
this.__updateMaskValue(v)
}
else if (this.innerValue !== v) {
this.innerValue = v
}
// textarea only
this.autogrow === true && this.$nextTick(this.__adjustHeightDebounce)
},
autogrow (autogrow) {
// textarea only
if (autogrow === true) {
this.$nextTick(this.__adjustHeightDebounce)
}
// if it has a number of rows set respect it
else if (this.$attrs.rows > 0) {
const inp = this.$refs.input
inp.style.height = 'auto'
}
}
},
data () {
return { innerValue: this.__getInitialMaskedValue() }
},
computed: {
isTextarea () {
return this.type === 'textarea' || this.autogrow === true
},
fieldClass () {
return `q-${this.isTextarea === true ? 'textarea' : 'input'}` +
(this.autogrow === true ? ' q-textarea--autogrow' : '')
}
},
methods: {
focus () {
this.$refs.input.focus()
},
__onInput (e) {
const val = e.target.value
if (this.hasMask === true) {
this.__updateMaskValue(val)
}
else {
this.__emitValue(val)
}
// we need to trigger it immediately too,
// to avoid "flickering"
this.autogrow === true && this.__adjustHeight()
},
__emitValue (val, stopWatcher) {
const fn = () => {
if (this.hasOwnProperty('tempValue') === true) {
delete this.tempValue
}
if (this.value !== val) {
stopWatcher === true && (this.stopValueWatcher = true)
this.$emit('input', val)
}
}
if (this.debounce !== void 0) {
clearTimeout(this.emitTimer)
this.tempValue = val
this.emitTimer = setTimeout(fn, this.debounce)
}
else {
fn()
}
},
// textarea only
__adjustHeight () {
const inp = this.$refs.input
inp.style.height = '1px'
inp.style.height = inp.scrollHeight + 'px'
},
__getControl (h) {
const on = {
...this.$listeners,
input: this.__onInput,
focus: stop,
blur: stop
}
if (this.hasMask === true) {
on.keydown = this.__onMaskedKeydown
}
const attrs = {
tabindex: 0,
rows: this.type === 'textarea' ? 6 : void 0,
...this.$attrs,
'aria-label': this.label,
type: this.type,
maxlength: this.maxlength,
disabled: this.disable,
readonly: this.readonly
}
if (this.autogrow === true) {
attrs.rows = 1
}
return h(this.isTextarea ? 'textarea' : 'input', {
ref: 'input',
staticClass: 'q-field__native',
style: this.inputStyle,
class: this.inputClass,
attrs,
on,
domProps: {
value: this.hasOwnProperty('tempValue') === true
? this.tempValue
: this.innerValue
}
})
}
},
created () {
// textarea only
this.__adjustHeightDebounce = debounce(this.__adjustHeight, 100)
},
mounted () {
// textarea only
this.autogrow === true && this.__adjustHeight()
this.autofocus === true && this.$nextTick(this.focus)
},
beforeDestroy () {
clearTimeout(this.emitTimer)
}
})
|
import unittest
import shelve
import glob
from test import support
from collections.abc import MutableMapping
from test.test_dbm import dbm_iterator
def L1(s):
return s.decode("latin-1")
class byteskeydict(MutableMapping):
"Mapping that supports bytes keys"
def __init__(self):
self.d = {}
def __getitem__(self, key):
return self.d[L1(key)]
def __setitem__(self, key, value):
self.d[L1(key)] = value
def __delitem__(self, key):
del self.d[L1(key)]
def __len__(self):
return len(self.d)
def iterkeys(self):
for k in self.d.keys():
yield k.encode("latin-1")
__iter__ = iterkeys
def keys(self):
return list(self.iterkeys())
def copy(self):
return byteskeydict(self.d)
class TestCase(unittest.TestCase):
fn = "shelftemp.db"
def tearDown(self):
for f in glob.glob(self.fn+"*"):
support.unlink(f)
def test_close(self):
d1 = {}
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
self.assertEqual(len(s), 1)
s.close()
self.assertRaises(ValueError, len, s)
try:
s['key1']
except ValueError:
pass
else:
self.fail('Closed shelf should not find a key')
def test_ascii_file_shelf(self):
s = shelve.open(self.fn, protocol=0)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def test_binary_file_shelf(self):
s = shelve.open(self.fn, protocol=1)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def test_proto2_file_shelf(self):
s = shelve.open(self.fn, protocol=2)
try:
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
finally:
s.close()
def test_in_memory_shelf(self):
d1 = byteskeydict()
s = shelve.Shelf(d1, protocol=0)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
d2 = byteskeydict()
s = shelve.Shelf(d2, protocol=1)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
self.assertEqual(len(d1), 1)
self.assertEqual(len(d2), 1)
self.assertNotEqual(d1.items(), d2.items())
def test_mutable_entry(self):
d1 = byteskeydict()
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4])
s.close()
d2 = byteskeydict()
s = shelve.Shelf(d2, protocol=2, writeback=True)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4,5])
s.close()
self.assertEqual(len(d1), 1)
self.assertEqual(len(d2), 1)
def test_keyencoding(self):
d = {}
key = 'Pöp'
# the default keyencoding is utf-8
shelve.Shelf(d)[key] = [1]
self.assertIn(key.encode('utf-8'), d)
# but a different one can be given
shelve.Shelf(d, keyencoding='latin-1')[key] = [1]
self.assertIn(key.encode('latin-1'), d)
# with all consequences
s = shelve.Shelf(d, keyencoding='ascii')
self.assertRaises(UnicodeEncodeError, s.__setitem__, key, [1])
def test_writeback_also_writes_immediately(self):
# Issue 5754
d = {}
key = 'key'
encodedkey = key.encode('utf-8')
s = shelve.Shelf(d, writeback=True)
s[key] = [1]
p1 = d[encodedkey] # Will give a KeyError if backing store not updated
s['key'].append(2)
s.close()
p2 = d[encodedkey]
self.assertNotEqual(p1, p2) # Write creates new object in store
def test_with(self):
d1 = {}
with shelve.Shelf(d1, protocol=2, writeback=False) as s:
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
self.assertEqual(len(s), 1)
self.assertRaises(ValueError, len, s)
try:
s['key1']
except ValueError:
pass
else:
self.fail('Closed shelf should not find a key')
from test import mapping_tests
class TestShelveBase(mapping_tests.BasicTestMappingProtocol):
fn = "shelftemp.db"
counter = 0
def __init__(self, *args, **kw):
self._db = []
mapping_tests.BasicTestMappingProtocol.__init__(self, *args, **kw)
type2test = shelve.Shelf
def _reference(self):
return {"key1":"value1", "key2":2, "key3":(1,2,3)}
def _empty_mapping(self):
if self._in_mem:
x= shelve.Shelf(byteskeydict(), **self._args)
else:
self.counter+=1
x= shelve.open(self.fn+str(self.counter), **self._args)
self._db.append(x)
return x
def tearDown(self):
for db in self._db:
db.close()
self._db = []
if not self._in_mem:
for f in glob.glob(self.fn+"*"):
support.unlink(f)
class TestAsciiFileShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = False
class TestBinaryFileShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = False
class TestProto2FileShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = False
class TestAsciiMemShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = True
class TestBinaryMemShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = True
class TestProto2MemShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = True
def test_main():
for module in dbm_iterator():
support.run_unittest(
TestAsciiFileShelve,
TestBinaryFileShelve,
TestProto2FileShelve,
TestAsciiMemShelve,
TestBinaryMemShelve,
TestProto2MemShelve,
TestCase
)
if __name__ == "__main__":
test_main()
|
//// [/lib/initial-buildOutput.txt]
/lib/tsc --b /src/app --verbose
12:01:00 AM - Projects in this build:
* src/lib/tsconfig.json
* src/app/tsconfig.json
12:01:00 AM - Project 'src/lib/tsconfig.json' is out of date because output file 'src/lib/module.js' does not exist
12:01:00 AM - Building project '/src/lib/tsconfig.json'...
12:01:00 AM - Project 'src/app/tsconfig.json' is out of date because output file 'src/app/module.js' does not exist
12:01:00 AM - Building project '/src/app/tsconfig.json'...
exitCode:: ExitStatus.Success
//// [/src/app/module.d.ts]
declare module "file1" {
export const x = 10;
export class normalC {
}
export namespace normalN {
}
}
declare module "file2" {
export const y = 20;
}
declare const globalConst = 10;
declare module "file3" {
export const z = 30;
}
declare const myVar = 30;
//# sourceMappingURL=module.d.ts.map
//// [/src/app/module.d.ts.map]
{"version":3,"file":"module.d.ts","sourceRoot":"","sources":["../lib/file1.ts","../lib/file2.ts","../lib/global.ts","file3.ts","file4.ts"],"names":[],"mappings":";IAAA,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC;IACpB,MAAM,OAAO,OAAO;KAMnB;IACD,MAAM,WAAW,OAAO,CAAC;KASxB;;;ICjBD,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC;;ACApB,QAAA,MAAM,WAAW,KAAK,CAAC;;ICAvB,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC;;ACApB,QAAA,MAAM,KAAK,KAAK,CAAC"}
//// [/src/app/module.d.ts.map.baseline.txt]
===================================================================
JsFile: module.d.ts
mapUrl: module.d.ts.map
sourceRoot:
sources: ../lib/file1.ts,../lib/file2.ts,../lib/global.ts,file3.ts,file4.ts
===================================================================
-------------------------------------------------------------------
emittedFile:/src/app/module.d.ts
sourceFile:../lib/file1.ts
-------------------------------------------------------------------
>>>declare module "file1" {
>>> export const x = 10;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^
6 > ^^^^^
7 > ^
8 > ^^^->
1 >
2 > export
3 >
4 > const
5 > x
6 > = 10
7 > ;
1 >Emitted(2, 5) Source(1, 1) + SourceIndex(0)
2 >Emitted(2, 11) Source(1, 7) + SourceIndex(0)
3 >Emitted(2, 12) Source(1, 8) + SourceIndex(0)
4 >Emitted(2, 18) Source(1, 14) + SourceIndex(0)
5 >Emitted(2, 19) Source(1, 15) + SourceIndex(0)
6 >Emitted(2, 24) Source(1, 20) + SourceIndex(0)
7 >Emitted(2, 25) Source(1, 21) + SourceIndex(0)
---
>>> export class normalC {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^
4 > ^^^^^^^
1->
>
2 > export
3 > class
4 > normalC
1->Emitted(3, 5) Source(2, 1) + SourceIndex(0)
2 >Emitted(3, 11) Source(2, 7) + SourceIndex(0)
3 >Emitted(3, 18) Source(2, 14) + SourceIndex(0)
4 >Emitted(3, 25) Source(2, 21) + SourceIndex(0)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > {
> /*@internal*/ constructor() { }
> /*@internal*/ prop: string;
> /*@internal*/ method() { }
> /*@internal*/ get c() { return 10; }
> /*@internal*/ set c(val: number) { }
>}
1 >Emitted(4, 6) Source(8, 2) + SourceIndex(0)
---
>>> export namespace normalN {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^
4 > ^^^^^^^
5 > ^
1->
>
2 > export
3 > namespace
4 > normalN
5 >
1->Emitted(5, 5) Source(9, 1) + SourceIndex(0)
2 >Emitted(5, 11) Source(9, 7) + SourceIndex(0)
3 >Emitted(5, 22) Source(9, 18) + SourceIndex(0)
4 >Emitted(5, 29) Source(9, 25) + SourceIndex(0)
5 >Emitted(5, 30) Source(9, 26) + SourceIndex(0)
---
>>> }
1 >^^^^^
1 >{
> /*@internal*/ export class C { }
> /*@internal*/ export function foo() {}
> /*@internal*/ export namespace someNamespace { export class C {} }
> /*@internal*/ export namespace someOther.something { export class someClass {} }
> /*@internal*/ export import someImport = someNamespace.C;
> /*@internal*/ export type internalType = internalC;
> /*@internal*/ export const internalConst = 10;
> /*@internal*/ export enum internalEnum { a, b, c }
>}
1 >Emitted(6, 6) Source(18, 2) + SourceIndex(0)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.d.ts
sourceFile:../lib/file2.ts
-------------------------------------------------------------------
>>>}
>>>declare module "file2" {
>>> export const y = 20;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^
6 > ^^^^^
7 > ^
1 >
2 > export
3 >
4 > const
5 > y
6 > = 20
7 > ;
1 >Emitted(9, 5) Source(1, 1) + SourceIndex(1)
2 >Emitted(9, 11) Source(1, 7) + SourceIndex(1)
3 >Emitted(9, 12) Source(1, 8) + SourceIndex(1)
4 >Emitted(9, 18) Source(1, 14) + SourceIndex(1)
5 >Emitted(9, 19) Source(1, 15) + SourceIndex(1)
6 >Emitted(9, 24) Source(1, 20) + SourceIndex(1)
7 >Emitted(9, 25) Source(1, 21) + SourceIndex(1)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.d.ts
sourceFile:../lib/global.ts
-------------------------------------------------------------------
>>>}
>>>declare const globalConst = 10;
1 >
2 >^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^
5 > ^^^^^
6 > ^
1 >
2 >
3 > const
4 > globalConst
5 > = 10
6 > ;
1 >Emitted(11, 1) Source(1, 1) + SourceIndex(2)
2 >Emitted(11, 9) Source(1, 1) + SourceIndex(2)
3 >Emitted(11, 15) Source(1, 7) + SourceIndex(2)
4 >Emitted(11, 26) Source(1, 18) + SourceIndex(2)
5 >Emitted(11, 31) Source(1, 23) + SourceIndex(2)
6 >Emitted(11, 32) Source(1, 24) + SourceIndex(2)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.d.ts
sourceFile:file3.ts
-------------------------------------------------------------------
>>>declare module "file3" {
>>> export const z = 30;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^
6 > ^^^^^
7 > ^
1 >
2 > export
3 >
4 > const
5 > z
6 > = 30
7 > ;
1 >Emitted(13, 5) Source(1, 1) + SourceIndex(3)
2 >Emitted(13, 11) Source(1, 7) + SourceIndex(3)
3 >Emitted(13, 12) Source(1, 8) + SourceIndex(3)
4 >Emitted(13, 18) Source(1, 14) + SourceIndex(3)
5 >Emitted(13, 19) Source(1, 15) + SourceIndex(3)
6 >Emitted(13, 24) Source(1, 20) + SourceIndex(3)
7 >Emitted(13, 25) Source(1, 21) + SourceIndex(3)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.d.ts
sourceFile:file4.ts
-------------------------------------------------------------------
>>>}
>>>declare const myVar = 30;
1 >
2 >^^^^^^^^
3 > ^^^^^^
4 > ^^^^^
5 > ^^^^^
6 > ^
7 > ^^^^^^^^^^->
1 >
2 >
3 > const
4 > myVar
5 > = 30
6 > ;
1 >Emitted(15, 1) Source(1, 1) + SourceIndex(4)
2 >Emitted(15, 9) Source(1, 1) + SourceIndex(4)
3 >Emitted(15, 15) Source(1, 7) + SourceIndex(4)
4 >Emitted(15, 20) Source(1, 12) + SourceIndex(4)
5 >Emitted(15, 25) Source(1, 17) + SourceIndex(4)
6 >Emitted(15, 26) Source(1, 18) + SourceIndex(4)
---
>>>//# sourceMappingURL=module.d.ts.map
//// [/src/app/module.js]
/*@internal*/ var myGlob = 20;
define("file1", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.x = 10;
var normalC = /** @class */ (function () {
/*@internal*/ function normalC() {
}
/*@internal*/ normalC.prototype.method = function () { };
Object.defineProperty(normalC.prototype, "c", {
/*@internal*/ get: function () { return 10; },
/*@internal*/ set: function (val) { },
enumerable: false,
configurable: true
});
return normalC;
}());
exports.normalC = normalC;
var normalN;
(function (normalN) {
/*@internal*/ var C = /** @class */ (function () {
function C() {
}
return C;
}());
normalN.C = C;
/*@internal*/ function foo() { }
normalN.foo = foo;
/*@internal*/ var someNamespace;
(function (someNamespace) {
var C = /** @class */ (function () {
function C() {
}
return C;
}());
someNamespace.C = C;
})(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
/*@internal*/ var someOther;
(function (someOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = someOther.something || (someOther.something = {}));
})(someOther = normalN.someOther || (normalN.someOther = {}));
/*@internal*/ normalN.someImport = someNamespace.C;
/*@internal*/ normalN.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
})(normalN = exports.normalN || (exports.normalN = {}));
/*@internal*/ var internalC = /** @class */ (function () {
function internalC() {
}
return internalC;
}());
exports.internalC = internalC;
/*@internal*/ function internalfoo() { }
exports.internalfoo = internalfoo;
/*@internal*/ var internalNamespace;
(function (internalNamespace) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
internalNamespace.someClass = someClass;
})(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
/*@internal*/ var internalOther;
(function (internalOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = internalOther.something || (internalOther.something = {}));
})(internalOther = exports.internalOther || (exports.internalOther = {}));
/*@internal*/ exports.internalImport = internalNamespace.someClass;
/*@internal*/ exports.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
});
define("file2", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.y = 20;
});
var globalConst = 10;
define("file3", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.z = 30;
});
var myVar = 30;
//# sourceMappingURL=module.js.map
//// [/src/app/module.js.map]
{"version":3,"file":"module.js","sourceRoot":"","sources":["../lib/file0.ts","../lib/file1.ts","../lib/file2.ts","../lib/global.ts","file3.ts","file4.ts"],"names":[],"mappings":"AAAA,aAAa,CAAC,IAAM,MAAM,GAAG,EAAE,CAAC;;;;ICAnB,QAAA,CAAC,GAAG,EAAE,CAAC;IACpB;QACI,aAAa,CAAC;QAAgB,CAAC;QAE/B,aAAa,CAAC,wBAAM,GAAN,cAAW,CAAC;QACZ,sBAAI,sBAAC;YAAnB,aAAa,MAAC,cAAU,OAAO,EAAE,CAAC,CAAC,CAAC;YACpC,aAAa,MAAC,UAAM,GAAW,IAAI,CAAC;;;WADA;QAExC,cAAC;IAAD,CAAC,AAND,IAMC;IANY,0BAAO;IAOpB,IAAiB,OAAO,CASvB;IATD,WAAiB,OAAO;QACpB,aAAa,CAAC;YAAA;YAAiB,CAAC;YAAD,QAAC;QAAD,CAAC,AAAlB,IAAkB;QAAL,SAAC,IAAI,CAAA;QAChC,aAAa,CAAC,SAAgB,GAAG,KAAI,CAAC;QAAR,WAAG,MAAK,CAAA;QACtC,aAAa,CAAC,IAAiB,aAAa,CAAsB;QAApD,WAAiB,aAAa;YAAG;gBAAA;gBAAgB,CAAC;gBAAD,QAAC;YAAD,CAAC,AAAjB,IAAiB;YAAJ,eAAC,IAAG,CAAA;QAAC,CAAC,EAAnC,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAAsB;QAClE,aAAa,CAAC,IAAiB,SAAS,CAAwC;QAAlE,WAAiB,SAAS;YAAC,IAAA,SAAS,CAA8B;YAAvC,WAAA,SAAS;gBAAG;oBAAA;oBAAwB,CAAC;oBAAD,gBAAC;gBAAD,CAAC,AAAzB,IAAyB;gBAAZ,mBAAS,YAAG,CAAA;YAAC,CAAC,EAAvC,SAAS,GAAT,mBAAS,KAAT,mBAAS,QAA8B;QAAD,CAAC,EAAjD,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAAwC;QAChF,aAAa,CAAe,kBAAU,GAAG,aAAa,CAAC,CAAC,CAAC;QAEzD,aAAa,CAAc,qBAAa,GAAG,EAAE,CAAC;QAC9C,aAAa,CAAC,IAAY,YAAwB;QAApC,WAAY,YAAY;YAAG,yCAAC,CAAA;YAAE,yCAAC,CAAA;YAAE,yCAAC,CAAA;QAAC,CAAC,EAAxB,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAAY;IACtD,CAAC,EATgB,OAAO,GAAP,eAAO,KAAP,eAAO,QASvB;IACD,aAAa,CAAC;QAAA;QAAwB,CAAC;QAAD,gBAAC;IAAD,CAAC,AAAzB,IAAyB;IAAZ,8BAAS;IACpC,aAAa,CAAC,SAAgB,WAAW,KAAI,CAAC;IAAhC,kCAAgC;IAC9C,aAAa,CAAC,IAAiB,iBAAiB,CAA8B;IAAhE,WAAiB,iBAAiB;QAAG;YAAA;YAAwB,CAAC;YAAD,gBAAC;QAAD,CAAC,AAAzB,IAAyB;QAAZ,2BAAS,YAAG,CAAA;IAAC,CAAC,EAA/C,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAA8B;IAC9E,aAAa,CAAC,IAAiB,aAAa,CAAwC;IAAtE,WAAiB,aAAa;QAAC,IAAA,SAAS,CAA8B;QAAvC,WAAA,SAAS;YAAG;gBAAA;gBAAwB,CAAC;gBAAD,gBAAC;YAAD,CAAC,AAAzB,IAAyB;YAAZ,mBAAS,YAAG,CAAA;QAAC,CAAC,EAAvC,SAAS,GAAT,uBAAS,KAAT,uBAAS,QAA8B;IAAD,CAAC,EAArD,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAAwC;IACpF,aAAa,CAAe,QAAA,cAAc,GAAG,iBAAiB,CAAC,SAAS,CAAC;IAEzE,aAAa,CAAc,QAAA,aAAa,GAAG,EAAE,CAAC;IAC9C,aAAa,CAAC,IAAY,YAAwB;IAApC,WAAY,YAAY;QAAG,yCAAC,CAAA;QAAE,yCAAC,CAAA;QAAE,yCAAC,CAAA;IAAC,CAAC,EAAxB,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAAY;;;;;ICzBrC,QAAA,CAAC,GAAG,EAAE,CAAC;;ACApB,IAAM,WAAW,GAAG,EAAE,CAAC;;;;ICAV,QAAA,CAAC,GAAG,EAAE,CAAC;;ACApB,IAAM,KAAK,GAAG,EAAE,CAAC"}
//// [/src/app/module.js.map.baseline.txt]
===================================================================
JsFile: module.js
mapUrl: module.js.map
sourceRoot:
sources: ../lib/file0.ts,../lib/file1.ts,../lib/file2.ts,../lib/global.ts,file3.ts,file4.ts
===================================================================
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:../lib/file0.ts
-------------------------------------------------------------------
>>>/*@internal*/ var myGlob = 20;
1 >
2 >^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^
6 > ^^^
7 > ^^
8 > ^
9 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 >/*@internal*/
3 >
4 > const
5 > myGlob
6 > =
7 > 20
8 > ;
1 >Emitted(1, 1) Source(1, 1) + SourceIndex(0)
2 >Emitted(1, 14) Source(1, 14) + SourceIndex(0)
3 >Emitted(1, 15) Source(1, 15) + SourceIndex(0)
4 >Emitted(1, 19) Source(1, 21) + SourceIndex(0)
5 >Emitted(1, 25) Source(1, 27) + SourceIndex(0)
6 >Emitted(1, 28) Source(1, 30) + SourceIndex(0)
7 >Emitted(1, 30) Source(1, 32) + SourceIndex(0)
8 >Emitted(1, 31) Source(1, 33) + SourceIndex(0)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:../lib/file1.ts
-------------------------------------------------------------------
>>>define("file1", ["require", "exports"], function (require, exports) {
>>> "use strict";
>>> Object.defineProperty(exports, "__esModule", { value: true });
>>> exports.x = 10;
1->^^^^
2 > ^^^^^^^^
3 > ^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->export const
2 >
3 > x
4 > =
5 > 10
6 > ;
1->Emitted(5, 5) Source(1, 14) + SourceIndex(1)
2 >Emitted(5, 13) Source(1, 14) + SourceIndex(1)
3 >Emitted(5, 14) Source(1, 15) + SourceIndex(1)
4 >Emitted(5, 17) Source(1, 18) + SourceIndex(1)
5 >Emitted(5, 19) Source(1, 20) + SourceIndex(1)
6 >Emitted(5, 20) Source(1, 21) + SourceIndex(1)
---
>>> var normalC = /** @class */ (function () {
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
>
1->Emitted(6, 5) Source(2, 1) + SourceIndex(1)
---
>>> /*@internal*/ function normalC() {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
1->export class normalC {
>
2 > /*@internal*/
3 >
1->Emitted(7, 9) Source(3, 5) + SourceIndex(1)
2 >Emitted(7, 22) Source(3, 18) + SourceIndex(1)
3 >Emitted(7, 23) Source(3, 19) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >constructor() {
2 > }
1 >Emitted(8, 9) Source(3, 35) + SourceIndex(1)
2 >Emitted(8, 10) Source(3, 36) + SourceIndex(1)
---
>>> /*@internal*/ normalC.prototype.method = function () { };
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^
7 > ^
1->
> /*@internal*/ prop: string;
>
2 > /*@internal*/
3 >
4 > method
5 >
6 > method() {
7 > }
1->Emitted(9, 9) Source(5, 5) + SourceIndex(1)
2 >Emitted(9, 22) Source(5, 18) + SourceIndex(1)
3 >Emitted(9, 23) Source(5, 19) + SourceIndex(1)
4 >Emitted(9, 47) Source(5, 25) + SourceIndex(1)
5 >Emitted(9, 50) Source(5, 19) + SourceIndex(1)
6 >Emitted(9, 64) Source(5, 30) + SourceIndex(1)
7 >Emitted(9, 65) Source(5, 31) + SourceIndex(1)
---
>>> Object.defineProperty(normalC.prototype, "c", {
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^^^^^^^^^^^
4 > ^^^^^^^->
1 >
> /*@internal*/
2 > get
3 > c
1 >Emitted(10, 9) Source(6, 19) + SourceIndex(1)
2 >Emitted(10, 31) Source(6, 23) + SourceIndex(1)
3 >Emitted(10, 53) Source(6, 24) + SourceIndex(1)
---
>>> /*@internal*/ get: function () { return 10; },
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^^^^
5 > ^^^^^^^
6 > ^^
7 > ^
8 > ^
9 > ^
1->
2 > /*@internal*/
3 >
4 > get c() {
5 > return
6 > 10
7 > ;
8 >
9 > }
1->Emitted(11, 13) Source(6, 5) + SourceIndex(1)
2 >Emitted(11, 26) Source(6, 18) + SourceIndex(1)
3 >Emitted(11, 32) Source(6, 19) + SourceIndex(1)
4 >Emitted(11, 46) Source(6, 29) + SourceIndex(1)
5 >Emitted(11, 53) Source(6, 36) + SourceIndex(1)
6 >Emitted(11, 55) Source(6, 38) + SourceIndex(1)
7 >Emitted(11, 56) Source(6, 39) + SourceIndex(1)
8 >Emitted(11, 57) Source(6, 40) + SourceIndex(1)
9 >Emitted(11, 58) Source(6, 41) + SourceIndex(1)
---
>>> /*@internal*/ set: function (val) { },
1 >^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^
5 > ^^^
6 > ^^^^
7 > ^
1 >
>
2 > /*@internal*/
3 >
4 > set c(
5 > val: number
6 > ) {
7 > }
1 >Emitted(12, 13) Source(7, 5) + SourceIndex(1)
2 >Emitted(12, 26) Source(7, 18) + SourceIndex(1)
3 >Emitted(12, 32) Source(7, 19) + SourceIndex(1)
4 >Emitted(12, 42) Source(7, 25) + SourceIndex(1)
5 >Emitted(12, 45) Source(7, 36) + SourceIndex(1)
6 >Emitted(12, 49) Source(7, 40) + SourceIndex(1)
7 >Emitted(12, 50) Source(7, 41) + SourceIndex(1)
---
>>> enumerable: false,
>>> configurable: true
>>> });
1 >^^^^^^^^^^^
2 > ^^^^^^^^^^^^^->
1 >
1 >Emitted(15, 12) Source(6, 41) + SourceIndex(1)
---
>>> return normalC;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^
1->
> /*@internal*/ set c(val: number) { }
>
2 > }
1->Emitted(16, 9) Source(8, 1) + SourceIndex(1)
2 >Emitted(16, 23) Source(8, 2) + SourceIndex(1)
---
>>> }());
1 >^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class normalC {
> /*@internal*/ constructor() { }
> /*@internal*/ prop: string;
> /*@internal*/ method() { }
> /*@internal*/ get c() { return 10; }
> /*@internal*/ set c(val: number) { }
> }
1 >Emitted(17, 5) Source(8, 1) + SourceIndex(1)
2 >Emitted(17, 6) Source(8, 2) + SourceIndex(1)
3 >Emitted(17, 6) Source(2, 1) + SourceIndex(1)
4 >Emitted(17, 10) Source(8, 2) + SourceIndex(1)
---
>>> exports.normalC = normalC;
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^
1->
2 > normalC
1->Emitted(18, 5) Source(2, 14) + SourceIndex(1)
2 >Emitted(18, 31) Source(2, 21) + SourceIndex(1)
---
>>> var normalN;
1 >^^^^
2 > ^^^^
3 > ^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 > {
> /*@internal*/ constructor() { }
> /*@internal*/ prop: string;
> /*@internal*/ method() { }
> /*@internal*/ get c() { return 10; }
> /*@internal*/ set c(val: number) { }
>}
>
2 > export namespace
3 > normalN
4 > {
> /*@internal*/ export class C { }
> /*@internal*/ export function foo() {}
> /*@internal*/ export namespace someNamespace { export class C {} }
> /*@internal*/ export namespace someOther.something { export class someClass {} }
> /*@internal*/ export import someImport = someNamespace.C;
> /*@internal*/ export type internalType = internalC;
> /*@internal*/ export const internalConst = 10;
> /*@internal*/ export enum internalEnum { a, b, c }
> }
1 >Emitted(19, 5) Source(9, 1) + SourceIndex(1)
2 >Emitted(19, 9) Source(9, 18) + SourceIndex(1)
3 >Emitted(19, 16) Source(9, 25) + SourceIndex(1)
4 >Emitted(19, 17) Source(18, 2) + SourceIndex(1)
---
>>> (function (normalN) {
1->^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > export namespace
3 > normalN
1->Emitted(20, 5) Source(9, 1) + SourceIndex(1)
2 >Emitted(20, 16) Source(9, 18) + SourceIndex(1)
3 >Emitted(20, 23) Source(9, 25) + SourceIndex(1)
---
>>> /*@internal*/ var C = /** @class */ (function () {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^->
1-> {
>
2 > /*@internal*/
3 >
1->Emitted(21, 9) Source(10, 5) + SourceIndex(1)
2 >Emitted(21, 22) Source(10, 18) + SourceIndex(1)
3 >Emitted(21, 23) Source(10, 19) + SourceIndex(1)
---
>>> function C() {
1->^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(22, 13) Source(10, 19) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^->
1->export class C {
2 > }
1->Emitted(23, 13) Source(10, 36) + SourceIndex(1)
2 >Emitted(23, 14) Source(10, 37) + SourceIndex(1)
---
>>> return C;
1->^^^^^^^^^^^^
2 > ^^^^^^^^
1->
2 > }
1->Emitted(24, 13) Source(10, 36) + SourceIndex(1)
2 >Emitted(24, 21) Source(10, 37) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class C { }
1 >Emitted(25, 9) Source(10, 36) + SourceIndex(1)
2 >Emitted(25, 10) Source(10, 37) + SourceIndex(1)
3 >Emitted(25, 10) Source(10, 19) + SourceIndex(1)
4 >Emitted(25, 14) Source(10, 37) + SourceIndex(1)
---
>>> normalN.C = C;
1->^^^^^^^^
2 > ^^^^^^^^^
3 > ^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^->
1->
2 > C
3 > { }
4 >
1->Emitted(26, 9) Source(10, 32) + SourceIndex(1)
2 >Emitted(26, 18) Source(10, 33) + SourceIndex(1)
3 >Emitted(26, 22) Source(10, 37) + SourceIndex(1)
4 >Emitted(26, 23) Source(10, 37) + SourceIndex(1)
---
>>> /*@internal*/ function foo() { }
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^
7 > ^
1->
>
2 > /*@internal*/
3 >
4 > export function
5 > foo
6 > () {
7 > }
1->Emitted(27, 9) Source(11, 5) + SourceIndex(1)
2 >Emitted(27, 22) Source(11, 18) + SourceIndex(1)
3 >Emitted(27, 23) Source(11, 19) + SourceIndex(1)
4 >Emitted(27, 32) Source(11, 35) + SourceIndex(1)
5 >Emitted(27, 35) Source(11, 38) + SourceIndex(1)
6 >Emitted(27, 40) Source(11, 42) + SourceIndex(1)
7 >Emitted(27, 41) Source(11, 43) + SourceIndex(1)
---
>>> normalN.foo = foo;
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^->
1 >
2 > foo
3 > () {}
4 >
1 >Emitted(28, 9) Source(11, 35) + SourceIndex(1)
2 >Emitted(28, 20) Source(11, 38) + SourceIndex(1)
3 >Emitted(28, 26) Source(11, 43) + SourceIndex(1)
4 >Emitted(28, 27) Source(11, 43) + SourceIndex(1)
---
>>> /*@internal*/ var someNamespace;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^
6 > ^
1->
>
2 > /*@internal*/
3 >
4 > export namespace
5 > someNamespace
6 > { export class C {} }
1->Emitted(29, 9) Source(12, 5) + SourceIndex(1)
2 >Emitted(29, 22) Source(12, 18) + SourceIndex(1)
3 >Emitted(29, 23) Source(12, 19) + SourceIndex(1)
4 >Emitted(29, 27) Source(12, 36) + SourceIndex(1)
5 >Emitted(29, 40) Source(12, 49) + SourceIndex(1)
6 >Emitted(29, 41) Source(12, 71) + SourceIndex(1)
---
>>> (function (someNamespace) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^->
1 >
2 > export namespace
3 > someNamespace
1 >Emitted(30, 9) Source(12, 19) + SourceIndex(1)
2 >Emitted(30, 20) Source(12, 36) + SourceIndex(1)
3 >Emitted(30, 33) Source(12, 49) + SourceIndex(1)
---
>>> var C = /** @class */ (function () {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(31, 13) Source(12, 52) + SourceIndex(1)
---
>>> function C() {
1->^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(32, 17) Source(12, 52) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^->
1->export class C {
2 > }
1->Emitted(33, 17) Source(12, 68) + SourceIndex(1)
2 >Emitted(33, 18) Source(12, 69) + SourceIndex(1)
---
>>> return C;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^
1->
2 > }
1->Emitted(34, 17) Source(12, 68) + SourceIndex(1)
2 >Emitted(34, 25) Source(12, 69) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class C {}
1 >Emitted(35, 13) Source(12, 68) + SourceIndex(1)
2 >Emitted(35, 14) Source(12, 69) + SourceIndex(1)
3 >Emitted(35, 14) Source(12, 52) + SourceIndex(1)
4 >Emitted(35, 18) Source(12, 69) + SourceIndex(1)
---
>>> someNamespace.C = C;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^
3 > ^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > C
3 > {}
4 >
1->Emitted(36, 13) Source(12, 65) + SourceIndex(1)
2 >Emitted(36, 28) Source(12, 66) + SourceIndex(1)
3 >Emitted(36, 32) Source(12, 69) + SourceIndex(1)
4 >Emitted(36, 33) Source(12, 69) + SourceIndex(1)
---
>>> })(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > someNamespace
5 >
6 > someNamespace
7 >
8 > someNamespace
9 > { export class C {} }
1->Emitted(37, 9) Source(12, 70) + SourceIndex(1)
2 >Emitted(37, 10) Source(12, 71) + SourceIndex(1)
3 >Emitted(37, 12) Source(12, 36) + SourceIndex(1)
4 >Emitted(37, 25) Source(12, 49) + SourceIndex(1)
5 >Emitted(37, 28) Source(12, 36) + SourceIndex(1)
6 >Emitted(37, 49) Source(12, 49) + SourceIndex(1)
7 >Emitted(37, 54) Source(12, 36) + SourceIndex(1)
8 >Emitted(37, 75) Source(12, 49) + SourceIndex(1)
9 >Emitted(37, 83) Source(12, 71) + SourceIndex(1)
---
>>> /*@internal*/ var someOther;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^
6 > ^
1 >
>
2 > /*@internal*/
3 >
4 > export namespace
5 > someOther
6 > .something { export class someClass {} }
1 >Emitted(38, 9) Source(13, 5) + SourceIndex(1)
2 >Emitted(38, 22) Source(13, 18) + SourceIndex(1)
3 >Emitted(38, 23) Source(13, 19) + SourceIndex(1)
4 >Emitted(38, 27) Source(13, 36) + SourceIndex(1)
5 >Emitted(38, 36) Source(13, 45) + SourceIndex(1)
6 >Emitted(38, 37) Source(13, 85) + SourceIndex(1)
---
>>> (function (someOther) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
1 >
2 > export namespace
3 > someOther
1 >Emitted(39, 9) Source(13, 19) + SourceIndex(1)
2 >Emitted(39, 20) Source(13, 36) + SourceIndex(1)
3 >Emitted(39, 29) Source(13, 45) + SourceIndex(1)
---
>>> var something;
1 >^^^^^^^^^^^^
2 > ^^^^
3 > ^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 >.
2 >
3 > something
4 > { export class someClass {} }
1 >Emitted(40, 13) Source(13, 46) + SourceIndex(1)
2 >Emitted(40, 17) Source(13, 46) + SourceIndex(1)
3 >Emitted(40, 26) Source(13, 55) + SourceIndex(1)
4 >Emitted(40, 27) Source(13, 85) + SourceIndex(1)
---
>>> (function (something) {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 >
3 > something
1->Emitted(41, 13) Source(13, 46) + SourceIndex(1)
2 >Emitted(41, 24) Source(13, 46) + SourceIndex(1)
3 >Emitted(41, 33) Source(13, 55) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(42, 17) Source(13, 58) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(43, 21) Source(13, 58) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(44, 21) Source(13, 82) + SourceIndex(1)
2 >Emitted(44, 22) Source(13, 83) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(45, 21) Source(13, 82) + SourceIndex(1)
2 >Emitted(45, 37) Source(13, 83) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(46, 17) Source(13, 82) + SourceIndex(1)
2 >Emitted(46, 18) Source(13, 83) + SourceIndex(1)
3 >Emitted(46, 18) Source(13, 58) + SourceIndex(1)
4 >Emitted(46, 22) Source(13, 83) + SourceIndex(1)
---
>>> something.someClass = someClass;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(47, 17) Source(13, 71) + SourceIndex(1)
2 >Emitted(47, 36) Source(13, 80) + SourceIndex(1)
3 >Emitted(47, 48) Source(13, 83) + SourceIndex(1)
4 >Emitted(47, 49) Source(13, 83) + SourceIndex(1)
---
>>> })(something = someOther.something || (someOther.something = {}));
1->^^^^^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > something
5 >
6 > something
7 >
8 > something
9 > { export class someClass {} }
1->Emitted(48, 13) Source(13, 84) + SourceIndex(1)
2 >Emitted(48, 14) Source(13, 85) + SourceIndex(1)
3 >Emitted(48, 16) Source(13, 46) + SourceIndex(1)
4 >Emitted(48, 25) Source(13, 55) + SourceIndex(1)
5 >Emitted(48, 28) Source(13, 46) + SourceIndex(1)
6 >Emitted(48, 47) Source(13, 55) + SourceIndex(1)
7 >Emitted(48, 52) Source(13, 46) + SourceIndex(1)
8 >Emitted(48, 71) Source(13, 55) + SourceIndex(1)
9 >Emitted(48, 79) Source(13, 85) + SourceIndex(1)
---
>>> })(someOther = normalN.someOther || (normalN.someOther = {}));
1 >^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1 >
2 > }
3 >
4 > someOther
5 >
6 > someOther
7 >
8 > someOther
9 > .something { export class someClass {} }
1 >Emitted(49, 9) Source(13, 84) + SourceIndex(1)
2 >Emitted(49, 10) Source(13, 85) + SourceIndex(1)
3 >Emitted(49, 12) Source(13, 36) + SourceIndex(1)
4 >Emitted(49, 21) Source(13, 45) + SourceIndex(1)
5 >Emitted(49, 24) Source(13, 36) + SourceIndex(1)
6 >Emitted(49, 41) Source(13, 45) + SourceIndex(1)
7 >Emitted(49, 46) Source(13, 36) + SourceIndex(1)
8 >Emitted(49, 63) Source(13, 45) + SourceIndex(1)
9 >Emitted(49, 71) Source(13, 85) + SourceIndex(1)
---
>>> /*@internal*/ normalN.someImport = someNamespace.C;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^
7 > ^
8 > ^
9 > ^
1 >
>
2 > /*@internal*/
3 > export import
4 > someImport
5 > =
6 > someNamespace
7 > .
8 > C
9 > ;
1 >Emitted(50, 9) Source(14, 5) + SourceIndex(1)
2 >Emitted(50, 22) Source(14, 18) + SourceIndex(1)
3 >Emitted(50, 23) Source(14, 33) + SourceIndex(1)
4 >Emitted(50, 41) Source(14, 43) + SourceIndex(1)
5 >Emitted(50, 44) Source(14, 46) + SourceIndex(1)
6 >Emitted(50, 57) Source(14, 59) + SourceIndex(1)
7 >Emitted(50, 58) Source(14, 60) + SourceIndex(1)
8 >Emitted(50, 59) Source(14, 61) + SourceIndex(1)
9 >Emitted(50, 60) Source(14, 62) + SourceIndex(1)
---
>>> /*@internal*/ normalN.internalConst = 10;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^
7 > ^
1 >
> /*@internal*/ export type internalType = internalC;
>
2 > /*@internal*/
3 > export const
4 > internalConst
5 > =
6 > 10
7 > ;
1 >Emitted(51, 9) Source(16, 5) + SourceIndex(1)
2 >Emitted(51, 22) Source(16, 18) + SourceIndex(1)
3 >Emitted(51, 23) Source(16, 32) + SourceIndex(1)
4 >Emitted(51, 44) Source(16, 45) + SourceIndex(1)
5 >Emitted(51, 47) Source(16, 48) + SourceIndex(1)
6 >Emitted(51, 49) Source(16, 50) + SourceIndex(1)
7 >Emitted(51, 50) Source(16, 51) + SourceIndex(1)
---
>>> /*@internal*/ var internalEnum;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^
1 >
>
2 > /*@internal*/
3 >
4 > export enum
5 > internalEnum { a, b, c }
1 >Emitted(52, 9) Source(17, 5) + SourceIndex(1)
2 >Emitted(52, 22) Source(17, 18) + SourceIndex(1)
3 >Emitted(52, 23) Source(17, 19) + SourceIndex(1)
4 >Emitted(52, 27) Source(17, 31) + SourceIndex(1)
5 >Emitted(52, 39) Source(17, 55) + SourceIndex(1)
---
>>> (function (internalEnum) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export enum
3 > internalEnum
1 >Emitted(53, 9) Source(17, 19) + SourceIndex(1)
2 >Emitted(53, 20) Source(17, 31) + SourceIndex(1)
3 >Emitted(53, 32) Source(17, 43) + SourceIndex(1)
---
>>> internalEnum[internalEnum["a"] = 0] = "a";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1-> {
2 > a
3 >
1->Emitted(54, 13) Source(17, 46) + SourceIndex(1)
2 >Emitted(54, 54) Source(17, 47) + SourceIndex(1)
3 >Emitted(54, 55) Source(17, 47) + SourceIndex(1)
---
>>> internalEnum[internalEnum["b"] = 1] = "b";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1->,
2 > b
3 >
1->Emitted(55, 13) Source(17, 49) + SourceIndex(1)
2 >Emitted(55, 54) Source(17, 50) + SourceIndex(1)
3 >Emitted(55, 55) Source(17, 50) + SourceIndex(1)
---
>>> internalEnum[internalEnum["c"] = 2] = "c";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->,
2 > c
3 >
1->Emitted(56, 13) Source(17, 52) + SourceIndex(1)
2 >Emitted(56, 54) Source(17, 53) + SourceIndex(1)
3 >Emitted(56, 55) Source(17, 53) + SourceIndex(1)
---
>>> })(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalEnum
5 >
6 > internalEnum
7 >
8 > internalEnum
9 > { a, b, c }
1->Emitted(57, 9) Source(17, 54) + SourceIndex(1)
2 >Emitted(57, 10) Source(17, 55) + SourceIndex(1)
3 >Emitted(57, 12) Source(17, 31) + SourceIndex(1)
4 >Emitted(57, 24) Source(17, 43) + SourceIndex(1)
5 >Emitted(57, 27) Source(17, 31) + SourceIndex(1)
6 >Emitted(57, 47) Source(17, 43) + SourceIndex(1)
7 >Emitted(57, 52) Source(17, 31) + SourceIndex(1)
8 >Emitted(57, 72) Source(17, 43) + SourceIndex(1)
9 >Emitted(57, 80) Source(17, 55) + SourceIndex(1)
---
>>> })(normalN = exports.normalN || (exports.normalN = {}));
1 >^^^^
2 > ^
3 > ^^
4 > ^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^
9 > ^^^^^^^^
10> ^^^->
1 >
>
2 > }
3 >
4 > normalN
5 >
6 > normalN
7 >
8 > normalN
9 > {
> /*@internal*/ export class C { }
> /*@internal*/ export function foo() {}
> /*@internal*/ export namespace someNamespace { export class C {} }
> /*@internal*/ export namespace someOther.something { export class someClass {} }
> /*@internal*/ export import someImport = someNamespace.C;
> /*@internal*/ export type internalType = internalC;
> /*@internal*/ export const internalConst = 10;
> /*@internal*/ export enum internalEnum { a, b, c }
> }
1 >Emitted(58, 5) Source(18, 1) + SourceIndex(1)
2 >Emitted(58, 6) Source(18, 2) + SourceIndex(1)
3 >Emitted(58, 8) Source(9, 18) + SourceIndex(1)
4 >Emitted(58, 15) Source(9, 25) + SourceIndex(1)
5 >Emitted(58, 18) Source(9, 18) + SourceIndex(1)
6 >Emitted(58, 33) Source(9, 25) + SourceIndex(1)
7 >Emitted(58, 38) Source(9, 18) + SourceIndex(1)
8 >Emitted(58, 53) Source(9, 25) + SourceIndex(1)
9 >Emitted(58, 61) Source(18, 2) + SourceIndex(1)
---
>>> /*@internal*/ var internalC = /** @class */ (function () {
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^->
1->
>
2 > /*@internal*/
3 >
1->Emitted(59, 5) Source(19, 1) + SourceIndex(1)
2 >Emitted(59, 18) Source(19, 14) + SourceIndex(1)
3 >Emitted(59, 19) Source(19, 15) + SourceIndex(1)
---
>>> function internalC() {
1->^^^^^^^^
2 > ^^->
1->
1->Emitted(60, 9) Source(19, 15) + SourceIndex(1)
---
>>> }
1->^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class internalC {
2 > }
1->Emitted(61, 9) Source(19, 39) + SourceIndex(1)
2 >Emitted(61, 10) Source(19, 40) + SourceIndex(1)
---
>>> return internalC;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(62, 9) Source(19, 39) + SourceIndex(1)
2 >Emitted(62, 25) Source(19, 40) + SourceIndex(1)
---
>>> }());
1 >^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class internalC {}
1 >Emitted(63, 5) Source(19, 39) + SourceIndex(1)
2 >Emitted(63, 6) Source(19, 40) + SourceIndex(1)
3 >Emitted(63, 6) Source(19, 15) + SourceIndex(1)
4 >Emitted(63, 10) Source(19, 40) + SourceIndex(1)
---
>>> exports.internalC = internalC;
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^->
1->
2 > internalC
1->Emitted(64, 5) Source(19, 28) + SourceIndex(1)
2 >Emitted(64, 35) Source(19, 37) + SourceIndex(1)
---
>>> /*@internal*/ function internalfoo() { }
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^
5 > ^^^^^^^^^^^
6 > ^^^^^
7 > ^
1-> {}
>
2 > /*@internal*/
3 >
4 > export function
5 > internalfoo
6 > () {
7 > }
1->Emitted(65, 5) Source(20, 1) + SourceIndex(1)
2 >Emitted(65, 18) Source(20, 14) + SourceIndex(1)
3 >Emitted(65, 19) Source(20, 15) + SourceIndex(1)
4 >Emitted(65, 28) Source(20, 31) + SourceIndex(1)
5 >Emitted(65, 39) Source(20, 42) + SourceIndex(1)
6 >Emitted(65, 44) Source(20, 46) + SourceIndex(1)
7 >Emitted(65, 45) Source(20, 47) + SourceIndex(1)
---
>>> exports.internalfoo = internalfoo;
1 >^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^->
1 >
2 > export function internalfoo() {}
1 >Emitted(66, 5) Source(20, 15) + SourceIndex(1)
2 >Emitted(66, 39) Source(20, 47) + SourceIndex(1)
---
>>> /*@internal*/ var internalNamespace;
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^
6 > ^
1->
>
2 > /*@internal*/
3 >
4 > export namespace
5 > internalNamespace
6 > { export class someClass {} }
1->Emitted(67, 5) Source(21, 1) + SourceIndex(1)
2 >Emitted(67, 18) Source(21, 14) + SourceIndex(1)
3 >Emitted(67, 19) Source(21, 15) + SourceIndex(1)
4 >Emitted(67, 23) Source(21, 32) + SourceIndex(1)
5 >Emitted(67, 40) Source(21, 49) + SourceIndex(1)
6 >Emitted(67, 41) Source(21, 79) + SourceIndex(1)
---
>>> (function (internalNamespace) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export namespace
3 > internalNamespace
1 >Emitted(68, 5) Source(21, 15) + SourceIndex(1)
2 >Emitted(68, 16) Source(21, 32) + SourceIndex(1)
3 >Emitted(68, 33) Source(21, 49) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(69, 9) Source(21, 52) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(70, 13) Source(21, 52) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(71, 13) Source(21, 76) + SourceIndex(1)
2 >Emitted(71, 14) Source(21, 77) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(72, 13) Source(21, 76) + SourceIndex(1)
2 >Emitted(72, 29) Source(21, 77) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(73, 9) Source(21, 76) + SourceIndex(1)
2 >Emitted(73, 10) Source(21, 77) + SourceIndex(1)
3 >Emitted(73, 10) Source(21, 52) + SourceIndex(1)
4 >Emitted(73, 14) Source(21, 77) + SourceIndex(1)
---
>>> internalNamespace.someClass = someClass;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(74, 9) Source(21, 65) + SourceIndex(1)
2 >Emitted(74, 36) Source(21, 74) + SourceIndex(1)
3 >Emitted(74, 48) Source(21, 77) + SourceIndex(1)
4 >Emitted(74, 49) Source(21, 77) + SourceIndex(1)
---
>>> })(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
1->^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalNamespace
5 >
6 > internalNamespace
7 >
8 > internalNamespace
9 > { export class someClass {} }
1->Emitted(75, 5) Source(21, 78) + SourceIndex(1)
2 >Emitted(75, 6) Source(21, 79) + SourceIndex(1)
3 >Emitted(75, 8) Source(21, 32) + SourceIndex(1)
4 >Emitted(75, 25) Source(21, 49) + SourceIndex(1)
5 >Emitted(75, 28) Source(21, 32) + SourceIndex(1)
6 >Emitted(75, 53) Source(21, 49) + SourceIndex(1)
7 >Emitted(75, 58) Source(21, 32) + SourceIndex(1)
8 >Emitted(75, 83) Source(21, 49) + SourceIndex(1)
9 >Emitted(75, 91) Source(21, 79) + SourceIndex(1)
---
>>> /*@internal*/ var internalOther;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^
6 > ^
1 >
>
2 > /*@internal*/
3 >
4 > export namespace
5 > internalOther
6 > .something { export class someClass {} }
1 >Emitted(76, 5) Source(22, 1) + SourceIndex(1)
2 >Emitted(76, 18) Source(22, 14) + SourceIndex(1)
3 >Emitted(76, 19) Source(22, 15) + SourceIndex(1)
4 >Emitted(76, 23) Source(22, 32) + SourceIndex(1)
5 >Emitted(76, 36) Source(22, 45) + SourceIndex(1)
6 >Emitted(76, 37) Source(22, 85) + SourceIndex(1)
---
>>> (function (internalOther) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^
1 >
2 > export namespace
3 > internalOther
1 >Emitted(77, 5) Source(22, 15) + SourceIndex(1)
2 >Emitted(77, 16) Source(22, 32) + SourceIndex(1)
3 >Emitted(77, 29) Source(22, 45) + SourceIndex(1)
---
>>> var something;
1 >^^^^^^^^
2 > ^^^^
3 > ^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 >.
2 >
3 > something
4 > { export class someClass {} }
1 >Emitted(78, 9) Source(22, 46) + SourceIndex(1)
2 >Emitted(78, 13) Source(22, 46) + SourceIndex(1)
3 >Emitted(78, 22) Source(22, 55) + SourceIndex(1)
4 >Emitted(78, 23) Source(22, 85) + SourceIndex(1)
---
>>> (function (something) {
1->^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 >
3 > something
1->Emitted(79, 9) Source(22, 46) + SourceIndex(1)
2 >Emitted(79, 20) Source(22, 46) + SourceIndex(1)
3 >Emitted(79, 29) Source(22, 55) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(80, 13) Source(22, 58) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(81, 17) Source(22, 58) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(82, 17) Source(22, 82) + SourceIndex(1)
2 >Emitted(82, 18) Source(22, 83) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(83, 17) Source(22, 82) + SourceIndex(1)
2 >Emitted(83, 33) Source(22, 83) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(84, 13) Source(22, 82) + SourceIndex(1)
2 >Emitted(84, 14) Source(22, 83) + SourceIndex(1)
3 >Emitted(84, 14) Source(22, 58) + SourceIndex(1)
4 >Emitted(84, 18) Source(22, 83) + SourceIndex(1)
---
>>> something.someClass = someClass;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(85, 13) Source(22, 71) + SourceIndex(1)
2 >Emitted(85, 32) Source(22, 80) + SourceIndex(1)
3 >Emitted(85, 44) Source(22, 83) + SourceIndex(1)
4 >Emitted(85, 45) Source(22, 83) + SourceIndex(1)
---
>>> })(something = internalOther.something || (internalOther.something = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > something
5 >
6 > something
7 >
8 > something
9 > { export class someClass {} }
1->Emitted(86, 9) Source(22, 84) + SourceIndex(1)
2 >Emitted(86, 10) Source(22, 85) + SourceIndex(1)
3 >Emitted(86, 12) Source(22, 46) + SourceIndex(1)
4 >Emitted(86, 21) Source(22, 55) + SourceIndex(1)
5 >Emitted(86, 24) Source(22, 46) + SourceIndex(1)
6 >Emitted(86, 47) Source(22, 55) + SourceIndex(1)
7 >Emitted(86, 52) Source(22, 46) + SourceIndex(1)
8 >Emitted(86, 75) Source(22, 55) + SourceIndex(1)
9 >Emitted(86, 83) Source(22, 85) + SourceIndex(1)
---
>>> })(internalOther = exports.internalOther || (exports.internalOther = {}));
1 >^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1 >
2 > }
3 >
4 > internalOther
5 >
6 > internalOther
7 >
8 > internalOther
9 > .something { export class someClass {} }
1 >Emitted(87, 5) Source(22, 84) + SourceIndex(1)
2 >Emitted(87, 6) Source(22, 85) + SourceIndex(1)
3 >Emitted(87, 8) Source(22, 32) + SourceIndex(1)
4 >Emitted(87, 21) Source(22, 45) + SourceIndex(1)
5 >Emitted(87, 24) Source(22, 32) + SourceIndex(1)
6 >Emitted(87, 45) Source(22, 45) + SourceIndex(1)
7 >Emitted(87, 50) Source(22, 32) + SourceIndex(1)
8 >Emitted(87, 71) Source(22, 45) + SourceIndex(1)
9 >Emitted(87, 79) Source(22, 85) + SourceIndex(1)
---
>>> /*@internal*/ exports.internalImport = internalNamespace.someClass;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^
5 > ^^^^^^^^^^^^^^
6 > ^^^
7 > ^^^^^^^^^^^^^^^^^
8 > ^
9 > ^^^^^^^^^
10> ^
1 >
>
2 > /*@internal*/
3 > export import
4 >
5 > internalImport
6 > =
7 > internalNamespace
8 > .
9 > someClass
10> ;
1 >Emitted(88, 5) Source(23, 1) + SourceIndex(1)
2 >Emitted(88, 18) Source(23, 14) + SourceIndex(1)
3 >Emitted(88, 19) Source(23, 29) + SourceIndex(1)
4 >Emitted(88, 27) Source(23, 29) + SourceIndex(1)
5 >Emitted(88, 41) Source(23, 43) + SourceIndex(1)
6 >Emitted(88, 44) Source(23, 46) + SourceIndex(1)
7 >Emitted(88, 61) Source(23, 63) + SourceIndex(1)
8 >Emitted(88, 62) Source(23, 64) + SourceIndex(1)
9 >Emitted(88, 71) Source(23, 73) + SourceIndex(1)
10>Emitted(88, 72) Source(23, 74) + SourceIndex(1)
---
>>> /*@internal*/ exports.internalConst = 10;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^
5 > ^^^^^^^^^^^^^
6 > ^^^
7 > ^^
8 > ^
1 >
>/*@internal*/ export type internalType = internalC;
>
2 > /*@internal*/
3 > export const
4 >
5 > internalConst
6 > =
7 > 10
8 > ;
1 >Emitted(89, 5) Source(25, 1) + SourceIndex(1)
2 >Emitted(89, 18) Source(25, 14) + SourceIndex(1)
3 >Emitted(89, 19) Source(25, 28) + SourceIndex(1)
4 >Emitted(89, 27) Source(25, 28) + SourceIndex(1)
5 >Emitted(89, 40) Source(25, 41) + SourceIndex(1)
6 >Emitted(89, 43) Source(25, 44) + SourceIndex(1)
7 >Emitted(89, 45) Source(25, 46) + SourceIndex(1)
8 >Emitted(89, 46) Source(25, 47) + SourceIndex(1)
---
>>> /*@internal*/ var internalEnum;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^
1 >
>
2 > /*@internal*/
3 >
4 > export enum
5 > internalEnum { a, b, c }
1 >Emitted(90, 5) Source(26, 1) + SourceIndex(1)
2 >Emitted(90, 18) Source(26, 14) + SourceIndex(1)
3 >Emitted(90, 19) Source(26, 15) + SourceIndex(1)
4 >Emitted(90, 23) Source(26, 27) + SourceIndex(1)
5 >Emitted(90, 35) Source(26, 51) + SourceIndex(1)
---
>>> (function (internalEnum) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export enum
3 > internalEnum
1 >Emitted(91, 5) Source(26, 15) + SourceIndex(1)
2 >Emitted(91, 16) Source(26, 27) + SourceIndex(1)
3 >Emitted(91, 28) Source(26, 39) + SourceIndex(1)
---
>>> internalEnum[internalEnum["a"] = 0] = "a";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1-> {
2 > a
3 >
1->Emitted(92, 9) Source(26, 42) + SourceIndex(1)
2 >Emitted(92, 50) Source(26, 43) + SourceIndex(1)
3 >Emitted(92, 51) Source(26, 43) + SourceIndex(1)
---
>>> internalEnum[internalEnum["b"] = 1] = "b";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1->,
2 > b
3 >
1->Emitted(93, 9) Source(26, 45) + SourceIndex(1)
2 >Emitted(93, 50) Source(26, 46) + SourceIndex(1)
3 >Emitted(93, 51) Source(26, 46) + SourceIndex(1)
---
>>> internalEnum[internalEnum["c"] = 2] = "c";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->,
2 > c
3 >
1->Emitted(94, 9) Source(26, 48) + SourceIndex(1)
2 >Emitted(94, 50) Source(26, 49) + SourceIndex(1)
3 >Emitted(94, 51) Source(26, 49) + SourceIndex(1)
---
>>> })(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
1->^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalEnum
5 >
6 > internalEnum
7 >
8 > internalEnum
9 > { a, b, c }
1->Emitted(95, 5) Source(26, 50) + SourceIndex(1)
2 >Emitted(95, 6) Source(26, 51) + SourceIndex(1)
3 >Emitted(95, 8) Source(26, 27) + SourceIndex(1)
4 >Emitted(95, 20) Source(26, 39) + SourceIndex(1)
5 >Emitted(95, 23) Source(26, 27) + SourceIndex(1)
6 >Emitted(95, 43) Source(26, 39) + SourceIndex(1)
7 >Emitted(95, 48) Source(26, 27) + SourceIndex(1)
8 >Emitted(95, 68) Source(26, 39) + SourceIndex(1)
9 >Emitted(95, 76) Source(26, 51) + SourceIndex(1)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:../lib/file2.ts
-------------------------------------------------------------------
>>>});
>>>define("file2", ["require", "exports"], function (require, exports) {
>>> "use strict";
>>> Object.defineProperty(exports, "__esModule", { value: true });
>>> exports.y = 20;
1 >^^^^
2 > ^^^^^^^^
3 > ^
4 > ^^^
5 > ^^
6 > ^
1 >export const
2 >
3 > y
4 > =
5 > 20
6 > ;
1 >Emitted(100, 5) Source(1, 14) + SourceIndex(2)
2 >Emitted(100, 13) Source(1, 14) + SourceIndex(2)
3 >Emitted(100, 14) Source(1, 15) + SourceIndex(2)
4 >Emitted(100, 17) Source(1, 18) + SourceIndex(2)
5 >Emitted(100, 19) Source(1, 20) + SourceIndex(2)
6 >Emitted(100, 20) Source(1, 21) + SourceIndex(2)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:../lib/global.ts
-------------------------------------------------------------------
>>>});
>>>var globalConst = 10;
1 >
2 >^^^^
3 > ^^^^^^^^^^^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 >const
3 > globalConst
4 > =
5 > 10
6 > ;
1 >Emitted(102, 1) Source(1, 1) + SourceIndex(3)
2 >Emitted(102, 5) Source(1, 7) + SourceIndex(3)
3 >Emitted(102, 16) Source(1, 18) + SourceIndex(3)
4 >Emitted(102, 19) Source(1, 21) + SourceIndex(3)
5 >Emitted(102, 21) Source(1, 23) + SourceIndex(3)
6 >Emitted(102, 22) Source(1, 24) + SourceIndex(3)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:file3.ts
-------------------------------------------------------------------
>>>define("file3", ["require", "exports"], function (require, exports) {
>>> "use strict";
>>> Object.defineProperty(exports, "__esModule", { value: true });
>>> exports.z = 30;
1->^^^^
2 > ^^^^^^^^
3 > ^
4 > ^^^
5 > ^^
6 > ^
1->export const
2 >
3 > z
4 > =
5 > 30
6 > ;
1->Emitted(106, 5) Source(1, 14) + SourceIndex(4)
2 >Emitted(106, 13) Source(1, 14) + SourceIndex(4)
3 >Emitted(106, 14) Source(1, 15) + SourceIndex(4)
4 >Emitted(106, 17) Source(1, 18) + SourceIndex(4)
5 >Emitted(106, 19) Source(1, 20) + SourceIndex(4)
6 >Emitted(106, 20) Source(1, 21) + SourceIndex(4)
---
-------------------------------------------------------------------
emittedFile:/src/app/module.js
sourceFile:file4.ts
-------------------------------------------------------------------
>>>});
>>>var myVar = 30;
1 >
2 >^^^^
3 > ^^^^^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^^^^^^^->
1 >
2 >const
3 > myVar
4 > =
5 > 30
6 > ;
1 >Emitted(108, 1) Source(1, 1) + SourceIndex(5)
2 >Emitted(108, 5) Source(1, 7) + SourceIndex(5)
3 >Emitted(108, 10) Source(1, 12) + SourceIndex(5)
4 >Emitted(108, 13) Source(1, 15) + SourceIndex(5)
5 >Emitted(108, 15) Source(1, 17) + SourceIndex(5)
6 >Emitted(108, 16) Source(1, 18) + SourceIndex(5)
---
>>>//# sourceMappingURL=module.js.map
//// [/src/app/module.tsbuildinfo]
{
"bundle": {
"commonSourceDirectory": "./",
"sourceFiles": [
"./file3.ts",
"./file4.ts"
],
"js": {
"sections": [
{
"pos": 0,
"end": 4130,
"kind": "prepend",
"data": "../lib/module.js",
"texts": [
{
"pos": 0,
"end": 4130,
"kind": "text"
}
]
},
{
"pos": 4130,
"end": 4331,
"kind": "text"
}
]
},
"dts": {
"sections": [
{
"pos": 0,
"end": 217,
"kind": "prepend",
"data": "../lib/module.d.ts",
"texts": [
{
"pos": 0,
"end": 217,
"kind": "text"
}
]
},
{
"pos": 217,
"end": 299,
"kind": "text"
}
]
}
},
"version": "FakeTSVersion"
}
//// [/src/app/module.tsbuildinfo.baseline.txt]
======================================================================
File:: /src/app/module.js
----------------------------------------------------------------------
prepend: (0-4130):: ../lib/module.js texts:: 1
>>--------------------------------------------------------------------
text: (0-4130)
/*@internal*/ var myGlob = 20;
define("file1", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.x = 10;
var normalC = /** @class */ (function () {
/*@internal*/ function normalC() {
}
/*@internal*/ normalC.prototype.method = function () { };
Object.defineProperty(normalC.prototype, "c", {
/*@internal*/ get: function () { return 10; },
/*@internal*/ set: function (val) { },
enumerable: false,
configurable: true
});
return normalC;
}());
exports.normalC = normalC;
var normalN;
(function (normalN) {
/*@internal*/ var C = /** @class */ (function () {
function C() {
}
return C;
}());
normalN.C = C;
/*@internal*/ function foo() { }
normalN.foo = foo;
/*@internal*/ var someNamespace;
(function (someNamespace) {
var C = /** @class */ (function () {
function C() {
}
return C;
}());
someNamespace.C = C;
})(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
/*@internal*/ var someOther;
(function (someOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = someOther.something || (someOther.something = {}));
})(someOther = normalN.someOther || (normalN.someOther = {}));
/*@internal*/ normalN.someImport = someNamespace.C;
/*@internal*/ normalN.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
})(normalN = exports.normalN || (exports.normalN = {}));
/*@internal*/ var internalC = /** @class */ (function () {
function internalC() {
}
return internalC;
}());
exports.internalC = internalC;
/*@internal*/ function internalfoo() { }
exports.internalfoo = internalfoo;
/*@internal*/ var internalNamespace;
(function (internalNamespace) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
internalNamespace.someClass = someClass;
})(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
/*@internal*/ var internalOther;
(function (internalOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = internalOther.something || (internalOther.something = {}));
})(internalOther = exports.internalOther || (exports.internalOther = {}));
/*@internal*/ exports.internalImport = internalNamespace.someClass;
/*@internal*/ exports.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
});
define("file2", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.y = 20;
});
var globalConst = 10;
----------------------------------------------------------------------
text: (4130-4331)
define("file3", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.z = 30;
});
var myVar = 30;
======================================================================
======================================================================
File:: /src/app/module.d.ts
----------------------------------------------------------------------
prepend: (0-217):: ../lib/module.d.ts texts:: 1
>>--------------------------------------------------------------------
text: (0-217)
declare module "file1" {
export const x = 10;
export class normalC {
}
export namespace normalN {
}
}
declare module "file2" {
export const y = 20;
}
declare const globalConst = 10;
----------------------------------------------------------------------
text: (217-299)
declare module "file3" {
export const z = 30;
}
declare const myVar = 30;
======================================================================
//// [/src/app/tsconfig.json]
{
"compilerOptions": {
"target": "es5",
"module": "amd",
"composite": true,
"stripInternal": true,
"strict": false,
"sourceMap": true,
"declarationMap": true,
"outFile": "module.js"
},
"exclude": ["module.d.ts"],
"references": [
{ "path": "../lib", "prepend": true }
]
}
//// [/src/lib/file0.ts]
/*@internal*/ const myGlob = 20;
//// [/src/lib/file1.ts]
export const x = 10;
export class normalC {
/*@internal*/ constructor() { }
/*@internal*/ prop: string;
/*@internal*/ method() { }
/*@internal*/ get c() { return 10; }
/*@internal*/ set c(val: number) { }
}
export namespace normalN {
/*@internal*/ export class C { }
/*@internal*/ export function foo() {}
/*@internal*/ export namespace someNamespace { export class C {} }
/*@internal*/ export namespace someOther.something { export class someClass {} }
/*@internal*/ export import someImport = someNamespace.C;
/*@internal*/ export type internalType = internalC;
/*@internal*/ export const internalConst = 10;
/*@internal*/ export enum internalEnum { a, b, c }
}
/*@internal*/ export class internalC {}
/*@internal*/ export function internalfoo() {}
/*@internal*/ export namespace internalNamespace { export class someClass {} }
/*@internal*/ export namespace internalOther.something { export class someClass {} }
/*@internal*/ export import internalImport = internalNamespace.someClass;
/*@internal*/ export type internalType = internalC;
/*@internal*/ export const internalConst = 10;
/*@internal*/ export enum internalEnum { a, b, c }
//// [/src/lib/module.d.ts]
declare const myGlob = 20;
declare module "file1" {
export const x = 10;
export class normalC {
constructor();
prop: string;
method(): void;
get c(): number;
set c(val: number);
}
export namespace normalN {
class C {
}
function foo(): void;
namespace someNamespace {
class C {
}
}
namespace someOther.something {
class someClass {
}
}
export import someImport = someNamespace.C;
type internalType = internalC;
const internalConst = 10;
enum internalEnum {
a = 0,
b = 1,
c = 2
}
}
export class internalC {
}
export function internalfoo(): void;
export namespace internalNamespace {
class someClass {
}
}
export namespace internalOther.something {
class someClass {
}
}
export import internalImport = internalNamespace.someClass;
export type internalType = internalC;
export const internalConst = 10;
export enum internalEnum {
a = 0,
b = 1,
c = 2
}
}
declare module "file2" {
export const y = 20;
}
declare const globalConst = 10;
//# sourceMappingURL=module.d.ts.map
//// [/src/lib/module.d.ts.map]
{"version":3,"file":"module.d.ts","sourceRoot":"","sources":["file0.ts","file1.ts","file2.ts","global.ts"],"names":[],"mappings":"AAAc,QAAA,MAAM,MAAM,KAAK,CAAC;;ICAhC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC;IACpB,MAAM,OAAO,OAAO;;QAEF,IAAI,EAAE,MAAM,CAAC;QACb,MAAM;QACN,IAAI,CAAC,IACM,MAAM,CADK;QACtB,IAAI,CAAC,CAAC,KAAK,MAAM,EAAK;KACvC;IACD,MAAM,WAAW,OAAO,CAAC;QACP,MAAa,CAAC;SAAI;QAClB,SAAgB,GAAG,SAAK;QACxB,UAAiB,aAAa,CAAC;YAAE,MAAa,CAAC;aAAG;SAAE;QACpD,UAAiB,SAAS,CAAC,SAAS,CAAC;YAAE,MAAa,SAAS;aAAG;SAAE;QAClE,MAAM,QAAQ,UAAU,GAAG,aAAa,CAAC,CAAC,CAAC;QAC3C,KAAY,YAAY,GAAG,SAAS,CAAC;QAC9B,MAAM,aAAa,KAAK,CAAC;QAChC,KAAY,YAAY;YAAG,CAAC,IAAA;YAAE,CAAC,IAAA;YAAE,CAAC,IAAA;SAAE;KACrD;IACa,MAAM,OAAO,SAAS;KAAG;IACzB,MAAM,UAAU,WAAW,SAAK;IAChC,MAAM,WAAW,iBAAiB,CAAC;QAAE,MAAa,SAAS;SAAG;KAAE;IAChE,MAAM,WAAW,aAAa,CAAC,SAAS,CAAC;QAAE,MAAa,SAAS;SAAG;KAAE;IACtE,MAAM,QAAQ,cAAc,GAAG,iBAAiB,CAAC,SAAS,CAAC;IAC3D,MAAM,MAAM,YAAY,GAAG,SAAS,CAAC;IACrC,MAAM,CAAC,MAAM,aAAa,KAAK,CAAC;IAChC,MAAM,MAAM,YAAY;QAAG,CAAC,IAAA;QAAE,CAAC,IAAA;QAAE,CAAC,IAAA;KAAE;;;ICzBlD,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC;;ACApB,QAAA,MAAM,WAAW,KAAK,CAAC"}
//// [/src/lib/module.d.ts.map.baseline.txt]
===================================================================
JsFile: module.d.ts
mapUrl: module.d.ts.map
sourceRoot:
sources: file0.ts,file1.ts,file2.ts,global.ts
===================================================================
-------------------------------------------------------------------
emittedFile:/src/lib/module.d.ts
sourceFile:file0.ts
-------------------------------------------------------------------
>>>declare const myGlob = 20;
1 >
2 >^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^
5 > ^^^^^
6 > ^
1 >/*@internal*/
2 >
3 > const
4 > myGlob
5 > = 20
6 > ;
1 >Emitted(1, 1) Source(1, 15) + SourceIndex(0)
2 >Emitted(1, 9) Source(1, 15) + SourceIndex(0)
3 >Emitted(1, 15) Source(1, 21) + SourceIndex(0)
4 >Emitted(1, 21) Source(1, 27) + SourceIndex(0)
5 >Emitted(1, 26) Source(1, 32) + SourceIndex(0)
6 >Emitted(1, 27) Source(1, 33) + SourceIndex(0)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.d.ts
sourceFile:file1.ts
-------------------------------------------------------------------
>>>declare module "file1" {
>>> export const x = 10;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^
6 > ^^^^^
7 > ^
8 > ^^^->
1 >
2 > export
3 >
4 > const
5 > x
6 > = 10
7 > ;
1 >Emitted(3, 5) Source(1, 1) + SourceIndex(1)
2 >Emitted(3, 11) Source(1, 7) + SourceIndex(1)
3 >Emitted(3, 12) Source(1, 8) + SourceIndex(1)
4 >Emitted(3, 18) Source(1, 14) + SourceIndex(1)
5 >Emitted(3, 19) Source(1, 15) + SourceIndex(1)
6 >Emitted(3, 24) Source(1, 20) + SourceIndex(1)
7 >Emitted(3, 25) Source(1, 21) + SourceIndex(1)
---
>>> export class normalC {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^
4 > ^^^^^^^
1->
>
2 > export
3 > class
4 > normalC
1->Emitted(4, 5) Source(2, 1) + SourceIndex(1)
2 >Emitted(4, 11) Source(2, 7) + SourceIndex(1)
3 >Emitted(4, 18) Source(2, 14) + SourceIndex(1)
4 >Emitted(4, 25) Source(2, 21) + SourceIndex(1)
---
>>> constructor();
>>> prop: string;
1 >^^^^^^^^
2 > ^^^^
3 > ^^
4 > ^^^^^^
5 > ^
6 > ^^^->
1 > {
> /*@internal*/ constructor() { }
> /*@internal*/
2 > prop
3 > :
4 > string
5 > ;
1 >Emitted(6, 9) Source(4, 19) + SourceIndex(1)
2 >Emitted(6, 13) Source(4, 23) + SourceIndex(1)
3 >Emitted(6, 15) Source(4, 25) + SourceIndex(1)
4 >Emitted(6, 21) Source(4, 31) + SourceIndex(1)
5 >Emitted(6, 22) Source(4, 32) + SourceIndex(1)
---
>>> method(): void;
1->^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^->
1->
> /*@internal*/
2 > method
1->Emitted(7, 9) Source(5, 19) + SourceIndex(1)
2 >Emitted(7, 15) Source(5, 25) + SourceIndex(1)
---
>>> get c(): number;
1->^^^^^^^^
2 > ^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^
6 > ^
7 > ^^^^->
1->() { }
> /*@internal*/
2 > get
3 > c
4 > () { return 10; }
> /*@internal*/ set c(val:
5 > number
6 >
1->Emitted(8, 9) Source(6, 19) + SourceIndex(1)
2 >Emitted(8, 13) Source(6, 23) + SourceIndex(1)
3 >Emitted(8, 14) Source(6, 24) + SourceIndex(1)
4 >Emitted(8, 18) Source(7, 30) + SourceIndex(1)
5 >Emitted(8, 24) Source(7, 36) + SourceIndex(1)
6 >Emitted(8, 25) Source(6, 41) + SourceIndex(1)
---
>>> set c(val: number);
1->^^^^^^^^
2 > ^^^^
3 > ^
4 > ^
5 > ^^^^^
6 > ^^^^^^
7 > ^^
1->
> /*@internal*/
2 > set
3 > c
4 > (
5 > val:
6 > number
7 > ) { }
1->Emitted(9, 9) Source(7, 19) + SourceIndex(1)
2 >Emitted(9, 13) Source(7, 23) + SourceIndex(1)
3 >Emitted(9, 14) Source(7, 24) + SourceIndex(1)
4 >Emitted(9, 15) Source(7, 25) + SourceIndex(1)
5 >Emitted(9, 20) Source(7, 30) + SourceIndex(1)
6 >Emitted(9, 26) Source(7, 36) + SourceIndex(1)
7 >Emitted(9, 28) Source(7, 41) + SourceIndex(1)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
>}
1 >Emitted(10, 6) Source(8, 2) + SourceIndex(1)
---
>>> export namespace normalN {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^
4 > ^^^^^^^
5 > ^
1->
>
2 > export
3 > namespace
4 > normalN
5 >
1->Emitted(11, 5) Source(9, 1) + SourceIndex(1)
2 >Emitted(11, 11) Source(9, 7) + SourceIndex(1)
3 >Emitted(11, 22) Source(9, 18) + SourceIndex(1)
4 >Emitted(11, 29) Source(9, 25) + SourceIndex(1)
5 >Emitted(11, 30) Source(9, 26) + SourceIndex(1)
---
>>> class C {
1 >^^^^^^^^
2 > ^^^^^^
3 > ^
1 >{
> /*@internal*/
2 > export class
3 > C
1 >Emitted(12, 9) Source(10, 19) + SourceIndex(1)
2 >Emitted(12, 15) Source(10, 32) + SourceIndex(1)
3 >Emitted(12, 16) Source(10, 33) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^->
1 > { }
1 >Emitted(13, 10) Source(10, 37) + SourceIndex(1)
---
>>> function foo(): void;
1->^^^^^^^^
2 > ^^^^^^^^^
3 > ^^^
4 > ^^^^^^^^^
5 > ^^^^^->
1->
> /*@internal*/
2 > export function
3 > foo
4 > () {}
1->Emitted(14, 9) Source(11, 19) + SourceIndex(1)
2 >Emitted(14, 18) Source(11, 35) + SourceIndex(1)
3 >Emitted(14, 21) Source(11, 38) + SourceIndex(1)
4 >Emitted(14, 30) Source(11, 43) + SourceIndex(1)
---
>>> namespace someNamespace {
1->^^^^^^^^
2 > ^^^^^^^^^^
3 > ^^^^^^^^^^^^^
4 > ^
1->
> /*@internal*/
2 > export namespace
3 > someNamespace
4 >
1->Emitted(15, 9) Source(12, 19) + SourceIndex(1)
2 >Emitted(15, 19) Source(12, 36) + SourceIndex(1)
3 >Emitted(15, 32) Source(12, 49) + SourceIndex(1)
4 >Emitted(15, 33) Source(12, 50) + SourceIndex(1)
---
>>> class C {
1 >^^^^^^^^^^^^
2 > ^^^^^^
3 > ^
1 >{
2 > export class
3 > C
1 >Emitted(16, 13) Source(12, 52) + SourceIndex(1)
2 >Emitted(16, 19) Source(12, 65) + SourceIndex(1)
3 >Emitted(16, 20) Source(12, 66) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^^^^^
1 > {}
1 >Emitted(17, 14) Source(12, 69) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > }
1 >Emitted(18, 10) Source(12, 71) + SourceIndex(1)
---
>>> namespace someOther.something {
1->^^^^^^^^
2 > ^^^^^^^^^^
3 > ^^^^^^^^^
4 > ^
5 > ^^^^^^^^^
6 > ^
1->
> /*@internal*/
2 > export namespace
3 > someOther
4 > .
5 > something
6 >
1->Emitted(19, 9) Source(13, 19) + SourceIndex(1)
2 >Emitted(19, 19) Source(13, 36) + SourceIndex(1)
3 >Emitted(19, 28) Source(13, 45) + SourceIndex(1)
4 >Emitted(19, 29) Source(13, 46) + SourceIndex(1)
5 >Emitted(19, 38) Source(13, 55) + SourceIndex(1)
6 >Emitted(19, 39) Source(13, 56) + SourceIndex(1)
---
>>> class someClass {
1 >^^^^^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^^
1 >{
2 > export class
3 > someClass
1 >Emitted(20, 13) Source(13, 58) + SourceIndex(1)
2 >Emitted(20, 19) Source(13, 71) + SourceIndex(1)
3 >Emitted(20, 28) Source(13, 80) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^^^^^
1 > {}
1 >Emitted(21, 14) Source(13, 83) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > }
1 >Emitted(22, 10) Source(13, 85) + SourceIndex(1)
---
>>> export import someImport = someNamespace.C;
1->^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^
4 > ^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^
7 > ^
8 > ^
9 > ^
1->
> /*@internal*/
2 > export
3 > import
4 > someImport
5 > =
6 > someNamespace
7 > .
8 > C
9 > ;
1->Emitted(23, 9) Source(14, 19) + SourceIndex(1)
2 >Emitted(23, 15) Source(14, 25) + SourceIndex(1)
3 >Emitted(23, 23) Source(14, 33) + SourceIndex(1)
4 >Emitted(23, 33) Source(14, 43) + SourceIndex(1)
5 >Emitted(23, 36) Source(14, 46) + SourceIndex(1)
6 >Emitted(23, 49) Source(14, 59) + SourceIndex(1)
7 >Emitted(23, 50) Source(14, 60) + SourceIndex(1)
8 >Emitted(23, 51) Source(14, 61) + SourceIndex(1)
9 >Emitted(23, 52) Source(14, 62) + SourceIndex(1)
---
>>> type internalType = internalC;
1 >^^^^^^^^
2 > ^^^^^
3 > ^^^^^^^^^^^^
4 > ^^^
5 > ^^^^^^^^^
6 > ^
1 >
> /*@internal*/
2 > export type
3 > internalType
4 > =
5 > internalC
6 > ;
1 >Emitted(24, 9) Source(15, 19) + SourceIndex(1)
2 >Emitted(24, 14) Source(15, 31) + SourceIndex(1)
3 >Emitted(24, 26) Source(15, 43) + SourceIndex(1)
4 >Emitted(24, 29) Source(15, 46) + SourceIndex(1)
5 >Emitted(24, 38) Source(15, 55) + SourceIndex(1)
6 >Emitted(24, 39) Source(15, 56) + SourceIndex(1)
---
>>> const internalConst = 10;
1 >^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^^^
4 > ^^^^^
5 > ^
1 >
> /*@internal*/ export
2 > const
3 > internalConst
4 > = 10
5 > ;
1 >Emitted(25, 9) Source(16, 26) + SourceIndex(1)
2 >Emitted(25, 15) Source(16, 32) + SourceIndex(1)
3 >Emitted(25, 28) Source(16, 45) + SourceIndex(1)
4 >Emitted(25, 33) Source(16, 50) + SourceIndex(1)
5 >Emitted(25, 34) Source(16, 51) + SourceIndex(1)
---
>>> enum internalEnum {
1 >^^^^^^^^
2 > ^^^^^
3 > ^^^^^^^^^^^^
1 >
> /*@internal*/
2 > export enum
3 > internalEnum
1 >Emitted(26, 9) Source(17, 19) + SourceIndex(1)
2 >Emitted(26, 14) Source(17, 31) + SourceIndex(1)
3 >Emitted(26, 26) Source(17, 43) + SourceIndex(1)
---
>>> a = 0,
1 >^^^^^^^^^^^^
2 > ^
3 > ^^^^
4 > ^^->
1 > {
2 > a
3 >
1 >Emitted(27, 13) Source(17, 46) + SourceIndex(1)
2 >Emitted(27, 14) Source(17, 47) + SourceIndex(1)
3 >Emitted(27, 18) Source(17, 47) + SourceIndex(1)
---
>>> b = 1,
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^
4 > ^->
1->,
2 > b
3 >
1->Emitted(28, 13) Source(17, 49) + SourceIndex(1)
2 >Emitted(28, 14) Source(17, 50) + SourceIndex(1)
3 >Emitted(28, 18) Source(17, 50) + SourceIndex(1)
---
>>> c = 2
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^
1->,
2 > c
3 >
1->Emitted(29, 13) Source(17, 52) + SourceIndex(1)
2 >Emitted(29, 14) Source(17, 53) + SourceIndex(1)
3 >Emitted(29, 18) Source(17, 53) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
1 > }
1 >Emitted(30, 10) Source(17, 55) + SourceIndex(1)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
>}
1 >Emitted(31, 6) Source(18, 2) + SourceIndex(1)
---
>>> export class internalC {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^
4 > ^^^^^^^^^
1->
>/*@internal*/
2 > export
3 > class
4 > internalC
1->Emitted(32, 5) Source(19, 15) + SourceIndex(1)
2 >Emitted(32, 11) Source(19, 21) + SourceIndex(1)
3 >Emitted(32, 18) Source(19, 28) + SourceIndex(1)
4 >Emitted(32, 27) Source(19, 37) + SourceIndex(1)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > {}
1 >Emitted(33, 6) Source(19, 40) + SourceIndex(1)
---
>>> export function internalfoo(): void;
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^
4 > ^^^^^^^^^^^
5 > ^^^^^^^^^
6 > ^->
1->
>/*@internal*/
2 > export
3 > function
4 > internalfoo
5 > () {}
1->Emitted(34, 5) Source(20, 15) + SourceIndex(1)
2 >Emitted(34, 11) Source(20, 21) + SourceIndex(1)
3 >Emitted(34, 21) Source(20, 31) + SourceIndex(1)
4 >Emitted(34, 32) Source(20, 42) + SourceIndex(1)
5 >Emitted(34, 41) Source(20, 47) + SourceIndex(1)
---
>>> export namespace internalNamespace {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^
5 > ^
1->
>/*@internal*/
2 > export
3 > namespace
4 > internalNamespace
5 >
1->Emitted(35, 5) Source(21, 15) + SourceIndex(1)
2 >Emitted(35, 11) Source(21, 21) + SourceIndex(1)
3 >Emitted(35, 22) Source(21, 32) + SourceIndex(1)
4 >Emitted(35, 39) Source(21, 49) + SourceIndex(1)
5 >Emitted(35, 40) Source(21, 50) + SourceIndex(1)
---
>>> class someClass {
1 >^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^^
1 >{
2 > export class
3 > someClass
1 >Emitted(36, 9) Source(21, 52) + SourceIndex(1)
2 >Emitted(36, 15) Source(21, 65) + SourceIndex(1)
3 >Emitted(36, 24) Source(21, 74) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
1 > {}
1 >Emitted(37, 10) Source(21, 77) + SourceIndex(1)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > }
1 >Emitted(38, 6) Source(21, 79) + SourceIndex(1)
---
>>> export namespace internalOther.something {
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^^^^
4 > ^^^^^^^^^^^^^
5 > ^
6 > ^^^^^^^^^
7 > ^
1->
>/*@internal*/
2 > export
3 > namespace
4 > internalOther
5 > .
6 > something
7 >
1->Emitted(39, 5) Source(22, 15) + SourceIndex(1)
2 >Emitted(39, 11) Source(22, 21) + SourceIndex(1)
3 >Emitted(39, 22) Source(22, 32) + SourceIndex(1)
4 >Emitted(39, 35) Source(22, 45) + SourceIndex(1)
5 >Emitted(39, 36) Source(22, 46) + SourceIndex(1)
6 >Emitted(39, 45) Source(22, 55) + SourceIndex(1)
7 >Emitted(39, 46) Source(22, 56) + SourceIndex(1)
---
>>> class someClass {
1 >^^^^^^^^
2 > ^^^^^^
3 > ^^^^^^^^^
1 >{
2 > export class
3 > someClass
1 >Emitted(40, 9) Source(22, 58) + SourceIndex(1)
2 >Emitted(40, 15) Source(22, 71) + SourceIndex(1)
3 >Emitted(40, 24) Source(22, 80) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^^
1 > {}
1 >Emitted(41, 10) Source(22, 83) + SourceIndex(1)
---
>>> }
1 >^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 > }
1 >Emitted(42, 6) Source(22, 85) + SourceIndex(1)
---
>>> export import internalImport = internalNamespace.someClass;
1->^^^^
2 > ^^^^^^
3 > ^^^^^^^^
4 > ^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^
7 > ^
8 > ^^^^^^^^^
9 > ^
1->
>/*@internal*/
2 > export
3 > import
4 > internalImport
5 > =
6 > internalNamespace
7 > .
8 > someClass
9 > ;
1->Emitted(43, 5) Source(23, 15) + SourceIndex(1)
2 >Emitted(43, 11) Source(23, 21) + SourceIndex(1)
3 >Emitted(43, 19) Source(23, 29) + SourceIndex(1)
4 >Emitted(43, 33) Source(23, 43) + SourceIndex(1)
5 >Emitted(43, 36) Source(23, 46) + SourceIndex(1)
6 >Emitted(43, 53) Source(23, 63) + SourceIndex(1)
7 >Emitted(43, 54) Source(23, 64) + SourceIndex(1)
8 >Emitted(43, 63) Source(23, 73) + SourceIndex(1)
9 >Emitted(43, 64) Source(23, 74) + SourceIndex(1)
---
>>> export type internalType = internalC;
1 >^^^^
2 > ^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^
7 > ^
1 >
>/*@internal*/
2 > export
3 > type
4 > internalType
5 > =
6 > internalC
7 > ;
1 >Emitted(44, 5) Source(24, 15) + SourceIndex(1)
2 >Emitted(44, 11) Source(24, 21) + SourceIndex(1)
3 >Emitted(44, 17) Source(24, 27) + SourceIndex(1)
4 >Emitted(44, 29) Source(24, 39) + SourceIndex(1)
5 >Emitted(44, 32) Source(24, 42) + SourceIndex(1)
6 >Emitted(44, 41) Source(24, 51) + SourceIndex(1)
7 >Emitted(44, 42) Source(24, 52) + SourceIndex(1)
---
>>> export const internalConst = 10;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^^^^^^^^^^^^^
6 > ^^^^^
7 > ^
1 >
>/*@internal*/
2 > export
3 >
4 > const
5 > internalConst
6 > = 10
7 > ;
1 >Emitted(45, 5) Source(25, 15) + SourceIndex(1)
2 >Emitted(45, 11) Source(25, 21) + SourceIndex(1)
3 >Emitted(45, 12) Source(25, 22) + SourceIndex(1)
4 >Emitted(45, 18) Source(25, 28) + SourceIndex(1)
5 >Emitted(45, 31) Source(25, 41) + SourceIndex(1)
6 >Emitted(45, 36) Source(25, 46) + SourceIndex(1)
7 >Emitted(45, 37) Source(25, 47) + SourceIndex(1)
---
>>> export enum internalEnum {
1 >^^^^
2 > ^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^^
1 >
>/*@internal*/
2 > export
3 > enum
4 > internalEnum
1 >Emitted(46, 5) Source(26, 15) + SourceIndex(1)
2 >Emitted(46, 11) Source(26, 21) + SourceIndex(1)
3 >Emitted(46, 17) Source(26, 27) + SourceIndex(1)
4 >Emitted(46, 29) Source(26, 39) + SourceIndex(1)
---
>>> a = 0,
1 >^^^^^^^^
2 > ^
3 > ^^^^
4 > ^^->
1 > {
2 > a
3 >
1 >Emitted(47, 9) Source(26, 42) + SourceIndex(1)
2 >Emitted(47, 10) Source(26, 43) + SourceIndex(1)
3 >Emitted(47, 14) Source(26, 43) + SourceIndex(1)
---
>>> b = 1,
1->^^^^^^^^
2 > ^
3 > ^^^^
4 > ^->
1->,
2 > b
3 >
1->Emitted(48, 9) Source(26, 45) + SourceIndex(1)
2 >Emitted(48, 10) Source(26, 46) + SourceIndex(1)
3 >Emitted(48, 14) Source(26, 46) + SourceIndex(1)
---
>>> c = 2
1->^^^^^^^^
2 > ^
3 > ^^^^
1->,
2 > c
3 >
1->Emitted(49, 9) Source(26, 48) + SourceIndex(1)
2 >Emitted(49, 10) Source(26, 49) + SourceIndex(1)
3 >Emitted(49, 14) Source(26, 49) + SourceIndex(1)
---
>>> }
1 >^^^^^
1 > }
1 >Emitted(50, 6) Source(26, 51) + SourceIndex(1)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.d.ts
sourceFile:file2.ts
-------------------------------------------------------------------
>>>}
>>>declare module "file2" {
>>> export const y = 20;
1 >^^^^
2 > ^^^^^^
3 > ^
4 > ^^^^^^
5 > ^
6 > ^^^^^
7 > ^
1 >
2 > export
3 >
4 > const
5 > y
6 > = 20
7 > ;
1 >Emitted(53, 5) Source(1, 1) + SourceIndex(2)
2 >Emitted(53, 11) Source(1, 7) + SourceIndex(2)
3 >Emitted(53, 12) Source(1, 8) + SourceIndex(2)
4 >Emitted(53, 18) Source(1, 14) + SourceIndex(2)
5 >Emitted(53, 19) Source(1, 15) + SourceIndex(2)
6 >Emitted(53, 24) Source(1, 20) + SourceIndex(2)
7 >Emitted(53, 25) Source(1, 21) + SourceIndex(2)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.d.ts
sourceFile:global.ts
-------------------------------------------------------------------
>>>}
>>>declare const globalConst = 10;
1 >
2 >^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^
5 > ^^^^^
6 > ^
7 > ^^^^->
1 >
2 >
3 > const
4 > globalConst
5 > = 10
6 > ;
1 >Emitted(55, 1) Source(1, 1) + SourceIndex(3)
2 >Emitted(55, 9) Source(1, 1) + SourceIndex(3)
3 >Emitted(55, 15) Source(1, 7) + SourceIndex(3)
4 >Emitted(55, 26) Source(1, 18) + SourceIndex(3)
5 >Emitted(55, 31) Source(1, 23) + SourceIndex(3)
6 >Emitted(55, 32) Source(1, 24) + SourceIndex(3)
---
>>>//# sourceMappingURL=module.d.ts.map
//// [/src/lib/module.js]
/*@internal*/ var myGlob = 20;
define("file1", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.x = 10;
var normalC = /** @class */ (function () {
/*@internal*/ function normalC() {
}
/*@internal*/ normalC.prototype.method = function () { };
Object.defineProperty(normalC.prototype, "c", {
/*@internal*/ get: function () { return 10; },
/*@internal*/ set: function (val) { },
enumerable: false,
configurable: true
});
return normalC;
}());
exports.normalC = normalC;
var normalN;
(function (normalN) {
/*@internal*/ var C = /** @class */ (function () {
function C() {
}
return C;
}());
normalN.C = C;
/*@internal*/ function foo() { }
normalN.foo = foo;
/*@internal*/ var someNamespace;
(function (someNamespace) {
var C = /** @class */ (function () {
function C() {
}
return C;
}());
someNamespace.C = C;
})(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
/*@internal*/ var someOther;
(function (someOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = someOther.something || (someOther.something = {}));
})(someOther = normalN.someOther || (normalN.someOther = {}));
/*@internal*/ normalN.someImport = someNamespace.C;
/*@internal*/ normalN.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
})(normalN = exports.normalN || (exports.normalN = {}));
/*@internal*/ var internalC = /** @class */ (function () {
function internalC() {
}
return internalC;
}());
exports.internalC = internalC;
/*@internal*/ function internalfoo() { }
exports.internalfoo = internalfoo;
/*@internal*/ var internalNamespace;
(function (internalNamespace) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
internalNamespace.someClass = someClass;
})(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
/*@internal*/ var internalOther;
(function (internalOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = internalOther.something || (internalOther.something = {}));
})(internalOther = exports.internalOther || (exports.internalOther = {}));
/*@internal*/ exports.internalImport = internalNamespace.someClass;
/*@internal*/ exports.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
});
define("file2", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.y = 20;
});
var globalConst = 10;
//# sourceMappingURL=module.js.map
//// [/src/lib/module.js.map]
{"version":3,"file":"module.js","sourceRoot":"","sources":["file0.ts","file1.ts","file2.ts","global.ts"],"names":[],"mappings":"AAAA,aAAa,CAAC,IAAM,MAAM,GAAG,EAAE,CAAC;;;;ICAnB,QAAA,CAAC,GAAG,EAAE,CAAC;IACpB;QACI,aAAa,CAAC;QAAgB,CAAC;QAE/B,aAAa,CAAC,wBAAM,GAAN,cAAW,CAAC;QACZ,sBAAI,sBAAC;YAAnB,aAAa,MAAC,cAAU,OAAO,EAAE,CAAC,CAAC,CAAC;YACpC,aAAa,MAAC,UAAM,GAAW,IAAI,CAAC;;;WADA;QAExC,cAAC;IAAD,CAAC,AAND,IAMC;IANY,0BAAO;IAOpB,IAAiB,OAAO,CASvB;IATD,WAAiB,OAAO;QACpB,aAAa,CAAC;YAAA;YAAiB,CAAC;YAAD,QAAC;QAAD,CAAC,AAAlB,IAAkB;QAAL,SAAC,IAAI,CAAA;QAChC,aAAa,CAAC,SAAgB,GAAG,KAAI,CAAC;QAAR,WAAG,MAAK,CAAA;QACtC,aAAa,CAAC,IAAiB,aAAa,CAAsB;QAApD,WAAiB,aAAa;YAAG;gBAAA;gBAAgB,CAAC;gBAAD,QAAC;YAAD,CAAC,AAAjB,IAAiB;YAAJ,eAAC,IAAG,CAAA;QAAC,CAAC,EAAnC,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAAsB;QAClE,aAAa,CAAC,IAAiB,SAAS,CAAwC;QAAlE,WAAiB,SAAS;YAAC,IAAA,SAAS,CAA8B;YAAvC,WAAA,SAAS;gBAAG;oBAAA;oBAAwB,CAAC;oBAAD,gBAAC;gBAAD,CAAC,AAAzB,IAAyB;gBAAZ,mBAAS,YAAG,CAAA;YAAC,CAAC,EAAvC,SAAS,GAAT,mBAAS,KAAT,mBAAS,QAA8B;QAAD,CAAC,EAAjD,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAAwC;QAChF,aAAa,CAAe,kBAAU,GAAG,aAAa,CAAC,CAAC,CAAC;QAEzD,aAAa,CAAc,qBAAa,GAAG,EAAE,CAAC;QAC9C,aAAa,CAAC,IAAY,YAAwB;QAApC,WAAY,YAAY;YAAG,yCAAC,CAAA;YAAE,yCAAC,CAAA;YAAE,yCAAC,CAAA;QAAC,CAAC,EAAxB,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAAY;IACtD,CAAC,EATgB,OAAO,GAAP,eAAO,KAAP,eAAO,QASvB;IACD,aAAa,CAAC;QAAA;QAAwB,CAAC;QAAD,gBAAC;IAAD,CAAC,AAAzB,IAAyB;IAAZ,8BAAS;IACpC,aAAa,CAAC,SAAgB,WAAW,KAAI,CAAC;IAAhC,kCAAgC;IAC9C,aAAa,CAAC,IAAiB,iBAAiB,CAA8B;IAAhE,WAAiB,iBAAiB;QAAG;YAAA;YAAwB,CAAC;YAAD,gBAAC;QAAD,CAAC,AAAzB,IAAyB;QAAZ,2BAAS,YAAG,CAAA;IAAC,CAAC,EAA/C,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAA8B;IAC9E,aAAa,CAAC,IAAiB,aAAa,CAAwC;IAAtE,WAAiB,aAAa;QAAC,IAAA,SAAS,CAA8B;QAAvC,WAAA,SAAS;YAAG;gBAAA;gBAAwB,CAAC;gBAAD,gBAAC;YAAD,CAAC,AAAzB,IAAyB;YAAZ,mBAAS,YAAG,CAAA;QAAC,CAAC,EAAvC,SAAS,GAAT,uBAAS,KAAT,uBAAS,QAA8B;IAAD,CAAC,EAArD,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAAwC;IACpF,aAAa,CAAe,QAAA,cAAc,GAAG,iBAAiB,CAAC,SAAS,CAAC;IAEzE,aAAa,CAAc,QAAA,aAAa,GAAG,EAAE,CAAC;IAC9C,aAAa,CAAC,IAAY,YAAwB;IAApC,WAAY,YAAY;QAAG,yCAAC,CAAA;QAAE,yCAAC,CAAA;QAAE,yCAAC,CAAA;IAAC,CAAC,EAAxB,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAAY;;;;;ICzBrC,QAAA,CAAC,GAAG,EAAE,CAAC;;ACApB,IAAM,WAAW,GAAG,EAAE,CAAC"}
//// [/src/lib/module.js.map.baseline.txt]
===================================================================
JsFile: module.js
mapUrl: module.js.map
sourceRoot:
sources: file0.ts,file1.ts,file2.ts,global.ts
===================================================================
-------------------------------------------------------------------
emittedFile:/src/lib/module.js
sourceFile:file0.ts
-------------------------------------------------------------------
>>>/*@internal*/ var myGlob = 20;
1 >
2 >^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^
6 > ^^^
7 > ^^
8 > ^
9 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 >/*@internal*/
3 >
4 > const
5 > myGlob
6 > =
7 > 20
8 > ;
1 >Emitted(1, 1) Source(1, 1) + SourceIndex(0)
2 >Emitted(1, 14) Source(1, 14) + SourceIndex(0)
3 >Emitted(1, 15) Source(1, 15) + SourceIndex(0)
4 >Emitted(1, 19) Source(1, 21) + SourceIndex(0)
5 >Emitted(1, 25) Source(1, 27) + SourceIndex(0)
6 >Emitted(1, 28) Source(1, 30) + SourceIndex(0)
7 >Emitted(1, 30) Source(1, 32) + SourceIndex(0)
8 >Emitted(1, 31) Source(1, 33) + SourceIndex(0)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.js
sourceFile:file1.ts
-------------------------------------------------------------------
>>>define("file1", ["require", "exports"], function (require, exports) {
>>> "use strict";
>>> Object.defineProperty(exports, "__esModule", { value: true });
>>> exports.x = 10;
1->^^^^
2 > ^^^^^^^^
3 > ^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->export const
2 >
3 > x
4 > =
5 > 10
6 > ;
1->Emitted(5, 5) Source(1, 14) + SourceIndex(1)
2 >Emitted(5, 13) Source(1, 14) + SourceIndex(1)
3 >Emitted(5, 14) Source(1, 15) + SourceIndex(1)
4 >Emitted(5, 17) Source(1, 18) + SourceIndex(1)
5 >Emitted(5, 19) Source(1, 20) + SourceIndex(1)
6 >Emitted(5, 20) Source(1, 21) + SourceIndex(1)
---
>>> var normalC = /** @class */ (function () {
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
>
1->Emitted(6, 5) Source(2, 1) + SourceIndex(1)
---
>>> /*@internal*/ function normalC() {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
1->export class normalC {
>
2 > /*@internal*/
3 >
1->Emitted(7, 9) Source(3, 5) + SourceIndex(1)
2 >Emitted(7, 22) Source(3, 18) + SourceIndex(1)
3 >Emitted(7, 23) Source(3, 19) + SourceIndex(1)
---
>>> }
1 >^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >constructor() {
2 > }
1 >Emitted(8, 9) Source(3, 35) + SourceIndex(1)
2 >Emitted(8, 10) Source(3, 36) + SourceIndex(1)
---
>>> /*@internal*/ normalC.prototype.method = function () { };
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^
7 > ^
1->
> /*@internal*/ prop: string;
>
2 > /*@internal*/
3 >
4 > method
5 >
6 > method() {
7 > }
1->Emitted(9, 9) Source(5, 5) + SourceIndex(1)
2 >Emitted(9, 22) Source(5, 18) + SourceIndex(1)
3 >Emitted(9, 23) Source(5, 19) + SourceIndex(1)
4 >Emitted(9, 47) Source(5, 25) + SourceIndex(1)
5 >Emitted(9, 50) Source(5, 19) + SourceIndex(1)
6 >Emitted(9, 64) Source(5, 30) + SourceIndex(1)
7 >Emitted(9, 65) Source(5, 31) + SourceIndex(1)
---
>>> Object.defineProperty(normalC.prototype, "c", {
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^^^^^^^^^^^
4 > ^^^^^^^->
1 >
> /*@internal*/
2 > get
3 > c
1 >Emitted(10, 9) Source(6, 19) + SourceIndex(1)
2 >Emitted(10, 31) Source(6, 23) + SourceIndex(1)
3 >Emitted(10, 53) Source(6, 24) + SourceIndex(1)
---
>>> /*@internal*/ get: function () { return 10; },
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^^^^^
5 > ^^^^^^^
6 > ^^
7 > ^
8 > ^
9 > ^
1->
2 > /*@internal*/
3 >
4 > get c() {
5 > return
6 > 10
7 > ;
8 >
9 > }
1->Emitted(11, 13) Source(6, 5) + SourceIndex(1)
2 >Emitted(11, 26) Source(6, 18) + SourceIndex(1)
3 >Emitted(11, 32) Source(6, 19) + SourceIndex(1)
4 >Emitted(11, 46) Source(6, 29) + SourceIndex(1)
5 >Emitted(11, 53) Source(6, 36) + SourceIndex(1)
6 >Emitted(11, 55) Source(6, 38) + SourceIndex(1)
7 >Emitted(11, 56) Source(6, 39) + SourceIndex(1)
8 >Emitted(11, 57) Source(6, 40) + SourceIndex(1)
9 >Emitted(11, 58) Source(6, 41) + SourceIndex(1)
---
>>> /*@internal*/ set: function (val) { },
1 >^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^^^^^^
4 > ^^^^^^^^^^
5 > ^^^
6 > ^^^^
7 > ^
1 >
>
2 > /*@internal*/
3 >
4 > set c(
5 > val: number
6 > ) {
7 > }
1 >Emitted(12, 13) Source(7, 5) + SourceIndex(1)
2 >Emitted(12, 26) Source(7, 18) + SourceIndex(1)
3 >Emitted(12, 32) Source(7, 19) + SourceIndex(1)
4 >Emitted(12, 42) Source(7, 25) + SourceIndex(1)
5 >Emitted(12, 45) Source(7, 36) + SourceIndex(1)
6 >Emitted(12, 49) Source(7, 40) + SourceIndex(1)
7 >Emitted(12, 50) Source(7, 41) + SourceIndex(1)
---
>>> enumerable: false,
>>> configurable: true
>>> });
1 >^^^^^^^^^^^
2 > ^^^^^^^^^^^^^->
1 >
1 >Emitted(15, 12) Source(6, 41) + SourceIndex(1)
---
>>> return normalC;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^
1->
> /*@internal*/ set c(val: number) { }
>
2 > }
1->Emitted(16, 9) Source(8, 1) + SourceIndex(1)
2 >Emitted(16, 23) Source(8, 2) + SourceIndex(1)
---
>>> }());
1 >^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class normalC {
> /*@internal*/ constructor() { }
> /*@internal*/ prop: string;
> /*@internal*/ method() { }
> /*@internal*/ get c() { return 10; }
> /*@internal*/ set c(val: number) { }
> }
1 >Emitted(17, 5) Source(8, 1) + SourceIndex(1)
2 >Emitted(17, 6) Source(8, 2) + SourceIndex(1)
3 >Emitted(17, 6) Source(2, 1) + SourceIndex(1)
4 >Emitted(17, 10) Source(8, 2) + SourceIndex(1)
---
>>> exports.normalC = normalC;
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^
1->
2 > normalC
1->Emitted(18, 5) Source(2, 14) + SourceIndex(1)
2 >Emitted(18, 31) Source(2, 21) + SourceIndex(1)
---
>>> var normalN;
1 >^^^^
2 > ^^^^
3 > ^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 > {
> /*@internal*/ constructor() { }
> /*@internal*/ prop: string;
> /*@internal*/ method() { }
> /*@internal*/ get c() { return 10; }
> /*@internal*/ set c(val: number) { }
>}
>
2 > export namespace
3 > normalN
4 > {
> /*@internal*/ export class C { }
> /*@internal*/ export function foo() {}
> /*@internal*/ export namespace someNamespace { export class C {} }
> /*@internal*/ export namespace someOther.something { export class someClass {} }
> /*@internal*/ export import someImport = someNamespace.C;
> /*@internal*/ export type internalType = internalC;
> /*@internal*/ export const internalConst = 10;
> /*@internal*/ export enum internalEnum { a, b, c }
> }
1 >Emitted(19, 5) Source(9, 1) + SourceIndex(1)
2 >Emitted(19, 9) Source(9, 18) + SourceIndex(1)
3 >Emitted(19, 16) Source(9, 25) + SourceIndex(1)
4 >Emitted(19, 17) Source(18, 2) + SourceIndex(1)
---
>>> (function (normalN) {
1->^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > export namespace
3 > normalN
1->Emitted(20, 5) Source(9, 1) + SourceIndex(1)
2 >Emitted(20, 16) Source(9, 18) + SourceIndex(1)
3 >Emitted(20, 23) Source(9, 25) + SourceIndex(1)
---
>>> /*@internal*/ var C = /** @class */ (function () {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^->
1-> {
>
2 > /*@internal*/
3 >
1->Emitted(21, 9) Source(10, 5) + SourceIndex(1)
2 >Emitted(21, 22) Source(10, 18) + SourceIndex(1)
3 >Emitted(21, 23) Source(10, 19) + SourceIndex(1)
---
>>> function C() {
1->^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(22, 13) Source(10, 19) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^->
1->export class C {
2 > }
1->Emitted(23, 13) Source(10, 36) + SourceIndex(1)
2 >Emitted(23, 14) Source(10, 37) + SourceIndex(1)
---
>>> return C;
1->^^^^^^^^^^^^
2 > ^^^^^^^^
1->
2 > }
1->Emitted(24, 13) Source(10, 36) + SourceIndex(1)
2 >Emitted(24, 21) Source(10, 37) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class C { }
1 >Emitted(25, 9) Source(10, 36) + SourceIndex(1)
2 >Emitted(25, 10) Source(10, 37) + SourceIndex(1)
3 >Emitted(25, 10) Source(10, 19) + SourceIndex(1)
4 >Emitted(25, 14) Source(10, 37) + SourceIndex(1)
---
>>> normalN.C = C;
1->^^^^^^^^
2 > ^^^^^^^^^
3 > ^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^->
1->
2 > C
3 > { }
4 >
1->Emitted(26, 9) Source(10, 32) + SourceIndex(1)
2 >Emitted(26, 18) Source(10, 33) + SourceIndex(1)
3 >Emitted(26, 22) Source(10, 37) + SourceIndex(1)
4 >Emitted(26, 23) Source(10, 37) + SourceIndex(1)
---
>>> /*@internal*/ function foo() { }
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^
7 > ^
1->
>
2 > /*@internal*/
3 >
4 > export function
5 > foo
6 > () {
7 > }
1->Emitted(27, 9) Source(11, 5) + SourceIndex(1)
2 >Emitted(27, 22) Source(11, 18) + SourceIndex(1)
3 >Emitted(27, 23) Source(11, 19) + SourceIndex(1)
4 >Emitted(27, 32) Source(11, 35) + SourceIndex(1)
5 >Emitted(27, 35) Source(11, 38) + SourceIndex(1)
6 >Emitted(27, 40) Source(11, 42) + SourceIndex(1)
7 >Emitted(27, 41) Source(11, 43) + SourceIndex(1)
---
>>> normalN.foo = foo;
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^->
1 >
2 > foo
3 > () {}
4 >
1 >Emitted(28, 9) Source(11, 35) + SourceIndex(1)
2 >Emitted(28, 20) Source(11, 38) + SourceIndex(1)
3 >Emitted(28, 26) Source(11, 43) + SourceIndex(1)
4 >Emitted(28, 27) Source(11, 43) + SourceIndex(1)
---
>>> /*@internal*/ var someNamespace;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^
6 > ^
1->
>
2 > /*@internal*/
3 >
4 > export namespace
5 > someNamespace
6 > { export class C {} }
1->Emitted(29, 9) Source(12, 5) + SourceIndex(1)
2 >Emitted(29, 22) Source(12, 18) + SourceIndex(1)
3 >Emitted(29, 23) Source(12, 19) + SourceIndex(1)
4 >Emitted(29, 27) Source(12, 36) + SourceIndex(1)
5 >Emitted(29, 40) Source(12, 49) + SourceIndex(1)
6 >Emitted(29, 41) Source(12, 71) + SourceIndex(1)
---
>>> (function (someNamespace) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^->
1 >
2 > export namespace
3 > someNamespace
1 >Emitted(30, 9) Source(12, 19) + SourceIndex(1)
2 >Emitted(30, 20) Source(12, 36) + SourceIndex(1)
3 >Emitted(30, 33) Source(12, 49) + SourceIndex(1)
---
>>> var C = /** @class */ (function () {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(31, 13) Source(12, 52) + SourceIndex(1)
---
>>> function C() {
1->^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(32, 17) Source(12, 52) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^->
1->export class C {
2 > }
1->Emitted(33, 17) Source(12, 68) + SourceIndex(1)
2 >Emitted(33, 18) Source(12, 69) + SourceIndex(1)
---
>>> return C;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^
1->
2 > }
1->Emitted(34, 17) Source(12, 68) + SourceIndex(1)
2 >Emitted(34, 25) Source(12, 69) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class C {}
1 >Emitted(35, 13) Source(12, 68) + SourceIndex(1)
2 >Emitted(35, 14) Source(12, 69) + SourceIndex(1)
3 >Emitted(35, 14) Source(12, 52) + SourceIndex(1)
4 >Emitted(35, 18) Source(12, 69) + SourceIndex(1)
---
>>> someNamespace.C = C;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^
3 > ^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > C
3 > {}
4 >
1->Emitted(36, 13) Source(12, 65) + SourceIndex(1)
2 >Emitted(36, 28) Source(12, 66) + SourceIndex(1)
3 >Emitted(36, 32) Source(12, 69) + SourceIndex(1)
4 >Emitted(36, 33) Source(12, 69) + SourceIndex(1)
---
>>> })(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > someNamespace
5 >
6 > someNamespace
7 >
8 > someNamespace
9 > { export class C {} }
1->Emitted(37, 9) Source(12, 70) + SourceIndex(1)
2 >Emitted(37, 10) Source(12, 71) + SourceIndex(1)
3 >Emitted(37, 12) Source(12, 36) + SourceIndex(1)
4 >Emitted(37, 25) Source(12, 49) + SourceIndex(1)
5 >Emitted(37, 28) Source(12, 36) + SourceIndex(1)
6 >Emitted(37, 49) Source(12, 49) + SourceIndex(1)
7 >Emitted(37, 54) Source(12, 36) + SourceIndex(1)
8 >Emitted(37, 75) Source(12, 49) + SourceIndex(1)
9 >Emitted(37, 83) Source(12, 71) + SourceIndex(1)
---
>>> /*@internal*/ var someOther;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^
6 > ^
1 >
>
2 > /*@internal*/
3 >
4 > export namespace
5 > someOther
6 > .something { export class someClass {} }
1 >Emitted(38, 9) Source(13, 5) + SourceIndex(1)
2 >Emitted(38, 22) Source(13, 18) + SourceIndex(1)
3 >Emitted(38, 23) Source(13, 19) + SourceIndex(1)
4 >Emitted(38, 27) Source(13, 36) + SourceIndex(1)
5 >Emitted(38, 36) Source(13, 45) + SourceIndex(1)
6 >Emitted(38, 37) Source(13, 85) + SourceIndex(1)
---
>>> (function (someOther) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
1 >
2 > export namespace
3 > someOther
1 >Emitted(39, 9) Source(13, 19) + SourceIndex(1)
2 >Emitted(39, 20) Source(13, 36) + SourceIndex(1)
3 >Emitted(39, 29) Source(13, 45) + SourceIndex(1)
---
>>> var something;
1 >^^^^^^^^^^^^
2 > ^^^^
3 > ^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 >.
2 >
3 > something
4 > { export class someClass {} }
1 >Emitted(40, 13) Source(13, 46) + SourceIndex(1)
2 >Emitted(40, 17) Source(13, 46) + SourceIndex(1)
3 >Emitted(40, 26) Source(13, 55) + SourceIndex(1)
4 >Emitted(40, 27) Source(13, 85) + SourceIndex(1)
---
>>> (function (something) {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 >
3 > something
1->Emitted(41, 13) Source(13, 46) + SourceIndex(1)
2 >Emitted(41, 24) Source(13, 46) + SourceIndex(1)
3 >Emitted(41, 33) Source(13, 55) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(42, 17) Source(13, 58) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(43, 21) Source(13, 58) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(44, 21) Source(13, 82) + SourceIndex(1)
2 >Emitted(44, 22) Source(13, 83) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(45, 21) Source(13, 82) + SourceIndex(1)
2 >Emitted(45, 37) Source(13, 83) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(46, 17) Source(13, 82) + SourceIndex(1)
2 >Emitted(46, 18) Source(13, 83) + SourceIndex(1)
3 >Emitted(46, 18) Source(13, 58) + SourceIndex(1)
4 >Emitted(46, 22) Source(13, 83) + SourceIndex(1)
---
>>> something.someClass = someClass;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(47, 17) Source(13, 71) + SourceIndex(1)
2 >Emitted(47, 36) Source(13, 80) + SourceIndex(1)
3 >Emitted(47, 48) Source(13, 83) + SourceIndex(1)
4 >Emitted(47, 49) Source(13, 83) + SourceIndex(1)
---
>>> })(something = someOther.something || (someOther.something = {}));
1->^^^^^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > something
5 >
6 > something
7 >
8 > something
9 > { export class someClass {} }
1->Emitted(48, 13) Source(13, 84) + SourceIndex(1)
2 >Emitted(48, 14) Source(13, 85) + SourceIndex(1)
3 >Emitted(48, 16) Source(13, 46) + SourceIndex(1)
4 >Emitted(48, 25) Source(13, 55) + SourceIndex(1)
5 >Emitted(48, 28) Source(13, 46) + SourceIndex(1)
6 >Emitted(48, 47) Source(13, 55) + SourceIndex(1)
7 >Emitted(48, 52) Source(13, 46) + SourceIndex(1)
8 >Emitted(48, 71) Source(13, 55) + SourceIndex(1)
9 >Emitted(48, 79) Source(13, 85) + SourceIndex(1)
---
>>> })(someOther = normalN.someOther || (normalN.someOther = {}));
1 >^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1 >
2 > }
3 >
4 > someOther
5 >
6 > someOther
7 >
8 > someOther
9 > .something { export class someClass {} }
1 >Emitted(49, 9) Source(13, 84) + SourceIndex(1)
2 >Emitted(49, 10) Source(13, 85) + SourceIndex(1)
3 >Emitted(49, 12) Source(13, 36) + SourceIndex(1)
4 >Emitted(49, 21) Source(13, 45) + SourceIndex(1)
5 >Emitted(49, 24) Source(13, 36) + SourceIndex(1)
6 >Emitted(49, 41) Source(13, 45) + SourceIndex(1)
7 >Emitted(49, 46) Source(13, 36) + SourceIndex(1)
8 >Emitted(49, 63) Source(13, 45) + SourceIndex(1)
9 >Emitted(49, 71) Source(13, 85) + SourceIndex(1)
---
>>> /*@internal*/ normalN.someImport = someNamespace.C;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^
7 > ^
8 > ^
9 > ^
1 >
>
2 > /*@internal*/
3 > export import
4 > someImport
5 > =
6 > someNamespace
7 > .
8 > C
9 > ;
1 >Emitted(50, 9) Source(14, 5) + SourceIndex(1)
2 >Emitted(50, 22) Source(14, 18) + SourceIndex(1)
3 >Emitted(50, 23) Source(14, 33) + SourceIndex(1)
4 >Emitted(50, 41) Source(14, 43) + SourceIndex(1)
5 >Emitted(50, 44) Source(14, 46) + SourceIndex(1)
6 >Emitted(50, 57) Source(14, 59) + SourceIndex(1)
7 >Emitted(50, 58) Source(14, 60) + SourceIndex(1)
8 >Emitted(50, 59) Source(14, 61) + SourceIndex(1)
9 >Emitted(50, 60) Source(14, 62) + SourceIndex(1)
---
>>> /*@internal*/ normalN.internalConst = 10;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^
7 > ^
1 >
> /*@internal*/ export type internalType = internalC;
>
2 > /*@internal*/
3 > export const
4 > internalConst
5 > =
6 > 10
7 > ;
1 >Emitted(51, 9) Source(16, 5) + SourceIndex(1)
2 >Emitted(51, 22) Source(16, 18) + SourceIndex(1)
3 >Emitted(51, 23) Source(16, 32) + SourceIndex(1)
4 >Emitted(51, 44) Source(16, 45) + SourceIndex(1)
5 >Emitted(51, 47) Source(16, 48) + SourceIndex(1)
6 >Emitted(51, 49) Source(16, 50) + SourceIndex(1)
7 >Emitted(51, 50) Source(16, 51) + SourceIndex(1)
---
>>> /*@internal*/ var internalEnum;
1 >^^^^^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^
1 >
>
2 > /*@internal*/
3 >
4 > export enum
5 > internalEnum { a, b, c }
1 >Emitted(52, 9) Source(17, 5) + SourceIndex(1)
2 >Emitted(52, 22) Source(17, 18) + SourceIndex(1)
3 >Emitted(52, 23) Source(17, 19) + SourceIndex(1)
4 >Emitted(52, 27) Source(17, 31) + SourceIndex(1)
5 >Emitted(52, 39) Source(17, 55) + SourceIndex(1)
---
>>> (function (internalEnum) {
1 >^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export enum
3 > internalEnum
1 >Emitted(53, 9) Source(17, 19) + SourceIndex(1)
2 >Emitted(53, 20) Source(17, 31) + SourceIndex(1)
3 >Emitted(53, 32) Source(17, 43) + SourceIndex(1)
---
>>> internalEnum[internalEnum["a"] = 0] = "a";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1-> {
2 > a
3 >
1->Emitted(54, 13) Source(17, 46) + SourceIndex(1)
2 >Emitted(54, 54) Source(17, 47) + SourceIndex(1)
3 >Emitted(54, 55) Source(17, 47) + SourceIndex(1)
---
>>> internalEnum[internalEnum["b"] = 1] = "b";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1->,
2 > b
3 >
1->Emitted(55, 13) Source(17, 49) + SourceIndex(1)
2 >Emitted(55, 54) Source(17, 50) + SourceIndex(1)
3 >Emitted(55, 55) Source(17, 50) + SourceIndex(1)
---
>>> internalEnum[internalEnum["c"] = 2] = "c";
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->,
2 > c
3 >
1->Emitted(56, 13) Source(17, 52) + SourceIndex(1)
2 >Emitted(56, 54) Source(17, 53) + SourceIndex(1)
3 >Emitted(56, 55) Source(17, 53) + SourceIndex(1)
---
>>> })(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalEnum
5 >
6 > internalEnum
7 >
8 > internalEnum
9 > { a, b, c }
1->Emitted(57, 9) Source(17, 54) + SourceIndex(1)
2 >Emitted(57, 10) Source(17, 55) + SourceIndex(1)
3 >Emitted(57, 12) Source(17, 31) + SourceIndex(1)
4 >Emitted(57, 24) Source(17, 43) + SourceIndex(1)
5 >Emitted(57, 27) Source(17, 31) + SourceIndex(1)
6 >Emitted(57, 47) Source(17, 43) + SourceIndex(1)
7 >Emitted(57, 52) Source(17, 31) + SourceIndex(1)
8 >Emitted(57, 72) Source(17, 43) + SourceIndex(1)
9 >Emitted(57, 80) Source(17, 55) + SourceIndex(1)
---
>>> })(normalN = exports.normalN || (exports.normalN = {}));
1 >^^^^
2 > ^
3 > ^^
4 > ^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^
9 > ^^^^^^^^
10> ^^^->
1 >
>
2 > }
3 >
4 > normalN
5 >
6 > normalN
7 >
8 > normalN
9 > {
> /*@internal*/ export class C { }
> /*@internal*/ export function foo() {}
> /*@internal*/ export namespace someNamespace { export class C {} }
> /*@internal*/ export namespace someOther.something { export class someClass {} }
> /*@internal*/ export import someImport = someNamespace.C;
> /*@internal*/ export type internalType = internalC;
> /*@internal*/ export const internalConst = 10;
> /*@internal*/ export enum internalEnum { a, b, c }
> }
1 >Emitted(58, 5) Source(18, 1) + SourceIndex(1)
2 >Emitted(58, 6) Source(18, 2) + SourceIndex(1)
3 >Emitted(58, 8) Source(9, 18) + SourceIndex(1)
4 >Emitted(58, 15) Source(9, 25) + SourceIndex(1)
5 >Emitted(58, 18) Source(9, 18) + SourceIndex(1)
6 >Emitted(58, 33) Source(9, 25) + SourceIndex(1)
7 >Emitted(58, 38) Source(9, 18) + SourceIndex(1)
8 >Emitted(58, 53) Source(9, 25) + SourceIndex(1)
9 >Emitted(58, 61) Source(18, 2) + SourceIndex(1)
---
>>> /*@internal*/ var internalC = /** @class */ (function () {
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^->
1->
>
2 > /*@internal*/
3 >
1->Emitted(59, 5) Source(19, 1) + SourceIndex(1)
2 >Emitted(59, 18) Source(19, 14) + SourceIndex(1)
3 >Emitted(59, 19) Source(19, 15) + SourceIndex(1)
---
>>> function internalC() {
1->^^^^^^^^
2 > ^^->
1->
1->Emitted(60, 9) Source(19, 15) + SourceIndex(1)
---
>>> }
1->^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class internalC {
2 > }
1->Emitted(61, 9) Source(19, 39) + SourceIndex(1)
2 >Emitted(61, 10) Source(19, 40) + SourceIndex(1)
---
>>> return internalC;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(62, 9) Source(19, 39) + SourceIndex(1)
2 >Emitted(62, 25) Source(19, 40) + SourceIndex(1)
---
>>> }());
1 >^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class internalC {}
1 >Emitted(63, 5) Source(19, 39) + SourceIndex(1)
2 >Emitted(63, 6) Source(19, 40) + SourceIndex(1)
3 >Emitted(63, 6) Source(19, 15) + SourceIndex(1)
4 >Emitted(63, 10) Source(19, 40) + SourceIndex(1)
---
>>> exports.internalC = internalC;
1->^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^->
1->
2 > internalC
1->Emitted(64, 5) Source(19, 28) + SourceIndex(1)
2 >Emitted(64, 35) Source(19, 37) + SourceIndex(1)
---
>>> /*@internal*/ function internalfoo() { }
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^
5 > ^^^^^^^^^^^
6 > ^^^^^
7 > ^
1-> {}
>
2 > /*@internal*/
3 >
4 > export function
5 > internalfoo
6 > () {
7 > }
1->Emitted(65, 5) Source(20, 1) + SourceIndex(1)
2 >Emitted(65, 18) Source(20, 14) + SourceIndex(1)
3 >Emitted(65, 19) Source(20, 15) + SourceIndex(1)
4 >Emitted(65, 28) Source(20, 31) + SourceIndex(1)
5 >Emitted(65, 39) Source(20, 42) + SourceIndex(1)
6 >Emitted(65, 44) Source(20, 46) + SourceIndex(1)
7 >Emitted(65, 45) Source(20, 47) + SourceIndex(1)
---
>>> exports.internalfoo = internalfoo;
1 >^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^->
1 >
2 > export function internalfoo() {}
1 >Emitted(66, 5) Source(20, 15) + SourceIndex(1)
2 >Emitted(66, 39) Source(20, 47) + SourceIndex(1)
---
>>> /*@internal*/ var internalNamespace;
1->^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^
6 > ^
1->
>
2 > /*@internal*/
3 >
4 > export namespace
5 > internalNamespace
6 > { export class someClass {} }
1->Emitted(67, 5) Source(21, 1) + SourceIndex(1)
2 >Emitted(67, 18) Source(21, 14) + SourceIndex(1)
3 >Emitted(67, 19) Source(21, 15) + SourceIndex(1)
4 >Emitted(67, 23) Source(21, 32) + SourceIndex(1)
5 >Emitted(67, 40) Source(21, 49) + SourceIndex(1)
6 >Emitted(67, 41) Source(21, 79) + SourceIndex(1)
---
>>> (function (internalNamespace) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export namespace
3 > internalNamespace
1 >Emitted(68, 5) Source(21, 15) + SourceIndex(1)
2 >Emitted(68, 16) Source(21, 32) + SourceIndex(1)
3 >Emitted(68, 33) Source(21, 49) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(69, 9) Source(21, 52) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(70, 13) Source(21, 52) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(71, 13) Source(21, 76) + SourceIndex(1)
2 >Emitted(71, 14) Source(21, 77) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(72, 13) Source(21, 76) + SourceIndex(1)
2 >Emitted(72, 29) Source(21, 77) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(73, 9) Source(21, 76) + SourceIndex(1)
2 >Emitted(73, 10) Source(21, 77) + SourceIndex(1)
3 >Emitted(73, 10) Source(21, 52) + SourceIndex(1)
4 >Emitted(73, 14) Source(21, 77) + SourceIndex(1)
---
>>> internalNamespace.someClass = someClass;
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(74, 9) Source(21, 65) + SourceIndex(1)
2 >Emitted(74, 36) Source(21, 74) + SourceIndex(1)
3 >Emitted(74, 48) Source(21, 77) + SourceIndex(1)
4 >Emitted(74, 49) Source(21, 77) + SourceIndex(1)
---
>>> })(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
1->^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalNamespace
5 >
6 > internalNamespace
7 >
8 > internalNamespace
9 > { export class someClass {} }
1->Emitted(75, 5) Source(21, 78) + SourceIndex(1)
2 >Emitted(75, 6) Source(21, 79) + SourceIndex(1)
3 >Emitted(75, 8) Source(21, 32) + SourceIndex(1)
4 >Emitted(75, 25) Source(21, 49) + SourceIndex(1)
5 >Emitted(75, 28) Source(21, 32) + SourceIndex(1)
6 >Emitted(75, 53) Source(21, 49) + SourceIndex(1)
7 >Emitted(75, 58) Source(21, 32) + SourceIndex(1)
8 >Emitted(75, 83) Source(21, 49) + SourceIndex(1)
9 >Emitted(75, 91) Source(21, 79) + SourceIndex(1)
---
>>> /*@internal*/ var internalOther;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^^
6 > ^
1 >
>
2 > /*@internal*/
3 >
4 > export namespace
5 > internalOther
6 > .something { export class someClass {} }
1 >Emitted(76, 5) Source(22, 1) + SourceIndex(1)
2 >Emitted(76, 18) Source(22, 14) + SourceIndex(1)
3 >Emitted(76, 19) Source(22, 15) + SourceIndex(1)
4 >Emitted(76, 23) Source(22, 32) + SourceIndex(1)
5 >Emitted(76, 36) Source(22, 45) + SourceIndex(1)
6 >Emitted(76, 37) Source(22, 85) + SourceIndex(1)
---
>>> (function (internalOther) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^^
1 >
2 > export namespace
3 > internalOther
1 >Emitted(77, 5) Source(22, 15) + SourceIndex(1)
2 >Emitted(77, 16) Source(22, 32) + SourceIndex(1)
3 >Emitted(77, 29) Source(22, 45) + SourceIndex(1)
---
>>> var something;
1 >^^^^^^^^
2 > ^^^^
3 > ^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^->
1 >.
2 >
3 > something
4 > { export class someClass {} }
1 >Emitted(78, 9) Source(22, 46) + SourceIndex(1)
2 >Emitted(78, 13) Source(22, 46) + SourceIndex(1)
3 >Emitted(78, 22) Source(22, 55) + SourceIndex(1)
4 >Emitted(78, 23) Source(22, 85) + SourceIndex(1)
---
>>> (function (something) {
1->^^^^^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 >
3 > something
1->Emitted(79, 9) Source(22, 46) + SourceIndex(1)
2 >Emitted(79, 20) Source(22, 46) + SourceIndex(1)
3 >Emitted(79, 29) Source(22, 55) + SourceIndex(1)
---
>>> var someClass = /** @class */ (function () {
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1-> {
1->Emitted(80, 13) Source(22, 58) + SourceIndex(1)
---
>>> function someClass() {
1->^^^^^^^^^^^^^^^^
2 > ^^->
1->
1->Emitted(81, 17) Source(22, 58) + SourceIndex(1)
---
>>> }
1->^^^^^^^^^^^^^^^^
2 > ^
3 > ^^^^^^^^^^^^^^^^^->
1->export class someClass {
2 > }
1->Emitted(82, 17) Source(22, 82) + SourceIndex(1)
2 >Emitted(82, 18) Source(22, 83) + SourceIndex(1)
---
>>> return someClass;
1->^^^^^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^
1->
2 > }
1->Emitted(83, 17) Source(22, 82) + SourceIndex(1)
2 >Emitted(83, 33) Source(22, 83) + SourceIndex(1)
---
>>> }());
1 >^^^^^^^^^^^^
2 > ^
3 >
4 > ^^^^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > }
3 >
4 > export class someClass {}
1 >Emitted(84, 13) Source(22, 82) + SourceIndex(1)
2 >Emitted(84, 14) Source(22, 83) + SourceIndex(1)
3 >Emitted(84, 14) Source(22, 58) + SourceIndex(1)
4 >Emitted(84, 18) Source(22, 83) + SourceIndex(1)
---
>>> something.someClass = someClass;
1->^^^^^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^
5 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->
2 > someClass
3 > {}
4 >
1->Emitted(85, 13) Source(22, 71) + SourceIndex(1)
2 >Emitted(85, 32) Source(22, 80) + SourceIndex(1)
3 >Emitted(85, 44) Source(22, 83) + SourceIndex(1)
4 >Emitted(85, 45) Source(22, 83) + SourceIndex(1)
---
>>> })(something = internalOther.something || (internalOther.something = {}));
1->^^^^^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > something
5 >
6 > something
7 >
8 > something
9 > { export class someClass {} }
1->Emitted(86, 9) Source(22, 84) + SourceIndex(1)
2 >Emitted(86, 10) Source(22, 85) + SourceIndex(1)
3 >Emitted(86, 12) Source(22, 46) + SourceIndex(1)
4 >Emitted(86, 21) Source(22, 55) + SourceIndex(1)
5 >Emitted(86, 24) Source(22, 46) + SourceIndex(1)
6 >Emitted(86, 47) Source(22, 55) + SourceIndex(1)
7 >Emitted(86, 52) Source(22, 46) + SourceIndex(1)
8 >Emitted(86, 75) Source(22, 55) + SourceIndex(1)
9 >Emitted(86, 83) Source(22, 85) + SourceIndex(1)
---
>>> })(internalOther = exports.internalOther || (exports.internalOther = {}));
1 >^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1 >
2 > }
3 >
4 > internalOther
5 >
6 > internalOther
7 >
8 > internalOther
9 > .something { export class someClass {} }
1 >Emitted(87, 5) Source(22, 84) + SourceIndex(1)
2 >Emitted(87, 6) Source(22, 85) + SourceIndex(1)
3 >Emitted(87, 8) Source(22, 32) + SourceIndex(1)
4 >Emitted(87, 21) Source(22, 45) + SourceIndex(1)
5 >Emitted(87, 24) Source(22, 32) + SourceIndex(1)
6 >Emitted(87, 45) Source(22, 45) + SourceIndex(1)
7 >Emitted(87, 50) Source(22, 32) + SourceIndex(1)
8 >Emitted(87, 71) Source(22, 45) + SourceIndex(1)
9 >Emitted(87, 79) Source(22, 85) + SourceIndex(1)
---
>>> /*@internal*/ exports.internalImport = internalNamespace.someClass;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^
5 > ^^^^^^^^^^^^^^
6 > ^^^
7 > ^^^^^^^^^^^^^^^^^
8 > ^
9 > ^^^^^^^^^
10> ^
1 >
>
2 > /*@internal*/
3 > export import
4 >
5 > internalImport
6 > =
7 > internalNamespace
8 > .
9 > someClass
10> ;
1 >Emitted(88, 5) Source(23, 1) + SourceIndex(1)
2 >Emitted(88, 18) Source(23, 14) + SourceIndex(1)
3 >Emitted(88, 19) Source(23, 29) + SourceIndex(1)
4 >Emitted(88, 27) Source(23, 29) + SourceIndex(1)
5 >Emitted(88, 41) Source(23, 43) + SourceIndex(1)
6 >Emitted(88, 44) Source(23, 46) + SourceIndex(1)
7 >Emitted(88, 61) Source(23, 63) + SourceIndex(1)
8 >Emitted(88, 62) Source(23, 64) + SourceIndex(1)
9 >Emitted(88, 71) Source(23, 73) + SourceIndex(1)
10>Emitted(88, 72) Source(23, 74) + SourceIndex(1)
---
>>> /*@internal*/ exports.internalConst = 10;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^
5 > ^^^^^^^^^^^^^
6 > ^^^
7 > ^^
8 > ^
1 >
>/*@internal*/ export type internalType = internalC;
>
2 > /*@internal*/
3 > export const
4 >
5 > internalConst
6 > =
7 > 10
8 > ;
1 >Emitted(89, 5) Source(25, 1) + SourceIndex(1)
2 >Emitted(89, 18) Source(25, 14) + SourceIndex(1)
3 >Emitted(89, 19) Source(25, 28) + SourceIndex(1)
4 >Emitted(89, 27) Source(25, 28) + SourceIndex(1)
5 >Emitted(89, 40) Source(25, 41) + SourceIndex(1)
6 >Emitted(89, 43) Source(25, 44) + SourceIndex(1)
7 >Emitted(89, 45) Source(25, 46) + SourceIndex(1)
8 >Emitted(89, 46) Source(25, 47) + SourceIndex(1)
---
>>> /*@internal*/ var internalEnum;
1 >^^^^
2 > ^^^^^^^^^^^^^
3 > ^
4 > ^^^^
5 > ^^^^^^^^^^^^
1 >
>
2 > /*@internal*/
3 >
4 > export enum
5 > internalEnum { a, b, c }
1 >Emitted(90, 5) Source(26, 1) + SourceIndex(1)
2 >Emitted(90, 18) Source(26, 14) + SourceIndex(1)
3 >Emitted(90, 19) Source(26, 15) + SourceIndex(1)
4 >Emitted(90, 23) Source(26, 27) + SourceIndex(1)
5 >Emitted(90, 35) Source(26, 51) + SourceIndex(1)
---
>>> (function (internalEnum) {
1 >^^^^
2 > ^^^^^^^^^^^
3 > ^^^^^^^^^^^^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^->
1 >
2 > export enum
3 > internalEnum
1 >Emitted(91, 5) Source(26, 15) + SourceIndex(1)
2 >Emitted(91, 16) Source(26, 27) + SourceIndex(1)
3 >Emitted(91, 28) Source(26, 39) + SourceIndex(1)
---
>>> internalEnum[internalEnum["a"] = 0] = "a";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1-> {
2 > a
3 >
1->Emitted(92, 9) Source(26, 42) + SourceIndex(1)
2 >Emitted(92, 50) Source(26, 43) + SourceIndex(1)
3 >Emitted(92, 51) Source(26, 43) + SourceIndex(1)
---
>>> internalEnum[internalEnum["b"] = 1] = "b";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^->
1->,
2 > b
3 >
1->Emitted(93, 9) Source(26, 45) + SourceIndex(1)
2 >Emitted(93, 50) Source(26, 46) + SourceIndex(1)
3 >Emitted(93, 51) Source(26, 46) + SourceIndex(1)
---
>>> internalEnum[internalEnum["c"] = 2] = "c";
1->^^^^^^^^
2 > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 > ^
4 > ^^^^^^^^^^^^^^^^^^^^^^^^^^->
1->,
2 > c
3 >
1->Emitted(94, 9) Source(26, 48) + SourceIndex(1)
2 >Emitted(94, 50) Source(26, 49) + SourceIndex(1)
3 >Emitted(94, 51) Source(26, 49) + SourceIndex(1)
---
>>> })(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
1->^^^^
2 > ^
3 > ^^
4 > ^^^^^^^^^^^^
5 > ^^^
6 > ^^^^^^^^^^^^^^^^^^^^
7 > ^^^^^
8 > ^^^^^^^^^^^^^^^^^^^^
9 > ^^^^^^^^
1->
2 > }
3 >
4 > internalEnum
5 >
6 > internalEnum
7 >
8 > internalEnum
9 > { a, b, c }
1->Emitted(95, 5) Source(26, 50) + SourceIndex(1)
2 >Emitted(95, 6) Source(26, 51) + SourceIndex(1)
3 >Emitted(95, 8) Source(26, 27) + SourceIndex(1)
4 >Emitted(95, 20) Source(26, 39) + SourceIndex(1)
5 >Emitted(95, 23) Source(26, 27) + SourceIndex(1)
6 >Emitted(95, 43) Source(26, 39) + SourceIndex(1)
7 >Emitted(95, 48) Source(26, 27) + SourceIndex(1)
8 >Emitted(95, 68) Source(26, 39) + SourceIndex(1)
9 >Emitted(95, 76) Source(26, 51) + SourceIndex(1)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.js
sourceFile:file2.ts
-------------------------------------------------------------------
>>>});
>>>define("file2", ["require", "exports"], function (require, exports) {
>>> "use strict";
>>> Object.defineProperty(exports, "__esModule", { value: true });
>>> exports.y = 20;
1 >^^^^
2 > ^^^^^^^^
3 > ^
4 > ^^^
5 > ^^
6 > ^
1 >export const
2 >
3 > y
4 > =
5 > 20
6 > ;
1 >Emitted(100, 5) Source(1, 14) + SourceIndex(2)
2 >Emitted(100, 13) Source(1, 14) + SourceIndex(2)
3 >Emitted(100, 14) Source(1, 15) + SourceIndex(2)
4 >Emitted(100, 17) Source(1, 18) + SourceIndex(2)
5 >Emitted(100, 19) Source(1, 20) + SourceIndex(2)
6 >Emitted(100, 20) Source(1, 21) + SourceIndex(2)
---
-------------------------------------------------------------------
emittedFile:/src/lib/module.js
sourceFile:global.ts
-------------------------------------------------------------------
>>>});
>>>var globalConst = 10;
1 >
2 >^^^^
3 > ^^^^^^^^^^^
4 > ^^^
5 > ^^
6 > ^
7 > ^^^^^^^^^^^^->
1 >
2 >const
3 > globalConst
4 > =
5 > 10
6 > ;
1 >Emitted(102, 1) Source(1, 1) + SourceIndex(3)
2 >Emitted(102, 5) Source(1, 7) + SourceIndex(3)
3 >Emitted(102, 16) Source(1, 18) + SourceIndex(3)
4 >Emitted(102, 19) Source(1, 21) + SourceIndex(3)
5 >Emitted(102, 21) Source(1, 23) + SourceIndex(3)
6 >Emitted(102, 22) Source(1, 24) + SourceIndex(3)
---
>>>//# sourceMappingURL=module.js.map
//// [/src/lib/module.tsbuildinfo]
{
"bundle": {
"commonSourceDirectory": "./",
"sourceFiles": [
"./file0.ts",
"./file1.ts",
"./file2.ts",
"./global.ts"
],
"js": {
"sections": [
{
"pos": 0,
"end": 4130,
"kind": "text"
}
]
},
"dts": {
"sections": [
{
"pos": 0,
"end": 26,
"kind": "internal"
},
{
"pos": 28,
"end": 108,
"kind": "text"
},
{
"pos": 108,
"end": 233,
"kind": "internal"
},
{
"pos": 235,
"end": 274,
"kind": "text"
},
{
"pos": 274,
"end": 742,
"kind": "internal"
},
{
"pos": 744,
"end": 751,
"kind": "text"
},
{
"pos": 751,
"end": 1240,
"kind": "internal"
},
{
"pos": 1242,
"end": 1333,
"kind": "text"
}
]
}
},
"version": "FakeTSVersion"
}
//// [/src/lib/module.tsbuildinfo.baseline.txt]
======================================================================
File:: /src/lib/module.js
----------------------------------------------------------------------
text: (0-4130)
/*@internal*/ var myGlob = 20;
define("file1", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.x = 10;
var normalC = /** @class */ (function () {
/*@internal*/ function normalC() {
}
/*@internal*/ normalC.prototype.method = function () { };
Object.defineProperty(normalC.prototype, "c", {
/*@internal*/ get: function () { return 10; },
/*@internal*/ set: function (val) { },
enumerable: false,
configurable: true
});
return normalC;
}());
exports.normalC = normalC;
var normalN;
(function (normalN) {
/*@internal*/ var C = /** @class */ (function () {
function C() {
}
return C;
}());
normalN.C = C;
/*@internal*/ function foo() { }
normalN.foo = foo;
/*@internal*/ var someNamespace;
(function (someNamespace) {
var C = /** @class */ (function () {
function C() {
}
return C;
}());
someNamespace.C = C;
})(someNamespace = normalN.someNamespace || (normalN.someNamespace = {}));
/*@internal*/ var someOther;
(function (someOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = someOther.something || (someOther.something = {}));
})(someOther = normalN.someOther || (normalN.someOther = {}));
/*@internal*/ normalN.someImport = someNamespace.C;
/*@internal*/ normalN.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = normalN.internalEnum || (normalN.internalEnum = {}));
})(normalN = exports.normalN || (exports.normalN = {}));
/*@internal*/ var internalC = /** @class */ (function () {
function internalC() {
}
return internalC;
}());
exports.internalC = internalC;
/*@internal*/ function internalfoo() { }
exports.internalfoo = internalfoo;
/*@internal*/ var internalNamespace;
(function (internalNamespace) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
internalNamespace.someClass = someClass;
})(internalNamespace = exports.internalNamespace || (exports.internalNamespace = {}));
/*@internal*/ var internalOther;
(function (internalOther) {
var something;
(function (something) {
var someClass = /** @class */ (function () {
function someClass() {
}
return someClass;
}());
something.someClass = someClass;
})(something = internalOther.something || (internalOther.something = {}));
})(internalOther = exports.internalOther || (exports.internalOther = {}));
/*@internal*/ exports.internalImport = internalNamespace.someClass;
/*@internal*/ exports.internalConst = 10;
/*@internal*/ var internalEnum;
(function (internalEnum) {
internalEnum[internalEnum["a"] = 0] = "a";
internalEnum[internalEnum["b"] = 1] = "b";
internalEnum[internalEnum["c"] = 2] = "c";
})(internalEnum = exports.internalEnum || (exports.internalEnum = {}));
});
define("file2", ["require", "exports"], function (require, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.y = 20;
});
var globalConst = 10;
======================================================================
======================================================================
File:: /src/lib/module.d.ts
----------------------------------------------------------------------
internal: (0-26)
declare const myGlob = 20;
----------------------------------------------------------------------
text: (28-108)
declare module "file1" {
export const x = 10;
export class normalC {
----------------------------------------------------------------------
internal: (108-233)
constructor();
prop: string;
method(): void;
get c(): number;
set c(val: number);
----------------------------------------------------------------------
text: (235-274)
}
export namespace normalN {
----------------------------------------------------------------------
internal: (274-742)
class C {
}
function foo(): void;
namespace someNamespace {
class C {
}
}
namespace someOther.something {
class someClass {
}
}
export import someImport = someNamespace.C;
type internalType = internalC;
const internalConst = 10;
enum internalEnum {
a = 0,
b = 1,
c = 2
}
----------------------------------------------------------------------
text: (744-751)
}
----------------------------------------------------------------------
internal: (751-1240)
export class internalC {
}
export function internalfoo(): void;
export namespace internalNamespace {
class someClass {
}
}
export namespace internalOther.something {
class someClass {
}
}
export import internalImport = internalNamespace.someClass;
export type internalType = internalC;
export const internalConst = 10;
export enum internalEnum {
a = 0,
b = 1,
c = 2
}
----------------------------------------------------------------------
text: (1242-1333)
}
declare module "file2" {
export const y = 20;
}
declare const globalConst = 10;
======================================================================
|
# This python file is used to reproduce our link prediction experiment
# Author: Hongming ZHANG, HKUST KnowComp Group
from sklearn.metrics import roc_auc_score
import math
import subprocess
import BaselineMethods.MNE.Node2Vec_LayerSelect
import argparse
from BaselineMethods.MNE.MNE import *
def parse_args():
# Parses the node2vec arguments.
parser = argparse.ArgumentParser(description="Run node2vec.")
parser.add_argument('--input', nargs='?', default='graph/karate.edgelist',
help='Input graph path')
parser.add_argument('--output', nargs='?', default='emb/karate.emb',
help='Embeddings path')
parser.add_argument('--dimensions', type=int, default=200,
help='Number of dimensions. Default is 100.')
parser.add_argument('--walk-length', type=int, default=10,
help='Length of walk per source. Default is 80.')
parser.add_argument('--num-walks', type=int, default=20,
help='Number of walks per source. Default is 10.')
parser.add_argument('--window-size', type=int, default=10,
help='Context size for optimization. Default is 10.')
parser.add_argument('--iter', type=int, default=10,
help='Number of epochs in SGD')
parser.add_argument('--workers', type=int, default=8,
help='Number of parallel workers. Default is 8.')
parser.add_argument('--p', type=float, default=1,
help='Return hyperparameter. Default is 1.')
parser.add_argument('--q', type=float, default=1,
help='Inout hyperparameter. Default is 1.')
parser.add_argument('--weighted', dest='weighted', action='store_true',
help='Boolean specifying (un)weighted. Default is unweighted.')
parser.add_argument('--unweighted', dest='unweighted', action='store_false')
parser.set_defaults(weighted=False)
parser.add_argument('--directed', dest='directed', action='store_true',
help='Graph is (un)directed. Default is undirected.')
parser.add_argument('--undirected', dest='undirected', action='store_false')
parser.set_defaults(directed=False)
return parser.parse_args()
# randomly divide data into few parts for the purpose of cross-validation
def divide_data(input_list, group_number):
local_division = len(input_list) / float(group_number)
random.shuffle(input_list)
return [input_list[int(round(local_division * i)): int(round(local_division * (i + 1)))] for i in
range(group_number)]
def randomly_choose_false_edges(nodes, true_edges):
tmp_list = list()
all_edges = list()
for i in range(len(nodes)):
for j in range(len(nodes)):
all_edges.append((i, j))
random.shuffle(all_edges)
for edge in all_edges:
if edge[0] == edge[1]:
continue
if (nodes[edge[0]], nodes[edge[1]]) not in true_edges and (nodes[edge[1]], nodes[edge[0]]) not in true_edges:
tmp_list.append((nodes[edge[0]], nodes[edge[1]]))
return tmp_list
def get_dict_neighbourhood_score(local_model, node1, node2):
try:
vector1 = local_model[node1]
vector2 = local_model[node2]
return np.dot(vector1, vector2) / (np.linalg.norm(vector1) * np.linalg.norm(vector2))
except:
return 2+random.random()
def get_dict_AUC(model, true_edges, false_edges):
true_list = list()
prediction_list = list()
for edge in true_edges:
tmp_score = get_dict_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(1)
# prediction_list.append(tmp_score)
# for the unseen pair, we randomly give a prediction
if tmp_score > 2:
if tmp_score > 2.5:
prediction_list.append(1)
else:
prediction_list.append(-1)
else:
prediction_list.append(tmp_score)
for edge in false_edges:
tmp_score = get_dict_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(0)
# prediction_list.append(tmp_score)
# for the unseen pair, we randomly give a prediction
if tmp_score > 2:
if tmp_score > 2.5:
prediction_list.append(1)
else:
prediction_list.append(-1)
else:
prediction_list.append(tmp_score)
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
return roc_auc_score(y_true, y_scores)
def get_neighbourhood_score(local_model, node1, node2):
try:
vector1 = local_model.wv.syn0[local_model.wv.index2word.index(node1)]
vector2 = local_model.wv.syn0[local_model.wv.index2word.index(node2)]
return np.dot(vector1, vector2) / (np.linalg.norm(vector1) * np.linalg.norm(vector2))
except:
return 2+random.random()
def get_AUC(model, true_edges, false_edges):
true_list = list()
prediction_list = list()
for edge in true_edges:
tmp_score = get_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(1)
# prediction_list.append(tmp_score)
# for the unseen pair, we randomly give a prediction
if tmp_score > 2:
if tmp_score > 2.5:
prediction_list.append(1)
else:
prediction_list.append(-1)
else:
prediction_list.append(tmp_score)
for edge in false_edges:
tmp_score = get_neighbourhood_score(model, str(edge[0]), str(edge[1]))
true_list.append(0)
# prediction_list.append(tmp_score)
# for the unseen pair, we randomly give a prediction
if tmp_score > 2:
if tmp_score > 2.5:
prediction_list.append(1)
else:
prediction_list.append(-1)
else:
prediction_list.append(tmp_score)
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
return roc_auc_score(y_true, y_scores)
def get_common_neighbor_score(networks, target_A, target_B):
common_neighbor_counter = 0
tmp_network = networks
A_neighbors = list()
B_neighbors = list()
for edge in tmp_network:
if edge[0] == target_A:
A_neighbors.append(edge[1])
if edge[1] == target_A:
A_neighbors.append(edge[0])
if edge[0] == target_B:
B_neighbors.append(edge[1])
if edge[1] == target_B:
B_neighbors.append(edge[0])
for neighbor in A_neighbors:
if neighbor in B_neighbors:
common_neighbor_counter += 1
return common_neighbor_counter
def get_Jaccard_score(networks, target_A, target_B):
tmp_network = networks
A_neighbors = list()
B_neighbors = list()
for edge in tmp_network:
if edge[0] == target_A:
A_neighbors.append(edge[1])
if edge[1] == target_A:
A_neighbors.append(edge[0])
if edge[0] == target_B:
B_neighbors.append(edge[1])
if edge[1] == target_B:
B_neighbors.append(edge[0])
common_neighbor_counter = 0
for neighbor in A_neighbors:
if neighbor in B_neighbors:
common_neighbor_counter += 1
if len(A_neighbors) == 0 and len(B_neighbors) == 0:
Jaccard_score = 1
else:
Jaccard_score = common_neighbor_counter/(len(A_neighbors) + len(B_neighbors) - common_neighbor_counter)
return Jaccard_score
def get_frequency_dict(networks):
counting_dict = dict()
for edge in networks:
if edge[0] not in counting_dict:
counting_dict[edge[0]] = 0
if edge[1] not in counting_dict:
counting_dict[edge[1]] = 0
counting_dict[edge[0]] += 1
counting_dict[edge[1]] += 1
return counting_dict
def get_AA_score(networks, target_A, target_B, frequency_dict):
AA_score = 0
A_neighbors = list()
B_neighbors = list()
for edge in networks:
if edge[0] == target_A:
A_neighbors.append(edge[1])
if edge[1] == target_A:
A_neighbors.append(edge[0])
if edge[0] == target_B:
B_neighbors.append(edge[1])
if edge[1] == target_B:
B_neighbors.append(edge[0])
for neighbor in A_neighbors:
if neighbor in B_neighbors:
if frequency_dict[neighbor] > 1:
AA_score += 1/(math.log(frequency_dict[neighbor]))
return AA_score
def read_LINE_vectors(file_name):
tmp_embedding = dict()
file = open(file_name, 'r')
for line in file.readlines()[1:]:
numbers = line[:-2].split(' ')
tmp_vector = list()
for n in numbers[1:]:
tmp_vector.append(float(n))
tmp_embedding[numbers[0]] = np.asarray(tmp_vector)
file.close()
return tmp_embedding
def train_LINE_model(edges, epoch_num=1, dimension=100, negative=5):
preparation_command = 'LD_LIBRARY_PATH=/usr/local/lib\nexport LD_LIBRARY_PATH'
file_name = 'LINE_tmp_edges.txt'
file = open(file_name, 'w')
for edge in edges:
file.write(edge[0] + ' ' + edge[1] + ' 1\n')
file.close()
command1 = 'C++/LINE/linux/line -train LINE_tmp_edges.txt -output LINE_tmp_embedding1.txt -order 1 base-negative ' + str(
negative) + ' -dimension ' + str(dimension / 2)
command2 = 'C++/LINE/linux/line -train LINE_tmp_edges.txt -output LINE_tmp_embedding2.txt -order 2 -negative ' + str(
negative) + ' -dimension ' + str(dimension / 2)
subprocess.call(preparation_command + '\n' + command1 + '\n' + command2, shell=True)
print('finish training')
first_order_embedding = read_LINE_vectors('LINE_tmp_embedding1.txt')
second_order_embedding = read_LINE_vectors('LINE_tmp_embedding2.txt')
final_embedding = dict()
for node in first_order_embedding:
final_embedding[node] = np.append(first_order_embedding[node], second_order_embedding[node])
return final_embedding
def Evaluate_basic_methods(input_network):
print('Start to analyze the base methods')
training_network = input_network['training']
test_network = input_network['test_true']
false_network = input_network['test_false']
all_network = list()
all_test_network = list()
all_false_network = list()
all_nodes = list()
for edge_type in training_network:
for edge in training_network[edge_type]:
all_network.append(edge)
if edge[0] not in all_nodes:
all_nodes.append(edge[0])
if edge[1] not in all_nodes:
all_nodes.append(edge[1])
for edge in test_network[edge_type]:
all_test_network.append(edge)
for edge in false_network[edge_type]:
all_false_network.append(edge)
print('We are analyzing the common neighbor method')
all_network = set(all_network)
true_list = list()
prediction_list = list()
for edge in all_test_network:
true_list.append(1)
prediction_list.append(get_common_neighbor_score(all_network, edge[0], edge[1]))
for edge in all_false_network:
true_list.append(0)
prediction_list.append(get_common_neighbor_score(all_network, edge[0], edge[1]))
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
common_neighbor_performance = roc_auc_score(y_true, y_scores)
print('Performance of common neighbor:', common_neighbor_performance)
print('We are analyzing the Jaccard method')
true_list = list()
prediction_list = list()
for edge in all_test_network:
true_list.append(1)
prediction_list.append(get_Jaccard_score(all_network, edge[0], edge[1]))
for edge in all_false_network:
true_list.append(0)
prediction_list.append(get_Jaccard_score(all_network, edge[0], edge[1]))
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
Jaccard_performance = roc_auc_score(y_true, y_scores)
print('Performance of Jaccard:', Jaccard_performance)
print('We are analyzing the AA method')
true_list = list()
prediction_list = list()
frequency_dict = get_frequency_dict(all_network)
for edge in all_test_network:
true_list.append(1)
prediction_list.append(get_AA_score(all_network, edge[0], edge[1], frequency_dict))
for edge in all_false_network:
true_list.append(0)
prediction_list.append(get_AA_score(all_network, edge[0], edge[1], frequency_dict))
y_true = np.array(true_list)
y_scores = np.array(prediction_list)
AA_performance = roc_auc_score(y_true, y_scores)
print('Performance of AA:', AA_performance)
return common_neighbor_performance, Jaccard_performance, AA_performance
def merge_PMNE_models(input_all_models, all_nodes):
final_model = dict()
for tmp_model in input_all_models:
for node in all_nodes:
if node in final_model:
if node in tmp_model.wv.index2word:
final_model[node] = np.concatenate((final_model[node], tmp_model.wv.syn0[tmp_model.wv.index2word.index(node)]), axis=0)
else:
final_model[node] = np.concatenate((final_model[node], np.zeros([args.dimensions])), axis=0)
else:
if node in tmp_model.wv.index2word:
final_model[node] = tmp_model.wv.syn0[tmp_model.wv.index2word.index(node)]
else:
final_model[node] = np.zeros([args.dimensions])
return final_model
def Evaluate_PMNE_methods(input_network):
# we need to write codes to implement the co-analysis method of PMNE
print('Start to analyze the PMNE method')
training_network = input_network['training']
test_network = input_network['test_true']
false_network = input_network['test_false']
all_network = list()
all_test_network = list()
all_false_network = list()
all_nodes = list()
for edge_type in training_network:
for edge in training_network[edge_type]:
all_network.append(edge)
if edge[0] not in all_nodes:
all_nodes.append(edge[0])
if edge[1] not in all_nodes:
all_nodes.append(edge[1])
for edge in test_network[edge_type]:
all_test_network.append(edge)
for edge in false_network[edge_type]:
all_false_network.append(edge)
# print('We are working on method one')
all_network = set(all_network)
G = Random_walk.RWGraph(get_G_from_edges(all_network), args.directed, args.p, args.q)
G.preprocess_transition_probs()
walks = G.simulate_walks(args.num_walks, args.walk_length)
model_one = train_deepwalk_embedding(walks)
method_one_performance = get_AUC(model_one, all_test_network, all_false_network)
print('Performance of PMNE method one:', method_one_performance)
# print('We are working on method two')
all_models = list()
for edge_type in training_network:
tmp_edges = training_network[edge_type]
tmp_G = Random_walk.RWGraph(get_G_from_edges(tmp_edges), args.directed, args.p, args.q)
tmp_G.preprocess_transition_probs()
walks = tmp_G.simulate_walks(args.num_walks, args.walk_length)
tmp_model = train_deepwalk_embedding(walks)
all_models.append(tmp_model)
model_two = merge_PMNE_models(all_models, all_nodes)
method_two_performance = get_dict_AUC(model_two, all_test_network, all_false_network)
print('Performance of PMNE method two:', method_two_performance)
# print('We are working on method three')
tmp_graphs = list()
for edge_type in training_network:
tmp_G = get_G_from_edges(training_network[edge_type])
tmp_graphs.append(tmp_G)
MK_G = Node2Vec_LayerSelect.Graph(tmp_graphs, args.p, args.q, 0.5)
MK_G.preprocess_transition_probs()
MK_walks = MK_G.simulate_walks(args.num_walks, args.walk_length)
model_three = train_deepwalk_embedding(MK_walks)
method_three_performance = get_AUC(model_three, all_test_network, all_false_network)
print('Performance of PMNE method three:', method_three_performance)
return method_one_performance, method_two_performance, method_three_performance
# args = parse_args()
# # logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
# file_name = ''
# # file_name = 'data/Vickers-Chan-7thGraders_multiplex.edges'
# edge_data_by_type, _, all_nodes = load_network_data(file_name)
# # model = train_model(edge_data_by_type)
#
# # In our experiment, we use 5-fold cross-validation, but you can change that
# number_of_groups = 5
# edge_data_by_type_by_group = dict()
# for edge_type in edge_data_by_type:
# all_data = edge_data_by_type[edge_type]
# separated_data = divide_data(all_data, number_of_groups)
# edge_data_by_type_by_group[edge_type] = separated_data
#
# overall_MNE_performance = list()
# overall_node2Vec_performance = list()
# overall_LINE_performance = list()
# overall_Deepwalk_performance = list()
# overall_common_neighbor_performance = list()
# overall_Jaccard_performance = list()
# overall_AA_performance = list()
# overall_PMNE_1_performance = list()
# overall_PMNE_2_performance = list()
# overall_PMNE_3_performance = list()
#
# for i in range(number_of_groups):
# training_data_by_type = dict()
# evaluation_data_by_type = dict()
# for edge_type in edge_data_by_type_by_group:
# training_data_by_type[edge_type] = list()
# evaluation_data_by_type[edge_type] = list()
# for j in range(number_of_groups):
# if j == i:
# for tmp_edge in edge_data_by_type_by_group[edge_type][j]:
# evaluation_data_by_type[edge_type].append((tmp_edge[0], tmp_edge[1]))
# else:
# for tmp_edge in edge_data_by_type_by_group[edge_type][j]:
# training_data_by_type[edge_type].append((tmp_edge[0], tmp_edge[1]))
# base_edges = list()
# training_nodes = list()
# for edge_type in training_data_by_type:
# for edge in training_data_by_type[edge_type]:
# base_edges.append(edge)
# training_nodes.append(edge[0])
# training_nodes.append(edge[1])
# training_nodes = list(set(training_nodes))
# training_data_by_type['Base'] = base_edges
# MNE_model = train_model(training_data_by_type)
#
# tmp_MNE_performance = 0
# tmp_node2Vec_performance = 0
# tmp_LINE_performance = 0
# tmp_Deepwalk_performance = 0
# merged_networks = dict()
# merged_networks['training'] = dict()
# merged_networks['test_true'] = dict()
# merged_networks['test_false'] = dict()
# for edge_type in training_data_by_type:
# if edge_type == 'Base':
# continue
# print('We are working on edge:', edge_type)
# selected_true_edges = list()
# tmp_training_nodes = list()
# for edge in training_data_by_type[edge_type]:
# tmp_training_nodes.append(edge[0])
# tmp_training_nodes.append(edge[1])
# tmp_training_nodes = set(tmp_training_nodes)
# for edge in evaluation_data_by_type[edge_type]:
# if edge[0] in tmp_training_nodes and edge[1] in tmp_training_nodes:
# if edge[0] == edge[1]:
# continue
# selected_true_edges.append(edge)
# if len(selected_true_edges) == 0:
# continue
# selected_false_edges = randomly_choose_false_edges(training_nodes, edge_data_by_type[edge_type])
# print('number of info network edges:', len(training_data_by_type[edge_type]))
# print('number of evaluation edges:', len(selected_true_edges))
# merged_networks['training'][edge_type] = set(training_data_by_type[edge_type])
# merged_networks['test_true'][edge_type] = selected_true_edges
# merged_networks['test_false'][edge_type] = selected_false_edges
#
# local_model = dict()
# for pos in range(len(MNE_model['index2word'])):
# # 0.5 is the weight parameter mentioned in the paper, which is used to show how important each relation type is and can be tuned based on the network.
# local_model[MNE_model['index2word'][pos]] = MNE_model['base'][pos] + 0.5*np.dot(MNE_model['addition'][edge_type][pos], MNE_model['tran'][edge_type])
# tmp_MNE_score = get_dict_AUC(local_model, selected_true_edges, selected_false_edges)
# # tmp_MNE_score = get_AUC(MNE_model['addition'][edge_type], selected_true_edges, selected_false_edges)
# print('MNE score:', tmp_MNE_score)
# node2vec_G = Random_walk.RWGraph(get_G_from_edges(training_data_by_type[edge_type]), args.directed, 2, 0.5)
# node2vec_G.preprocess_transition_probs()
# node2vec_walks = node2vec_G.simulate_walks(20, 10)
# node2vec_model = train_deepwalk_embedding(node2vec_walks)
# tmp_node2vec_score = get_AUC(node2vec_model, selected_true_edges, selected_false_edges)
# print('node2vec score:', tmp_node2vec_score)
# Deepwalk_G = Random_walk.RWGraph(get_G_from_edges(training_data_by_type[edge_type]), args.directed, 1, 1)
# Deepwalk_G.preprocess_transition_probs()
# Deepwalk_walks = Deepwalk_G.simulate_walks(args.num_walks, 10)
# Deepwalk_model = train_deepwalk_embedding(Deepwalk_walks)
# tmp_Deepwalk_score = get_AUC(Deepwalk_model, selected_true_edges, selected_false_edges)
# print('Deepwalk score:', tmp_Deepwalk_score)
# LINE_model = train_LINE_model(training_data_by_type[edge_type])
# tmp_LINE_score = get_dict_AUC(LINE_model, selected_true_edges, selected_false_edges)
# print('LINE score:', tmp_LINE_score)
# tmp_MNE_performance += tmp_MNE_score
# tmp_node2Vec_performance += tmp_node2vec_score
# tmp_LINE_performance += tmp_LINE_score
# tmp_Deepwalk_performance += tmp_Deepwalk_score
#
# print('MNE performance:', tmp_MNE_performance / (len(training_data_by_type)-1))
# print('node2vec performance:', tmp_node2Vec_performance / (len(training_data_by_type)-1))
# print('LINE performance:', tmp_LINE_performance / (len(training_data_by_type)-1))
# print('Deepwalk performance:', tmp_Deepwalk_performance / (len(training_data_by_type)-1))
# overall_MNE_performance.append(tmp_MNE_performance / (len(training_data_by_type)-1))
# overall_node2Vec_performance.append(tmp_node2Vec_performance / (len(training_data_by_type)-1))
# overall_LINE_performance.append(tmp_LINE_performance / (len(training_data_by_type)-1))
# overall_Deepwalk_performance.append(tmp_Deepwalk_performance / (len(training_data_by_type)-1))
# common_neighbor_performance, Jaccard_performance, AA_performance = Evaluate_basic_methods(merged_networks)
# performance_1, performance_2, performance_3 = Evaluate_PMNE_methods(merged_networks)
# overall_common_neighbor_performance.append(common_neighbor_performance)
# overall_Jaccard_performance.append(Jaccard_performance)
# overall_AA_performance.append(AA_performance)
# overall_PMNE_1_performance.append(performance_1)
# overall_PMNE_2_performance.append(performance_2)
# overall_PMNE_3_performance.append(performance_3)
#
# overall_MNE_performance = np.asarray(overall_MNE_performance)
# overall_node2Vec_performance = np.asarray(overall_node2Vec_performance)
# overall_LINE_performance = np.asarray(overall_LINE_performance)
# overall_Deepwalk_performance = np.asarray(overall_Deepwalk_performance)
# overall_common_neighbor_performance = np.asarray(overall_common_neighbor_performance)
# overall_Jaccard_performance = np.asarray(overall_Jaccard_performance)
# overall_AA_performance = np.asarray(overall_AA_performance)
# overall_PMNE_1_performance = np.asarray(overall_PMNE_1_performance)
# overall_PMNE_2_performance = np.asarray(overall_PMNE_2_performance)
# overall_PMNE_3_performance = np.asarray(overall_PMNE_3_performance)
#
# print('Overall MRNE AUC:', overall_MNE_performance)
# print('Overall node2Vec AUC:', overall_node2Vec_performance)
# print('Overall LINE AUC:', overall_LINE_performance)
# print('Overall Deepwalk AUC:', overall_Deepwalk_performance)
# print('Overall Common neighbor AUC:', overall_common_neighbor_performance)
# print('Overall Jaccard AUC:', overall_Jaccard_performance)
# print('Overall AA AUC:', overall_AA_performance)
# print('Overall PMNE 1 AUC:', overall_PMNE_1_performance)
# print('Overall PMNE 2 AUC:', overall_PMNE_2_performance)
# print('Overall PMNE 3 AUC:', overall_PMNE_3_performance)
#
# print('')
# print('')
# print('')
#
# print('Overall MRNE AUC:', np.mean(overall_MNE_performance))
# print('Overall node2Vec AUC:', np.mean(overall_node2Vec_performance))
# print('Overall LINE AUC:', np.mean(overall_LINE_performance))
# print('Overall Deepwalk AUC:', np.mean(overall_Deepwalk_performance))
# print('Overall Common neighbor AUC:', np.mean(overall_common_neighbor_performance))
# print('Overall Jaccard AUC:', np.mean(overall_Jaccard_performance))
# print('Overall AA AUC:', np.mean(overall_AA_performance))
# print('Overall PMNE 1 AUC:', np.mean(overall_PMNE_1_performance))
# print('Overall PMNE 2 AUC:', np.mean(overall_PMNE_2_performance))
# print('Overall PMNE 3 AUC:', np.mean(overall_PMNE_3_performance))
#
# print('')
# print('')
# print('')
#
# print('Overall MRNE std:', np.std(overall_MNE_performance))
# print('Overall node2Vec std:', np.std(overall_node2Vec_performance))
# print('Overall LINE std:', np.std(overall_LINE_performance))
# print('Overall Deepwalk std:', np.std(overall_Deepwalk_performance))
# print('Overall Common neighbor std:', np.std(overall_common_neighbor_performance))
# print('Overall Jaccard std:', np.std(overall_Jaccard_performance))
# print('Overall AA std:', np.std(overall_AA_performance))
# print('Overall PMNE 1 std:', np.std(overall_PMNE_1_performance))
# print('Overall PMNE 2 std:', np.std(overall_PMNE_2_performance))
# print('Overall PMNE 3 std:', np.std(overall_PMNE_3_performance))
#
# print('end')
|
import tqdm
import csv
import logging
import fire
from pathlib import Path
import subprocess
def filter_urls(filename, needed_language):
with open(filename) as f:
reader = csv.reader(f)
for line in tqdm.tqdm(reader):
try:
url, language, forked_from = line[1], line[5], line[7]
except (IndexError, ValueError):
logging.debug(f"Ignoring {line}")
continue
if language == needed_language and forked_from == '\\N':
yield url
def load_stars(filename):
with open(filename) as f:
reader = csv.reader(f)
next(reader)
for line in tqdm.tqdm(reader):
yield tuple(line)
def load_urls_in_language(filename):
with open(filename) as f:
for url in tqdm.tqdm(f):
yield url
def main(action: str,
language: str = "C",
projects_filename: str = 'dump/projects.csv',
min_star: int = 1000):
assert action in ['language', 'star', 'clone', 'move']
repo_list_file = f'repos/repos_in_{language}.txt'
if action == 'language':
with open(repo_list_file, 'w') as f:
for url in filter_urls(projects_filename, needed_language=language):
try:
_, _, _, _, owner, repo = url.strip().split('/')
except ValueError:
logging.debug(f"{url} parse failed")
continue
f.write(f'{owner}/{repo}\n')
elif action == 'star':
star_count = dict(load_stars('data/repositories_small.csv'))
with open(f'repos/repos_in_{language}_{min_star}_stars.txt', 'w') as f:
for repo_id in load_urls_in_language(repo_list_file):
repo_id = repo_id.strip()
if repo_id not in star_count:
logging.debug(f"{repo_id} not found in stars counting")
continue
if int(star_count[repo_id]) > min_star:
f.write(repo_id+'\n')
elif action == 'clone':
Path(f'repos/{language}').mkdir(exist_ok=True)
subprocess.run(f'xargs --replace -P10 git clone https://github.com/{{}}.git'
f' repos/{language}/{{}} < repos/repos_in_{language}_{min_star}_stars.txt',
shell=True)
elif action == 'move':
with open(f'repos/repos_in_{language}_{min_star}_stars.txt') as f:
for repo_id in f:
owner, repo = repo_id.strip().split('/')
Path(f'repos/{language}/{owner}').mkdir(exist_ok=True)
try:
Path(f'repos/{repo}').rename(f'repos/{language}/{owner}/{repo}')
except FileNotFoundError:
pass
else:
assert False
if __name__ == '__main__':
fire.Fire(main)
|
import { DatePicker } from '../../../src/components/datepicker/datepicker';
import { TimePicker } from '../../../src/components/timepicker/timepicker'; //eslint-disable-line
import { Locale } from '../../../src/components/locale/locale';
import { cleanup } from '../../helpers/func-utils';
require('../../../src/components/locale/cultures/en-US.js');
require('../../../src/components/locale/cultures/ar-EG.js');
require('../../../src/components/locale/cultures/ar-SA.js');
require('../../../src/components/locale/cultures/en-US.js');
require('../../../src/components/locale/cultures/ja-JP.js');
require('../../../src/components/locale/cultures/sv-SE.js');
require('../../../src/components/locale/cultures/en-GB.js');
require('../../../src/components/locale/cultures/da-DK.js');
const datepickerHTML = require('../../../app/views/components/datepicker/example-index.html');
const svg = require('../../../src/components/icons/theme-new-svg.html');
let datepickerEl;
let datepickerTimeEl;
let datepickerAPI;
let datepickerTimeAPI;
describe('DatePicker API', () => {
beforeEach(() => {
datepickerEl = null;
datepickerAPI = null;
document.body.insertAdjacentHTML('afterbegin', svg);
document.body.insertAdjacentHTML('afterbegin', datepickerHTML);
datepickerEl = document.getElementById('date-field-normal');
datepickerTimeEl = document.getElementById('start-time');
Locale.addCulture('ar-EG', Soho.Locale.cultures['ar-EG'], Soho.Locale.languages['ar']); //eslint-disable-line
Locale.addCulture('ar-SA', Soho.Locale.cultures['ar-SA'], Soho.Locale.languages['ar']); //eslint-disable-line
Locale.addCulture('en-US', Soho.Locale.cultures['en-US'], Soho.Locale.languages['en']); //eslint-disable-line
Locale.addCulture('ja-JP', Soho.Locale.cultures['ja-JP'], Soho.Locale.languages['ja']); //eslint-disable-line
Locale.addCulture('sv-SE', Soho.Locale.cultures['sv-SE'], Soho.Locale.languages['sv']); //eslint-disable-line
Locale.addCulture('en-GB', Soho.Locale.cultures['en-GB'], Soho.Locale.languages['en']); //eslint-disable-line
Locale.addCulture('da-DK', Soho.Locale.cultures['da-DK'], Soho.Locale.languages['da']); //eslint-disable-line
Locale.set('en-US');
datepickerAPI = new DatePicker(datepickerEl);
datepickerTimeAPI = new DatePicker(datepickerTimeEl);
});
afterEach(() => {
datepickerAPI.destroy();
cleanup();
});
it('Should be defined on jQuery object', () => {
expect(datepickerAPI).toEqual(jasmine.any(Object));
});
it('Should open datepicker', (done) => {
datepickerAPI.openCalendar();
setTimeout(() => {
expect(datepickerAPI.isOpen()).toBeTruthy();
expect(document.body.querySelector('#monthview-popup')).toBeVisible();
done();
}, 100);
});
it('Should destroy datepicker', () => {
datepickerAPI.destroy();
expect(datepickerAPI.isOpen()).toBeFalsy();
expect(document.body.querySelector('#monthview-popup')).toBeFalsy();
});
it('Should disable datepicker', () => {
datepickerAPI.disable();
expect(document.body.querySelector('.field.is-disabled .datepicker')).toBeTruthy();
expect(datepickerAPI.isDisabled()).toBeTruthy();
});
it('Should enable datepicker', () => {
datepickerAPI.enable();
expect(document.body.querySelector('.field.is-disabled .datepicker')).toBeFalsy();
expect(datepickerAPI.isDisabled()).toBeFalsy();
});
it('Should have accessible text', () => {
// Label
expect(datepickerAPI.label.length).toBeTruthy();
// Trigger button audible span
expect(datepickerAPI.trigger[0].querySelector('.audible').textContent.length).toBeTruthy();
});
it('Should render datepicker readonly', () => {
datepickerAPI.readonly();
expect(document.body.querySelector('.datepicker[readonly]')).toBeTruthy();
expect(datepickerAPI.isDisabled()).toBeFalsy();
});
it('Should be able to call setToday and getCurrentDate', () => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl);
datepickerAPI.setToday();
const todayDate = datepickerAPI.getCurrentDate();
const testDate = new Date();
testDate.setHours(0, 0, 0, 0);
expect(todayDate.toString()).toEqual(testDate.toString());
});
it('Should be able to call setToday when there is a value', () => {
datepickerEl.value = '8/1/2018';
datepickerAPI.setToday();
const todayDate = datepickerAPI.getCurrentDate();
const testDate = new Date();
testDate.setHours(0, 0, 0, 0);
expect(todayDate.toString()).toEqual(testDate.toString());
});
it('Should be able to call setToday with time set to noon', () => {
datepickerTimeAPI.destroy();
datepickerTimeAPI = new DatePicker(datepickerEl, { useCurrentTime: false });
datepickerTimeAPI.setToday();
const todayDate = datepickerTimeAPI.getCurrentDate();
const testDate = new Date();
testDate.setHours(0, 0, 0, 0);
expect(todayDate.toString()).toEqual(testDate.toString());
});
// Will fix on a future PR
it('Should be able to call setToday and getCurrentDate in Umalqura with time set to noon', () => {
datepickerAPI.destroy();
Locale.set('ar-SA');
datepickerAPI = new DatePicker(datepickerEl, { useCurrentTime: false, showTime: true });
datepickerAPI.setToday();
const todayDate = datepickerAPI.getCurrentDate();
const testDate = new Date();
testDate.setHours(0, 0, 0, 0);
expect(todayDate.toString()).toEqual(testDate.toString());
const converted = Locale.gregorianToUmalqura(testDate);
expect(datepickerEl.value).toEqual(`${converted[0]}/${(`${converted[1] + 1}`).padStart(2, '0')}/${(`${converted[2]}`).padStart(2, '0')} 12:00 ص`);
});
it('Should be able to set time using current time', () => {
datepickerTimeAPI.destroy();
datepickerTimeAPI = new DatePicker(datepickerEl, { useCurrentTime: true, showTime: true });
datepickerTimeAPI.setToday();
const todayDate = datepickerTimeAPI.getCurrentDate();
const testDate = new Date();
expect(todayDate.toString()).toEqual(testDate.toString());
});
// Will fix on a future PR
it('Should be able to set time using current time in Umalqura', () => {
datepickerAPI.destroy();
Locale.set('ar-SA');
datepickerAPI = new DatePicker(datepickerEl, { useCurrentTime: true, showTime: true });
datepickerAPI.setToday();
const todayDate = datepickerAPI.getCurrentDate();
const testDate = new Date();
expect(todayDate.toString()).toEqual(testDate.toString());
const converted = Locale.gregorianToUmalqura(testDate);
let hours = testDate.getHours();
let minutes = testDate.getMinutes();
let amPm = 'ص';
if (hours > 12) {
hours -= hours > 12 ? 12 : 0;
amPm = 'م';
}
if (hours === 12) {
return;
}
if (minutes.toString().length === 1) {
minutes = `0${minutes}`;
}
expect(datepickerEl.value).toEqual(`${converted[0]}/${(`${converted[1] + 1}`).padStart(2, '0')}/${(`${converted[2]}`).padStart(2, '0')} ${hours}:${minutes} ${amPm}`);
});
it('Should set internal format', () => {
datepickerAPI.setFormat();
expect(datepickerAPI.pattern).toEqual('M/d/yyyy');
expect(datepickerAPI.show24Hours).toEqual(false);
expect(datepickerAPI.isSeconds).toEqual(false);
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { dateFormat: 'yyyy-MM-DD' });
datepickerAPI.setFormat();
expect(datepickerAPI.pattern).toEqual('yyyy-MM-DD');
expect(datepickerAPI.show24Hours).toEqual(false);
expect(datepickerAPI.isSeconds).toEqual(false);
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { dateFormat: 'yyyy-MM-DD hh:mm:ss' });
datepickerAPI.setFormat();
expect(datepickerAPI.pattern).toEqual('yyyy-MM-DD hh:mm:ss');
expect(datepickerAPI.show24Hours).toEqual(false);
expect(datepickerAPI.isSeconds).toEqual(true);
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { dateFormat: 'yyyy-MM-DD HH:mm' });
datepickerAPI.setFormat();
expect(datepickerAPI.pattern).toEqual('yyyy-MM-DD HH:mm');
expect(datepickerAPI.show24Hours).toEqual(true);
expect(datepickerAPI.isSeconds).toEqual(false);
});
it('Should be able to set placeholder', () => {
expect(datepickerEl.getAttribute('placeholder')).toBeFalsy();
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { dateFormat: 'yyyy-MM-DD', placeholder: true });
expect(datepickerEl.getAttribute('placeholder')).toEqual('yyyy-MM-DD');
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { placeholder: true, range: { useRange: true, start: '2/5/2018', end: '2/28/2018' } });
expect(datepickerEl.getAttribute('placeholder')).toEqual('M/d/yyyy - M/d/yyyy');
});
it('Should be able to restrict months', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, {
dateFormat: 'MM/dd/yyyy',
disable: {
restrictMonths: true,
minDate: '04/01/2018',
maxDate: '06/30/2018'
}
});
datepickerAPI.setValue('04/15/2018');
datepickerAPI.openCalendar();
setTimeout(() => {
const prevButton = document.body.querySelector('.btn-icon.prev');
const nextButton = document.body.querySelector('.btn-icon.next');
const monthSpan = document.body.querySelector('span.month');
expect(monthSpan.innerHTML).toEqual('April');
expect(prevButton.disabled).toEqual(true);
expect(nextButton.disabled).toEqual(false);
nextButton.click();
expect(monthSpan.innerHTML.trim()).toEqual('May');
nextButton.click();
expect(monthSpan.innerHTML.trim()).toEqual('June');
expect(prevButton.disabled).toEqual(false);
expect(nextButton.disabled).toEqual(true);
done();
}, 100);
});
it('Should be able to disable days and weeks', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, {
dateFormat: 'MM/dd/yyyy',
disable: {
dayOfWeek: [0, 6],
dates: ['04/30/2018', '04/01/2018']
}
});
datepickerAPI.setValue('04/15/2018');
datepickerAPI.openCalendar();
setTimeout(() => {
const tds = document.body.querySelectorAll('.monthview-table td');
expect(tds[0].classList.contains('is-disabled')).toEqual(true);
expect(tds[6].classList.contains('is-disabled')).toEqual(true);
expect(tds[7].classList.contains('is-disabled')).toEqual(true);
expect(tds[13].classList.contains('is-disabled')).toEqual(true);
expect(tds[14].classList.contains('is-disabled')).toEqual(true);
expect(tds[20].classList.contains('is-disabled')).toEqual(true);
expect(tds[21].classList.contains('is-disabled')).toEqual(true);
expect(tds[27].classList.contains('is-disabled')).toEqual(true);
expect(tds[28].classList.contains('is-disabled')).toEqual(true);
expect(tds[29].classList.contains('is-disabled')).toEqual(true);
expect(tds[34].classList.contains('is-disabled')).toEqual(true);
done();
}, 100);
});
it('Should be able to render a legend', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, {
showLegend: true,
dateFormat: 'yyyy-MM-dd',
legend: [
{ name: 'Public Holiday', color: '#76B051', dates: ['1/1/2017', '1/12/2017'] },
{ name: 'Weekends', color: '#EFA836', dayOfWeek: [0, 6] },
{ name: 'Other', color: '#B94E4E', dates: ['1/18/2017', '1/19/2017'] },
{ name: 'Half Days', color: '#9279A6', dates: ['1/21/2017', '1/22/2017'] },
{ name: 'Full Days', color: '#2578A9', dates: ['1/24/2017', '1/25/2017'] }
]
});
datepickerAPI.setValue('2017-01-03');
datepickerAPI.openCalendar();
setTimeout(() => {
const legendItems = document.body.querySelectorAll('.monthview-legend-item');
expect(legendItems[0].textContent.trim()).toEqual('Public Holiday');
expect(legendItems[1].textContent.trim()).toEqual('Weekends');
expect(legendItems[2].textContent.trim()).toEqual('Other');
expect(legendItems[3].textContent.trim()).toEqual('Half Days');
expect(legendItems[4].textContent.trim()).toEqual('Full Days');
const tds = document.body.querySelectorAll('.monthview-table td');
expect(tds[0].style.backgroundColor).toEqual('rgba(118, 176, 81, 0.3)');
expect(tds[6].style.backgroundColor).toEqual('rgba(239, 168, 54, 0.3)');
done();
}, 100);
});
it('Should be able to change months', (done) => {
datepickerAPI.setValue(new Date(2018, 4, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
datepickerAPI.calendarAPI.showMonth('6', '2018');
expect(document.body.querySelectorAll('.monthview-header span')[0].textContent).toEqual('July ');
expect(document.body.querySelectorAll('.monthview-header span')[1].textContent).toEqual(' 2018');
expect(document.body.querySelectorAll('td:not(.alternate)').length).toEqual(31);
done();
}, 100);
});
it('Should render year first in ja-JP ', (done) => {
datepickerAPI.destroy();
Locale.set('ja-JP');
datepickerAPI = new DatePicker(datepickerEl);
datepickerAPI.setValue(new Date(2018, 5, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.body.querySelectorAll('.monthview-header span')[0].textContent).toEqual('2018年 ');
expect(document.body.querySelectorAll('.monthview-header span')[1].textContent).toEqual('6月');
done();
}, 100);
});
it('Should render first day of week as Monday in sv-SE', (done) => {
datepickerAPI.destroy();
Locale.set('sv-SE');
datepickerAPI = new DatePicker(datepickerEl);
datepickerAPI.setValue(new Date(2018, 5, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.body.querySelectorAll('.monthview-table thead th')[0].textContent).toEqual('M');
expect(document.body.querySelectorAll('.monthview-table thead th')[1].textContent).toEqual('T');
done();
}, 100);
});
it('Should render first day of week as Monday in en-GB', (done) => {
datepickerAPI.destroy();
Locale.set('en-GB');
datepickerAPI = new DatePicker(datepickerEl);
datepickerAPI.setValue(new Date(2018, 5, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.body.querySelectorAll('.monthview-table thead th')[0].textContent).toEqual('M');
expect(document.body.querySelectorAll('.monthview-table thead th')[1].textContent).toEqual('Τ');
done();
}, 100);
});
it('Should render Month Year Picker', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { showMonthYearPicker: true });
datepickerAPI.setValue(new Date(2018, 5, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.querySelector('button.btn-monthyear-pane').innerText).toEqual('June2018');
expect(document.querySelectorAll('.monthview-monthyear-pane').length).toEqual(1);
done();
}, 100);
});
it('Should be able to disable Month Year Picker', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { showMonthYearPicker: false });
datepickerAPI.setValue(new Date(2018, 5, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.querySelectorAll('button.btn-monthyear-pane').length).toEqual(0);
expect(document.querySelectorAll('.monthview-monthyear-pane').length).toEqual(0);
done();
}, 100);
});
it('Should render range', (done) => {
datepickerAPI.destroy();
datepickerEl.value = '2/7/2018 - 2/22/2018';
datepickerAPI = new DatePicker(datepickerEl, { range: { useRange: true } });
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.body.querySelectorAll('.range-selection').length).toEqual(16);
datepickerEl.value = '';
done();
}, 100);
});
it('Should render time', (done) => {
datepickerAPI.destroy();
datepickerEl.value = '2018-10-12 12:25:10 AM';
datepickerAPI = new DatePicker(datepickerEl, { showTime: true, dateFormat: 'yyyy-MM-dd', timeFormat: 'h:mm:ss a' });
datepickerAPI.openCalendar();
setTimeout(() => {
expect(document.body.querySelectorAll('.monthview select')[0].value).toEqual('12');
expect(document.body.querySelectorAll('.monthview select')[1].value).toEqual('25');
expect(document.body.querySelectorAll('.monthview select')[2].value).toEqual('10');
expect(document.body.querySelectorAll('.monthview select')[3].value).toEqual('AM');
datepickerEl.value = '';
done();
}, 100);
});
it('Should hide icon if input is hidden', () => {
datepickerAPI.destroy();
datepickerEl.classList.add('hidden');
datepickerAPI = new DatePicker(datepickerEl);
expect($(datepickerEl).siblings('button.trigger').css('visibility')).toEqual('hidden');
});
it('Should be able to render a different locale', (done) => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { locale: 'da-DK' });
datepickerAPI.setValue(new Date(2019, 2, 15));
datepickerAPI.openCalendar();
setTimeout(() => {
datepickerAPI.calendarAPI.showMonth('2', '2019');
expect(document.body.querySelectorAll('.monthview-header span')[0].textContent).toEqual('marts ');
expect(document.body.querySelectorAll('.monthview-header span')[1].textContent).toEqual(' 2019');
expect(document.body.querySelectorAll('td:not(.alternate)').length).toEqual(30);
done();
}, 100);
});
it('Should be able to not use mask on input', () => {
datepickerAPI.destroy();
datepickerAPI = new DatePicker(datepickerEl, { useMask: false });
datepickerAPI.setToday();
expect($(datepickerEl).data('mask')).toBeFalsy();
});
});
|
/**
* @copyright 2010-2017, The Titon Project
* @license http://opensource.org/licenses/BSD-3-Clause
* @link http://titon.io
*/
import React, { Children, PropTypes } from 'react';
import { default as InputSelect } from '../Input/Select';
import Menu from './Menu';
import bind from '../../decorators/bind';
import childrenOf from '../../prop-types/childrenOf';
import formatInputName from '../../utility/formatInputName';
import invariant from '../../utility/invariant';
import isOutsideElement from '../../utility/isOutsideElement';
import generateUID from '../../utility/generateUID';
import CONTEXT_TYPES from './contextTypes';
import MODULE from './module';
import { TOUCH } from '../../flags';
export default class Select extends InputSelect {
static module = MODULE;
static childContextTypes = CONTEXT_TYPES;
static defaultProps = {
...InputSelect.defaultProps,
arrow: <span className="caret-down" />,
countMessage: '{count} of {total} selected',
defaultLabel: 'Select an Option',
listLimit: 3,
multipleFormat: 'list',
native: TOUCH,
};
static propTypes = {
...InputSelect.propTypes,
arrow: PropTypes.node,
children: childrenOf(Menu),
countMessage: PropTypes.string,
defaultLabel: PropTypes.string,
listLimit: PropTypes.number,
multipleFormat: PropTypes.oneOf(['count', 'list']),
native: PropTypes.bool,
};
uid = generateUID();
constructor(props) {
super(props);
if (props.multiple) {
invariant(!props.native && !TOUCH,
'Selects using `multiple` cannot use `native` controls on non-touch devices.');
}
this.state = {
...this.state,
expanded: false,
options: this.extractOptions(props.options),
value: this.extractValues(props.defaultValue, props.multiple),
};
}
getChildContext() {
const { name, options, multiple } = this.props;
const { expanded, value } = this.state;
return {
[MODULE.contextKey]: {
expanded,
hideMenu: this.hideMenu,
inputID: formatInputName(name),
inputName: name,
mappedOptions: this.state.options,
multiple,
options,
selectValue: this.selectValue,
selectedValues: this.extractValues(value, true),
showMenu: this.showMenu,
toggleMenu: this.toggleMenu,
uid: this.uid,
},
};
}
componentWillMount() {
window.addEventListener('click', this.handleOnClickOut);
}
shouldComponentUpdate(nextProps, nextState) {
return this.hasMenu() ? true : super.shouldComponentUpdate(nextProps, nextState);
}
componentWillUpdate(nextProps, nextState) {
if (nextState.value !== this.state.value) {
super.componentWillUpdate(nextProps, nextState);
}
}
componentDidUpdate(prevProps, prevState) {
if (prevState.value !== this.state.value) {
super.componentDidUpdate(prevProps, prevState);
}
}
componentWillUnmount() {
window.removeEventListener('click', this.handleOnClickOut);
}
extractOptions(options) {
const map = {};
options.forEach((option) => {
// Optgroup
if (option.options) {
option.options.forEach((child) => {
map[child.value] = child;
});
// Option
} else {
map[option.value] = option;
}
});
return map;
}
extractValues(value, multiple) {
const values = Array.isArray(value) ? value : [value];
if (!multiple) {
return values[0] || '';
}
return values;
}
getSelectedLabel() {
const { options } = this.state;
const { listLimit, defaultLabel, multipleFormat, countMessage } = this.props;
const label = [];
let count = 0;
let message = '';
let { value } = this.state;
if (!value.length) {
return defaultLabel;
}
if (!Array.isArray(value)) {
value = [value];
}
value.forEach((val) => {
const option = options[val];
if (option) {
label.push(option.selectedLabel || option.label);
count += 1;
}
});
switch (multipleFormat) {
case 'count':
return countMessage
.replace('{count}', count)
.replace('{total}', Object.keys(options).length);
default:
message = label.slice(0, listLimit).join(', ');
if (listLimit < count) {
message += ' ...';
}
return message;
}
}
hasMenu() {
return (Children.count(this.props.children) > 0);
}
@bind
hideMenu() {
this.setState({
expanded: false,
});
}
@bind
selectValue(value) {
this.setState({
value: this.extractValues(value, this.props.multiple),
});
}
@bind
showMenu() {
this.setState({
expanded: true,
});
}
@bind
toggleMenu() {
if (this.props.disabled || !this.hasMenu()) {
return;
}
if (this.state.expanded) {
this.hideMenu();
} else {
this.showMenu();
}
}
@bind
handleOnBlur() {
if (!this.props.disabled && this.state.expanded && this.hasMenu()) {
this.hideMenu();
}
}
@bind
handleOnClickLabel(e) {
e.preventDefault();
this.toggleMenu();
}
@bind
handleOnClickOut(e) {
if (
!this.props.disabled && this.state.expanded &&
this.hasMenu() && isOutsideElement(this.container, e.target)
) {
this.hideMenu();
}
}
@bind
handleOnFocus() {
if (!this.props.disabled && !this.state.expanded && this.hasMenu()) {
this.showMenu();
}
}
render() {
const { children, native, disabled, options, arrow } = this.props;
const { expanded } = this.state;
const inputProps = this.gatherProps(false);
const stateClasses = this.gatherStateClasses();
// Add another state class
stateClasses['is-native'] = native;
return (
<div
ref={(ref) => { this.container = ref; }}
id={this.formatID('select', inputProps.id)}
className={this.formatClass(stateClasses)}
aria-disabled={disabled}
>
<select
{...inputProps}
onFocus={this.handleOnFocus}
onBlur={this.handleOnBlur}
>
{this.renderOptions(options)}
</select>
<a
href=""
role="button"
className={this.formatChildClass('toggle', stateClasses)}
onClick={this.handleOnClickLabel}
aria-controls={native ? null : this.formatID('select-toggle', inputProps.id)}
aria-haspopup={native ? null : true}
aria-expanded={native ? null : expanded}
>
<span className={this.formatChildClass('label')}>
{this.getSelectedLabel()}
</span>
<span className={this.formatChildClass('arrow')}>
{arrow}
</span>
</a>
{native ? null : children}
</div>
);
}
}
|
/*
* linux/kernel/time/clocksource.c
*
* This file contains the functions which manage clocksource drivers.
*
* Copyright (C) 2004, 2005 IBM, John Stultz (johnstul@us.ibm.com)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* TODO WishList:
* o Allow clocksource drivers to be unregistered
*/
#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
#include <linux/device.h>
#include <linux/clocksource.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/sched.h> /* for spin_unlock_irq() using preempt_count() m68k */
#include <linux/tick.h>
#include <linux/kthread.h>
#include "tick-internal.h"
#include "timekeeping_internal.h"
/**
* clocks_calc_mult_shift - calculate mult/shift factors for scaled math of clocks
* @mult: pointer to mult variable
* @shift: pointer to shift variable
* @from: frequency to convert from
* @to: frequency to convert to
* @maxsec: guaranteed runtime conversion range in seconds
*
* The function evaluates the shift/mult pair for the scaled math
* operations of clocksources and clockevents.
*
* @to and @from are frequency values in HZ. For clock sources @to is
* NSEC_PER_SEC == 1GHz and @from is the counter frequency. For clock
* event @to is the counter frequency and @from is NSEC_PER_SEC.
*
* The @maxsec conversion range argument controls the time frame in
* seconds which must be covered by the runtime conversion with the
* calculated mult and shift factors. This guarantees that no 64bit
* overflow happens when the input value of the conversion is
* multiplied with the calculated mult factor. Larger ranges may
* reduce the conversion accuracy by chosing smaller mult and shift
* factors.
*/
void
clocks_calc_mult_shift(u32 *mult, u32 *shift, u32 from, u32 to, u32 maxsec)
{
u64 tmp;
u32 sft, sftacc= 32;
/*
* Calculate the shift factor which is limiting the conversion
* range:
*/
tmp = ((u64)maxsec * from) >> 32;
while (tmp) {
tmp >>=1;
sftacc--;
}
/*
* Find the conversion shift/mult pair which has the best
* accuracy and fits the maxsec conversion range:
*/
for (sft = 32; sft > 0; sft--) {
tmp = (u64) to << sft;
tmp += from / 2;
do_div(tmp, from);
if ((tmp >> sftacc) == 0)
break;
}
*mult = tmp;
*shift = sft;
}
EXPORT_SYMBOL_GPL(clocks_calc_mult_shift);
/*[Clocksource internal variables]---------
* curr_clocksource:
* currently selected clocksource.
* clocksource_list:
* linked list with the registered clocksources
* clocksource_mutex:
* protects manipulations to curr_clocksource and the clocksource_list
* override_name:
* Name of the user-specified clocksource.
*/
static struct clocksource *curr_clocksource;
static LIST_HEAD(clocksource_list);
static DEFINE_MUTEX(clocksource_mutex);
static char override_name[CS_NAME_LEN];
static int finished_booting;
#ifdef CONFIG_CLOCKSOURCE_WATCHDOG
static void clocksource_watchdog_work(struct work_struct *work);
static void clocksource_select(void);
static LIST_HEAD(watchdog_list);
static struct clocksource *watchdog;
static struct timer_list watchdog_timer;
static DECLARE_WORK(watchdog_work, clocksource_watchdog_work);
static DEFINE_SPINLOCK(watchdog_lock);
static int watchdog_running;
static atomic_t watchdog_reset_pending;
static int clocksource_watchdog_kthread(void *data);
static void __clocksource_change_rating(struct clocksource *cs, int rating);
/*
* Interval: 0.5sec Threshold: 0.0625s
*/
#define WATCHDOG_INTERVAL (HZ >> 1)
#define WATCHDOG_THRESHOLD (NSEC_PER_SEC >> 4)
static void clocksource_watchdog_work(struct work_struct *work)
{
/*
* If kthread_run fails the next watchdog scan over the
* watchdog_list will find the unstable clock again.
*/
kthread_run(clocksource_watchdog_kthread, NULL, "kwatchdog");
}
static void __clocksource_unstable(struct clocksource *cs)
{
cs->flags &= ~(CLOCK_SOURCE_VALID_FOR_HRES | CLOCK_SOURCE_WATCHDOG);
cs->flags |= CLOCK_SOURCE_UNSTABLE;
if (cs->mark_unstable)
cs->mark_unstable(cs);
if (finished_booting)
schedule_work(&watchdog_work);
}
/**
* clocksource_mark_unstable - mark clocksource unstable via watchdog
* @cs: clocksource to be marked unstable
*
* This function is called instead of clocksource_change_rating from
* cpu hotplug code to avoid a deadlock between the clocksource mutex
* and the cpu hotplug mutex. It defers the update of the clocksource
* to the watchdog thread.
*/
void clocksource_mark_unstable(struct clocksource *cs)
{
unsigned long flags;
spin_lock_irqsave(&watchdog_lock, flags);
if (!(cs->flags & CLOCK_SOURCE_UNSTABLE)) {
if (list_empty(&cs->wd_list))
list_add(&cs->wd_list, &watchdog_list);
__clocksource_unstable(cs);
}
spin_unlock_irqrestore(&watchdog_lock, flags);
}
static void clocksource_watchdog(struct timer_list *unused)
{
struct clocksource *cs;
u64 csnow, wdnow, cslast, wdlast, delta;
int64_t wd_nsec, cs_nsec;
int next_cpu, reset_pending;
spin_lock(&watchdog_lock);
if (!watchdog_running)
goto out;
reset_pending = atomic_read(&watchdog_reset_pending);
list_for_each_entry(cs, &watchdog_list, wd_list) {
/* Clocksource already marked unstable? */
if (cs->flags & CLOCK_SOURCE_UNSTABLE) {
if (finished_booting)
schedule_work(&watchdog_work);
continue;
}
local_irq_disable();
csnow = cs->read(cs);
wdnow = watchdog->read(watchdog);
local_irq_enable();
/* Clocksource initialized ? */
if (!(cs->flags & CLOCK_SOURCE_WATCHDOG) ||
atomic_read(&watchdog_reset_pending)) {
cs->flags |= CLOCK_SOURCE_WATCHDOG;
cs->wd_last = wdnow;
cs->cs_last = csnow;
continue;
}
delta = clocksource_delta(wdnow, cs->wd_last, watchdog->mask);
wd_nsec = clocksource_cyc2ns(delta, watchdog->mult,
watchdog->shift);
delta = clocksource_delta(csnow, cs->cs_last, cs->mask);
cs_nsec = clocksource_cyc2ns(delta, cs->mult, cs->shift);
wdlast = cs->wd_last; /* save these in case we print them */
cslast = cs->cs_last;
cs->cs_last = csnow;
cs->wd_last = wdnow;
if (atomic_read(&watchdog_reset_pending))
continue;
/* Check the deviation from the watchdog clocksource. */
if (abs(cs_nsec - wd_nsec) > WATCHDOG_THRESHOLD) {
pr_warn("timekeeping watchdog on CPU%d: Marking clocksource '%s' as unstable because the skew is too large:\n",
smp_processor_id(), cs->name);
pr_warn(" '%s' wd_now: %llx wd_last: %llx mask: %llx\n",
watchdog->name, wdnow, wdlast, watchdog->mask);
pr_warn(" '%s' cs_now: %llx cs_last: %llx mask: %llx\n",
cs->name, csnow, cslast, cs->mask);
__clocksource_unstable(cs);
continue;
}
if (cs == curr_clocksource && cs->tick_stable)
cs->tick_stable(cs);
if (!(cs->flags & CLOCK_SOURCE_VALID_FOR_HRES) &&
(cs->flags & CLOCK_SOURCE_IS_CONTINUOUS) &&
(watchdog->flags & CLOCK_SOURCE_IS_CONTINUOUS)) {
/* Mark it valid for high-res. */
cs->flags |= CLOCK_SOURCE_VALID_FOR_HRES;
/*
* clocksource_done_booting() will sort it if
* finished_booting is not set yet.
*/
if (!finished_booting)
continue;
/*
* If this is not the current clocksource let
* the watchdog thread reselect it. Due to the
* change to high res this clocksource might
* be preferred now. If it is the current
* clocksource let the tick code know about
* that change.
*/
if (cs != curr_clocksource) {
cs->flags |= CLOCK_SOURCE_RESELECT;
schedule_work(&watchdog_work);
} else {
tick_clock_notify();
}
}
}
/*
* We only clear the watchdog_reset_pending, when we did a
* full cycle through all clocksources.
*/
if (reset_pending)
atomic_dec(&watchdog_reset_pending);
/*
* Cycle through CPUs to check if the CPUs stay synchronized
* to each other.
*/
next_cpu = cpumask_next(raw_smp_processor_id(), cpu_online_mask);
if (next_cpu >= nr_cpu_ids)
next_cpu = cpumask_first(cpu_online_mask);
watchdog_timer.expires += WATCHDOG_INTERVAL;
add_timer_on(&watchdog_timer, next_cpu);
out:
spin_unlock(&watchdog_lock);
}
static inline void clocksource_start_watchdog(void)
{
if (watchdog_running || !watchdog || list_empty(&watchdog_list))
return;
timer_setup(&watchdog_timer, clocksource_watchdog, 0);
watchdog_timer.expires = jiffies + WATCHDOG_INTERVAL;
add_timer_on(&watchdog_timer, cpumask_first(cpu_online_mask));
watchdog_running = 1;
}
static inline void clocksource_stop_watchdog(void)
{
if (!watchdog_running || (watchdog && !list_empty(&watchdog_list)))
return;
del_timer(&watchdog_timer);
watchdog_running = 0;
}
static inline void clocksource_reset_watchdog(void)
{
struct clocksource *cs;
list_for_each_entry(cs, &watchdog_list, wd_list)
cs->flags &= ~CLOCK_SOURCE_WATCHDOG;
}
static void clocksource_resume_watchdog(void)
{
atomic_inc(&watchdog_reset_pending);
}
static void clocksource_enqueue_watchdog(struct clocksource *cs)
{
unsigned long flags;
spin_lock_irqsave(&watchdog_lock, flags);
if (cs->flags & CLOCK_SOURCE_MUST_VERIFY) {
/* cs is a clocksource to be watched. */
list_add(&cs->wd_list, &watchdog_list);
cs->flags &= ~CLOCK_SOURCE_WATCHDOG;
} else {
/* cs is a watchdog. */
if (cs->flags & CLOCK_SOURCE_IS_CONTINUOUS)
cs->flags |= CLOCK_SOURCE_VALID_FOR_HRES;
}
spin_unlock_irqrestore(&watchdog_lock, flags);
}
static void clocksource_select_watchdog(bool fallback)
{
struct clocksource *cs, *old_wd;
unsigned long flags;
spin_lock_irqsave(&watchdog_lock, flags);
/* save current watchdog */
old_wd = watchdog;
if (fallback)
watchdog = NULL;
list_for_each_entry(cs, &clocksource_list, list) {
/* cs is a clocksource to be watched. */
if (cs->flags & CLOCK_SOURCE_MUST_VERIFY)
continue;
/* Skip current if we were requested for a fallback. */
if (fallback && cs == old_wd)
continue;
/* Pick the best watchdog. */
if (!watchdog || cs->rating > watchdog->rating)
watchdog = cs;
}
/* If we failed to find a fallback restore the old one. */
if (!watchdog)
watchdog = old_wd;
/* If we changed the watchdog we need to reset cycles. */
if (watchdog != old_wd)
clocksource_reset_watchdog();
/* Check if the watchdog timer needs to be started. */
clocksource_start_watchdog();
spin_unlock_irqrestore(&watchdog_lock, flags);
}
static void clocksource_dequeue_watchdog(struct clocksource *cs)
{
unsigned long flags;
spin_lock_irqsave(&watchdog_lock, flags);
if (cs != watchdog) {
if (cs->flags & CLOCK_SOURCE_MUST_VERIFY) {
/* cs is a watched clocksource. */
list_del_init(&cs->wd_list);
/* Check if the watchdog timer needs to be stopped. */
clocksource_stop_watchdog();
}
}
spin_unlock_irqrestore(&watchdog_lock, flags);
}
static int __clocksource_watchdog_kthread(void)
{
struct clocksource *cs, *tmp;
unsigned long flags;
LIST_HEAD(unstable);
int select = 0;
spin_lock_irqsave(&watchdog_lock, flags);
list_for_each_entry_safe(cs, tmp, &watchdog_list, wd_list) {
if (cs->flags & CLOCK_SOURCE_UNSTABLE) {
list_del_init(&cs->wd_list);
list_add(&cs->wd_list, &unstable);
select = 1;
}
if (cs->flags & CLOCK_SOURCE_RESELECT) {
cs->flags &= ~CLOCK_SOURCE_RESELECT;
select = 1;
}
}
/* Check if the watchdog timer needs to be stopped. */
clocksource_stop_watchdog();
spin_unlock_irqrestore(&watchdog_lock, flags);
/* Needs to be done outside of watchdog lock */
list_for_each_entry_safe(cs, tmp, &unstable, wd_list) {
list_del_init(&cs->wd_list);
__clocksource_change_rating(cs, 0);
}
return select;
}
static int clocksource_watchdog_kthread(void *data)
{
mutex_lock(&clocksource_mutex);
if (__clocksource_watchdog_kthread())
clocksource_select();
mutex_unlock(&clocksource_mutex);
return 0;
}
static bool clocksource_is_watchdog(struct clocksource *cs)
{
return cs == watchdog;
}
#else /* CONFIG_CLOCKSOURCE_WATCHDOG */
static void clocksource_enqueue_watchdog(struct clocksource *cs)
{
if (cs->flags & CLOCK_SOURCE_IS_CONTINUOUS)
cs->flags |= CLOCK_SOURCE_VALID_FOR_HRES;
}
static void clocksource_select_watchdog(bool fallback) { }
static inline void clocksource_dequeue_watchdog(struct clocksource *cs) { }
static inline void clocksource_resume_watchdog(void) { }
static inline int __clocksource_watchdog_kthread(void) { return 0; }
static bool clocksource_is_watchdog(struct clocksource *cs) { return false; }
void clocksource_mark_unstable(struct clocksource *cs) { }
#endif /* CONFIG_CLOCKSOURCE_WATCHDOG */
/**
* clocksource_suspend - suspend the clocksource(s)
*/
void clocksource_suspend(void)
{
struct clocksource *cs;
list_for_each_entry_reverse(cs, &clocksource_list, list)
if (cs->suspend)
cs->suspend(cs);
}
/**
* clocksource_resume - resume the clocksource(s)
*/
void clocksource_resume(void)
{
struct clocksource *cs;
list_for_each_entry(cs, &clocksource_list, list)
if (cs->resume)
cs->resume(cs);
clocksource_resume_watchdog();
}
/**
* clocksource_touch_watchdog - Update watchdog
*
* Update the watchdog after exception contexts such as kgdb so as not
* to incorrectly trip the watchdog. This might fail when the kernel
* was stopped in code which holds watchdog_lock.
*/
void clocksource_touch_watchdog(void)
{
clocksource_resume_watchdog();
}
/**
* clocksource_max_adjustment- Returns max adjustment amount
* @cs: Pointer to clocksource
*
*/
static u32 clocksource_max_adjustment(struct clocksource *cs)
{
u64 ret;
/*
* We won't try to correct for more than 11% adjustments (110,000 ppm),
*/
ret = (u64)cs->mult * 11;
do_div(ret,100);
return (u32)ret;
}
/**
* clocks_calc_max_nsecs - Returns maximum nanoseconds that can be converted
* @mult: cycle to nanosecond multiplier
* @shift: cycle to nanosecond divisor (power of two)
* @maxadj: maximum adjustment value to mult (~11%)
* @mask: bitmask for two's complement subtraction of non 64 bit counters
* @max_cyc: maximum cycle value before potential overflow (does not include
* any safety margin)
*
* NOTE: This function includes a safety margin of 50%, in other words, we
* return half the number of nanoseconds the hardware counter can technically
* cover. This is done so that we can potentially detect problems caused by
* delayed timers or bad hardware, which might result in time intervals that
* are larger than what the math used can handle without overflows.
*/
u64 clocks_calc_max_nsecs(u32 mult, u32 shift, u32 maxadj, u64 mask, u64 *max_cyc)
{
u64 max_nsecs, max_cycles;
/*
* Calculate the maximum number of cycles that we can pass to the
* cyc2ns() function without overflowing a 64-bit result.
*/
max_cycles = ULLONG_MAX;
do_div(max_cycles, mult+maxadj);
/*
* The actual maximum number of cycles we can defer the clocksource is
* determined by the minimum of max_cycles and mask.
* Note: Here we subtract the maxadj to make sure we don't sleep for
* too long if there's a large negative adjustment.
*/
max_cycles = min(max_cycles, mask);
max_nsecs = clocksource_cyc2ns(max_cycles, mult - maxadj, shift);
/* return the max_cycles value as well if requested */
if (max_cyc)
*max_cyc = max_cycles;
/* Return 50% of the actual maximum, so we can detect bad values */
max_nsecs >>= 1;
return max_nsecs;
}
/**
* clocksource_update_max_deferment - Updates the clocksource max_idle_ns & max_cycles
* @cs: Pointer to clocksource to be updated
*
*/
static inline void clocksource_update_max_deferment(struct clocksource *cs)
{
cs->max_idle_ns = clocks_calc_max_nsecs(cs->mult, cs->shift,
cs->maxadj, cs->mask,
&cs->max_cycles);
}
#ifndef CONFIG_ARCH_USES_GETTIMEOFFSET
static struct clocksource *clocksource_find_best(bool oneshot, bool skipcur)
{
struct clocksource *cs;
if (!finished_booting || list_empty(&clocksource_list))
return NULL;
/*
* We pick the clocksource with the highest rating. If oneshot
* mode is active, we pick the highres valid clocksource with
* the best rating.
*/
list_for_each_entry(cs, &clocksource_list, list) {
if (skipcur && cs == curr_clocksource)
continue;
if (oneshot && !(cs->flags & CLOCK_SOURCE_VALID_FOR_HRES))
continue;
return cs;
}
return NULL;
}
static void __clocksource_select(bool skipcur)
{
bool oneshot = tick_oneshot_mode_active();
struct clocksource *best, *cs;
/* Find the best suitable clocksource */
best = clocksource_find_best(oneshot, skipcur);
if (!best)
return;
/* Check for the override clocksource. */
list_for_each_entry(cs, &clocksource_list, list) {
if (skipcur && cs == curr_clocksource)
continue;
if (strcmp(cs->name, override_name) != 0)
continue;
/*
* Check to make sure we don't switch to a non-highres
* capable clocksource if the tick code is in oneshot
* mode (highres or nohz)
*/
if (!(cs->flags & CLOCK_SOURCE_VALID_FOR_HRES) && oneshot) {
/* Override clocksource cannot be used. */
if (cs->flags & CLOCK_SOURCE_UNSTABLE) {
pr_warn("Override clocksource %s is unstable and not HRT compatible - cannot switch while in HRT/NOHZ mode\n",
cs->name);
override_name[0] = 0;
} else {
/*
* The override cannot be currently verified.
* Deferring to let the watchdog check.
*/
pr_info("Override clocksource %s is not currently HRT compatible - deferring\n",
cs->name);
}
} else
/* Override clocksource can be used. */
best = cs;
break;
}
if (curr_clocksource != best && !timekeeping_notify(best)) {
pr_info("Switched to clocksource %s\n", best->name);
curr_clocksource = best;
}
}
/**
* clocksource_select - Select the best clocksource available
*
* Private function. Must hold clocksource_mutex when called.
*
* Select the clocksource with the best rating, or the clocksource,
* which is selected by userspace override.
*/
static void clocksource_select(void)
{
__clocksource_select(false);
}
static void clocksource_select_fallback(void)
{
__clocksource_select(true);
}
#else /* !CONFIG_ARCH_USES_GETTIMEOFFSET */
static inline void clocksource_select(void) { }
static inline void clocksource_select_fallback(void) { }
#endif
/*
* clocksource_done_booting - Called near the end of core bootup
*
* Hack to avoid lots of clocksource churn at boot time.
* We use fs_initcall because we want this to start before
* device_initcall but after subsys_initcall.
*/
static int __init clocksource_done_booting(void)
{
mutex_lock(&clocksource_mutex);
curr_clocksource = clocksource_default_clock();
finished_booting = 1;
/*
* Run the watchdog first to eliminate unstable clock sources
*/
__clocksource_watchdog_kthread();
clocksource_select();
mutex_unlock(&clocksource_mutex);
return 0;
}
fs_initcall(clocksource_done_booting);
/*
* Enqueue the clocksource sorted by rating
*/
static void clocksource_enqueue(struct clocksource *cs)
{
struct list_head *entry = &clocksource_list;
struct clocksource *tmp;
list_for_each_entry(tmp, &clocksource_list, list) {
/* Keep track of the place, where to insert */
if (tmp->rating < cs->rating)
break;
entry = &tmp->list;
}
list_add(&cs->list, entry);
}
/**
* __clocksource_update_freq_scale - Used update clocksource with new freq
* @cs: clocksource to be registered
* @scale: Scale factor multiplied against freq to get clocksource hz
* @freq: clocksource frequency (cycles per second) divided by scale
*
* This should only be called from the clocksource->enable() method.
*
* This *SHOULD NOT* be called directly! Please use the
* __clocksource_update_freq_hz() or __clocksource_update_freq_khz() helper
* functions.
*/
void __clocksource_update_freq_scale(struct clocksource *cs, u32 scale, u32 freq)
{
u64 sec;
/*
* Default clocksources are *special* and self-define their mult/shift.
* But, you're not special, so you should specify a freq value.
*/
if (freq) {
/*
* Calc the maximum number of seconds which we can run before
* wrapping around. For clocksources which have a mask > 32-bit
* we need to limit the max sleep time to have a good
* conversion precision. 10 minutes is still a reasonable
* amount. That results in a shift value of 24 for a
* clocksource with mask >= 40-bit and f >= 4GHz. That maps to
* ~ 0.06ppm granularity for NTP.
*/
sec = cs->mask;
do_div(sec, freq);
do_div(sec, scale);
if (!sec)
sec = 1;
else if (sec > 600 && cs->mask > UINT_MAX)
sec = 600;
clocks_calc_mult_shift(&cs->mult, &cs->shift, freq,
NSEC_PER_SEC / scale, sec * scale);
}
/*
* Ensure clocksources that have large 'mult' values don't overflow
* when adjusted.
*/
cs->maxadj = clocksource_max_adjustment(cs);
while (freq && ((cs->mult + cs->maxadj < cs->mult)
|| (cs->mult - cs->maxadj > cs->mult))) {
cs->mult >>= 1;
cs->shift--;
cs->maxadj = clocksource_max_adjustment(cs);
}
/*
* Only warn for *special* clocksources that self-define
* their mult/shift values and don't specify a freq.
*/
WARN_ONCE(cs->mult + cs->maxadj < cs->mult,
"timekeeping: Clocksource %s might overflow on 11%% adjustment\n",
cs->name);
clocksource_update_max_deferment(cs);
pr_info("%s: mask: 0x%llx max_cycles: 0x%llx, max_idle_ns: %lld ns\n",
cs->name, cs->mask, cs->max_cycles, cs->max_idle_ns);
}
EXPORT_SYMBOL_GPL(__clocksource_update_freq_scale);
/**
* __clocksource_register_scale - Used to install new clocksources
* @cs: clocksource to be registered
* @scale: Scale factor multiplied against freq to get clocksource hz
* @freq: clocksource frequency (cycles per second) divided by scale
*
* Returns -EBUSY if registration fails, zero otherwise.
*
* This *SHOULD NOT* be called directly! Please use the
* clocksource_register_hz() or clocksource_register_khz helper functions.
*/
int __clocksource_register_scale(struct clocksource *cs, u32 scale, u32 freq)
{
/* Initialize mult/shift and max_idle_ns */
__clocksource_update_freq_scale(cs, scale, freq);
/* Add clocksource to the clocksource list */
mutex_lock(&clocksource_mutex);
clocksource_enqueue(cs);
clocksource_enqueue_watchdog(cs);
clocksource_select();
clocksource_select_watchdog(false);
mutex_unlock(&clocksource_mutex);
return 0;
}
EXPORT_SYMBOL_GPL(__clocksource_register_scale);
static void __clocksource_change_rating(struct clocksource *cs, int rating)
{
list_del(&cs->list);
cs->rating = rating;
clocksource_enqueue(cs);
}
/**
* clocksource_change_rating - Change the rating of a registered clocksource
* @cs: clocksource to be changed
* @rating: new rating
*/
void clocksource_change_rating(struct clocksource *cs, int rating)
{
mutex_lock(&clocksource_mutex);
__clocksource_change_rating(cs, rating);
clocksource_select();
clocksource_select_watchdog(false);
mutex_unlock(&clocksource_mutex);
}
EXPORT_SYMBOL(clocksource_change_rating);
/*
* Unbind clocksource @cs. Called with clocksource_mutex held
*/
static int clocksource_unbind(struct clocksource *cs)
{
if (clocksource_is_watchdog(cs)) {
/* Select and try to install a replacement watchdog. */
clocksource_select_watchdog(true);
if (clocksource_is_watchdog(cs))
return -EBUSY;
}
if (cs == curr_clocksource) {
/* Select and try to install a replacement clock source */
clocksource_select_fallback();
if (curr_clocksource == cs)
return -EBUSY;
}
clocksource_dequeue_watchdog(cs);
list_del_init(&cs->list);
return 0;
}
/**
* clocksource_unregister - remove a registered clocksource
* @cs: clocksource to be unregistered
*/
int clocksource_unregister(struct clocksource *cs)
{
int ret = 0;
mutex_lock(&clocksource_mutex);
if (!list_empty(&cs->list))
ret = clocksource_unbind(cs);
mutex_unlock(&clocksource_mutex);
return ret;
}
EXPORT_SYMBOL(clocksource_unregister);
#ifdef CONFIG_SYSFS
/**
* sysfs_show_current_clocksources - sysfs interface for current clocksource
* @dev: unused
* @attr: unused
* @buf: char buffer to be filled with clocksource list
*
* Provides sysfs interface for listing current clocksource.
*/
static ssize_t
sysfs_show_current_clocksources(struct device *dev,
struct device_attribute *attr, char *buf)
{
ssize_t count = 0;
mutex_lock(&clocksource_mutex);
count = snprintf(buf, PAGE_SIZE, "%s\n", curr_clocksource->name);
mutex_unlock(&clocksource_mutex);
return count;
}
ssize_t sysfs_get_uname(const char *buf, char *dst, size_t cnt)
{
size_t ret = cnt;
/* strings from sysfs write are not 0 terminated! */
if (!cnt || cnt >= CS_NAME_LEN)
return -EINVAL;
/* strip of \n: */
if (buf[cnt-1] == '\n')
cnt--;
if (cnt > 0)
memcpy(dst, buf, cnt);
dst[cnt] = 0;
return ret;
}
/**
* sysfs_override_clocksource - interface for manually overriding clocksource
* @dev: unused
* @attr: unused
* @buf: name of override clocksource
* @count: length of buffer
*
* Takes input from sysfs interface for manually overriding the default
* clocksource selection.
*/
static ssize_t sysfs_override_clocksource(struct device *dev,
struct device_attribute *attr,
const char *buf, size_t count)
{
ssize_t ret;
mutex_lock(&clocksource_mutex);
ret = sysfs_get_uname(buf, override_name, count);
if (ret >= 0)
clocksource_select();
mutex_unlock(&clocksource_mutex);
return ret;
}
/**
* sysfs_unbind_current_clocksource - interface for manually unbinding clocksource
* @dev: unused
* @attr: unused
* @buf: unused
* @count: length of buffer
*
* Takes input from sysfs interface for manually unbinding a clocksource.
*/
static ssize_t sysfs_unbind_clocksource(struct device *dev,
struct device_attribute *attr,
const char *buf, size_t count)
{
struct clocksource *cs;
char name[CS_NAME_LEN];
ssize_t ret;
ret = sysfs_get_uname(buf, name, count);
if (ret < 0)
return ret;
ret = -ENODEV;
mutex_lock(&clocksource_mutex);
list_for_each_entry(cs, &clocksource_list, list) {
if (strcmp(cs->name, name))
continue;
ret = clocksource_unbind(cs);
break;
}
mutex_unlock(&clocksource_mutex);
return ret ? ret : count;
}
/**
* sysfs_show_available_clocksources - sysfs interface for listing clocksource
* @dev: unused
* @attr: unused
* @buf: char buffer to be filled with clocksource list
*
* Provides sysfs interface for listing registered clocksources
*/
static ssize_t
sysfs_show_available_clocksources(struct device *dev,
struct device_attribute *attr,
char *buf)
{
struct clocksource *src;
ssize_t count = 0;
mutex_lock(&clocksource_mutex);
list_for_each_entry(src, &clocksource_list, list) {
/*
* Don't show non-HRES clocksource if the tick code is
* in one shot mode (highres=on or nohz=on)
*/
if (!tick_oneshot_mode_active() ||
(src->flags & CLOCK_SOURCE_VALID_FOR_HRES))
count += snprintf(buf + count,
max((ssize_t)PAGE_SIZE - count, (ssize_t)0),
"%s ", src->name);
}
mutex_unlock(&clocksource_mutex);
count += snprintf(buf + count,
max((ssize_t)PAGE_SIZE - count, (ssize_t)0), "\n");
return count;
}
/*
* Sysfs setup bits:
*/
static DEVICE_ATTR(current_clocksource, 0644, sysfs_show_current_clocksources,
sysfs_override_clocksource);
static DEVICE_ATTR(unbind_clocksource, 0200, NULL, sysfs_unbind_clocksource);
static DEVICE_ATTR(available_clocksource, 0444,
sysfs_show_available_clocksources, NULL);
static struct bus_type clocksource_subsys = {
.name = "clocksource",
.dev_name = "clocksource",
};
static struct device device_clocksource = {
.id = 0,
.bus = &clocksource_subsys,
};
static int __init init_clocksource_sysfs(void)
{
int error = subsys_system_register(&clocksource_subsys, NULL);
if (!error)
error = device_register(&device_clocksource);
if (!error)
error = device_create_file(
&device_clocksource,
&dev_attr_current_clocksource);
if (!error)
error = device_create_file(&device_clocksource,
&dev_attr_unbind_clocksource);
if (!error)
error = device_create_file(
&device_clocksource,
&dev_attr_available_clocksource);
return error;
}
device_initcall(init_clocksource_sysfs);
#endif /* CONFIG_SYSFS */
/**
* boot_override_clocksource - boot clock override
* @str: override name
*
* Takes a clocksource= boot argument and uses it
* as the clocksource override name.
*/
static int __init boot_override_clocksource(char* str)
{
mutex_lock(&clocksource_mutex);
if (str)
strlcpy(override_name, str, sizeof(override_name));
mutex_unlock(&clocksource_mutex);
return 1;
}
__setup("clocksource=", boot_override_clocksource);
/**
* boot_override_clock - Compatibility layer for deprecated boot option
* @str: override name
*
* DEPRECATED! Takes a clock= boot argument and uses it
* as the clocksource override name
*/
static int __init boot_override_clock(char* str)
{
if (!strcmp(str, "pmtmr")) {
pr_warn("clock=pmtmr is deprecated - use clocksource=acpi_pm\n");
return boot_override_clocksource("acpi_pm");
}
pr_warn("clock= boot option is deprecated - use clocksource=xyz\n");
return boot_override_clocksource(str);
}
__setup("clock=", boot_override_clock);
|
# Copyright 2018 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils as json
from tempest.lib.common.utils import data_utils
from tempest.lib.common.utils import test_utils
from tempest.lib import decorators
from patrole_tempest_plugin import rbac_rule_validation
from patrole_tempest_plugin.tests.api.network import rbac_base as base
class FlavorsPluginRbacTest(base.BaseNetworkPluginRbacTest):
@classmethod
def resource_setup(cls):
super(FlavorsPluginRbacTest, cls).resource_setup()
providers = cls.ntp_client.list_service_providers()
if not providers["service_providers"]:
raise cls.skipException("No service_providers available.")
cls.service_type = providers["service_providers"][0]["service_type"]
@decorators.idempotent_id('2632a61b-831e-4da5-82c8-a5f7d448589b')
@rbac_rule_validation.action(service="neutron",
rules=["create_flavor"])
def test_create_flavor(self):
"""Create flavor.
RBAC test for the neutron "create_flavor" policy
"""
with self.rbac_utils.override_role(self):
flavor = self.ntp_client.create_flavor(
service_type=self.service_type)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor, flavor["flavor"]["id"])
@decorators.idempotent_id('9c53164c-117d-4b44-a5cb-96f08386513f')
@rbac_rule_validation.action(service="neutron",
rules=["get_flavor",
"update_flavor"],
expected_error_codes=[404, 403])
def test_update_flavor(self):
"""Update flavor.
RBAC test for the neutron "update_flavor" policy
"""
flavor = self.ntp_client.create_flavor(service_type=self.service_type)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor, flavor["flavor"]["id"])
name = data_utils.rand_name(self.__class__.__name__ + '-Flavor')
with self.rbac_utils.override_role(self):
self.ntp_client.update_flavor(flavor["flavor"]["id"], name=name)
@decorators.idempotent_id('1de15f9e-5080-4259-ab41-e230bb312de8')
@rbac_rule_validation.action(service="neutron",
rules=["get_flavor",
"delete_flavor"],
expected_error_codes=[404, 403])
def test_delete_flavor(self):
"""Delete flavor.
RBAC test for the neutron "delete_flavor" policy
"""
flavor = self.ntp_client.create_flavor(service_type=self.service_type)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor, flavor["flavor"]["id"])
with self.rbac_utils.override_role(self):
self.ntp_client.delete_flavor(flavor["flavor"]["id"])
@decorators.idempotent_id('c2baf35f-e6c1-4833-9114-aadd9b1f6aaa')
@rbac_rule_validation.action(service="neutron",
rules=["get_flavor"],
expected_error_codes=[404])
def test_show_flavor(self):
"""Show flavor.
RBAC test for the neutron "get_flavor" policy
"""
flavor = self.ntp_client.create_flavor(service_type=self.service_type)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor, flavor["flavor"]["id"])
with self.rbac_utils.override_role(self):
self.ntp_client.show_flavor(flavor["flavor"]["id"])
@decorators.idempotent_id('ab10bd5d-987e-4255-966f-947670ffd0fa')
@rbac_rule_validation.action(service="neutron",
rules=["get_flavors"])
def test_list_flavors(self):
"""List flavors.
RBAC test for the neutron "get_flavors" policy
"""
flavor = self.ntp_client.create_flavor(service_type=self.service_type)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor, flavor["flavor"]["id"])
with self.rbac_utils.override_role(self):
self.ntp_client.list_flavors()
class FlavorsServiceProfilePluginRbacTest(base.BaseNetworkPluginRbacTest):
@classmethod
def resource_setup(cls):
super(FlavorsServiceProfilePluginRbacTest, cls).resource_setup()
providers = cls.ntp_client.list_service_providers()
if not providers["service_providers"]:
raise cls.skipException("No service_providers available.")
cls.service_type = providers["service_providers"][0]["service_type"]
cls.flavor_id = cls.create_flavor()
cls.service_profile_id = cls.create_service_profile()
@classmethod
def create_flavor(cls):
flavor = cls.ntp_client.create_flavor(service_type=cls.service_type)
flavor_id = flavor["flavor"]["id"]
cls.addClassResourceCleanup(
test_utils.call_and_ignore_notfound_exc,
cls.ntp_client.delete_flavor, flavor_id)
return flavor_id
@classmethod
def create_service_profile(cls):
service_profile = cls.ntp_client.create_service_profile(
metainfo=json.dumps({'foo': 'bar'}))
service_profile_id = service_profile["service_profile"]["id"]
cls.addClassResourceCleanup(
test_utils.call_and_ignore_notfound_exc,
cls.ntp_client.delete_service_profile, service_profile_id)
return service_profile_id
def create_flavor_service_profile(self, flavor_id, service_profile_id):
self.ntp_client.create_flavor_service_profile(
flavor_id, service_profile_id)
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.ntp_client.delete_flavor_service_profile,
flavor_id, service_profile_id)
@decorators.idempotent_id('aa84b4c5-0dd6-4c34-aa81-3a76507f9b81')
@rbac_rule_validation.action(service="neutron",
rules=["create_flavor_service_profile"])
def test_create_flavor_service_profile(self):
"""Create flavor_service_profile.
RBAC test for the neutron "create_flavor_service_profile" policy
"""
with self.rbac_utils.override_role(self):
self.create_flavor_service_profile(self.flavor_id,
self.service_profile_id)
@decorators.idempotent_id('3b680d9e-946a-4670-ab7f-0e4576675833')
@rbac_rule_validation.action(service="neutron",
rules=["delete_flavor_service_profile"])
def test_delete_flavor_service_profile(self):
"""Delete flavor_service_profile.
RBAC test for the neutron "delete_flavor_service_profile" policy
"""
self.create_flavor_service_profile(self.flavor_id,
self.service_profile_id)
with self.rbac_utils.override_role(self):
self.ntp_client.delete_flavor_service_profile(
self.flavor_id, self.service_profile_id)
|
import Drawer from './drawer';
import * as util from './util';
import CanvasEntry from './drawer.canvasentry';
/**
* MultiCanvas renderer for wavesurfer. Is currently the default and sole
* builtin renderer.
*
* A `MultiCanvas` consists of one or more `CanvasEntry` instances, depending
* on the zoom level.
*/
export default class MultiCanvas extends Drawer {
/**
* @param {HTMLElement} container The container node of the wavesurfer instance
* @param {WavesurferParams} params The wavesurfer initialisation options
*/
constructor(container, params) {
super(container, params);
/**
* @type {number}
*/
this.maxCanvasWidth = params.maxCanvasWidth;
/**
* @type {number}
*/
this.maxCanvasElementWidth = Math.round(
params.maxCanvasWidth / params.pixelRatio
);
/**
* Whether or not the progress wave is rendered. If the `waveColor`
* and `progressColor` are the same color it is not.
*
* @type {boolean}
*/
this.hasProgressCanvas = params.waveColor != params.progressColor;
/**
* @type {number}
*/
this.halfPixel = 0.5 / params.pixelRatio;
/**
* List of `CanvasEntry` instances.
*
* @type {Array}
*/
this.canvases = [];
/**
* @type {HTMLElement}
*/
this.progressWave = null;
/**
* Class used to generate entries.
*
* @type {function}
*/
this.EntryClass = CanvasEntry;
/**
* Canvas 2d context attributes.
*
* @type {object}
*/
this.canvasContextAttributes = params.drawingContextAttributes;
/**
* Overlap added between entries to prevent vertical white stripes
* between `canvas` elements.
*
* @type {number}
*/
this.overlap = 2 * Math.ceil(params.pixelRatio / 2);
/**
* The radius of the wave bars. Makes bars rounded
*
* @type {number}
*/
this.barRadius = params.barRadius || 0;
/**
* Whether to render the waveform vertically. Defaults to false.
*
* @type {boolean}
*/
this.vertical = params.vertical;
}
/**
* Initialize the drawer
*/
init() {
this.createWrapper();
this.createElements();
}
/**
* Create the canvas elements and style them
*
*/
createElements() {
this.progressWave = util.withOrientation(
this.wrapper.appendChild(document.createElement('wave')),
this.params.vertical
);
this.style(this.progressWave, {
position: 'absolute',
zIndex: 3,
left: 0,
top: 0,
bottom: 0,
overflow: 'hidden',
width: '0',
display: 'none',
boxSizing: 'border-box',
borderRightStyle: 'solid',
pointerEvents: 'none'
});
this.addCanvas();
this.updateCursor();
}
/**
* Update cursor style
*/
updateCursor() {
this.style(this.progressWave, {
borderRightWidth: this.params.cursorWidth + 'px',
borderRightColor: this.params.cursorColor
});
}
/**
* Adjust to the updated size by adding or removing canvases
*/
updateSize() {
const totalWidth = Math.round(this.width / this.params.pixelRatio);
const requiredCanvases = Math.ceil(
totalWidth / (this.maxCanvasElementWidth + this.overlap)
);
// add required canvases
while (this.canvases.length < requiredCanvases) {
this.addCanvas();
}
// remove older existing canvases, if any
while (this.canvases.length > requiredCanvases) {
this.removeCanvas();
}
let canvasWidth = this.maxCanvasWidth + this.overlap;
const lastCanvas = this.canvases.length - 1;
this.canvases.forEach((entry, i) => {
if (i == lastCanvas) {
canvasWidth = this.width - this.maxCanvasWidth * lastCanvas;
}
this.updateDimensions(entry, canvasWidth, this.height);
entry.clearWave();
});
}
/**
* Add a canvas to the canvas list
*
*/
addCanvas() {
const entry = new this.EntryClass();
entry.canvasContextAttributes = this.canvasContextAttributes;
entry.hasProgressCanvas = this.hasProgressCanvas;
entry.halfPixel = this.halfPixel;
const leftOffset = this.maxCanvasElementWidth * this.canvases.length;
// wave
let wave = util.withOrientation(
this.wrapper.appendChild(document.createElement('canvas')),
this.params.vertical
);
this.style(wave, {
position: 'absolute',
zIndex: 2,
left: leftOffset + 'px',
top: 0,
bottom: 0,
height: '100%',
pointerEvents: 'none'
});
entry.initWave(wave);
// progress
if (this.hasProgressCanvas) {
let progress = util.withOrientation(
this.progressWave.appendChild(document.createElement('canvas')),
this.params.vertical
);
this.style(progress, {
position: 'absolute',
left: leftOffset + 'px',
top: 0,
bottom: 0,
height: '100%'
});
entry.initProgress(progress);
}
this.canvases.push(entry);
}
/**
* Pop single canvas from the list
*
*/
removeCanvas() {
let lastEntry = this.canvases[this.canvases.length - 1];
// wave
lastEntry.wave.parentElement.removeChild(lastEntry.wave.domElement);
// progress
if (this.hasProgressCanvas) {
lastEntry.progress.parentElement.removeChild(lastEntry.progress.domElement);
}
// cleanup
if (lastEntry) {
lastEntry.destroy();
lastEntry = null;
}
this.canvases.pop();
}
/**
* Update the dimensions of a canvas element
*
* @param {CanvasEntry} entry Target entry
* @param {number} width The new width of the element
* @param {number} height The new height of the element
*/
updateDimensions(entry, width, height) {
const elementWidth = Math.round(width / this.params.pixelRatio);
const totalWidth = Math.round(this.width / this.params.pixelRatio);
// update canvas dimensions
entry.updateDimensions(elementWidth, totalWidth, width, height);
// style element
this.style(this.progressWave, { display: 'block' });
}
/**
* Clear the whole multi-canvas
*/
clearWave() {
util.frame(() => {
this.canvases.forEach(entry => entry.clearWave());
})();
}
/**
* Draw a waveform with bars
*
* @param {number[]|Number.<Array[]>} peaks Can also be an array of arrays
* for split channel rendering
* @param {number} channelIndex The index of the current channel. Normally
* should be 0. Must be an integer.
* @param {number} start The x-offset of the beginning of the area that
* should be rendered
* @param {number} end The x-offset of the end of the area that should be
* rendered
* @returns {void}
*/
drawBars(peaks, channelIndex, start, end) {
return this.prepareDraw(
peaks,
channelIndex,
start,
end,
({ absmax, hasMinVals, height, offsetY, halfH, peaks, channelIndex: ch }) => {
// if drawBars was called within ws.empty we don't pass a start and
// don't want anything to happen
if (start === undefined) {
return;
}
// Skip every other value if there are negatives.
const peakIndexScale = hasMinVals ? 2 : 1;
const length = peaks.length / peakIndexScale;
const bar = this.params.barWidth * this.params.pixelRatio;
const gap =
this.params.barGap === null
? Math.max(this.params.pixelRatio, ~~(bar / 2))
: Math.max(
this.params.pixelRatio,
this.params.barGap * this.params.pixelRatio
);
const step = bar + gap;
const scale = length / this.width;
const first = start;
const last = end;
let i = first;
for (i; i < last; i += step) {
const peak =
peaks[Math.floor(i * scale * peakIndexScale)] || 0;
let h = Math.round((peak / absmax) * halfH);
/* in case of silences, allow the user to specify that we
* always draw *something* (normally a 1px high bar) */
if (h == 0 && this.params.barMinHeight) {
h = this.params.barMinHeight;
}
this.fillRect(
i + this.halfPixel,
halfH - h + offsetY,
bar + this.halfPixel,
h * 2,
this.barRadius,
ch
);
}
}
);
}
/**
* Draw a waveform
*
* @param {number[]|Number.<Array[]>} peaks Can also be an array of arrays
* for split channel rendering
* @param {number} channelIndex The index of the current channel. Normally
* should be 0
* @param {number?} start The x-offset of the beginning of the area that
* should be rendered (If this isn't set only a flat line is rendered)
* @param {number?} end The x-offset of the end of the area that should be
* rendered
* @returns {void}
*/
drawWave(peaks, channelIndex, start, end) {
return this.prepareDraw(
peaks,
channelIndex,
start,
end,
({ absmax, hasMinVals, height, offsetY, halfH, peaks, channelIndex }) => {
if (!hasMinVals) {
const reflectedPeaks = [];
const len = peaks.length;
let i = 0;
for (i; i < len; i++) {
reflectedPeaks[2 * i] = peaks[i];
reflectedPeaks[2 * i + 1] = -peaks[i];
}
peaks = reflectedPeaks;
}
// if drawWave was called within ws.empty we don't pass a start and
// end and simply want a flat line
if (start !== undefined) {
this.drawLine(peaks, absmax, halfH, offsetY, start, end, channelIndex);
}
// always draw a median line
this.fillRect(
0,
halfH + offsetY - this.halfPixel,
this.width,
this.halfPixel,
this.barRadius,
channelIndex
);
}
);
}
/**
* Tell the canvas entries to render their portion of the waveform
*
* @param {number[]} peaks Peaks data
* @param {number} absmax Maximum peak value (absolute)
* @param {number} halfH Half the height of the waveform
* @param {number} offsetY Offset to the top
* @param {number} start The x-offset of the beginning of the area that
* should be rendered
* @param {number} end The x-offset of the end of the area that
* should be rendered
* @param {channelIndex} channelIndex The channel index of the line drawn
*/
drawLine(peaks, absmax, halfH, offsetY, start, end, channelIndex) {
const { waveColor, progressColor } = this.params.splitChannelsOptions.channelColors[channelIndex] || {};
this.canvases.forEach((entry, i) => {
this.setFillStyles(entry, waveColor, progressColor);
this.applyCanvasTransforms(entry, this.params.vertical);
entry.drawLines(peaks, absmax, halfH, offsetY, start, end);
});
}
/**
* Draw a rectangle on the multi-canvas
*
* @param {number} x X-position of the rectangle
* @param {number} y Y-position of the rectangle
* @param {number} width Width of the rectangle
* @param {number} height Height of the rectangle
* @param {number} radius Radius of the rectangle
* @param {channelIndex} channelIndex The channel index of the bar drawn
*/
fillRect(x, y, width, height, radius, channelIndex) {
const startCanvas = Math.floor(x / this.maxCanvasWidth);
const endCanvas = Math.min(
Math.ceil((x + width) / this.maxCanvasWidth) + 1,
this.canvases.length
);
let i = startCanvas;
for (i; i < endCanvas; i++) {
const entry = this.canvases[i];
const leftOffset = i * this.maxCanvasWidth;
const intersection = {
x1: Math.max(x, i * this.maxCanvasWidth),
y1: y,
x2: Math.min(
x + width,
i * this.maxCanvasWidth + entry.wave.width
),
y2: y + height
};
if (intersection.x1 < intersection.x2) {
const { waveColor, progressColor } = this.params.splitChannelsOptions.channelColors[channelIndex] || {};
this.setFillStyles(entry, waveColor, progressColor);
this.applyCanvasTransforms(entry, this.params.vertical);
entry.fillRects(
intersection.x1 - leftOffset,
intersection.y1,
intersection.x2 - intersection.x1,
intersection.y2 - intersection.y1,
radius
);
}
}
}
/**
* Returns whether to hide the channel from being drawn based on params.
*
* @param {number} channelIndex The index of the current channel.
* @returns {bool} True to hide the channel, false to draw.
*/
hideChannel(channelIndex) {
return this.params.splitChannels && this.params.splitChannelsOptions.filterChannels.includes(channelIndex);
}
/**
* Performs preparation tasks and calculations which are shared by `drawBars`
* and `drawWave`
*
* @param {number[]|Number.<Array[]>} peaks Can also be an array of arrays for
* split channel rendering
* @param {number} channelIndex The index of the current channel. Normally
* should be 0
* @param {number?} start The x-offset of the beginning of the area that
* should be rendered. If this isn't set only a flat line is rendered
* @param {number?} end The x-offset of the end of the area that should be
* rendered
* @param {function} fn The render function to call, e.g. `drawWave`
* @param {number} drawIndex The index of the current channel after filtering.
* @param {number?} normalizedMax Maximum modulation value across channels for use with relativeNormalization. Ignored when undefined
* @returns {void}
*/
prepareDraw(peaks, channelIndex, start, end, fn, drawIndex, normalizedMax) {
return util.frame(() => {
// Split channels and call this function with the channelIndex set
if (peaks[0] instanceof Array) {
const channels = peaks;
if (this.params.splitChannels) {
const filteredChannels = channels.filter((c, i) => !this.hideChannel(i));
if (!this.params.splitChannelsOptions.overlay) {
this.setHeight(
Math.max(filteredChannels.length, 1) *
this.params.height *
this.params.pixelRatio
);
}
let overallAbsMax;
if (this.params.splitChannelsOptions && this.params.splitChannelsOptions.relativeNormalization) {
// calculate maximum peak across channels to use for normalization
overallAbsMax = util.max(channels.map((channelPeaks => util.absMax(channelPeaks))));
}
return channels.forEach((channelPeaks, i) =>
this.prepareDraw(channelPeaks, i, start, end, fn, filteredChannels.indexOf(channelPeaks), overallAbsMax)
);
}
peaks = channels[0];
}
// Return and do not draw channel peaks if hidden.
if (this.hideChannel(channelIndex)) {
return;
}
// calculate maximum modulation value, either from the barHeight
// parameter or if normalize=true from the largest value in the peak
// set
let absmax = 1 / this.params.barHeight;
if (this.params.normalize) {
absmax = normalizedMax === undefined ? util.absMax(peaks) : normalizedMax;
} else if(this.params.peakMax != 0) {
absmax = this.params.peakMax;
}
// Bar wave draws the bottom only as a reflection of the top,
// so we don't need negative values
const hasMinVals = [].some.call(peaks, val => val < 0);
const height = this.params.height * this.params.pixelRatio;
const halfH = height / 2;
let offsetY = height * drawIndex || 0;
// Override offsetY if overlay is true
if (this.params.splitChannelsOptions && this.params.splitChannelsOptions.overlay) {
offsetY = 0;
}
return fn({
absmax: absmax,
hasMinVals: hasMinVals,
height: height,
offsetY: offsetY,
halfH: halfH,
peaks: peaks,
channelIndex: channelIndex
});
})();
}
/**
* Set the fill styles for a certain entry (wave and progress)
*
* @param {CanvasEntry} entry Target entry
* @param {string} waveColor Wave color to draw this entry
* @param {string} progressColor Progress color to draw this entry
*/
setFillStyles(entry, waveColor = this.params.waveColor, progressColor = this.params.progressColor) {
entry.setFillStyles(waveColor, progressColor);
}
/**
* Set the canvas transforms for a certain entry (wave and progress)
*
* @param {CanvasEntry} entry Target entry
* @param {boolean} vertical Whether to render the waveform vertically
*/
applyCanvasTransforms(entry, vertical = false) {
entry.applyCanvasTransforms(vertical);
}
/**
* Return image data of the multi-canvas
*
* When using a `type` of `'blob'`, this will return a `Promise`.
*
* @param {string} format='image/png' An optional value of a format type.
* @param {number} quality=0.92 An optional value between 0 and 1.
* @param {string} type='dataURL' Either 'dataURL' or 'blob'.
* @return {string|string[]|Promise} When using the default `'dataURL'`
* `type` this returns a single data URL or an array of data URLs,
* one for each canvas. When using the `'blob'` `type` this returns a
* `Promise` that resolves with an array of `Blob` instances, one for each
* canvas.
*/
getImage(format, quality, type) {
if (type === 'blob') {
return Promise.all(
this.canvases.map(entry => {
return entry.getImage(format, quality, type);
})
);
} else if (type === 'dataURL') {
let images = this.canvases.map(entry =>
entry.getImage(format, quality, type)
);
return images.length > 1 ? images : images[0];
}
}
/**
* Render the new progress
*
* @param {number} position X-offset of progress position in pixels
*/
updateProgress(position) {
this.style(this.progressWave, { width: position + 'px' });
}
}
|
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
/*eslint "max-len": 0*/
// It is often OK to add things to the allowlist, but make sure to highlight
// this in review.
exports.rules = [
// Global rules
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/video-iframe-integration.js',
allowlist: [
// Do not extend this allowlist.
// video-iframe-integration.js is an entry point.
],
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/sanitizer.js',
allowlist: [
// DEPRECATED: Use src/purifier.js instead. @choumx for questions.
'extensions/amp-mustache/0.1/amp-mustache.js->src/sanitizer.js',
],
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/purifier/**/*.js',
allowlist: [
// WARNING: Importing purifier.js will also bundle DOMPurify (13KB).
'extensions/amp-list/0.1/amp-list.js->src/purifier/sanitation.js',
'extensions/amp-mustache/0.2/amp-mustache.js->src/purifier/purifier.js',
'extensions/amp-script/0.1/amp-script.js->src/purifier/purifier.js',
'src/purifier/purifier.js->src/purifier/sanitation.js',
'src/sanitizer.js->src/purifier/sanitation.js',
],
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/module.js',
allowlist: [
'extensions/amp-date-picker/0.1/**->src/module.js',
'extensions/amp-inputmask/0.1/**->src/module.js',
],
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'third_party/**/*.js',
allowlist: [
'extensions/amp-autocomplete/**/*.js->third_party/fuzzysearch/index.js',
'extensions/amp-crypto-polyfill/**/*.js->third_party/closure-library/sha384-generated.js',
'extensions/amp-list/**->third_party/set-dom/set-dom.js',
'extensions/amp-mustache/**/amp-mustache.js->third_party/mustache/mustache.js',
'extensions/amp-recaptcha-input/**/*.js->third_party/amp-toolbox-cache-url/dist/amp-toolbox-cache-url.esm.js',
'extensions/amp-resize-observer-polyfill/**/*.js->third_party/resize-observer-polyfill/ResizeObserver.install.js',
'extensions/amp-story-360/0.1/amp-story-360.js->third_party/zuho/zuho.js',
'extensions/amp-subscriptions-google/**/*.js->third_party/subscriptions-project/swg.js',
'extensions/amp-subscriptions-google/**/*.js->third_party/subscriptions-project/swg-gaa.js',
'extensions/amp-subscriptions/**/*.js->third_party/subscriptions-project/aes_gcm.js',
'extensions/amp-subscriptions/**/*.js->third_party/subscriptions-project/config.js',
'extensions/amp-google-assistant-assistjs/**/*.js->third_party/closure-responding-channel/closure-bundle.js',
'src/core/dom/css-selectors.js->third_party/css-escape/css-escape.js',
'src/sanitizer.js->third_party/caja/html-sanitizer.js',
'src/shadow-embed.js->third_party/webcomponentsjs/ShadowCSS.js',
],
},
// Rules for 3p
{
filesMatching: '3p/**/*.js',
mustNotDependOn: 'src/**/*.js',
allowlist: [
'3p/**->src/core/constants/amp-events.js',
'3p/**->src/core/data-structures/observable.js',
'3p/**->src/core/data-structures/promise.js',
'3p/**->src/core/error/index.js',
'3p/**->src/core/types/enum.js',
'3p/**->src/core/types/function/index.js',
'3p/**->src/core/types/index.js',
'3p/**->src/core/types/object/index.js',
'3p/**->src/core/types/string/index.js',
'3p/**->src/core/types/string/url.js',
'3p/**->src/log.js',
'3p/**->src/core/dom/style.js',
'3p/**->src/url.js',
'3p/**->src/config.js',
'3p/**->src/mode.js',
'3p/**->src/core/types/object/json.js',
'3p/**->src/3p-frame-messaging.js',
'3p/**->src/internal-version.js',
'3p/polyfills.js->src/polyfills/math-sign.js',
'3p/polyfills.js->src/polyfills/object-assign.js',
'3p/polyfills.js->src/polyfills/object-values.js',
'3p/polyfills.js->src/polyfills/string-starts-with.js',
'3p/polyfills.js->src/polyfills/promise.js',
'3p/messaging.js->src/event-helper.js',
'3p/bodymovinanimation.js->src/event-helper.js',
'3p/iframe-messaging-client.js->src/event-helper.js',
'3p/viqeoplayer.js->src/event-helper.js',
],
},
{
filesMatching: '3p/**/*.js',
mustNotDependOn: 'extensions/**/*.js',
},
// Rules for ads
{
filesMatching: 'ads/**/*.js',
mustNotDependOn: 'src/**/*.js',
allowlist: [
'ads/**->src/core/dom/fingerprint.js',
'ads/**->src/core/constants/consent-state.js',
'ads/**->src/core/error/index.js',
'ads/**->src/core/types/array.js',
'ads/**->src/core/types/function/index.js',
'ads/**->src/core/types/index.js',
'ads/**->src/core/types/object/index.js',
'ads/**->src/core/types/string/index.js',
'ads/**->src/core/types/string/url.js',
'ads/**->src/log.js',
'ads/**->src/mode.js',
'ads/**->src/url.js',
'ads/**->src/core/dom/static-template.js',
'ads/**->src/core/dom/style.js',
'ads/**->src/internal-version.js',
// ads/google/a4a doesn't contain 3P ad code and should probably move
// somewhere else at some point
'ads/google/a4a/**->src/ad-cid.js',
'ads/google/a4a/**->src/consent.js',
'ads/google/a4a/**->src/amp-element-helpers.js',
'ads/google/a4a/**->src/experiments/index.js',
'ads/google/a4a/**->src/services.js',
'ads/google/a4a/utils.js->src/service/variable-source.js',
'ads/google/a4a/utils.js->src/ini-load.js',
'ads/google/a4a/utils.js->src/core/dom/page-layout-box.js',
// Some ads need to depend on json.js
'ads/**->src/core/types/object/json.js',
// IMA, similar to other non-Ad 3Ps above, needs access to event-helper
'ads/google/ima/ima-video.js->src/event-helper.js',
],
},
{
filesMatching: 'ads/**/*.js',
mustNotDependOn: 'extensions/**/*.js',
allowlist: [
// See todo note in ads/_a4a-config.js
'ads/google/a4a/utils.js->extensions/amp-geo/0.1/amp-geo-in-group.js',
],
},
// Rules for extensions and main src.
{
filesMatching: '{src,extensions}/**/*.js',
mustNotDependOn: '3p/**/*.js',
allowlist: [
'src/inabox/inabox-iframe-messaging-client.js->3p/iframe-messaging-client.js',
],
},
// Rules for extensions.
// Note: For the multipass build to correctly include depended on code, you
// need to add the depended on code to `CLOSURE_SRC_GLOBS` in
// build-system/compile/sources.js.
{
// Extensions can't depend on other extensions.
filesMatching: 'extensions/**/*.js',
mustNotDependOn: 'extensions/**/*.js',
allowlist: [
// a4a ads depend on a4a.
'extensions/amp-ad-network-nws-impl/0.1/amp-ad-network-nws-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-fake-impl/0.1/amp-ad-network-fake-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-adzerk-impl/0.1/amp-ad-network-adzerk-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-smartads-impl/0.1/amp-ad-network-smartads-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/sra-utils.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-oblivki-impl/0.1/amp-ad-network-oblivki-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
'extensions/amp-ad-network-valueimpression-impl/0.1/amp-ad-network-valueimpression-impl.js->extensions/amp-a4a/0.1/amp-a4a.js',
// A4A impls importing amp fast fetch header name
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js->extensions/amp-a4a/0.1/signature-verifier.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->extensions/amp-a4a/0.1/signature-verifier.js',
// And a few mrore things depend on a4a.
'extensions/amp-ad-custom/0.1/amp-ad-custom.js->extensions/amp-a4a/0.1/amp-ad-network-base.js',
'extensions/amp-ad-custom/0.1/amp-ad-custom.js->extensions/amp-a4a/0.1/amp-ad-type-defs.js',
'extensions/amp-ad-custom/0.1/amp-ad-custom.js->extensions/amp-a4a/0.1/name-frame-renderer.js',
'extensions/amp-ad-custom/0.1/amp-ad-custom.js->extensions/amp-a4a/0.1/template-renderer.js',
'extensions/amp-ad-custom/0.1/amp-ad-custom.js->extensions/amp-a4a/0.1/template-validator.js',
'extensions/amp-ad-network-adzerk-impl/0.1/amp-ad-network-adzerk-impl.js->extensions/amp-a4a/0.1/amp-ad-template-helper.js',
'extensions/amp-ad-network-adzerk-impl/0.1/amp-ad-network-adzerk-impl.js->extensions/amp-a4a/0.1/amp-ad-type-defs.js',
'extensions/amp-ad-network-adzerk-impl/0.1/amp-ad-network-adzerk-impl.js->extensions/amp-a4a/0.1/amp-ad-utils.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->extensions/amp-a4a/0.1/refresh-manager.js',
'extensions/amp-ad-network-valueimpression-impl/0.1/amp-ad-network-valueimpression-impl.js->extensions/amp-a4a/0.1/refresh-manager.js',
// AMP access depends on AMP access
'extensions/amp-access-scroll/0.1/scroll-impl.js->extensions/amp-access/0.1/amp-access-client.js',
// Ads needs concurrent loading
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js->extensions/amp-ad/0.1/concurrent-load.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->extensions/amp-ad/0.1/concurrent-load.js',
'extensions/amp-a4a/0.1/amp-a4a.js->extensions/amp-ad/0.1/concurrent-load.js',
// Ads needs iframe transports
'extensions/amp-ad-exit/0.1/config.js->extensions/amp-analytics/0.1/iframe-transport-vendors.js',
// Amp carousel (and friends) depending on base carousel
'extensions/amp-carousel/0.2/amp-carousel.js->extensions/amp-base-carousel/0.1/action-source.js',
'extensions/amp-carousel/0.2/amp-carousel.js->extensions/amp-base-carousel/0.1/carousel.js',
'extensions/amp-carousel/0.2/amp-carousel.js->extensions/amp-base-carousel/0.1/carousel-events.js',
'extensions/amp-carousel/0.2/amp-carousel.js->extensions/amp-base-carousel/0.1/child-layout-manager.js',
'extensions/amp-inline-gallery/0.1/amp-inline-gallery.js->extensions/amp-base-carousel/0.1/carousel-events.js',
'extensions/amp-inline-gallery/0.1/amp-inline-gallery-thumbnails.js->extensions/amp-base-carousel/0.1/carousel-events.js',
'extensions/amp-inline-gallery/1.0/base-element.js->extensions/amp-base-carousel/1.0/carousel-props.js',
'extensions/amp-inline-gallery/1.0/amp-inline-gallery-pagination.js->extensions/amp-base-carousel/1.0/carousel-props.js',
'extensions/amp-inline-gallery/1.0/component.js->extensions/amp-base-carousel/1.0/carousel-context.js',
'extensions/amp-inline-gallery/1.0/pagination.js->extensions/amp-base-carousel/1.0/carousel-context.js',
'extensions/amp-inline-gallery/1.0/amp-inline-gallery-thumbnails.js->extensions/amp-base-carousel/1.0/component.jss.js',
'extensions/amp-inline-gallery/1.0/amp-inline-gallery-thumbnails.js->extensions/amp-base-carousel/1.0/carousel-props.js',
'extensions/amp-inline-gallery/1.0/thumbnails.js->extensions/amp-base-carousel/1.0/component.js',
'extensions/amp-inline-gallery/1.0/thumbnails.js->extensions/amp-base-carousel/1.0/carousel-context.js',
'extensions/amp-stream-gallery/0.1/amp-stream-gallery.js->extensions/amp-base-carousel/0.1/action-source.js',
'extensions/amp-stream-gallery/0.1/amp-stream-gallery.js->extensions/amp-base-carousel/0.1/carousel.js',
'extensions/amp-stream-gallery/0.1/amp-stream-gallery.js->extensions/amp-base-carousel/0.1/carousel-events.js',
'extensions/amp-stream-gallery/0.1/amp-stream-gallery.js->extensions/amp-base-carousel/0.1/child-layout-manager.js',
'extensions/amp-stream-gallery/0.1/amp-stream-gallery.js->extensions/amp-base-carousel/0.1/responsive-attributes.js',
'extensions/amp-stream-gallery/1.0/base-element.js->extensions/amp-base-carousel/1.0/component.jss.js',
'extensions/amp-stream-gallery/1.0/component.js->extensions/amp-base-carousel/1.0/component.js',
// Autolightboxing dependencies
'extensions/amp-base-carousel/1.0/component.js->extensions/amp-lightbox-gallery/1.0/component.js',
'extensions/amp-base-carousel/1.0/scroller.js->extensions/amp-lightbox-gallery/1.0/context.js',
'extensions/amp-lightbox-gallery/1.0/provider.js->extensions/amp-lightbox/1.0/component.js',
// Facebook components
'extensions/amp-facebook/1.0/amp-facebook.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-page/0.1/amp-facebook-page.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-comments/0.1/amp-facebook-comments.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-comments/1.0/amp-facebook-comments.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-comments/1.0/base-element.js->extensions/amp-facebook/1.0/facebook-base-element.js',
'extensions/amp-facebook-like/1.0/amp-facebook-like.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-like/1.0/base-element.js->extensions/amp-facebook/1.0/facebook-base-element.js',
'extensions/amp-facebook-page/1.0/amp-facebook-page.js->extensions/amp-facebook/0.1/facebook-loader.js',
'extensions/amp-facebook-page/1.0/base-element.js->extensions/amp-facebook/1.0/facebook-base-element.js',
// VideoBaseElement, VideoIframe and VideoWrapper are meant to be shared.
'extensions/**->extensions/amp-video/1.0/base-element.js',
'extensions/**->extensions/amp-video/1.0/video-iframe.js',
// <amp-video-iframe> versions share this message API definition.
'extensions/amp-video-iframe/**->extensions/amp-video-iframe/amp-video-iframe-api.js',
// <amp-vimeo> versions share this message API definition.
'extensions/amp-vimeo/**->extensions/amp-vimeo/vimeo-api.js',
// Amp geo in group enum
'extensions/amp-a4a/0.1/amp-a4a.js->extensions/amp-geo/0.1/amp-geo-in-group.js',
'extensions/amp-consent/0.1/consent-config.js->extensions/amp-geo/0.1/amp-geo-in-group.js',
'extensions/amp-user-notification/0.1/amp-user-notification.js->extensions/amp-geo/0.1/amp-geo-in-group.js',
// AMP Story
'extensions/amp-story/1.0/animation-types.js->extensions/amp-animation/0.1/web-animation-types.js',
// AMP Story 360
'extensions/amp-story-360/0.1/amp-story-360.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-360/0.1/amp-story-360.js->extensions/amp-story/1.0/utils.js',
// AMP Story Panning Media
'extensions/amp-story-panning-media/0.1/amp-story-panning-media.js->extensions/amp-story/1.0/amp-story-store-service.js',
// Story ads
'extensions/amp-story-auto-ads/0.1/algorithm-count-pages.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-auto-ads/0.1/algorithm-predetermined.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-auto-ads/0.1/amp-story-auto-ads.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-auto-ads/0.1/story-ad-page.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-auto-ads/0.1/story-ad-page-manager.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-auto-ads/0.1/amp-story-auto-ads.js->extensions/amp-story/1.0/events.js',
// TODO(#24080) Remove this when story ads have full ad network support.
'extensions/amp-story-auto-ads/0.1/story-ad-page.js->extensions/amp-ad-exit/0.1/config.js',
// TODO(ccordry): remove this after createShadowRootWithStyle is moved to src
'extensions/amp-story-auto-ads/0.1/amp-story-auto-ads.js->extensions/amp-story/1.0/utils.js',
'extensions/amp-story-auto-ads/0.1/story-ad-ui.js->extensions/amp-story/1.0/utils.js',
// Story education
'extensions/amp-story-education/0.1/amp-story-education.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-education/0.1/amp-story-education.js->extensions/amp-story/1.0/utils.js',
'extensions/amp-story-education/0.1/amp-story-education.js->extensions/amp-story/1.0/amp-story-localization-service.js',
// Interactive components that depend on story functionality.
'extensions/amp-story-interactive/0.1/amp-story-interactive-abstract.js->extensions/amp-story/1.0/amp-story-store-service.js',
'extensions/amp-story-interactive/0.1/amp-story-interactive-abstract.js->extensions/amp-story/1.0/story-analytics.js',
'extensions/amp-story-interactive/0.1/amp-story-interactive-abstract.js->extensions/amp-story/1.0/utils.js',
'extensions/amp-story-interactive/0.1/amp-story-interactive-abstract.js->extensions/amp-story/1.0/variable-service.js',
'extensions/amp-story-interactive/0.1/amp-story-interactive-results.js->extensions/amp-story/1.0/amp-story-store-service.js',
// Subscriptions.
'extensions/amp-subscriptions/0.1/expr.js->extensions/amp-access/0.1/access-expr.js',
'extensions/amp-subscriptions/0.1/local-subscription-platform-iframe.js->extensions/amp-access/0.1/iframe-api/messenger.js',
'extensions/amp-subscriptions/0.1/viewer-subscription-platform.js->extensions/amp-access/0.1/jwt.js',
'extensions/amp-subscriptions/0.1/actions.js->extensions/amp-access/0.1/login-dialog.js',
'extensions/amp-subscriptions-google/0.1/amp-subscriptions-google.js->extensions/amp-subscriptions/0.1/analytics.js',
'extensions/amp-subscriptions-google/0.1/amp-subscriptions-google.js->extensions/amp-subscriptions/0.1/doc-impl.js',
'extensions/amp-subscriptions-google/0.1/amp-subscriptions-google.js->extensions/amp-subscriptions/0.1/entitlement.js',
'extensions/amp-subscriptions-google/0.1/amp-subscriptions-google.js->extensions/amp-subscriptions/0.1/constants.js',
'extensions/amp-subscriptions-google/0.1/amp-subscriptions-google.js->extensions/amp-subscriptions/0.1/url-builder.js',
// amp-smartlinks depends on amp-skimlinks/link-rewriter
'extensions/amp-smartlinks/0.1/amp-smartlinks.js->extensions/amp-skimlinks/0.1/link-rewriter/link-rewriter-manager.js',
'extensions/amp-smartlinks/0.1/linkmate.js->extensions/amp-skimlinks/0.1/link-rewriter/two-steps-response.js',
],
},
{
filesMatching: 'extensions/**/*.js',
mustNotDependOn: 'src/service/**/*.js',
allowlist: [
'extensions/amp-a4a/0.1/a4a-variable-source.js->' +
'src/service/variable-source.js',
'extensions/amp-a4a/0.1/amp-a4a.js->' +
'src/service/url-replacements-impl.js',
// Real time config.
'extensions/amp-a4a/0.1/amp-a4a.js->' +
'src/service/real-time-config/real-time-config-impl.js',
// Parsing extension urls.
'extensions/amp-a4a/0.1/head-validation.js->' +
'src/service/extension-script.js',
'extensions/amp-a4a/0.1/amp-ad-utils.js->' +
'src/service/extension-script.js',
'extensions/amp-live-list/0.1/live-list-manager.js->' +
'src/service/extension-script.js',
'extensions/amp-video/0.1/amp-video.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-video-iframe/0.1/amp-video-iframe.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-ooyala-player/0.1/amp-ooyala-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-youtube/0.1/amp-youtube.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-viqeo-player/0.1/amp-viqeo-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-brightcove/0.1/amp-brightcove.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-powr-player/0.1/amp-powr-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-dailymotion/0.1/amp-dailymotion.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-brid-player/0.1/amp-brid-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-jwplayer/0.1/amp-jwplayer.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-gfycat/0.1/amp-gfycat.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-a4a/0.1/friendly-frame-util.js->' +
'src/service/url-replacements-impl.js',
'extensions/amp-nexxtv-player/0.1/amp-nexxtv-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-3q-player/0.1/amp-3q-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-ima-video/0.1/amp-ima-video.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-minute-media-player/0.1/amp-minute-media-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-redbull-player/0.1/amp-redbull-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-vimeo/0.1/amp-vimeo.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-wistia-player/0.1/amp-wistia-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-delight-player/0.1/amp-delight-player.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-position-observer/0.1/amp-position-observer.js->' +
'src/service/position-observer/position-observer-impl.js',
'extensions/amp-position-observer/0.1/amp-position-observer.js->' +
'src/service/position-observer/position-observer-worker.js',
'extensions/amp-fx-collection/0.1/providers/fx-provider.js->' +
'src/service/position-observer/position-observer-impl.js',
'extensions/amp-fx-collection/0.1/providers/fx-provider.js->' +
'src/service/position-observer/position-observer-worker.js',
'extensions/amp-analytics/0.1/cookie-writer.js->' +
'src/service/cid-impl.js',
'extensions/amp-consent/0.1/cookie-writer.js->' +
'src/service/cid-impl.js',
'extensions/amp-next-page/0.1/next-page-service.js->' +
'src/service/position-observer/position-observer-impl.js',
'extensions/amp-next-page/0.1/next-page-service.js->' +
'src/service/position-observer/position-observer-worker.js',
'extensions/amp-next-page/1.0/visibility-observer.js->' +
'src/service/position-observer/position-observer-worker.js',
'extensions/amp-next-page/1.0/visibility-observer.js->' +
'src/service/position-observer/position-observer-impl.js',
'extensions/amp-user-notification/0.1/amp-user-notification.js->' +
'src/service/notification-ui-manager.js',
'extensions/amp-consent/0.1/amp-consent.js->' +
'src/service/notification-ui-manager.js',
// Accessing USER_INTERACTED constant:
'extensions/amp-story/1.0/page-advancement.js->' +
'src/service/action-impl.js',
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js->' +
'src/service/navigation.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->' +
'src/service/navigation.js',
'extensions/amp-mowplayer/0.1/amp-mowplayer.js->' +
'src/service/video-manager-impl.js',
'extensions/amp-analytics/0.1/linker-manager.js->' +
'src/service/navigation.js',
'extensions/amp-skimlinks/0.1/link-rewriter/link-rewriter-manager.js->' +
'src/service/navigation.js',
// Accessing extension-script.calculateExtensionScriptUrl().
'extensions/amp-script/0.1/amp-script.js->' +
'src/service/extension-script.js',
// Origin experiments.
'extensions/amp-experiment/1.0/amp-experiment.js->' +
'src/service/origin-experiments-impl.js',
// For action macros.
'extensions/amp-link-rewriter/0.1/amp-link-rewriter.js->' +
'src/service/navigation.js',
// For localization.
'extensions/amp-story/1.0/amp-story-localization-service.js->src/service/localization.js',
'extensions/amp-story-auto-ads/0.1/story-ad-localization.js->src/service/localization.js',
// Accessing calculateScriptBaseUrl() for vendor config URLs
'extensions/amp-analytics/0.1/config.js->' +
'src/service/extension-script.js',
// Experiment moving Fixed Layer to extension
'extensions/amp-viewer-integration/0.1/amp-viewer-integration.js->' +
'src/service/fixed-layer.js',
// Ads remote config manager
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->src/service/real-time-config/callout-vendors.js',
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js->src/service/real-time-config/real-time-config-impl.js',
'extensions/amp-ad-network-valueimpression-impl/0.1/amp-ad-network-valueimpression-impl.js->src/service/real-time-config/real-time-config-impl.js',
],
},
{
filesMatching: 'extensions/**/*.js',
mustNotDependOn: 'src/base-element.js',
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/polyfills/*.js',
allowlist: [
// DO NOT add extensions/ files
'3p/polyfills.js->src/polyfills/math-sign.js',
'3p/polyfills.js->src/polyfills/object-assign.js',
'3p/polyfills.js->src/polyfills/object-values.js',
'3p/polyfills.js->src/polyfills/promise.js',
'3p/polyfills.js->src/polyfills/string-starts-with.js',
'src/amp.js->src/polyfills/index.js',
'src/polyfills/index.js->src/polyfills/abort-controller.js',
'src/polyfills/index.js->src/polyfills/domtokenlist.js',
'src/polyfills/index.js->src/polyfills/document-contains.js',
'src/polyfills/index.js->src/polyfills/fetch.js',
'src/polyfills/index.js->src/polyfills/get-bounding-client-rect.js',
'src/polyfills/index.js->src/polyfills/math-sign.js',
'src/polyfills/index.js->src/polyfills/object-assign.js',
'src/polyfills/index.js->src/polyfills/object-values.js',
'src/polyfills/index.js->src/polyfills/promise.js',
'src/polyfills/index.js->src/polyfills/array-includes.js',
'src/polyfills/index.js->src/polyfills/string-starts-with.js',
'src/polyfills/index.js->src/polyfills/custom-elements.js',
'src/polyfills/index.js->src/polyfills/intersection-observer.js',
'src/polyfills/index.js->src/polyfills/resize-observer.js',
'src/polyfills/index.js->src/polyfills/map-set.js',
'src/polyfills/index.js->src/polyfills/set-add.js',
'src/polyfills/index.js->src/polyfills/weakmap-set.js',
'src/friendly-iframe-embed.js->src/polyfills/abort-controller.js',
'src/friendly-iframe-embed.js->src/polyfills/custom-elements.js',
'src/friendly-iframe-embed.js->src/polyfills/document-contains.js',
'src/friendly-iframe-embed.js->src/polyfills/domtokenlist.js',
'src/friendly-iframe-embed.js->src/polyfills/intersection-observer.js',
'src/friendly-iframe-embed.js->src/polyfills/resize-observer.js',
],
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/polyfills/index.js',
allowlist: ['src/amp.js->src/polyfills/index.js'],
},
// Base assertions should never be used explicitly; only the user/dev wrappers
// or the Log class should have access to the base implementations.
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/core/assert/base.js',
allowlist: [
'src/core/assert/dev.js->src/core/assert/base.js',
'src/core/assert/user.js->src/core/assert/base.js',
'src/log.js->src/core/assert/base.js',
],
},
// Rules for main src.
{
filesMatching: 'src/**/*.js',
mustNotDependOn: 'extensions/**/*.js',
},
{
filesMatching: 'src/**/*.js',
mustNotDependOn: 'ads/**/*.js',
allowlist: 'src/ad-cid.js->ads/_config.js',
},
{
filesMatching: '**/*.js',
mustNotDependOn: 'src/service/custom-element-registry.js',
allowlist: [
'builtins/**->src/service/custom-element-registry.js',
'src/amp.js->src/service/custom-element-registry.js',
'src/runtime.js->src/service/custom-element-registry.js',
'src/service/extensions-impl.js->src/service/custom-element-registry.js',
],
},
// A4A
{
filesMatching: 'extensions/**/*-ad-network-*.js',
mustNotDependOn: [
'extensions/amp-ad/0.1/amp-ad-xorigin-iframe-handler.js',
'src/3p-frame.js',
'src/iframe-helper.js',
],
allowlist:
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js->src/3p-frame.js',
},
{
mustNotDependOn: [
'extensions/amp-ad-network-doubleclick-impl/0.1/amp-ad-network-doubleclick-impl.js',
'extensions/amp-ad-network-adsense-impl/0.1/amp-ad-network-adsense-impl.js',
],
},
// Delayed fetch for Doubleclick will be deprecated on March 29, 2018.
// Doubleclick.js will be deleted from the repository at that time.
// Please see https://github.com/ampproject/amphtml/issues/11834
// for more information.
// Do not add any additional files to this allowlist without express
// permission from @bradfrizzell, @keithwrightbos, or @robhazan.
{
mustNotDependOn: ['ads/google/doubleclick.js'],
allowlist: [
/** DO NOT ADD TO ALLOWLIST */
'ads/vendors/ix.js->ads/google/doubleclick.js',
'ads/vendors/imonomy.js->ads/google/doubleclick.js',
'ads/vendors/navegg.js->ads/google/doubleclick.js',
/** DO NOT ADD TO ALLOWLIST */
'ads/vendors/openx.js->ads/google/doubleclick.js',
'ads/vendors/pulsepoint.js->ads/google/doubleclick.js',
/** DO NOT ADD TO ALLOWLIST */
],
},
];
|
import React, { Component } from 'react'
import {
View,
Image,
Text
} from 'react-native';
import PropTypes from 'prop-types'
import UserAvatarView from '../../atoms/UserAvatarView/UserAvatarView'
import OnDemandButton from '../../atoms/OnDemandButton/OnDemandButton'
import UserCallButton from '../../atoms/UserCallButton/UserCallButton'
import MessageButton from '../../../../_shared/src/atoms/MessageButton/MessageButton'
import styles from './styles';
import Reactotron from 'reactotron-react-native'
export default class AssociatedCaregiverRow extends Component {
static propTypes = {
/* A function that is called when onDemand button is pressed */
sendOnDemandMessage: PropTypes.func.isRequired,
/* A function that is called when call button is pressed */
callUser: PropTypes.func.isRequired,
/** A function that exposes the react-navigation API to allow
* for navigating outside of its main stack
*/
navTo: PropTypes.func.isRequired,
/** Caregivers profile image */
profileImage: PropTypes.string,
/** ID for onDemand message */
odmId: PropTypes.string.isRequired,
/** ID for secure messages*/
secureMessageId: PropTypes.string.isRequired,
/** Caregivers online status */
overAllStatus: PropTypes.string.isRequired,
/** Caregivers display name */
userName: PropTypes.string.isRequired,
/** If enabled, show messages icon */
messagesEnabled: PropTypes.bool.isRequired,
/** If enabled, show onDemand icon */
onDemandmessagesEnabled: PropTypes.bool.isRequired,
/** The display name of the logged in user (for TopBar) */
loggedInDisplayName: PropTypes.string.required,
/** The profile image of the logged in user (for TopBar) */
loggedInProfileImage: PropTypes.string.required,
/** The userId of the logged in user (for TopBar) */
loggedInUserId: PropTypes.number.required
}
render() {
const { sendOnDemandMessage, callUser, navTo, profileImage, overAllStatus, userName, messagesEnabled, onDemandmessagesEnabled, loggedInUserId, loggedInProfileImage, loggedInDisplayName, odmId, secureMessageId } = this.props
const isActive = (overAllStatus === 'available')
return (
<View style={styles.rowContainerStyle}>
<View style={styles.userAvatarStyle}>
<UserAvatarView
profileImage={profileImage}
width={42}
height={42}
overAllStatus={overAllStatus}
/>
</View>
<View style={isActive ? styles.textContainerStyle : styles.textContainerInactiveStyle}>
<Text style={styles.userNameTextStyle}>{userName}</Text>
</View>
<View style={styles.callButtonStyle}>
{onDemandmessagesEnabled &&
<OnDemandButton
size={42}
onPress={() => sendOnDemandMessage(userName, profileImage, odmId)}
/>
}
<View style={{ paddingLeft: 10 }}>
<UserCallButton
size={42}
isActive={isActive}
onPress={callUser}
overAllStatus={overAllStatus}
/>
</View>
{messagesEnabled &&
<View style={{paddingLeft: 10}}>
<MessageButton
size={42}
isActive={true}
onPress={() => {
navTo('VCSecureMessageDetail', {
userId: loggedInUserId,
recipientId: secureMessageId,
profileImage: loggedInProfileImage,
userName: loggedInDisplayName
})
}}
/>
</View>
}
</View>
</View>
)
}
}
|
//===------- TreeTransform.h - Semantic Tree Transformation -----*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//===----------------------------------------------------------------------===//
//
// This file implements a semantic tree transformation that takes a given
// AST and rebuilds it, possibly transforming some nodes in the process.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CLANG_LIB_SEMA_TREETRANSFORM_H
#define LLVM_CLANG_LIB_SEMA_TREETRANSFORM_H
#include "CoroutineStmtBuilder.h"
#include "TypeLocBuilder.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclObjC.h"
#include "clang/AST/DeclTemplate.h"
#include "clang/AST/Expr.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/ExprObjC.h"
#include "clang/AST/ExprOpenMP.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/StmtCXX.h"
#include "clang/AST/StmtObjC.h"
#include "clang/AST/StmtOpenMP.h"
#include "clang/Sema/Designator.h"
#include "clang/Sema/Lookup.h"
#include "clang/Sema/Ownership.h"
#include "clang/Sema/ParsedTemplate.h"
#include "clang/Sema/ScopeInfo.h"
#include "clang/Sema/SemaDiagnostic.h"
#include "clang/Sema/SemaInternal.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/Support/ErrorHandling.h"
#include <algorithm>
namespace clang {
using namespace sema;
/// A semantic tree transformation that allows one to transform one
/// abstract syntax tree into another.
///
/// A new tree transformation is defined by creating a new subclass \c X of
/// \c TreeTransform<X> and then overriding certain operations to provide
/// behavior specific to that transformation. For example, template
/// instantiation is implemented as a tree transformation where the
/// transformation of TemplateTypeParmType nodes involves substituting the
/// template arguments for their corresponding template parameters; a similar
/// transformation is performed for non-type template parameters and
/// template template parameters.
///
/// This tree-transformation template uses static polymorphism to allow
/// subclasses to customize any of its operations. Thus, a subclass can
/// override any of the transformation or rebuild operators by providing an
/// operation with the same signature as the default implementation. The
/// overriding function should not be virtual.
///
/// Semantic tree transformations are split into two stages, either of which
/// can be replaced by a subclass. The "transform" step transforms an AST node
/// or the parts of an AST node using the various transformation functions,
/// then passes the pieces on to the "rebuild" step, which constructs a new AST
/// node of the appropriate kind from the pieces. The default transformation
/// routines recursively transform the operands to composite AST nodes (e.g.,
/// the pointee type of a PointerType node) and, if any of those operand nodes
/// were changed by the transformation, invokes the rebuild operation to create
/// a new AST node.
///
/// Subclasses can customize the transformation at various levels. The
/// most coarse-grained transformations involve replacing TransformType(),
/// TransformExpr(), TransformDecl(), TransformNestedNameSpecifierLoc(),
/// TransformTemplateName(), or TransformTemplateArgument() with entirely
/// new implementations.
///
/// For more fine-grained transformations, subclasses can replace any of the
/// \c TransformXXX functions (where XXX is the name of an AST node, e.g.,
/// PointerType, StmtExpr) to alter the transformation. As mentioned previously,
/// replacing TransformTemplateTypeParmType() allows template instantiation
/// to substitute template arguments for their corresponding template
/// parameters. Additionally, subclasses can override the \c RebuildXXX
/// functions to control how AST nodes are rebuilt when their operands change.
/// By default, \c TreeTransform will invoke semantic analysis to rebuild
/// AST nodes. However, certain other tree transformations (e.g, cloning) may
/// be able to use more efficient rebuild steps.
///
/// There are a handful of other functions that can be overridden, allowing one
/// to avoid traversing nodes that don't need any transformation
/// (\c AlreadyTransformed()), force rebuilding AST nodes even when their
/// operands have not changed (\c AlwaysRebuild()), and customize the
/// default locations and entity names used for type-checking
/// (\c getBaseLocation(), \c getBaseEntity()).
template<typename Derived>
class TreeTransform {
/// Private RAII object that helps us forget and then re-remember
/// the template argument corresponding to a partially-substituted parameter
/// pack.
class ForgetPartiallySubstitutedPackRAII {
Derived &Self;
TemplateArgument Old;
public:
ForgetPartiallySubstitutedPackRAII(Derived &Self) : Self(Self) {
Old = Self.ForgetPartiallySubstitutedPack();
}
~ForgetPartiallySubstitutedPackRAII() {
Self.RememberPartiallySubstitutedPack(Old);
}
};
protected:
Sema &SemaRef;
/// The set of local declarations that have been transformed, for
/// cases where we are forced to build new declarations within the transformer
/// rather than in the subclass (e.g., lambda closure types).
llvm::DenseMap<Decl *, Decl *> TransformedLocalDecls;
public:
/// Initializes a new tree transformer.
TreeTransform(Sema &SemaRef) : SemaRef(SemaRef) { }
/// Retrieves a reference to the derived class.
Derived &getDerived() { return static_cast<Derived&>(*this); }
/// Retrieves a reference to the derived class.
const Derived &getDerived() const {
return static_cast<const Derived&>(*this);
}
static inline ExprResult Owned(Expr *E) { return E; }
static inline StmtResult Owned(Stmt *S) { return S; }
/// Retrieves a reference to the semantic analysis object used for
/// this tree transform.
Sema &getSema() const { return SemaRef; }
/// Whether the transformation should always rebuild AST nodes, even
/// if none of the children have changed.
///
/// Subclasses may override this function to specify when the transformation
/// should rebuild all AST nodes.
///
/// We must always rebuild all AST nodes when performing variadic template
/// pack expansion, in order to avoid violating the AST invariant that each
/// statement node appears at most once in its containing declaration.
bool AlwaysRebuild() { return SemaRef.ArgumentPackSubstitutionIndex != -1; }
/// Whether the transformation is forming an expression or statement that
/// replaces the original. In this case, we'll reuse mangling numbers from
/// existing lambdas.
bool ReplacingOriginal() { return false; }
/// Returns the location of the entity being transformed, if that
/// information was not available elsewhere in the AST.
///
/// By default, returns no source-location information. Subclasses can
/// provide an alternative implementation that provides better location
/// information.
SourceLocation getBaseLocation() { return SourceLocation(); }
/// Returns the name of the entity being transformed, if that
/// information was not available elsewhere in the AST.
///
/// By default, returns an empty name. Subclasses can provide an alternative
/// implementation with a more precise name.
DeclarationName getBaseEntity() { return DeclarationName(); }
/// Sets the "base" location and entity when that
/// information is known based on another transformation.
///
/// By default, the source location and entity are ignored. Subclasses can
/// override this function to provide a customized implementation.
void setBase(SourceLocation Loc, DeclarationName Entity) { }
/// RAII object that temporarily sets the base location and entity
/// used for reporting diagnostics in types.
class TemporaryBase {
TreeTransform &Self;
SourceLocation OldLocation;
DeclarationName OldEntity;
public:
TemporaryBase(TreeTransform &Self, SourceLocation Location,
DeclarationName Entity) : Self(Self) {
OldLocation = Self.getDerived().getBaseLocation();
OldEntity = Self.getDerived().getBaseEntity();
if (Location.isValid())
Self.getDerived().setBase(Location, Entity);
}
~TemporaryBase() {
Self.getDerived().setBase(OldLocation, OldEntity);
}
};
/// Determine whether the given type \p T has already been
/// transformed.
///
/// Subclasses can provide an alternative implementation of this routine
/// to short-circuit evaluation when it is known that a given type will
/// not change. For example, template instantiation need not traverse
/// non-dependent types.
bool AlreadyTransformed(QualType T) {
return T.isNull();
}
/// Determine whether the given call argument should be dropped, e.g.,
/// because it is a default argument.
///
/// Subclasses can provide an alternative implementation of this routine to
/// determine which kinds of call arguments get dropped. By default,
/// CXXDefaultArgument nodes are dropped (prior to transformation).
bool DropCallArgument(Expr *E) {
return E->isDefaultArgument();
}
/// Determine whether we should expand a pack expansion with the
/// given set of parameter packs into separate arguments by repeatedly
/// transforming the pattern.
///
/// By default, the transformer never tries to expand pack expansions.
/// Subclasses can override this routine to provide different behavior.
///
/// \param EllipsisLoc The location of the ellipsis that identifies the
/// pack expansion.
///
/// \param PatternRange The source range that covers the entire pattern of
/// the pack expansion.
///
/// \param Unexpanded The set of unexpanded parameter packs within the
/// pattern.
///
/// \param ShouldExpand Will be set to \c true if the transformer should
/// expand the corresponding pack expansions into separate arguments. When
/// set, \c NumExpansions must also be set.
///
/// \param RetainExpansion Whether the caller should add an unexpanded
/// pack expansion after all of the expanded arguments. This is used
/// when extending explicitly-specified template argument packs per
/// C++0x [temp.arg.explicit]p9.
///
/// \param NumExpansions The number of separate arguments that will be in
/// the expanded form of the corresponding pack expansion. This is both an
/// input and an output parameter, which can be set by the caller if the
/// number of expansions is known a priori (e.g., due to a prior substitution)
/// and will be set by the callee when the number of expansions is known.
/// The callee must set this value when \c ShouldExpand is \c true; it may
/// set this value in other cases.
///
/// \returns true if an error occurred (e.g., because the parameter packs
/// are to be instantiated with arguments of different lengths), false
/// otherwise. If false, \c ShouldExpand (and possibly \c NumExpansions)
/// must be set.
bool TryExpandParameterPacks(SourceLocation EllipsisLoc,
SourceRange PatternRange,
ArrayRef<UnexpandedParameterPack> Unexpanded,
bool &ShouldExpand,
bool &RetainExpansion,
Optional<unsigned> &NumExpansions) {
ShouldExpand = false;
return false;
}
/// "Forget" about the partially-substituted pack template argument,
/// when performing an instantiation that must preserve the parameter pack
/// use.
///
/// This routine is meant to be overridden by the template instantiator.
TemplateArgument ForgetPartiallySubstitutedPack() {
return TemplateArgument();
}
/// "Remember" the partially-substituted pack template argument
/// after performing an instantiation that must preserve the parameter pack
/// use.
///
/// This routine is meant to be overridden by the template instantiator.
void RememberPartiallySubstitutedPack(TemplateArgument Arg) { }
/// Note to the derived class when a function parameter pack is
/// being expanded.
void ExpandingFunctionParameterPack(ParmVarDecl *Pack) { }
/// Transforms the given type into another type.
///
/// By default, this routine transforms a type by creating a
/// TypeSourceInfo for it and delegating to the appropriate
/// function. This is expensive, but we don't mind, because
/// this method is deprecated anyway; all users should be
/// switched to storing TypeSourceInfos.
///
/// \returns the transformed type.
QualType TransformType(QualType T);
/// Transforms the given type-with-location into a new
/// type-with-location.
///
/// By default, this routine transforms a type by delegating to the
/// appropriate TransformXXXType to build a new type. Subclasses
/// may override this function (to take over all type
/// transformations) or some set of the TransformXXXType functions
/// to alter the transformation.
TypeSourceInfo *TransformType(TypeSourceInfo *DI);
/// Transform the given type-with-location into a new
/// type, collecting location information in the given builder
/// as necessary.
///
QualType TransformType(TypeLocBuilder &TLB, TypeLoc TL);
/// Transform a type that is permitted to produce a
/// DeducedTemplateSpecializationType.
///
/// This is used in the (relatively rare) contexts where it is acceptable
/// for transformation to produce a class template type with deduced
/// template arguments.
/// @{
QualType TransformTypeWithDeducedTST(QualType T);
TypeSourceInfo *TransformTypeWithDeducedTST(TypeSourceInfo *DI);
/// @}
/// The reason why the value of a statement is not discarded, if any.
enum StmtDiscardKind {
SDK_Discarded,
SDK_NotDiscarded,
SDK_StmtExprResult,
};
/// Transform the given statement.
///
/// By default, this routine transforms a statement by delegating to the
/// appropriate TransformXXXStmt function to transform a specific kind of
/// statement or the TransformExpr() function to transform an expression.
/// Subclasses may override this function to transform statements using some
/// other mechanism.
///
/// \returns the transformed statement.
StmtResult TransformStmt(Stmt *S, StmtDiscardKind SDK = SDK_Discarded);
/// Transform the given statement.
///
/// By default, this routine transforms a statement by delegating to the
/// appropriate TransformOMPXXXClause function to transform a specific kind
/// of clause. Subclasses may override this function to transform statements
/// using some other mechanism.
///
/// \returns the transformed OpenMP clause.
OMPClause *TransformOMPClause(OMPClause *S);
/// Transform the given attribute.
///
/// By default, this routine transforms a statement by delegating to the
/// appropriate TransformXXXAttr function to transform a specific kind
/// of attribute. Subclasses may override this function to transform
/// attributed statements using some other mechanism.
///
/// \returns the transformed attribute
const Attr *TransformAttr(const Attr *S);
/// Transform the specified attribute.
///
/// Subclasses should override the transformation of attributes with a pragma
/// spelling to transform expressions stored within the attribute.
///
/// \returns the transformed attribute.
#define ATTR(X)
#define PRAGMA_SPELLING_ATTR(X) \
const X##Attr *Transform##X##Attr(const X##Attr *R) { return R; }
#include "clang/Basic/AttrList.inc"
/// Transform the given expression.
///
/// By default, this routine transforms an expression by delegating to the
/// appropriate TransformXXXExpr function to build a new expression.
/// Subclasses may override this function to transform expressions using some
/// other mechanism.
///
/// \returns the transformed expression.
ExprResult TransformExpr(Expr *E);
/// Transform the given initializer.
///
/// By default, this routine transforms an initializer by stripping off the
/// semantic nodes added by initialization, then passing the result to
/// TransformExpr or TransformExprs.
///
/// \returns the transformed initializer.
ExprResult TransformInitializer(Expr *Init, bool NotCopyInit);
/// Transform the given list of expressions.
///
/// This routine transforms a list of expressions by invoking
/// \c TransformExpr() for each subexpression. However, it also provides
/// support for variadic templates by expanding any pack expansions (if the
/// derived class permits such expansion) along the way. When pack expansions
/// are present, the number of outputs may not equal the number of inputs.
///
/// \param Inputs The set of expressions to be transformed.
///
/// \param NumInputs The number of expressions in \c Inputs.
///
/// \param IsCall If \c true, then this transform is being performed on
/// function-call arguments, and any arguments that should be dropped, will
/// be.
///
/// \param Outputs The transformed input expressions will be added to this
/// vector.
///
/// \param ArgChanged If non-NULL, will be set \c true if any argument changed
/// due to transformation.
///
/// \returns true if an error occurred, false otherwise.
bool TransformExprs(Expr *const *Inputs, unsigned NumInputs, bool IsCall,
SmallVectorImpl<Expr *> &Outputs,
bool *ArgChanged = nullptr);
/// Transform the given declaration, which is referenced from a type
/// or expression.
///
/// By default, acts as the identity function on declarations, unless the
/// transformer has had to transform the declaration itself. Subclasses
/// may override this function to provide alternate behavior.
Decl *TransformDecl(SourceLocation Loc, Decl *D) {
llvm::DenseMap<Decl *, Decl *>::iterator Known
= TransformedLocalDecls.find(D);
if (Known != TransformedLocalDecls.end())
return Known->second;
return D;
}
/// Transform the specified condition.
///
/// By default, this transforms the variable and expression and rebuilds
/// the condition.
Sema::ConditionResult TransformCondition(SourceLocation Loc, VarDecl *Var,
Expr *Expr,
Sema::ConditionKind Kind);
/// Transform the attributes associated with the given declaration and
/// place them on the new declaration.
///
/// By default, this operation does nothing. Subclasses may override this
/// behavior to transform attributes.
void transformAttrs(Decl *Old, Decl *New) { }
/// Note that a local declaration has been transformed by this
/// transformer.
///
/// Local declarations are typically transformed via a call to
/// TransformDefinition. However, in some cases (e.g., lambda expressions),
/// the transformer itself has to transform the declarations. This routine
/// can be overridden by a subclass that keeps track of such mappings.
void transformedLocalDecl(Decl *Old, ArrayRef<Decl *> New) {
assert(New.size() == 1 &&
"must override transformedLocalDecl if performing pack expansion");
TransformedLocalDecls[Old] = New.front();
}
/// Transform the definition of the given declaration.
///
/// By default, invokes TransformDecl() to transform the declaration.
/// Subclasses may override this function to provide alternate behavior.
Decl *TransformDefinition(SourceLocation Loc, Decl *D) {
return getDerived().TransformDecl(Loc, D);
}
/// Transform the given declaration, which was the first part of a
/// nested-name-specifier in a member access expression.
///
/// This specific declaration transformation only applies to the first
/// identifier in a nested-name-specifier of a member access expression, e.g.,
/// the \c T in \c x->T::member
///
/// By default, invokes TransformDecl() to transform the declaration.
/// Subclasses may override this function to provide alternate behavior.
NamedDecl *TransformFirstQualifierInScope(NamedDecl *D, SourceLocation Loc) {
return cast_or_null<NamedDecl>(getDerived().TransformDecl(Loc, D));
}
/// Transform the set of declarations in an OverloadExpr.
bool TransformOverloadExprDecls(OverloadExpr *Old, bool RequiresADL,
LookupResult &R);
/// Transform the given nested-name-specifier with source-location
/// information.
///
/// By default, transforms all of the types and declarations within the
/// nested-name-specifier. Subclasses may override this function to provide
/// alternate behavior.
NestedNameSpecifierLoc
TransformNestedNameSpecifierLoc(NestedNameSpecifierLoc NNS,
QualType ObjectType = QualType(),
NamedDecl *FirstQualifierInScope = nullptr);
/// Transform the given declaration name.
///
/// By default, transforms the types of conversion function, constructor,
/// and destructor names and then (if needed) rebuilds the declaration name.
/// Identifiers and selectors are returned unmodified. Sublcasses may
/// override this function to provide alternate behavior.
DeclarationNameInfo
TransformDeclarationNameInfo(const DeclarationNameInfo &NameInfo);
/// Transform the given template name.
///
/// \param SS The nested-name-specifier that qualifies the template
/// name. This nested-name-specifier must already have been transformed.
///
/// \param Name The template name to transform.
///
/// \param NameLoc The source location of the template name.
///
/// \param ObjectType If we're translating a template name within a member
/// access expression, this is the type of the object whose member template
/// is being referenced.
///
/// \param FirstQualifierInScope If the first part of a nested-name-specifier
/// also refers to a name within the current (lexical) scope, this is the
/// declaration it refers to.
///
/// By default, transforms the template name by transforming the declarations
/// and nested-name-specifiers that occur within the template name.
/// Subclasses may override this function to provide alternate behavior.
TemplateName
TransformTemplateName(CXXScopeSpec &SS, TemplateName Name,
SourceLocation NameLoc,
QualType ObjectType = QualType(),
NamedDecl *FirstQualifierInScope = nullptr,
bool AllowInjectedClassName = false);
/// Transform the given template argument.
///
/// By default, this operation transforms the type, expression, or
/// declaration stored within the template argument and constructs a
/// new template argument from the transformed result. Subclasses may
/// override this function to provide alternate behavior.
///
/// Returns true if there was an error.
bool TransformTemplateArgument(const TemplateArgumentLoc &Input,
TemplateArgumentLoc &Output,
bool Uneval = false);
/// Transform the given set of template arguments.
///
/// By default, this operation transforms all of the template arguments
/// in the input set using \c TransformTemplateArgument(), and appends
/// the transformed arguments to the output list.
///
/// Note that this overload of \c TransformTemplateArguments() is merely
/// a convenience function. Subclasses that wish to override this behavior
/// should override the iterator-based member template version.
///
/// \param Inputs The set of template arguments to be transformed.
///
/// \param NumInputs The number of template arguments in \p Inputs.
///
/// \param Outputs The set of transformed template arguments output by this
/// routine.
///
/// Returns true if an error occurred.
bool TransformTemplateArguments(const TemplateArgumentLoc *Inputs,
unsigned NumInputs,
TemplateArgumentListInfo &Outputs,
bool Uneval = false) {
return TransformTemplateArguments(Inputs, Inputs + NumInputs, Outputs,
Uneval);
}
/// Transform the given set of template arguments.
///
/// By default, this operation transforms all of the template arguments
/// in the input set using \c TransformTemplateArgument(), and appends
/// the transformed arguments to the output list.
///
/// \param First An iterator to the first template argument.
///
/// \param Last An iterator one step past the last template argument.
///
/// \param Outputs The set of transformed template arguments output by this
/// routine.
///
/// Returns true if an error occurred.
template<typename InputIterator>
bool TransformTemplateArguments(InputIterator First,
InputIterator Last,
TemplateArgumentListInfo &Outputs,
bool Uneval = false);
/// Fakes up a TemplateArgumentLoc for a given TemplateArgument.
void InventTemplateArgumentLoc(const TemplateArgument &Arg,
TemplateArgumentLoc &ArgLoc);
/// Fakes up a TypeSourceInfo for a type.
TypeSourceInfo *InventTypeSourceInfo(QualType T) {
return SemaRef.Context.getTrivialTypeSourceInfo(T,
getDerived().getBaseLocation());
}
#define ABSTRACT_TYPELOC(CLASS, PARENT)
#define TYPELOC(CLASS, PARENT) \
QualType Transform##CLASS##Type(TypeLocBuilder &TLB, CLASS##TypeLoc T);
#include "clang/AST/TypeLocNodes.def"
template<typename Fn>
QualType TransformFunctionProtoType(TypeLocBuilder &TLB,
FunctionProtoTypeLoc TL,
CXXRecordDecl *ThisContext,
Qualifiers ThisTypeQuals,
Fn TransformExceptionSpec);
bool TransformExceptionSpec(SourceLocation Loc,
FunctionProtoType::ExceptionSpecInfo &ESI,
SmallVectorImpl<QualType> &Exceptions,
bool &Changed);
StmtResult TransformSEHHandler(Stmt *Handler);
QualType
TransformTemplateSpecializationType(TypeLocBuilder &TLB,
TemplateSpecializationTypeLoc TL,
TemplateName Template);
QualType
TransformDependentTemplateSpecializationType(TypeLocBuilder &TLB,
DependentTemplateSpecializationTypeLoc TL,
TemplateName Template,
CXXScopeSpec &SS);
QualType TransformDependentTemplateSpecializationType(
TypeLocBuilder &TLB, DependentTemplateSpecializationTypeLoc TL,
NestedNameSpecifierLoc QualifierLoc);
/// Transforms the parameters of a function type into the
/// given vectors.
///
/// The result vectors should be kept in sync; null entries in the
/// variables vector are acceptable.
///
/// Return true on error.
bool TransformFunctionTypeParams(
SourceLocation Loc, ArrayRef<ParmVarDecl *> Params,
const QualType *ParamTypes,
const FunctionProtoType::ExtParameterInfo *ParamInfos,
SmallVectorImpl<QualType> &PTypes, SmallVectorImpl<ParmVarDecl *> *PVars,
Sema::ExtParameterInfoBuilder &PInfos);
/// Transforms a single function-type parameter. Return null
/// on error.
///
/// \param indexAdjustment - A number to add to the parameter's
/// scope index; can be negative
ParmVarDecl *TransformFunctionTypeParam(ParmVarDecl *OldParm,
int indexAdjustment,
Optional<unsigned> NumExpansions,
bool ExpectParameterPack);
/// Transform the body of a lambda-expression.
StmtResult TransformLambdaBody(LambdaExpr *E, Stmt *Body);
/// Alternative implementation of TransformLambdaBody that skips transforming
/// the body.
StmtResult SkipLambdaBody(LambdaExpr *E, Stmt *Body);
QualType TransformReferenceType(TypeLocBuilder &TLB, ReferenceTypeLoc TL);
StmtResult TransformCompoundStmt(CompoundStmt *S, bool IsStmtExpr);
ExprResult TransformCXXNamedCastExpr(CXXNamedCastExpr *E);
TemplateParameterList *TransformTemplateParameterList(
TemplateParameterList *TPL) {
return TPL;
}
ExprResult TransformAddressOfOperand(Expr *E);
ExprResult TransformDependentScopeDeclRefExpr(DependentScopeDeclRefExpr *E,
bool IsAddressOfOperand,
TypeSourceInfo **RecoveryTSI);
ExprResult TransformParenDependentScopeDeclRefExpr(
ParenExpr *PE, DependentScopeDeclRefExpr *DRE, bool IsAddressOfOperand,
TypeSourceInfo **RecoveryTSI);
StmtResult TransformOMPExecutableDirective(OMPExecutableDirective *S);
// FIXME: We use LLVM_ATTRIBUTE_NOINLINE because inlining causes a ridiculous
// amount of stack usage with clang.
#define STMT(Node, Parent) \
LLVM_ATTRIBUTE_NOINLINE \
StmtResult Transform##Node(Node *S);
#define VALUESTMT(Node, Parent) \
LLVM_ATTRIBUTE_NOINLINE \
StmtResult Transform##Node(Node *S, StmtDiscardKind SDK);
#define EXPR(Node, Parent) \
LLVM_ATTRIBUTE_NOINLINE \
ExprResult Transform##Node(Node *E);
#define ABSTRACT_STMT(Stmt)
#include "clang/AST/StmtNodes.inc"
#define OPENMP_CLAUSE(Name, Class) \
LLVM_ATTRIBUTE_NOINLINE \
OMPClause *Transform ## Class(Class *S);
#include "clang/Basic/OpenMPKinds.def"
/// Build a new qualified type given its unqualified type and type location.
///
/// By default, this routine adds type qualifiers only to types that can
/// have qualifiers, and silently suppresses those qualifiers that are not
/// permitted. Subclasses may override this routine to provide different
/// behavior.
QualType RebuildQualifiedType(QualType T, QualifiedTypeLoc TL);
/// Build a new pointer type given its pointee type.
///
/// By default, performs semantic analysis when building the pointer type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildPointerType(QualType PointeeType, SourceLocation Sigil);
/// Build a new block pointer type given its pointee type.
///
/// By default, performs semantic analysis when building the block pointer
/// type. Subclasses may override this routine to provide different behavior.
QualType RebuildBlockPointerType(QualType PointeeType, SourceLocation Sigil);
/// Build a new reference type given the type it references.
///
/// By default, performs semantic analysis when building the
/// reference type. Subclasses may override this routine to provide
/// different behavior.
///
/// \param LValue whether the type was written with an lvalue sigil
/// or an rvalue sigil.
QualType RebuildReferenceType(QualType ReferentType,
bool LValue,
SourceLocation Sigil);
/// Build a new member pointer type given the pointee type and the
/// class type it refers into.
///
/// By default, performs semantic analysis when building the member pointer
/// type. Subclasses may override this routine to provide different behavior.
QualType RebuildMemberPointerType(QualType PointeeType, QualType ClassType,
SourceLocation Sigil);
QualType RebuildObjCTypeParamType(const ObjCTypeParamDecl *Decl,
SourceLocation ProtocolLAngleLoc,
ArrayRef<ObjCProtocolDecl *> Protocols,
ArrayRef<SourceLocation> ProtocolLocs,
SourceLocation ProtocolRAngleLoc);
/// Build an Objective-C object type.
///
/// By default, performs semantic analysis when building the object type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildObjCObjectType(QualType BaseType,
SourceLocation Loc,
SourceLocation TypeArgsLAngleLoc,
ArrayRef<TypeSourceInfo *> TypeArgs,
SourceLocation TypeArgsRAngleLoc,
SourceLocation ProtocolLAngleLoc,
ArrayRef<ObjCProtocolDecl *> Protocols,
ArrayRef<SourceLocation> ProtocolLocs,
SourceLocation ProtocolRAngleLoc);
/// Build a new Objective-C object pointer type given the pointee type.
///
/// By default, directly builds the pointer type, with no additional semantic
/// analysis.
QualType RebuildObjCObjectPointerType(QualType PointeeType,
SourceLocation Star);
/// Build a new array type given the element type, size
/// modifier, size of the array (if known), size expression, and index type
/// qualifiers.
///
/// By default, performs semantic analysis when building the array type.
/// Subclasses may override this routine to provide different behavior.
/// Also by default, all of the other Rebuild*Array
QualType RebuildArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
const llvm::APInt *Size,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange);
/// Build a new constant array type given the element type, size
/// modifier, (known) size of the array, and index type qualifiers.
///
/// By default, performs semantic analysis when building the array type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildConstantArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
const llvm::APInt &Size,
unsigned IndexTypeQuals,
SourceRange BracketsRange);
/// Build a new incomplete array type given the element type, size
/// modifier, and index type qualifiers.
///
/// By default, performs semantic analysis when building the array type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildIncompleteArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
unsigned IndexTypeQuals,
SourceRange BracketsRange);
/// Build a new variable-length array type given the element type,
/// size modifier, size expression, and index type qualifiers.
///
/// By default, performs semantic analysis when building the array type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildVariableArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange);
/// Build a new dependent-sized array type given the element type,
/// size modifier, size expression, and index type qualifiers.
///
/// By default, performs semantic analysis when building the array type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildDependentSizedArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange);
/// Build a new vector type given the element type and
/// number of elements.
///
/// By default, performs semantic analysis when building the vector type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildVectorType(QualType ElementType, unsigned NumElements,
VectorType::VectorKind VecKind);
/// Build a new potentially dependently-sized extended vector type
/// given the element type and number of elements.
///
/// By default, performs semantic analysis when building the vector type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildDependentVectorType(QualType ElementType, Expr *SizeExpr,
SourceLocation AttributeLoc,
VectorType::VectorKind);
/// Build a new extended vector type given the element type and
/// number of elements.
///
/// By default, performs semantic analysis when building the vector type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildExtVectorType(QualType ElementType, unsigned NumElements,
SourceLocation AttributeLoc);
/// Build a new potentially dependently-sized extended vector type
/// given the element type and number of elements.
///
/// By default, performs semantic analysis when building the vector type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildDependentSizedExtVectorType(QualType ElementType,
Expr *SizeExpr,
SourceLocation AttributeLoc);
/// Build a new DependentAddressSpaceType or return the pointee
/// type variable with the correct address space (retrieved from
/// AddrSpaceExpr) applied to it. The former will be returned in cases
/// where the address space remains dependent.
///
/// By default, performs semantic analysis when building the type with address
/// space applied. Subclasses may override this routine to provide different
/// behavior.
QualType RebuildDependentAddressSpaceType(QualType PointeeType,
Expr *AddrSpaceExpr,
SourceLocation AttributeLoc);
/// Build a new function type.
///
/// By default, performs semantic analysis when building the function type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildFunctionProtoType(QualType T,
MutableArrayRef<QualType> ParamTypes,
const FunctionProtoType::ExtProtoInfo &EPI);
/// Build a new unprototyped function type.
QualType RebuildFunctionNoProtoType(QualType ResultType);
/// Rebuild an unresolved typename type, given the decl that
/// the UnresolvedUsingTypenameDecl was transformed to.
QualType RebuildUnresolvedUsingType(SourceLocation NameLoc, Decl *D);
/// Build a new typedef type.
QualType RebuildTypedefType(TypedefNameDecl *Typedef) {
return SemaRef.Context.getTypeDeclType(Typedef);
}
/// Build a new MacroDefined type.
QualType RebuildMacroQualifiedType(QualType T,
const IdentifierInfo *MacroII) {
return SemaRef.Context.getMacroQualifiedType(T, MacroII);
}
/// Build a new class/struct/union type.
QualType RebuildRecordType(RecordDecl *Record) {
return SemaRef.Context.getTypeDeclType(Record);
}
/// Build a new Enum type.
QualType RebuildEnumType(EnumDecl *Enum) {
return SemaRef.Context.getTypeDeclType(Enum);
}
/// Build a new typeof(expr) type.
///
/// By default, performs semantic analysis when building the typeof type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildTypeOfExprType(Expr *Underlying, SourceLocation Loc);
/// Build a new typeof(type) type.
///
/// By default, builds a new TypeOfType with the given underlying type.
QualType RebuildTypeOfType(QualType Underlying);
/// Build a new unary transform type.
QualType RebuildUnaryTransformType(QualType BaseType,
UnaryTransformType::UTTKind UKind,
SourceLocation Loc);
/// Build a new C++11 decltype type.
///
/// By default, performs semantic analysis when building the decltype type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildDecltypeType(Expr *Underlying, SourceLocation Loc);
/// Build a new C++11 auto type.
///
/// By default, builds a new AutoType with the given deduced type.
QualType RebuildAutoType(QualType Deduced, AutoTypeKeyword Keyword) {
// Note, IsDependent is always false here: we implicitly convert an 'auto'
// which has been deduced to a dependent type into an undeduced 'auto', so
// that we'll retry deduction after the transformation.
return SemaRef.Context.getAutoType(Deduced, Keyword,
/*IsDependent*/ false);
}
/// By default, builds a new DeducedTemplateSpecializationType with the given
/// deduced type.
QualType RebuildDeducedTemplateSpecializationType(TemplateName Template,
QualType Deduced) {
return SemaRef.Context.getDeducedTemplateSpecializationType(
Template, Deduced, /*IsDependent*/ false);
}
/// Build a new template specialization type.
///
/// By default, performs semantic analysis when building the template
/// specialization type. Subclasses may override this routine to provide
/// different behavior.
QualType RebuildTemplateSpecializationType(TemplateName Template,
SourceLocation TemplateLoc,
TemplateArgumentListInfo &Args);
/// Build a new parenthesized type.
///
/// By default, builds a new ParenType type from the inner type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildParenType(QualType InnerType) {
return SemaRef.BuildParenType(InnerType);
}
/// Build a new qualified name type.
///
/// By default, builds a new ElaboratedType type from the keyword,
/// the nested-name-specifier and the named type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildElaboratedType(SourceLocation KeywordLoc,
ElaboratedTypeKeyword Keyword,
NestedNameSpecifierLoc QualifierLoc,
QualType Named) {
return SemaRef.Context.getElaboratedType(Keyword,
QualifierLoc.getNestedNameSpecifier(),
Named);
}
/// Build a new typename type that refers to a template-id.
///
/// By default, builds a new DependentNameType type from the
/// nested-name-specifier and the given type. Subclasses may override
/// this routine to provide different behavior.
QualType RebuildDependentTemplateSpecializationType(
ElaboratedTypeKeyword Keyword,
NestedNameSpecifierLoc QualifierLoc,
SourceLocation TemplateKWLoc,
const IdentifierInfo *Name,
SourceLocation NameLoc,
TemplateArgumentListInfo &Args,
bool AllowInjectedClassName) {
// Rebuild the template name.
// TODO: avoid TemplateName abstraction
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
TemplateName InstName = getDerived().RebuildTemplateName(
SS, TemplateKWLoc, *Name, NameLoc, QualType(), nullptr,
AllowInjectedClassName);
if (InstName.isNull())
return QualType();
// If it's still dependent, make a dependent specialization.
if (InstName.getAsDependentTemplateName())
return SemaRef.Context.getDependentTemplateSpecializationType(Keyword,
QualifierLoc.getNestedNameSpecifier(),
Name,
Args);
// Otherwise, make an elaborated type wrapping a non-dependent
// specialization.
QualType T =
getDerived().RebuildTemplateSpecializationType(InstName, NameLoc, Args);
if (T.isNull()) return QualType();
if (Keyword == ETK_None && QualifierLoc.getNestedNameSpecifier() == nullptr)
return T;
return SemaRef.Context.getElaboratedType(Keyword,
QualifierLoc.getNestedNameSpecifier(),
T);
}
/// Build a new typename type that refers to an identifier.
///
/// By default, performs semantic analysis when building the typename type
/// (or elaborated type). Subclasses may override this routine to provide
/// different behavior.
QualType RebuildDependentNameType(ElaboratedTypeKeyword Keyword,
SourceLocation KeywordLoc,
NestedNameSpecifierLoc QualifierLoc,
const IdentifierInfo *Id,
SourceLocation IdLoc,
bool DeducedTSTContext) {
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
if (QualifierLoc.getNestedNameSpecifier()->isDependent()) {
// If the name is still dependent, just build a new dependent name type.
if (!SemaRef.computeDeclContext(SS))
return SemaRef.Context.getDependentNameType(Keyword,
QualifierLoc.getNestedNameSpecifier(),
Id);
}
if (Keyword == ETK_None || Keyword == ETK_Typename) {
QualType T = SemaRef.CheckTypenameType(Keyword, KeywordLoc, QualifierLoc,
*Id, IdLoc);
// If a dependent name resolves to a deduced template specialization type,
// check that we're in one of the syntactic contexts permitting it.
if (!DeducedTSTContext) {
if (auto *Deduced = dyn_cast_or_null<DeducedTemplateSpecializationType>(
T.isNull() ? nullptr : T->getContainedDeducedType())) {
SemaRef.Diag(IdLoc, diag::err_dependent_deduced_tst)
<< (int)SemaRef.getTemplateNameKindForDiagnostics(
Deduced->getTemplateName())
<< QualType(QualifierLoc.getNestedNameSpecifier()->getAsType(), 0);
if (auto *TD = Deduced->getTemplateName().getAsTemplateDecl())
SemaRef.Diag(TD->getLocation(), diag::note_template_decl_here);
return QualType();
}
}
return T;
}
TagTypeKind Kind = TypeWithKeyword::getTagTypeKindForKeyword(Keyword);
// We had a dependent elaborated-type-specifier that has been transformed
// into a non-dependent elaborated-type-specifier. Find the tag we're
// referring to.
LookupResult Result(SemaRef, Id, IdLoc, Sema::LookupTagName);
DeclContext *DC = SemaRef.computeDeclContext(SS, false);
if (!DC)
return QualType();
if (SemaRef.RequireCompleteDeclContext(SS, DC))
return QualType();
TagDecl *Tag = nullptr;
SemaRef.LookupQualifiedName(Result, DC);
switch (Result.getResultKind()) {
case LookupResult::NotFound:
case LookupResult::NotFoundInCurrentInstantiation:
break;
case LookupResult::Found:
Tag = Result.getAsSingle<TagDecl>();
break;
case LookupResult::FoundOverloaded:
case LookupResult::FoundUnresolvedValue:
llvm_unreachable("Tag lookup cannot find non-tags");
case LookupResult::Ambiguous:
// Let the LookupResult structure handle ambiguities.
return QualType();
}
if (!Tag) {
// Check where the name exists but isn't a tag type and use that to emit
// better diagnostics.
LookupResult Result(SemaRef, Id, IdLoc, Sema::LookupTagName);
SemaRef.LookupQualifiedName(Result, DC);
switch (Result.getResultKind()) {
case LookupResult::Found:
case LookupResult::FoundOverloaded:
case LookupResult::FoundUnresolvedValue: {
NamedDecl *SomeDecl = Result.getRepresentativeDecl();
Sema::NonTagKind NTK = SemaRef.getNonTagTypeDeclKind(SomeDecl, Kind);
SemaRef.Diag(IdLoc, diag::err_tag_reference_non_tag) << SomeDecl
<< NTK << Kind;
SemaRef.Diag(SomeDecl->getLocation(), diag::note_declared_at);
break;
}
default:
SemaRef.Diag(IdLoc, diag::err_not_tag_in_scope)
<< Kind << Id << DC << QualifierLoc.getSourceRange();
break;
}
return QualType();
}
if (!SemaRef.isAcceptableTagRedeclaration(Tag, Kind, /*isDefinition*/false,
IdLoc, Id)) {
SemaRef.Diag(KeywordLoc, diag::err_use_with_wrong_tag) << Id;
SemaRef.Diag(Tag->getLocation(), diag::note_previous_use);
return QualType();
}
// Build the elaborated-type-specifier type.
QualType T = SemaRef.Context.getTypeDeclType(Tag);
return SemaRef.Context.getElaboratedType(Keyword,
QualifierLoc.getNestedNameSpecifier(),
T);
}
/// Build a new pack expansion type.
///
/// By default, builds a new PackExpansionType type from the given pattern.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildPackExpansionType(QualType Pattern,
SourceRange PatternRange,
SourceLocation EllipsisLoc,
Optional<unsigned> NumExpansions) {
return getSema().CheckPackExpansion(Pattern, PatternRange, EllipsisLoc,
NumExpansions);
}
/// Build a new atomic type given its value type.
///
/// By default, performs semantic analysis when building the atomic type.
/// Subclasses may override this routine to provide different behavior.
QualType RebuildAtomicType(QualType ValueType, SourceLocation KWLoc);
/// Build a new pipe type given its value type.
QualType RebuildPipeType(QualType ValueType, SourceLocation KWLoc,
bool isReadPipe);
/// Build a new template name given a nested name specifier, a flag
/// indicating whether the "template" keyword was provided, and the template
/// that the template name refers to.
///
/// By default, builds the new template name directly. Subclasses may override
/// this routine to provide different behavior.
TemplateName RebuildTemplateName(CXXScopeSpec &SS,
bool TemplateKW,
TemplateDecl *Template);
/// Build a new template name given a nested name specifier and the
/// name that is referred to as a template.
///
/// By default, performs semantic analysis to determine whether the name can
/// be resolved to a specific template, then builds the appropriate kind of
/// template name. Subclasses may override this routine to provide different
/// behavior.
TemplateName RebuildTemplateName(CXXScopeSpec &SS,
SourceLocation TemplateKWLoc,
const IdentifierInfo &Name,
SourceLocation NameLoc, QualType ObjectType,
NamedDecl *FirstQualifierInScope,
bool AllowInjectedClassName);
/// Build a new template name given a nested name specifier and the
/// overloaded operator name that is referred to as a template.
///
/// By default, performs semantic analysis to determine whether the name can
/// be resolved to a specific template, then builds the appropriate kind of
/// template name. Subclasses may override this routine to provide different
/// behavior.
TemplateName RebuildTemplateName(CXXScopeSpec &SS,
SourceLocation TemplateKWLoc,
OverloadedOperatorKind Operator,
SourceLocation NameLoc, QualType ObjectType,
bool AllowInjectedClassName);
/// Build a new template name given a template template parameter pack
/// and the
///
/// By default, performs semantic analysis to determine whether the name can
/// be resolved to a specific template, then builds the appropriate kind of
/// template name. Subclasses may override this routine to provide different
/// behavior.
TemplateName RebuildTemplateName(TemplateTemplateParmDecl *Param,
const TemplateArgument &ArgPack) {
return getSema().Context.getSubstTemplateTemplateParmPack(Param, ArgPack);
}
/// Build a new compound statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCompoundStmt(SourceLocation LBraceLoc,
MultiStmtArg Statements,
SourceLocation RBraceLoc,
bool IsStmtExpr) {
return getSema().ActOnCompoundStmt(LBraceLoc, RBraceLoc, Statements,
IsStmtExpr);
}
/// Build a new case statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCaseStmt(SourceLocation CaseLoc,
Expr *LHS,
SourceLocation EllipsisLoc,
Expr *RHS,
SourceLocation ColonLoc) {
return getSema().ActOnCaseStmt(CaseLoc, LHS, EllipsisLoc, RHS,
ColonLoc);
}
/// Attach the body to a new case statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCaseStmtBody(Stmt *S, Stmt *Body) {
getSema().ActOnCaseStmtBody(S, Body);
return S;
}
/// Build a new default statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildDefaultStmt(SourceLocation DefaultLoc,
SourceLocation ColonLoc,
Stmt *SubStmt) {
return getSema().ActOnDefaultStmt(DefaultLoc, ColonLoc, SubStmt,
/*CurScope=*/nullptr);
}
/// Build a new label statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildLabelStmt(SourceLocation IdentLoc, LabelDecl *L,
SourceLocation ColonLoc, Stmt *SubStmt) {
return SemaRef.ActOnLabelStmt(IdentLoc, L, ColonLoc, SubStmt);
}
/// Build a new label statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildAttributedStmt(SourceLocation AttrLoc,
ArrayRef<const Attr*> Attrs,
Stmt *SubStmt) {
return SemaRef.ActOnAttributedStmt(AttrLoc, Attrs, SubStmt);
}
/// Build a new "if" statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildIfStmt(SourceLocation IfLoc, bool IsConstexpr,
Sema::ConditionResult Cond, Stmt *Init, Stmt *Then,
SourceLocation ElseLoc, Stmt *Else) {
return getSema().ActOnIfStmt(IfLoc, IsConstexpr, Init, Cond, Then,
ElseLoc, Else);
}
/// Start building a new switch statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildSwitchStmtStart(SourceLocation SwitchLoc, Stmt *Init,
Sema::ConditionResult Cond) {
return getSema().ActOnStartOfSwitchStmt(SwitchLoc, Init, Cond);
}
/// Attach the body to the switch statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildSwitchStmtBody(SourceLocation SwitchLoc,
Stmt *Switch, Stmt *Body) {
return getSema().ActOnFinishSwitchStmt(SwitchLoc, Switch, Body);
}
/// Build a new while statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildWhileStmt(SourceLocation WhileLoc,
Sema::ConditionResult Cond, Stmt *Body) {
return getSema().ActOnWhileStmt(WhileLoc, Cond, Body);
}
/// Build a new do-while statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildDoStmt(SourceLocation DoLoc, Stmt *Body,
SourceLocation WhileLoc, SourceLocation LParenLoc,
Expr *Cond, SourceLocation RParenLoc) {
return getSema().ActOnDoStmt(DoLoc, Body, WhileLoc, LParenLoc,
Cond, RParenLoc);
}
/// Build a new for statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildForStmt(SourceLocation ForLoc, SourceLocation LParenLoc,
Stmt *Init, Sema::ConditionResult Cond,
Sema::FullExprArg Inc, SourceLocation RParenLoc,
Stmt *Body) {
return getSema().ActOnForStmt(ForLoc, LParenLoc, Init, Cond,
Inc, RParenLoc, Body);
}
/// Build a new goto statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildGotoStmt(SourceLocation GotoLoc, SourceLocation LabelLoc,
LabelDecl *Label) {
return getSema().ActOnGotoStmt(GotoLoc, LabelLoc, Label);
}
/// Build a new indirect goto statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildIndirectGotoStmt(SourceLocation GotoLoc,
SourceLocation StarLoc,
Expr *Target) {
return getSema().ActOnIndirectGotoStmt(GotoLoc, StarLoc, Target);
}
/// Build a new return statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildReturnStmt(SourceLocation ReturnLoc, Expr *Result) {
return getSema().BuildReturnStmt(ReturnLoc, Result);
}
/// Build a new declaration statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildDeclStmt(MutableArrayRef<Decl *> Decls,
SourceLocation StartLoc, SourceLocation EndLoc) {
Sema::DeclGroupPtrTy DG = getSema().BuildDeclaratorGroup(Decls);
return getSema().ActOnDeclStmt(DG, StartLoc, EndLoc);
}
/// Build a new inline asm statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildGCCAsmStmt(SourceLocation AsmLoc, bool IsSimple,
bool IsVolatile, unsigned NumOutputs,
unsigned NumInputs, IdentifierInfo **Names,
MultiExprArg Constraints, MultiExprArg Exprs,
Expr *AsmString, MultiExprArg Clobbers,
unsigned NumLabels,
SourceLocation RParenLoc) {
return getSema().ActOnGCCAsmStmt(AsmLoc, IsSimple, IsVolatile, NumOutputs,
NumInputs, Names, Constraints, Exprs,
AsmString, Clobbers, NumLabels, RParenLoc);
}
/// Build a new MS style inline asm statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildMSAsmStmt(SourceLocation AsmLoc, SourceLocation LBraceLoc,
ArrayRef<Token> AsmToks,
StringRef AsmString,
unsigned NumOutputs, unsigned NumInputs,
ArrayRef<StringRef> Constraints,
ArrayRef<StringRef> Clobbers,
ArrayRef<Expr*> Exprs,
SourceLocation EndLoc) {
return getSema().ActOnMSAsmStmt(AsmLoc, LBraceLoc, AsmToks, AsmString,
NumOutputs, NumInputs,
Constraints, Clobbers, Exprs, EndLoc);
}
/// Build a new co_return statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCoreturnStmt(SourceLocation CoreturnLoc, Expr *Result,
bool IsImplicit) {
return getSema().BuildCoreturnStmt(CoreturnLoc, Result, IsImplicit);
}
/// Build a new co_await expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCoawaitExpr(SourceLocation CoawaitLoc, Expr *Result,
bool IsImplicit) {
return getSema().BuildResolvedCoawaitExpr(CoawaitLoc, Result, IsImplicit);
}
/// Build a new co_await expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildDependentCoawaitExpr(SourceLocation CoawaitLoc,
Expr *Result,
UnresolvedLookupExpr *Lookup) {
return getSema().BuildUnresolvedCoawaitExpr(CoawaitLoc, Result, Lookup);
}
/// Build a new co_yield expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCoyieldExpr(SourceLocation CoyieldLoc, Expr *Result) {
return getSema().BuildCoyieldExpr(CoyieldLoc, Result);
}
StmtResult RebuildCoroutineBodyStmt(CoroutineBodyStmt::CtorArgs Args) {
return getSema().BuildCoroutineBodyStmt(Args);
}
/// Build a new Objective-C \@try statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAtTryStmt(SourceLocation AtLoc,
Stmt *TryBody,
MultiStmtArg CatchStmts,
Stmt *Finally) {
return getSema().ActOnObjCAtTryStmt(AtLoc, TryBody, CatchStmts,
Finally);
}
/// Rebuild an Objective-C exception declaration.
///
/// By default, performs semantic analysis to build the new declaration.
/// Subclasses may override this routine to provide different behavior.
VarDecl *RebuildObjCExceptionDecl(VarDecl *ExceptionDecl,
TypeSourceInfo *TInfo, QualType T) {
return getSema().BuildObjCExceptionDecl(TInfo, T,
ExceptionDecl->getInnerLocStart(),
ExceptionDecl->getLocation(),
ExceptionDecl->getIdentifier());
}
/// Build a new Objective-C \@catch statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAtCatchStmt(SourceLocation AtLoc,
SourceLocation RParenLoc,
VarDecl *Var,
Stmt *Body) {
return getSema().ActOnObjCAtCatchStmt(AtLoc, RParenLoc,
Var, Body);
}
/// Build a new Objective-C \@finally statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAtFinallyStmt(SourceLocation AtLoc,
Stmt *Body) {
return getSema().ActOnObjCAtFinallyStmt(AtLoc, Body);
}
/// Build a new Objective-C \@throw statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAtThrowStmt(SourceLocation AtLoc,
Expr *Operand) {
return getSema().BuildObjCAtThrowStmt(AtLoc, Operand);
}
/// Build a new OpenMP executable directive.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildOMPExecutableDirective(OpenMPDirectiveKind Kind,
DeclarationNameInfo DirName,
OpenMPDirectiveKind CancelRegion,
ArrayRef<OMPClause *> Clauses,
Stmt *AStmt, SourceLocation StartLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPExecutableDirective(
Kind, DirName, CancelRegion, Clauses, AStmt, StartLoc, EndLoc);
}
/// Build a new OpenMP 'if' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPIfClause(OpenMPDirectiveKind NameModifier,
Expr *Condition, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation NameModifierLoc,
SourceLocation ColonLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPIfClause(NameModifier, Condition, StartLoc,
LParenLoc, NameModifierLoc, ColonLoc,
EndLoc);
}
/// Build a new OpenMP 'final' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPFinalClause(Expr *Condition, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPFinalClause(Condition, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'num_threads' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPNumThreadsClause(Expr *NumThreads,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPNumThreadsClause(NumThreads, StartLoc,
LParenLoc, EndLoc);
}
/// Build a new OpenMP 'safelen' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPSafelenClause(Expr *Len, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPSafelenClause(Len, StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'simdlen' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPSimdlenClause(Expr *Len, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPSimdlenClause(Len, StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'allocator' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPAllocatorClause(Expr *A, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPAllocatorClause(A, StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'collapse' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPCollapseClause(Expr *Num, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPCollapseClause(Num, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'default' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPDefaultClause(OpenMPDefaultClauseKind Kind,
SourceLocation KindKwLoc,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPDefaultClause(Kind, KindKwLoc,
StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'proc_bind' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPProcBindClause(OpenMPProcBindClauseKind Kind,
SourceLocation KindKwLoc,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPProcBindClause(Kind, KindKwLoc,
StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'schedule' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPScheduleClause(
OpenMPScheduleClauseModifier M1, OpenMPScheduleClauseModifier M2,
OpenMPScheduleClauseKind Kind, Expr *ChunkSize, SourceLocation StartLoc,
SourceLocation LParenLoc, SourceLocation M1Loc, SourceLocation M2Loc,
SourceLocation KindLoc, SourceLocation CommaLoc, SourceLocation EndLoc) {
return getSema().ActOnOpenMPScheduleClause(
M1, M2, Kind, ChunkSize, StartLoc, LParenLoc, M1Loc, M2Loc, KindLoc,
CommaLoc, EndLoc);
}
/// Build a new OpenMP 'ordered' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPOrderedClause(SourceLocation StartLoc,
SourceLocation EndLoc,
SourceLocation LParenLoc, Expr *Num) {
return getSema().ActOnOpenMPOrderedClause(StartLoc, EndLoc, LParenLoc, Num);
}
/// Build a new OpenMP 'private' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPPrivateClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPPrivateClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'firstprivate' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPFirstprivateClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPFirstprivateClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'lastprivate' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPLastprivateClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPLastprivateClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'shared' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPSharedClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPSharedClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'reduction' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPReductionClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation ColonLoc,
SourceLocation EndLoc,
CXXScopeSpec &ReductionIdScopeSpec,
const DeclarationNameInfo &ReductionId,
ArrayRef<Expr *> UnresolvedReductions) {
return getSema().ActOnOpenMPReductionClause(
VarList, StartLoc, LParenLoc, ColonLoc, EndLoc, ReductionIdScopeSpec,
ReductionId, UnresolvedReductions);
}
/// Build a new OpenMP 'task_reduction' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPTaskReductionClause(
ArrayRef<Expr *> VarList, SourceLocation StartLoc,
SourceLocation LParenLoc, SourceLocation ColonLoc, SourceLocation EndLoc,
CXXScopeSpec &ReductionIdScopeSpec,
const DeclarationNameInfo &ReductionId,
ArrayRef<Expr *> UnresolvedReductions) {
return getSema().ActOnOpenMPTaskReductionClause(
VarList, StartLoc, LParenLoc, ColonLoc, EndLoc, ReductionIdScopeSpec,
ReductionId, UnresolvedReductions);
}
/// Build a new OpenMP 'in_reduction' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *
RebuildOMPInReductionClause(ArrayRef<Expr *> VarList, SourceLocation StartLoc,
SourceLocation LParenLoc, SourceLocation ColonLoc,
SourceLocation EndLoc,
CXXScopeSpec &ReductionIdScopeSpec,
const DeclarationNameInfo &ReductionId,
ArrayRef<Expr *> UnresolvedReductions) {
return getSema().ActOnOpenMPInReductionClause(
VarList, StartLoc, LParenLoc, ColonLoc, EndLoc, ReductionIdScopeSpec,
ReductionId, UnresolvedReductions);
}
/// Build a new OpenMP 'linear' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPLinearClause(ArrayRef<Expr *> VarList, Expr *Step,
SourceLocation StartLoc,
SourceLocation LParenLoc,
OpenMPLinearClauseKind Modifier,
SourceLocation ModifierLoc,
SourceLocation ColonLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPLinearClause(VarList, Step, StartLoc, LParenLoc,
Modifier, ModifierLoc, ColonLoc,
EndLoc);
}
/// Build a new OpenMP 'aligned' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPAlignedClause(ArrayRef<Expr *> VarList, Expr *Alignment,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation ColonLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPAlignedClause(VarList, Alignment, StartLoc,
LParenLoc, ColonLoc, EndLoc);
}
/// Build a new OpenMP 'copyin' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPCopyinClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPCopyinClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'copyprivate' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPCopyprivateClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPCopyprivateClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'flush' pseudo clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPFlushClause(ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPFlushClause(VarList, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'depend' pseudo clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *
RebuildOMPDependClause(OpenMPDependClauseKind DepKind, SourceLocation DepLoc,
SourceLocation ColonLoc, ArrayRef<Expr *> VarList,
SourceLocation StartLoc, SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPDependClause(DepKind, DepLoc, ColonLoc, VarList,
StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'device' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPDeviceClause(Expr *Device, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPDeviceClause(Device, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'map' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPMapClause(
ArrayRef<OpenMPMapModifierKind> MapTypeModifiers,
ArrayRef<SourceLocation> MapTypeModifiersLoc,
CXXScopeSpec MapperIdScopeSpec, DeclarationNameInfo MapperId,
OpenMPMapClauseKind MapType, bool IsMapTypeImplicit,
SourceLocation MapLoc, SourceLocation ColonLoc, ArrayRef<Expr *> VarList,
const OMPVarListLocTy &Locs, ArrayRef<Expr *> UnresolvedMappers) {
return getSema().ActOnOpenMPMapClause(MapTypeModifiers, MapTypeModifiersLoc,
MapperIdScopeSpec, MapperId, MapType,
IsMapTypeImplicit, MapLoc, ColonLoc,
VarList, Locs, UnresolvedMappers);
}
/// Build a new OpenMP 'allocate' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPAllocateClause(Expr *Allocate, ArrayRef<Expr *> VarList,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation ColonLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPAllocateClause(Allocate, VarList, StartLoc,
LParenLoc, ColonLoc, EndLoc);
}
/// Build a new OpenMP 'num_teams' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPNumTeamsClause(Expr *NumTeams, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPNumTeamsClause(NumTeams, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'thread_limit' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPThreadLimitClause(Expr *ThreadLimit,
SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPThreadLimitClause(ThreadLimit, StartLoc,
LParenLoc, EndLoc);
}
/// Build a new OpenMP 'priority' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPPriorityClause(Expr *Priority, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPPriorityClause(Priority, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'grainsize' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPGrainsizeClause(Expr *Grainsize, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPGrainsizeClause(Grainsize, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'num_tasks' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPNumTasksClause(Expr *NumTasks, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPNumTasksClause(NumTasks, StartLoc, LParenLoc,
EndLoc);
}
/// Build a new OpenMP 'hint' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPHintClause(Expr *Hint, SourceLocation StartLoc,
SourceLocation LParenLoc,
SourceLocation EndLoc) {
return getSema().ActOnOpenMPHintClause(Hint, StartLoc, LParenLoc, EndLoc);
}
/// Build a new OpenMP 'dist_schedule' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *
RebuildOMPDistScheduleClause(OpenMPDistScheduleClauseKind Kind,
Expr *ChunkSize, SourceLocation StartLoc,
SourceLocation LParenLoc, SourceLocation KindLoc,
SourceLocation CommaLoc, SourceLocation EndLoc) {
return getSema().ActOnOpenMPDistScheduleClause(
Kind, ChunkSize, StartLoc, LParenLoc, KindLoc, CommaLoc, EndLoc);
}
/// Build a new OpenMP 'to' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPToClause(ArrayRef<Expr *> VarList,
CXXScopeSpec &MapperIdScopeSpec,
DeclarationNameInfo &MapperId,
const OMPVarListLocTy &Locs,
ArrayRef<Expr *> UnresolvedMappers) {
return getSema().ActOnOpenMPToClause(VarList, MapperIdScopeSpec, MapperId,
Locs, UnresolvedMappers);
}
/// Build a new OpenMP 'from' clause.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPFromClause(ArrayRef<Expr *> VarList,
CXXScopeSpec &MapperIdScopeSpec,
DeclarationNameInfo &MapperId,
const OMPVarListLocTy &Locs,
ArrayRef<Expr *> UnresolvedMappers) {
return getSema().ActOnOpenMPFromClause(VarList, MapperIdScopeSpec, MapperId,
Locs, UnresolvedMappers);
}
/// Build a new OpenMP 'use_device_ptr' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPUseDevicePtrClause(ArrayRef<Expr *> VarList,
const OMPVarListLocTy &Locs) {
return getSema().ActOnOpenMPUseDevicePtrClause(VarList, Locs);
}
/// Build a new OpenMP 'is_device_ptr' clause.
///
/// By default, performs semantic analysis to build the new OpenMP clause.
/// Subclasses may override this routine to provide different behavior.
OMPClause *RebuildOMPIsDevicePtrClause(ArrayRef<Expr *> VarList,
const OMPVarListLocTy &Locs) {
return getSema().ActOnOpenMPIsDevicePtrClause(VarList, Locs);
}
/// Rebuild the operand to an Objective-C \@synchronized statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCAtSynchronizedOperand(SourceLocation atLoc,
Expr *object) {
return getSema().ActOnObjCAtSynchronizedOperand(atLoc, object);
}
/// Build a new Objective-C \@synchronized statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAtSynchronizedStmt(SourceLocation AtLoc,
Expr *Object, Stmt *Body) {
return getSema().ActOnObjCAtSynchronizedStmt(AtLoc, Object, Body);
}
/// Build a new Objective-C \@autoreleasepool statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCAutoreleasePoolStmt(SourceLocation AtLoc,
Stmt *Body) {
return getSema().ActOnObjCAutoreleasePoolStmt(AtLoc, Body);
}
/// Build a new Objective-C fast enumeration statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildObjCForCollectionStmt(SourceLocation ForLoc,
Stmt *Element,
Expr *Collection,
SourceLocation RParenLoc,
Stmt *Body) {
StmtResult ForEachStmt = getSema().ActOnObjCForCollectionStmt(ForLoc,
Element,
Collection,
RParenLoc);
if (ForEachStmt.isInvalid())
return StmtError();
return getSema().FinishObjCForCollectionStmt(ForEachStmt.get(), Body);
}
/// Build a new C++ exception declaration.
///
/// By default, performs semantic analysis to build the new decaration.
/// Subclasses may override this routine to provide different behavior.
VarDecl *RebuildExceptionDecl(VarDecl *ExceptionDecl,
TypeSourceInfo *Declarator,
SourceLocation StartLoc,
SourceLocation IdLoc,
IdentifierInfo *Id) {
VarDecl *Var = getSema().BuildExceptionDeclaration(nullptr, Declarator,
StartLoc, IdLoc, Id);
if (Var)
getSema().CurContext->addDecl(Var);
return Var;
}
/// Build a new C++ catch statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCXXCatchStmt(SourceLocation CatchLoc,
VarDecl *ExceptionDecl,
Stmt *Handler) {
return Owned(new (getSema().Context) CXXCatchStmt(CatchLoc, ExceptionDecl,
Handler));
}
/// Build a new C++ try statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCXXTryStmt(SourceLocation TryLoc, Stmt *TryBlock,
ArrayRef<Stmt *> Handlers) {
return getSema().ActOnCXXTryBlock(TryLoc, TryBlock, Handlers);
}
/// Build a new C++0x range-based for statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildCXXForRangeStmt(SourceLocation ForLoc,
SourceLocation CoawaitLoc, Stmt *Init,
SourceLocation ColonLoc, Stmt *Range,
Stmt *Begin, Stmt *End, Expr *Cond,
Expr *Inc, Stmt *LoopVar,
SourceLocation RParenLoc) {
// If we've just learned that the range is actually an Objective-C
// collection, treat this as an Objective-C fast enumeration loop.
if (DeclStmt *RangeStmt = dyn_cast<DeclStmt>(Range)) {
if (RangeStmt->isSingleDecl()) {
if (VarDecl *RangeVar = dyn_cast<VarDecl>(RangeStmt->getSingleDecl())) {
if (RangeVar->isInvalidDecl())
return StmtError();
Expr *RangeExpr = RangeVar->getInit();
if (!RangeExpr->isTypeDependent() &&
RangeExpr->getType()->isObjCObjectPointerType()) {
// FIXME: Support init-statements in Objective-C++20 ranged for
// statement.
if (Init) {
return SemaRef.Diag(Init->getBeginLoc(),
diag::err_objc_for_range_init_stmt)
<< Init->getSourceRange();
}
return getSema().ActOnObjCForCollectionStmt(ForLoc, LoopVar,
RangeExpr, RParenLoc);
}
}
}
}
return getSema().BuildCXXForRangeStmt(ForLoc, CoawaitLoc, Init, ColonLoc,
Range, Begin, End, Cond, Inc, LoopVar,
RParenLoc, Sema::BFRK_Rebuild);
}
/// Build a new C++0x range-based for statement.
///
/// By default, performs semantic analysis to build the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult RebuildMSDependentExistsStmt(SourceLocation KeywordLoc,
bool IsIfExists,
NestedNameSpecifierLoc QualifierLoc,
DeclarationNameInfo NameInfo,
Stmt *Nested) {
return getSema().BuildMSDependentExistsStmt(KeywordLoc, IsIfExists,
QualifierLoc, NameInfo, Nested);
}
/// Attach body to a C++0x range-based for statement.
///
/// By default, performs semantic analysis to finish the new statement.
/// Subclasses may override this routine to provide different behavior.
StmtResult FinishCXXForRangeStmt(Stmt *ForRange, Stmt *Body) {
return getSema().FinishCXXForRangeStmt(ForRange, Body);
}
StmtResult RebuildSEHTryStmt(bool IsCXXTry, SourceLocation TryLoc,
Stmt *TryBlock, Stmt *Handler) {
return getSema().ActOnSEHTryBlock(IsCXXTry, TryLoc, TryBlock, Handler);
}
StmtResult RebuildSEHExceptStmt(SourceLocation Loc, Expr *FilterExpr,
Stmt *Block) {
return getSema().ActOnSEHExceptBlock(Loc, FilterExpr, Block);
}
StmtResult RebuildSEHFinallyStmt(SourceLocation Loc, Stmt *Block) {
return SEHFinallyStmt::Create(getSema().getASTContext(), Loc, Block);
}
/// Build a new predefined expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildPredefinedExpr(SourceLocation Loc,
PredefinedExpr::IdentKind IK) {
return getSema().BuildPredefinedExpr(Loc, IK);
}
/// Build a new expression that references a declaration.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildDeclarationNameExpr(const CXXScopeSpec &SS,
LookupResult &R,
bool RequiresADL) {
return getSema().BuildDeclarationNameExpr(SS, R, RequiresADL);
}
/// Build a new expression that references a declaration.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildDeclRefExpr(NestedNameSpecifierLoc QualifierLoc,
ValueDecl *VD,
const DeclarationNameInfo &NameInfo,
TemplateArgumentListInfo *TemplateArgs) {
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
// FIXME: loses template args.
return getSema().BuildDeclarationNameExpr(SS, NameInfo, VD);
}
/// Build a new expression in parentheses.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildParenExpr(Expr *SubExpr, SourceLocation LParen,
SourceLocation RParen) {
return getSema().ActOnParenExpr(LParen, RParen, SubExpr);
}
/// Build a new pseudo-destructor expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXPseudoDestructorExpr(Expr *Base,
SourceLocation OperatorLoc,
bool isArrow,
CXXScopeSpec &SS,
TypeSourceInfo *ScopeType,
SourceLocation CCLoc,
SourceLocation TildeLoc,
PseudoDestructorTypeStorage Destroyed);
/// Build a new unary operator expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildUnaryOperator(SourceLocation OpLoc,
UnaryOperatorKind Opc,
Expr *SubExpr) {
return getSema().BuildUnaryOp(/*Scope=*/nullptr, OpLoc, Opc, SubExpr);
}
/// Build a new builtin offsetof expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildOffsetOfExpr(SourceLocation OperatorLoc,
TypeSourceInfo *Type,
ArrayRef<Sema::OffsetOfComponent> Components,
SourceLocation RParenLoc) {
return getSema().BuildBuiltinOffsetOf(OperatorLoc, Type, Components,
RParenLoc);
}
/// Build a new sizeof, alignof or vec_step expression with a
/// type argument.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildUnaryExprOrTypeTrait(TypeSourceInfo *TInfo,
SourceLocation OpLoc,
UnaryExprOrTypeTrait ExprKind,
SourceRange R) {
return getSema().CreateUnaryExprOrTypeTraitExpr(TInfo, OpLoc, ExprKind, R);
}
/// Build a new sizeof, alignof or vec step expression with an
/// expression argument.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildUnaryExprOrTypeTrait(Expr *SubExpr, SourceLocation OpLoc,
UnaryExprOrTypeTrait ExprKind,
SourceRange R) {
ExprResult Result
= getSema().CreateUnaryExprOrTypeTraitExpr(SubExpr, OpLoc, ExprKind);
if (Result.isInvalid())
return ExprError();
return Result;
}
/// Build a new array subscript expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildArraySubscriptExpr(Expr *LHS,
SourceLocation LBracketLoc,
Expr *RHS,
SourceLocation RBracketLoc) {
return getSema().ActOnArraySubscriptExpr(/*Scope=*/nullptr, LHS,
LBracketLoc, RHS,
RBracketLoc);
}
/// Build a new array section expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildOMPArraySectionExpr(Expr *Base, SourceLocation LBracketLoc,
Expr *LowerBound,
SourceLocation ColonLoc, Expr *Length,
SourceLocation RBracketLoc) {
return getSema().ActOnOMPArraySectionExpr(Base, LBracketLoc, LowerBound,
ColonLoc, Length, RBracketLoc);
}
/// Build a new call expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCallExpr(Expr *Callee, SourceLocation LParenLoc,
MultiExprArg Args,
SourceLocation RParenLoc,
Expr *ExecConfig = nullptr) {
return getSema().BuildCallExpr(/*Scope=*/nullptr, Callee, LParenLoc, Args,
RParenLoc, ExecConfig);
}
/// Build a new member access expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildMemberExpr(Expr *Base, SourceLocation OpLoc,
bool isArrow,
NestedNameSpecifierLoc QualifierLoc,
SourceLocation TemplateKWLoc,
const DeclarationNameInfo &MemberNameInfo,
ValueDecl *Member,
NamedDecl *FoundDecl,
const TemplateArgumentListInfo *ExplicitTemplateArgs,
NamedDecl *FirstQualifierInScope) {
ExprResult BaseResult = getSema().PerformMemberExprBaseConversion(Base,
isArrow);
if (!Member->getDeclName()) {
// We have a reference to an unnamed field. This is always the
// base of an anonymous struct/union member access, i.e. the
// field is always of record type.
assert(Member->getType()->isRecordType() &&
"unnamed member not of record type?");
BaseResult =
getSema().PerformObjectMemberConversion(BaseResult.get(),
QualifierLoc.getNestedNameSpecifier(),
FoundDecl, Member);
if (BaseResult.isInvalid())
return ExprError();
Base = BaseResult.get();
CXXScopeSpec EmptySS;
return getSema().BuildFieldReferenceExpr(
Base, isArrow, OpLoc, EmptySS, cast<FieldDecl>(Member),
DeclAccessPair::make(FoundDecl, FoundDecl->getAccess()), MemberNameInfo);
}
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
Base = BaseResult.get();
QualType BaseType = Base->getType();
if (isArrow && !BaseType->isPointerType())
return ExprError();
// FIXME: this involves duplicating earlier analysis in a lot of
// cases; we should avoid this when possible.
LookupResult R(getSema(), MemberNameInfo, Sema::LookupMemberName);
R.addDecl(FoundDecl);
R.resolveKind();
return getSema().BuildMemberReferenceExpr(Base, BaseType, OpLoc, isArrow,
SS, TemplateKWLoc,
FirstQualifierInScope,
R, ExplicitTemplateArgs,
/*S*/nullptr);
}
/// Build a new binary operator expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildBinaryOperator(SourceLocation OpLoc,
BinaryOperatorKind Opc,
Expr *LHS, Expr *RHS) {
return getSema().BuildBinOp(/*Scope=*/nullptr, OpLoc, Opc, LHS, RHS);
}
/// Build a new conditional operator expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildConditionalOperator(Expr *Cond,
SourceLocation QuestionLoc,
Expr *LHS,
SourceLocation ColonLoc,
Expr *RHS) {
return getSema().ActOnConditionalOp(QuestionLoc, ColonLoc, Cond,
LHS, RHS);
}
/// Build a new C-style cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCStyleCastExpr(SourceLocation LParenLoc,
TypeSourceInfo *TInfo,
SourceLocation RParenLoc,
Expr *SubExpr) {
return getSema().BuildCStyleCastExpr(LParenLoc, TInfo, RParenLoc,
SubExpr);
}
/// Build a new compound literal expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCompoundLiteralExpr(SourceLocation LParenLoc,
TypeSourceInfo *TInfo,
SourceLocation RParenLoc,
Expr *Init) {
return getSema().BuildCompoundLiteralExpr(LParenLoc, TInfo, RParenLoc,
Init);
}
/// Build a new extended vector element access expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildExtVectorElementExpr(Expr *Base,
SourceLocation OpLoc,
SourceLocation AccessorLoc,
IdentifierInfo &Accessor) {
CXXScopeSpec SS;
DeclarationNameInfo NameInfo(&Accessor, AccessorLoc);
return getSema().BuildMemberReferenceExpr(Base, Base->getType(),
OpLoc, /*IsArrow*/ false,
SS, SourceLocation(),
/*FirstQualifierInScope*/ nullptr,
NameInfo,
/* TemplateArgs */ nullptr,
/*S*/ nullptr);
}
/// Build a new initializer list expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildInitList(SourceLocation LBraceLoc,
MultiExprArg Inits,
SourceLocation RBraceLoc) {
return SemaRef.ActOnInitList(LBraceLoc, Inits, RBraceLoc);
}
/// Build a new designated initializer expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildDesignatedInitExpr(Designation &Desig,
MultiExprArg ArrayExprs,
SourceLocation EqualOrColonLoc,
bool GNUSyntax,
Expr *Init) {
ExprResult Result
= SemaRef.ActOnDesignatedInitializer(Desig, EqualOrColonLoc, GNUSyntax,
Init);
if (Result.isInvalid())
return ExprError();
return Result;
}
/// Build a new value-initialized expression.
///
/// By default, builds the implicit value initialization without performing
/// any semantic analysis. Subclasses may override this routine to provide
/// different behavior.
ExprResult RebuildImplicitValueInitExpr(QualType T) {
return new (SemaRef.Context) ImplicitValueInitExpr(T);
}
/// Build a new \c va_arg expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildVAArgExpr(SourceLocation BuiltinLoc,
Expr *SubExpr, TypeSourceInfo *TInfo,
SourceLocation RParenLoc) {
return getSema().BuildVAArgExpr(BuiltinLoc,
SubExpr, TInfo,
RParenLoc);
}
/// Build a new expression list in parentheses.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildParenListExpr(SourceLocation LParenLoc,
MultiExprArg SubExprs,
SourceLocation RParenLoc) {
return getSema().ActOnParenListExpr(LParenLoc, RParenLoc, SubExprs);
}
/// Build a new address-of-label expression.
///
/// By default, performs semantic analysis, using the name of the label
/// rather than attempting to map the label statement itself.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildAddrLabelExpr(SourceLocation AmpAmpLoc,
SourceLocation LabelLoc, LabelDecl *Label) {
return getSema().ActOnAddrLabel(AmpAmpLoc, LabelLoc, Label);
}
/// Build a new GNU statement expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildStmtExpr(SourceLocation LParenLoc,
Stmt *SubStmt,
SourceLocation RParenLoc) {
return getSema().ActOnStmtExpr(LParenLoc, SubStmt, RParenLoc);
}
/// Build a new __builtin_choose_expr expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildChooseExpr(SourceLocation BuiltinLoc,
Expr *Cond, Expr *LHS, Expr *RHS,
SourceLocation RParenLoc) {
return SemaRef.ActOnChooseExpr(BuiltinLoc,
Cond, LHS, RHS,
RParenLoc);
}
/// Build a new generic selection expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildGenericSelectionExpr(SourceLocation KeyLoc,
SourceLocation DefaultLoc,
SourceLocation RParenLoc,
Expr *ControllingExpr,
ArrayRef<TypeSourceInfo *> Types,
ArrayRef<Expr *> Exprs) {
return getSema().CreateGenericSelectionExpr(KeyLoc, DefaultLoc, RParenLoc,
ControllingExpr, Types, Exprs);
}
/// Build a new overloaded operator call expression.
///
/// By default, performs semantic analysis to build the new expression.
/// The semantic analysis provides the behavior of template instantiation,
/// copying with transformations that turn what looks like an overloaded
/// operator call into a use of a builtin operator, performing
/// argument-dependent lookup, etc. Subclasses may override this routine to
/// provide different behavior.
ExprResult RebuildCXXOperatorCallExpr(OverloadedOperatorKind Op,
SourceLocation OpLoc,
Expr *Callee,
Expr *First,
Expr *Second);
/// Build a new C++ "named" cast expression, such as static_cast or
/// reinterpret_cast.
///
/// By default, this routine dispatches to one of the more-specific routines
/// for a particular named case, e.g., RebuildCXXStaticCastExpr().
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXNamedCastExpr(SourceLocation OpLoc,
Stmt::StmtClass Class,
SourceLocation LAngleLoc,
TypeSourceInfo *TInfo,
SourceLocation RAngleLoc,
SourceLocation LParenLoc,
Expr *SubExpr,
SourceLocation RParenLoc) {
switch (Class) {
case Stmt::CXXStaticCastExprClass:
return getDerived().RebuildCXXStaticCastExpr(OpLoc, LAngleLoc, TInfo,
RAngleLoc, LParenLoc,
SubExpr, RParenLoc);
case Stmt::CXXDynamicCastExprClass:
return getDerived().RebuildCXXDynamicCastExpr(OpLoc, LAngleLoc, TInfo,
RAngleLoc, LParenLoc,
SubExpr, RParenLoc);
case Stmt::CXXReinterpretCastExprClass:
return getDerived().RebuildCXXReinterpretCastExpr(OpLoc, LAngleLoc, TInfo,
RAngleLoc, LParenLoc,
SubExpr,
RParenLoc);
case Stmt::CXXConstCastExprClass:
return getDerived().RebuildCXXConstCastExpr(OpLoc, LAngleLoc, TInfo,
RAngleLoc, LParenLoc,
SubExpr, RParenLoc);
default:
llvm_unreachable("Invalid C++ named cast");
}
}
/// Build a new C++ static_cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXStaticCastExpr(SourceLocation OpLoc,
SourceLocation LAngleLoc,
TypeSourceInfo *TInfo,
SourceLocation RAngleLoc,
SourceLocation LParenLoc,
Expr *SubExpr,
SourceLocation RParenLoc) {
return getSema().BuildCXXNamedCast(OpLoc, tok::kw_static_cast,
TInfo, SubExpr,
SourceRange(LAngleLoc, RAngleLoc),
SourceRange(LParenLoc, RParenLoc));
}
/// Build a new C++ dynamic_cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXDynamicCastExpr(SourceLocation OpLoc,
SourceLocation LAngleLoc,
TypeSourceInfo *TInfo,
SourceLocation RAngleLoc,
SourceLocation LParenLoc,
Expr *SubExpr,
SourceLocation RParenLoc) {
return getSema().BuildCXXNamedCast(OpLoc, tok::kw_dynamic_cast,
TInfo, SubExpr,
SourceRange(LAngleLoc, RAngleLoc),
SourceRange(LParenLoc, RParenLoc));
}
/// Build a new C++ reinterpret_cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXReinterpretCastExpr(SourceLocation OpLoc,
SourceLocation LAngleLoc,
TypeSourceInfo *TInfo,
SourceLocation RAngleLoc,
SourceLocation LParenLoc,
Expr *SubExpr,
SourceLocation RParenLoc) {
return getSema().BuildCXXNamedCast(OpLoc, tok::kw_reinterpret_cast,
TInfo, SubExpr,
SourceRange(LAngleLoc, RAngleLoc),
SourceRange(LParenLoc, RParenLoc));
}
/// Build a new C++ const_cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXConstCastExpr(SourceLocation OpLoc,
SourceLocation LAngleLoc,
TypeSourceInfo *TInfo,
SourceLocation RAngleLoc,
SourceLocation LParenLoc,
Expr *SubExpr,
SourceLocation RParenLoc) {
return getSema().BuildCXXNamedCast(OpLoc, tok::kw_const_cast,
TInfo, SubExpr,
SourceRange(LAngleLoc, RAngleLoc),
SourceRange(LParenLoc, RParenLoc));
}
/// Build a new C++ functional-style cast expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXFunctionalCastExpr(TypeSourceInfo *TInfo,
SourceLocation LParenLoc,
Expr *Sub,
SourceLocation RParenLoc,
bool ListInitialization) {
return getSema().BuildCXXTypeConstructExpr(TInfo, LParenLoc,
MultiExprArg(&Sub, 1), RParenLoc,
ListInitialization);
}
/// Build a new C++ typeid(type) expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXTypeidExpr(QualType TypeInfoType,
SourceLocation TypeidLoc,
TypeSourceInfo *Operand,
SourceLocation RParenLoc) {
return getSema().BuildCXXTypeId(TypeInfoType, TypeidLoc, Operand,
RParenLoc);
}
/// Build a new C++ typeid(expr) expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXTypeidExpr(QualType TypeInfoType,
SourceLocation TypeidLoc,
Expr *Operand,
SourceLocation RParenLoc) {
return getSema().BuildCXXTypeId(TypeInfoType, TypeidLoc, Operand,
RParenLoc);
}
/// Build a new C++ __uuidof(type) expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXUuidofExpr(QualType TypeInfoType,
SourceLocation TypeidLoc,
TypeSourceInfo *Operand,
SourceLocation RParenLoc) {
return getSema().BuildCXXUuidof(TypeInfoType, TypeidLoc, Operand,
RParenLoc);
}
/// Build a new C++ __uuidof(expr) expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXUuidofExpr(QualType TypeInfoType,
SourceLocation TypeidLoc,
Expr *Operand,
SourceLocation RParenLoc) {
return getSema().BuildCXXUuidof(TypeInfoType, TypeidLoc, Operand,
RParenLoc);
}
/// Build a new C++ "this" expression.
///
/// By default, builds a new "this" expression without performing any
/// semantic analysis. Subclasses may override this routine to provide
/// different behavior.
ExprResult RebuildCXXThisExpr(SourceLocation ThisLoc,
QualType ThisType,
bool isImplicit) {
return getSema().BuildCXXThisExpr(ThisLoc, ThisType, isImplicit);
}
/// Build a new C++ throw expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXThrowExpr(SourceLocation ThrowLoc, Expr *Sub,
bool IsThrownVariableInScope) {
return getSema().BuildCXXThrow(ThrowLoc, Sub, IsThrownVariableInScope);
}
/// Build a new C++ default-argument expression.
///
/// By default, builds a new default-argument expression, which does not
/// require any semantic analysis. Subclasses may override this routine to
/// provide different behavior.
ExprResult RebuildCXXDefaultArgExpr(SourceLocation Loc, ParmVarDecl *Param) {
return CXXDefaultArgExpr::Create(getSema().Context, Loc, Param,
getSema().CurContext);
}
/// Build a new C++11 default-initialization expression.
///
/// By default, builds a new default field initialization expression, which
/// does not require any semantic analysis. Subclasses may override this
/// routine to provide different behavior.
ExprResult RebuildCXXDefaultInitExpr(SourceLocation Loc,
FieldDecl *Field) {
return CXXDefaultInitExpr::Create(getSema().Context, Loc, Field,
getSema().CurContext);
}
/// Build a new C++ zero-initialization expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXScalarValueInitExpr(TypeSourceInfo *TSInfo,
SourceLocation LParenLoc,
SourceLocation RParenLoc) {
return getSema().BuildCXXTypeConstructExpr(
TSInfo, LParenLoc, None, RParenLoc, /*ListInitialization=*/false);
}
/// Build a new C++ "new" expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXNewExpr(SourceLocation StartLoc,
bool UseGlobal,
SourceLocation PlacementLParen,
MultiExprArg PlacementArgs,
SourceLocation PlacementRParen,
SourceRange TypeIdParens,
QualType AllocatedType,
TypeSourceInfo *AllocatedTypeInfo,
Optional<Expr *> ArraySize,
SourceRange DirectInitRange,
Expr *Initializer) {
return getSema().BuildCXXNew(StartLoc, UseGlobal,
PlacementLParen,
PlacementArgs,
PlacementRParen,
TypeIdParens,
AllocatedType,
AllocatedTypeInfo,
ArraySize,
DirectInitRange,
Initializer);
}
/// Build a new C++ "delete" expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXDeleteExpr(SourceLocation StartLoc,
bool IsGlobalDelete,
bool IsArrayForm,
Expr *Operand) {
return getSema().ActOnCXXDelete(StartLoc, IsGlobalDelete, IsArrayForm,
Operand);
}
/// Build a new type trait expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildTypeTrait(TypeTrait Trait,
SourceLocation StartLoc,
ArrayRef<TypeSourceInfo *> Args,
SourceLocation RParenLoc) {
return getSema().BuildTypeTrait(Trait, StartLoc, Args, RParenLoc);
}
/// Build a new array type trait expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildArrayTypeTrait(ArrayTypeTrait Trait,
SourceLocation StartLoc,
TypeSourceInfo *TSInfo,
Expr *DimExpr,
SourceLocation RParenLoc) {
return getSema().BuildArrayTypeTrait(Trait, StartLoc, TSInfo, DimExpr, RParenLoc);
}
/// Build a new expression trait expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildExpressionTrait(ExpressionTrait Trait,
SourceLocation StartLoc,
Expr *Queried,
SourceLocation RParenLoc) {
return getSema().BuildExpressionTrait(Trait, StartLoc, Queried, RParenLoc);
}
/// Build a new (previously unresolved) declaration reference
/// expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildDependentScopeDeclRefExpr(
NestedNameSpecifierLoc QualifierLoc,
SourceLocation TemplateKWLoc,
const DeclarationNameInfo &NameInfo,
const TemplateArgumentListInfo *TemplateArgs,
bool IsAddressOfOperand,
TypeSourceInfo **RecoveryTSI) {
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
if (TemplateArgs || TemplateKWLoc.isValid())
return getSema().BuildQualifiedTemplateIdExpr(SS, TemplateKWLoc, NameInfo,
TemplateArgs);
return getSema().BuildQualifiedDeclarationNameExpr(
SS, NameInfo, IsAddressOfOperand, /*S*/nullptr, RecoveryTSI);
}
/// Build a new template-id expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildTemplateIdExpr(const CXXScopeSpec &SS,
SourceLocation TemplateKWLoc,
LookupResult &R,
bool RequiresADL,
const TemplateArgumentListInfo *TemplateArgs) {
return getSema().BuildTemplateIdExpr(SS, TemplateKWLoc, R, RequiresADL,
TemplateArgs);
}
/// Build a new object-construction expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXConstructExpr(QualType T,
SourceLocation Loc,
CXXConstructorDecl *Constructor,
bool IsElidable,
MultiExprArg Args,
bool HadMultipleCandidates,
bool ListInitialization,
bool StdInitListInitialization,
bool RequiresZeroInit,
CXXConstructExpr::ConstructionKind ConstructKind,
SourceRange ParenRange) {
SmallVector<Expr*, 8> ConvertedArgs;
if (getSema().CompleteConstructorCall(Constructor, Args, Loc,
ConvertedArgs))
return ExprError();
return getSema().BuildCXXConstructExpr(Loc, T, Constructor,
IsElidable,
ConvertedArgs,
HadMultipleCandidates,
ListInitialization,
StdInitListInitialization,
RequiresZeroInit, ConstructKind,
ParenRange);
}
/// Build a new implicit construction via inherited constructor
/// expression.
ExprResult RebuildCXXInheritedCtorInitExpr(QualType T, SourceLocation Loc,
CXXConstructorDecl *Constructor,
bool ConstructsVBase,
bool InheritedFromVBase) {
return new (getSema().Context) CXXInheritedCtorInitExpr(
Loc, T, Constructor, ConstructsVBase, InheritedFromVBase);
}
/// Build a new object-construction expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXTemporaryObjectExpr(TypeSourceInfo *TSInfo,
SourceLocation LParenOrBraceLoc,
MultiExprArg Args,
SourceLocation RParenOrBraceLoc,
bool ListInitialization) {
return getSema().BuildCXXTypeConstructExpr(
TSInfo, LParenOrBraceLoc, Args, RParenOrBraceLoc, ListInitialization);
}
/// Build a new object-construction expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXUnresolvedConstructExpr(TypeSourceInfo *TSInfo,
SourceLocation LParenLoc,
MultiExprArg Args,
SourceLocation RParenLoc,
bool ListInitialization) {
return getSema().BuildCXXTypeConstructExpr(TSInfo, LParenLoc, Args,
RParenLoc, ListInitialization);
}
/// Build a new member reference expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXDependentScopeMemberExpr(Expr *BaseE,
QualType BaseType,
bool IsArrow,
SourceLocation OperatorLoc,
NestedNameSpecifierLoc QualifierLoc,
SourceLocation TemplateKWLoc,
NamedDecl *FirstQualifierInScope,
const DeclarationNameInfo &MemberNameInfo,
const TemplateArgumentListInfo *TemplateArgs) {
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
return SemaRef.BuildMemberReferenceExpr(BaseE, BaseType,
OperatorLoc, IsArrow,
SS, TemplateKWLoc,
FirstQualifierInScope,
MemberNameInfo,
TemplateArgs, /*S*/nullptr);
}
/// Build a new member reference expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildUnresolvedMemberExpr(Expr *BaseE, QualType BaseType,
SourceLocation OperatorLoc,
bool IsArrow,
NestedNameSpecifierLoc QualifierLoc,
SourceLocation TemplateKWLoc,
NamedDecl *FirstQualifierInScope,
LookupResult &R,
const TemplateArgumentListInfo *TemplateArgs) {
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
return SemaRef.BuildMemberReferenceExpr(BaseE, BaseType,
OperatorLoc, IsArrow,
SS, TemplateKWLoc,
FirstQualifierInScope,
R, TemplateArgs, /*S*/nullptr);
}
/// Build a new noexcept expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildCXXNoexceptExpr(SourceRange Range, Expr *Arg) {
return SemaRef.BuildCXXNoexceptExpr(Range.getBegin(), Arg, Range.getEnd());
}
/// Build a new expression to compute the length of a parameter pack.
ExprResult RebuildSizeOfPackExpr(SourceLocation OperatorLoc,
NamedDecl *Pack,
SourceLocation PackLoc,
SourceLocation RParenLoc,
Optional<unsigned> Length,
ArrayRef<TemplateArgument> PartialArgs) {
return SizeOfPackExpr::Create(SemaRef.Context, OperatorLoc, Pack, PackLoc,
RParenLoc, Length, PartialArgs);
}
/// Build a new expression representing a call to a source location
/// builtin.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildSourceLocExpr(SourceLocExpr::IdentKind Kind,
SourceLocation BuiltinLoc,
SourceLocation RPLoc,
DeclContext *ParentContext) {
return getSema().BuildSourceLocExpr(Kind, BuiltinLoc, RPLoc, ParentContext);
}
/// Build a new Objective-C boxed expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCBoxedExpr(SourceRange SR, Expr *ValueExpr) {
return getSema().BuildObjCBoxedExpr(SR, ValueExpr);
}
/// Build a new Objective-C array literal.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCArrayLiteral(SourceRange Range,
Expr **Elements, unsigned NumElements) {
return getSema().BuildObjCArrayLiteral(Range,
MultiExprArg(Elements, NumElements));
}
ExprResult RebuildObjCSubscriptRefExpr(SourceLocation RB,
Expr *Base, Expr *Key,
ObjCMethodDecl *getterMethod,
ObjCMethodDecl *setterMethod) {
return getSema().BuildObjCSubscriptExpression(RB, Base, Key,
getterMethod, setterMethod);
}
/// Build a new Objective-C dictionary literal.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCDictionaryLiteral(SourceRange Range,
MutableArrayRef<ObjCDictionaryElement> Elements) {
return getSema().BuildObjCDictionaryLiteral(Range, Elements);
}
/// Build a new Objective-C \@encode expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCEncodeExpr(SourceLocation AtLoc,
TypeSourceInfo *EncodeTypeInfo,
SourceLocation RParenLoc) {
return SemaRef.BuildObjCEncodeExpression(AtLoc, EncodeTypeInfo, RParenLoc);
}
/// Build a new Objective-C class message.
ExprResult RebuildObjCMessageExpr(TypeSourceInfo *ReceiverTypeInfo,
Selector Sel,
ArrayRef<SourceLocation> SelectorLocs,
ObjCMethodDecl *Method,
SourceLocation LBracLoc,
MultiExprArg Args,
SourceLocation RBracLoc) {
return SemaRef.BuildClassMessage(ReceiverTypeInfo,
ReceiverTypeInfo->getType(),
/*SuperLoc=*/SourceLocation(),
Sel, Method, LBracLoc, SelectorLocs,
RBracLoc, Args);
}
/// Build a new Objective-C instance message.
ExprResult RebuildObjCMessageExpr(Expr *Receiver,
Selector Sel,
ArrayRef<SourceLocation> SelectorLocs,
ObjCMethodDecl *Method,
SourceLocation LBracLoc,
MultiExprArg Args,
SourceLocation RBracLoc) {
return SemaRef.BuildInstanceMessage(Receiver,
Receiver->getType(),
/*SuperLoc=*/SourceLocation(),
Sel, Method, LBracLoc, SelectorLocs,
RBracLoc, Args);
}
/// Build a new Objective-C instance/class message to 'super'.
ExprResult RebuildObjCMessageExpr(SourceLocation SuperLoc,
Selector Sel,
ArrayRef<SourceLocation> SelectorLocs,
QualType SuperType,
ObjCMethodDecl *Method,
SourceLocation LBracLoc,
MultiExprArg Args,
SourceLocation RBracLoc) {
return Method->isInstanceMethod() ? SemaRef.BuildInstanceMessage(nullptr,
SuperType,
SuperLoc,
Sel, Method, LBracLoc, SelectorLocs,
RBracLoc, Args)
: SemaRef.BuildClassMessage(nullptr,
SuperType,
SuperLoc,
Sel, Method, LBracLoc, SelectorLocs,
RBracLoc, Args);
}
/// Build a new Objective-C ivar reference expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCIvarRefExpr(Expr *BaseArg, ObjCIvarDecl *Ivar,
SourceLocation IvarLoc,
bool IsArrow, bool IsFreeIvar) {
CXXScopeSpec SS;
DeclarationNameInfo NameInfo(Ivar->getDeclName(), IvarLoc);
ExprResult Result = getSema().BuildMemberReferenceExpr(
BaseArg, BaseArg->getType(),
/*FIXME:*/ IvarLoc, IsArrow, SS, SourceLocation(),
/*FirstQualifierInScope=*/nullptr, NameInfo,
/*TemplateArgs=*/nullptr,
/*S=*/nullptr);
if (IsFreeIvar && Result.isUsable())
cast<ObjCIvarRefExpr>(Result.get())->setIsFreeIvar(IsFreeIvar);
return Result;
}
/// Build a new Objective-C property reference expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCPropertyRefExpr(Expr *BaseArg,
ObjCPropertyDecl *Property,
SourceLocation PropertyLoc) {
CXXScopeSpec SS;
DeclarationNameInfo NameInfo(Property->getDeclName(), PropertyLoc);
return getSema().BuildMemberReferenceExpr(BaseArg, BaseArg->getType(),
/*FIXME:*/PropertyLoc,
/*IsArrow=*/false,
SS, SourceLocation(),
/*FirstQualifierInScope=*/nullptr,
NameInfo,
/*TemplateArgs=*/nullptr,
/*S=*/nullptr);
}
/// Build a new Objective-C property reference expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCPropertyRefExpr(Expr *Base, QualType T,
ObjCMethodDecl *Getter,
ObjCMethodDecl *Setter,
SourceLocation PropertyLoc) {
// Since these expressions can only be value-dependent, we do not
// need to perform semantic analysis again.
return Owned(
new (getSema().Context) ObjCPropertyRefExpr(Getter, Setter, T,
VK_LValue, OK_ObjCProperty,
PropertyLoc, Base));
}
/// Build a new Objective-C "isa" expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildObjCIsaExpr(Expr *BaseArg, SourceLocation IsaLoc,
SourceLocation OpLoc, bool IsArrow) {
CXXScopeSpec SS;
DeclarationNameInfo NameInfo(&getSema().Context.Idents.get("isa"), IsaLoc);
return getSema().BuildMemberReferenceExpr(BaseArg, BaseArg->getType(),
OpLoc, IsArrow,
SS, SourceLocation(),
/*FirstQualifierInScope=*/nullptr,
NameInfo,
/*TemplateArgs=*/nullptr,
/*S=*/nullptr);
}
/// Build a new shuffle vector expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildShuffleVectorExpr(SourceLocation BuiltinLoc,
MultiExprArg SubExprs,
SourceLocation RParenLoc) {
// Find the declaration for __builtin_shufflevector
const IdentifierInfo &Name
= SemaRef.Context.Idents.get("__builtin_shufflevector");
TranslationUnitDecl *TUDecl = SemaRef.Context.getTranslationUnitDecl();
DeclContext::lookup_result Lookup = TUDecl->lookup(DeclarationName(&Name));
assert(!Lookup.empty() && "No __builtin_shufflevector?");
// Build a reference to the __builtin_shufflevector builtin
FunctionDecl *Builtin = cast<FunctionDecl>(Lookup.front());
Expr *Callee = new (SemaRef.Context)
DeclRefExpr(SemaRef.Context, Builtin, false,
SemaRef.Context.BuiltinFnTy, VK_RValue, BuiltinLoc);
QualType CalleePtrTy = SemaRef.Context.getPointerType(Builtin->getType());
Callee = SemaRef.ImpCastExprToType(Callee, CalleePtrTy,
CK_BuiltinFnToFnPtr).get();
// Build the CallExpr
ExprResult TheCall = CallExpr::Create(
SemaRef.Context, Callee, SubExprs, Builtin->getCallResultType(),
Expr::getValueKindForType(Builtin->getReturnType()), RParenLoc);
// Type-check the __builtin_shufflevector expression.
return SemaRef.SemaBuiltinShuffleVector(cast<CallExpr>(TheCall.get()));
}
/// Build a new convert vector expression.
ExprResult RebuildConvertVectorExpr(SourceLocation BuiltinLoc,
Expr *SrcExpr, TypeSourceInfo *DstTInfo,
SourceLocation RParenLoc) {
return SemaRef.SemaConvertVectorExpr(SrcExpr, DstTInfo,
BuiltinLoc, RParenLoc);
}
/// Build a new template argument pack expansion.
///
/// By default, performs semantic analysis to build a new pack expansion
/// for a template argument. Subclasses may override this routine to provide
/// different behavior.
TemplateArgumentLoc RebuildPackExpansion(TemplateArgumentLoc Pattern,
SourceLocation EllipsisLoc,
Optional<unsigned> NumExpansions) {
switch (Pattern.getArgument().getKind()) {
case TemplateArgument::Expression: {
ExprResult Result
= getSema().CheckPackExpansion(Pattern.getSourceExpression(),
EllipsisLoc, NumExpansions);
if (Result.isInvalid())
return TemplateArgumentLoc();
return TemplateArgumentLoc(Result.get(), Result.get());
}
case TemplateArgument::Template:
return TemplateArgumentLoc(TemplateArgument(
Pattern.getArgument().getAsTemplate(),
NumExpansions),
Pattern.getTemplateQualifierLoc(),
Pattern.getTemplateNameLoc(),
EllipsisLoc);
case TemplateArgument::Null:
case TemplateArgument::Integral:
case TemplateArgument::Declaration:
case TemplateArgument::Pack:
case TemplateArgument::TemplateExpansion:
case TemplateArgument::NullPtr:
llvm_unreachable("Pack expansion pattern has no parameter packs");
case TemplateArgument::Type:
if (TypeSourceInfo *Expansion
= getSema().CheckPackExpansion(Pattern.getTypeSourceInfo(),
EllipsisLoc,
NumExpansions))
return TemplateArgumentLoc(TemplateArgument(Expansion->getType()),
Expansion);
break;
}
return TemplateArgumentLoc();
}
/// Build a new expression pack expansion.
///
/// By default, performs semantic analysis to build a new pack expansion
/// for an expression. Subclasses may override this routine to provide
/// different behavior.
ExprResult RebuildPackExpansion(Expr *Pattern, SourceLocation EllipsisLoc,
Optional<unsigned> NumExpansions) {
return getSema().CheckPackExpansion(Pattern, EllipsisLoc, NumExpansions);
}
/// Build a new C++1z fold-expression.
///
/// By default, performs semantic analysis in order to build a new fold
/// expression.
ExprResult RebuildCXXFoldExpr(SourceLocation LParenLoc, Expr *LHS,
BinaryOperatorKind Operator,
SourceLocation EllipsisLoc, Expr *RHS,
SourceLocation RParenLoc,
Optional<unsigned> NumExpansions) {
return getSema().BuildCXXFoldExpr(LParenLoc, LHS, Operator, EllipsisLoc,
RHS, RParenLoc, NumExpansions);
}
/// Build an empty C++1z fold-expression with the given operator.
///
/// By default, produces the fallback value for the fold-expression, or
/// produce an error if there is no fallback value.
ExprResult RebuildEmptyCXXFoldExpr(SourceLocation EllipsisLoc,
BinaryOperatorKind Operator) {
return getSema().BuildEmptyCXXFoldExpr(EllipsisLoc, Operator);
}
/// Build a new atomic operation expression.
///
/// By default, performs semantic analysis to build the new expression.
/// Subclasses may override this routine to provide different behavior.
ExprResult RebuildAtomicExpr(SourceLocation BuiltinLoc,
MultiExprArg SubExprs,
QualType RetTy,
AtomicExpr::AtomicOp Op,
SourceLocation RParenLoc) {
// Just create the expression; there is not any interesting semantic
// analysis here because we can't actually build an AtomicExpr until
// we are sure it is semantically sound.
return new (SemaRef.Context) AtomicExpr(BuiltinLoc, SubExprs, RetTy, Op,
RParenLoc);
}
private:
TypeLoc TransformTypeInObjectScope(TypeLoc TL,
QualType ObjectType,
NamedDecl *FirstQualifierInScope,
CXXScopeSpec &SS);
TypeSourceInfo *TransformTypeInObjectScope(TypeSourceInfo *TSInfo,
QualType ObjectType,
NamedDecl *FirstQualifierInScope,
CXXScopeSpec &SS);
TypeSourceInfo *TransformTSIInObjectScope(TypeLoc TL, QualType ObjectType,
NamedDecl *FirstQualifierInScope,
CXXScopeSpec &SS);
QualType TransformDependentNameType(TypeLocBuilder &TLB,
DependentNameTypeLoc TL,
bool DeducibleTSTContext);
};
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformStmt(Stmt *S, StmtDiscardKind SDK) {
if (!S)
return S;
switch (S->getStmtClass()) {
case Stmt::NoStmtClass: break;
// Transform individual statement nodes
// Pass SDK into statements that can produce a value
#define STMT(Node, Parent) \
case Stmt::Node##Class: return getDerived().Transform##Node(cast<Node>(S));
#define VALUESTMT(Node, Parent) \
case Stmt::Node##Class: \
return getDerived().Transform##Node(cast<Node>(S), SDK);
#define ABSTRACT_STMT(Node)
#define EXPR(Node, Parent)
#include "clang/AST/StmtNodes.inc"
// Transform expressions by calling TransformExpr.
#define STMT(Node, Parent)
#define ABSTRACT_STMT(Stmt)
#define EXPR(Node, Parent) case Stmt::Node##Class:
#include "clang/AST/StmtNodes.inc"
{
ExprResult E = getDerived().TransformExpr(cast<Expr>(S));
if (SDK == SDK_StmtExprResult)
E = getSema().ActOnStmtExprResult(E);
return getSema().ActOnExprStmt(E, SDK == SDK_Discarded);
}
}
return S;
}
template<typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPClause(OMPClause *S) {
if (!S)
return S;
switch (S->getClauseKind()) {
default: break;
// Transform individual clause nodes
#define OPENMP_CLAUSE(Name, Class) \
case OMPC_ ## Name : \
return getDerived().Transform ## Class(cast<Class>(S));
#include "clang/Basic/OpenMPKinds.def"
}
return S;
}
template<typename Derived>
ExprResult TreeTransform<Derived>::TransformExpr(Expr *E) {
if (!E)
return E;
switch (E->getStmtClass()) {
case Stmt::NoStmtClass: break;
#define STMT(Node, Parent) case Stmt::Node##Class: break;
#define ABSTRACT_STMT(Stmt)
#define EXPR(Node, Parent) \
case Stmt::Node##Class: return getDerived().Transform##Node(cast<Node>(E));
#include "clang/AST/StmtNodes.inc"
}
return E;
}
template<typename Derived>
ExprResult TreeTransform<Derived>::TransformInitializer(Expr *Init,
bool NotCopyInit) {
// Initializers are instantiated like expressions, except that various outer
// layers are stripped.
if (!Init)
return Init;
if (auto *FE = dyn_cast<FullExpr>(Init))
Init = FE->getSubExpr();
if (auto *AIL = dyn_cast<ArrayInitLoopExpr>(Init))
Init = AIL->getCommonExpr();
if (MaterializeTemporaryExpr *MTE = dyn_cast<MaterializeTemporaryExpr>(Init))
Init = MTE->GetTemporaryExpr();
while (CXXBindTemporaryExpr *Binder = dyn_cast<CXXBindTemporaryExpr>(Init))
Init = Binder->getSubExpr();
if (ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Init))
Init = ICE->getSubExprAsWritten();
if (CXXStdInitializerListExpr *ILE =
dyn_cast<CXXStdInitializerListExpr>(Init))
return TransformInitializer(ILE->getSubExpr(), NotCopyInit);
// If this is copy-initialization, we only need to reconstruct
// InitListExprs. Other forms of copy-initialization will be a no-op if
// the initializer is already the right type.
CXXConstructExpr *Construct = dyn_cast<CXXConstructExpr>(Init);
if (!NotCopyInit && !(Construct && Construct->isListInitialization()))
return getDerived().TransformExpr(Init);
// Revert value-initialization back to empty parens.
if (CXXScalarValueInitExpr *VIE = dyn_cast<CXXScalarValueInitExpr>(Init)) {
SourceRange Parens = VIE->getSourceRange();
return getDerived().RebuildParenListExpr(Parens.getBegin(), None,
Parens.getEnd());
}
// FIXME: We shouldn't build ImplicitValueInitExprs for direct-initialization.
if (isa<ImplicitValueInitExpr>(Init))
return getDerived().RebuildParenListExpr(SourceLocation(), None,
SourceLocation());
// Revert initialization by constructor back to a parenthesized or braced list
// of expressions. Any other form of initializer can just be reused directly.
if (!Construct || isa<CXXTemporaryObjectExpr>(Construct))
return getDerived().TransformExpr(Init);
// If the initialization implicitly converted an initializer list to a
// std::initializer_list object, unwrap the std::initializer_list too.
if (Construct && Construct->isStdInitListInitialization())
return TransformInitializer(Construct->getArg(0), NotCopyInit);
// Enter a list-init context if this was list initialization.
EnterExpressionEvaluationContext Context(
getSema(), EnterExpressionEvaluationContext::InitList,
Construct->isListInitialization());
SmallVector<Expr*, 8> NewArgs;
bool ArgChanged = false;
if (getDerived().TransformExprs(Construct->getArgs(), Construct->getNumArgs(),
/*IsCall*/true, NewArgs, &ArgChanged))
return ExprError();
// If this was list initialization, revert to syntactic list form.
if (Construct->isListInitialization())
return getDerived().RebuildInitList(Construct->getBeginLoc(), NewArgs,
Construct->getEndLoc());
// Build a ParenListExpr to represent anything else.
SourceRange Parens = Construct->getParenOrBraceRange();
if (Parens.isInvalid()) {
// This was a variable declaration's initialization for which no initializer
// was specified.
assert(NewArgs.empty() &&
"no parens or braces but have direct init with arguments?");
return ExprEmpty();
}
return getDerived().RebuildParenListExpr(Parens.getBegin(), NewArgs,
Parens.getEnd());
}
template<typename Derived>
bool TreeTransform<Derived>::TransformExprs(Expr *const *Inputs,
unsigned NumInputs,
bool IsCall,
SmallVectorImpl<Expr *> &Outputs,
bool *ArgChanged) {
for (unsigned I = 0; I != NumInputs; ++I) {
// If requested, drop call arguments that need to be dropped.
if (IsCall && getDerived().DropCallArgument(Inputs[I])) {
if (ArgChanged)
*ArgChanged = true;
break;
}
if (PackExpansionExpr *Expansion = dyn_cast<PackExpansionExpr>(Inputs[I])) {
Expr *Pattern = Expansion->getPattern();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
getSema().collectUnexpandedParameterPacks(Pattern, Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can and should
// be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions = Expansion->getNumExpansions();
Optional<unsigned> NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(Expansion->getEllipsisLoc(),
Pattern->getSourceRange(),
Unexpanded,
Expand, RetainExpansion,
NumExpansions))
return true;
if (!Expand) {
// The transform has determined that we should perform a simple
// transformation on the pack expansion, producing another pack
// expansion.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
ExprResult OutPattern = getDerived().TransformExpr(Pattern);
if (OutPattern.isInvalid())
return true;
ExprResult Out = getDerived().RebuildPackExpansion(OutPattern.get(),
Expansion->getEllipsisLoc(),
NumExpansions);
if (Out.isInvalid())
return true;
if (ArgChanged)
*ArgChanged = true;
Outputs.push_back(Out.get());
continue;
}
// Record right away that the argument was changed. This needs
// to happen even if the array expands to nothing.
if (ArgChanged) *ArgChanged = true;
// The transform has determined that we should perform an elementwise
// expansion of the pattern. Do so.
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
ExprResult Out = getDerived().TransformExpr(Pattern);
if (Out.isInvalid())
return true;
if (Out.get()->containsUnexpandedParameterPack()) {
Out = getDerived().RebuildPackExpansion(
Out.get(), Expansion->getEllipsisLoc(), OrigNumExpansions);
if (Out.isInvalid())
return true;
}
Outputs.push_back(Out.get());
}
// If we're supposed to retain a pack expansion, do so by temporarily
// forgetting the partially-substituted parameter pack.
if (RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
ExprResult Out = getDerived().TransformExpr(Pattern);
if (Out.isInvalid())
return true;
Out = getDerived().RebuildPackExpansion(
Out.get(), Expansion->getEllipsisLoc(), OrigNumExpansions);
if (Out.isInvalid())
return true;
Outputs.push_back(Out.get());
}
continue;
}
ExprResult Result =
IsCall ? getDerived().TransformInitializer(Inputs[I], /*DirectInit*/false)
: getDerived().TransformExpr(Inputs[I]);
if (Result.isInvalid())
return true;
if (Result.get() != Inputs[I] && ArgChanged)
*ArgChanged = true;
Outputs.push_back(Result.get());
}
return false;
}
template <typename Derived>
Sema::ConditionResult TreeTransform<Derived>::TransformCondition(
SourceLocation Loc, VarDecl *Var, Expr *Expr, Sema::ConditionKind Kind) {
if (Var) {
VarDecl *ConditionVar = cast_or_null<VarDecl>(
getDerived().TransformDefinition(Var->getLocation(), Var));
if (!ConditionVar)
return Sema::ConditionError();
return getSema().ActOnConditionVariable(ConditionVar, Loc, Kind);
}
if (Expr) {
ExprResult CondExpr = getDerived().TransformExpr(Expr);
if (CondExpr.isInvalid())
return Sema::ConditionError();
return getSema().ActOnCondition(nullptr, Loc, CondExpr.get(), Kind);
}
return Sema::ConditionResult();
}
template<typename Derived>
NestedNameSpecifierLoc
TreeTransform<Derived>::TransformNestedNameSpecifierLoc(
NestedNameSpecifierLoc NNS,
QualType ObjectType,
NamedDecl *FirstQualifierInScope) {
SmallVector<NestedNameSpecifierLoc, 4> Qualifiers;
for (NestedNameSpecifierLoc Qualifier = NNS; Qualifier;
Qualifier = Qualifier.getPrefix())
Qualifiers.push_back(Qualifier);
CXXScopeSpec SS;
while (!Qualifiers.empty()) {
NestedNameSpecifierLoc Q = Qualifiers.pop_back_val();
NestedNameSpecifier *QNNS = Q.getNestedNameSpecifier();
switch (QNNS->getKind()) {
case NestedNameSpecifier::Identifier: {
Sema::NestedNameSpecInfo IdInfo(QNNS->getAsIdentifier(),
Q.getLocalBeginLoc(), Q.getLocalEndLoc(), ObjectType);
if (SemaRef.BuildCXXNestedNameSpecifier(/*Scope=*/nullptr, IdInfo, false,
SS, FirstQualifierInScope, false))
return NestedNameSpecifierLoc();
}
break;
case NestedNameSpecifier::Namespace: {
NamespaceDecl *NS
= cast_or_null<NamespaceDecl>(
getDerived().TransformDecl(
Q.getLocalBeginLoc(),
QNNS->getAsNamespace()));
SS.Extend(SemaRef.Context, NS, Q.getLocalBeginLoc(), Q.getLocalEndLoc());
break;
}
case NestedNameSpecifier::NamespaceAlias: {
NamespaceAliasDecl *Alias
= cast_or_null<NamespaceAliasDecl>(
getDerived().TransformDecl(Q.getLocalBeginLoc(),
QNNS->getAsNamespaceAlias()));
SS.Extend(SemaRef.Context, Alias, Q.getLocalBeginLoc(),
Q.getLocalEndLoc());
break;
}
case NestedNameSpecifier::Global:
// There is no meaningful transformation that one could perform on the
// global scope.
SS.MakeGlobal(SemaRef.Context, Q.getBeginLoc());
break;
case NestedNameSpecifier::Super: {
CXXRecordDecl *RD =
cast_or_null<CXXRecordDecl>(getDerived().TransformDecl(
SourceLocation(), QNNS->getAsRecordDecl()));
SS.MakeSuper(SemaRef.Context, RD, Q.getBeginLoc(), Q.getEndLoc());
break;
}
case NestedNameSpecifier::TypeSpecWithTemplate:
case NestedNameSpecifier::TypeSpec: {
TypeLoc TL = TransformTypeInObjectScope(Q.getTypeLoc(), ObjectType,
FirstQualifierInScope, SS);
if (!TL)
return NestedNameSpecifierLoc();
if (TL.getType()->isDependentType() || TL.getType()->isRecordType() ||
(SemaRef.getLangOpts().CPlusPlus11 &&
TL.getType()->isEnumeralType())) {
assert(!TL.getType().hasLocalQualifiers() &&
"Can't get cv-qualifiers here");
if (TL.getType()->isEnumeralType())
SemaRef.Diag(TL.getBeginLoc(),
diag::warn_cxx98_compat_enum_nested_name_spec);
SS.Extend(SemaRef.Context, /*FIXME:*/SourceLocation(), TL,
Q.getLocalEndLoc());
break;
}
// If the nested-name-specifier is an invalid type def, don't emit an
// error because a previous error should have already been emitted.
TypedefTypeLoc TTL = TL.getAs<TypedefTypeLoc>();
if (!TTL || !TTL.getTypedefNameDecl()->isInvalidDecl()) {
SemaRef.Diag(TL.getBeginLoc(), diag::err_nested_name_spec_non_tag)
<< TL.getType() << SS.getRange();
}
return NestedNameSpecifierLoc();
}
}
// The qualifier-in-scope and object type only apply to the leftmost entity.
FirstQualifierInScope = nullptr;
ObjectType = QualType();
}
// Don't rebuild the nested-name-specifier if we don't have to.
if (SS.getScopeRep() == NNS.getNestedNameSpecifier() &&
!getDerived().AlwaysRebuild())
return NNS;
// If we can re-use the source-location data from the original
// nested-name-specifier, do so.
if (SS.location_size() == NNS.getDataLength() &&
memcmp(SS.location_data(), NNS.getOpaqueData(), SS.location_size()) == 0)
return NestedNameSpecifierLoc(SS.getScopeRep(), NNS.getOpaqueData());
// Allocate new nested-name-specifier location information.
return SS.getWithLocInContext(SemaRef.Context);
}
template<typename Derived>
DeclarationNameInfo
TreeTransform<Derived>
::TransformDeclarationNameInfo(const DeclarationNameInfo &NameInfo) {
DeclarationName Name = NameInfo.getName();
if (!Name)
return DeclarationNameInfo();
switch (Name.getNameKind()) {
case DeclarationName::Identifier:
case DeclarationName::ObjCZeroArgSelector:
case DeclarationName::ObjCOneArgSelector:
case DeclarationName::ObjCMultiArgSelector:
case DeclarationName::CXXOperatorName:
case DeclarationName::CXXLiteralOperatorName:
case DeclarationName::CXXUsingDirective:
return NameInfo;
case DeclarationName::CXXDeductionGuideName: {
TemplateDecl *OldTemplate = Name.getCXXDeductionGuideTemplate();
TemplateDecl *NewTemplate = cast_or_null<TemplateDecl>(
getDerived().TransformDecl(NameInfo.getLoc(), OldTemplate));
if (!NewTemplate)
return DeclarationNameInfo();
DeclarationNameInfo NewNameInfo(NameInfo);
NewNameInfo.setName(
SemaRef.Context.DeclarationNames.getCXXDeductionGuideName(NewTemplate));
return NewNameInfo;
}
case DeclarationName::CXXConstructorName:
case DeclarationName::CXXDestructorName:
case DeclarationName::CXXConversionFunctionName: {
TypeSourceInfo *NewTInfo;
CanQualType NewCanTy;
if (TypeSourceInfo *OldTInfo = NameInfo.getNamedTypeInfo()) {
NewTInfo = getDerived().TransformType(OldTInfo);
if (!NewTInfo)
return DeclarationNameInfo();
NewCanTy = SemaRef.Context.getCanonicalType(NewTInfo->getType());
}
else {
NewTInfo = nullptr;
TemporaryBase Rebase(*this, NameInfo.getLoc(), Name);
QualType NewT = getDerived().TransformType(Name.getCXXNameType());
if (NewT.isNull())
return DeclarationNameInfo();
NewCanTy = SemaRef.Context.getCanonicalType(NewT);
}
DeclarationName NewName
= SemaRef.Context.DeclarationNames.getCXXSpecialName(Name.getNameKind(),
NewCanTy);
DeclarationNameInfo NewNameInfo(NameInfo);
NewNameInfo.setName(NewName);
NewNameInfo.setNamedTypeInfo(NewTInfo);
return NewNameInfo;
}
}
llvm_unreachable("Unknown name kind.");
}
template<typename Derived>
TemplateName
TreeTransform<Derived>::TransformTemplateName(CXXScopeSpec &SS,
TemplateName Name,
SourceLocation NameLoc,
QualType ObjectType,
NamedDecl *FirstQualifierInScope,
bool AllowInjectedClassName) {
if (QualifiedTemplateName *QTN = Name.getAsQualifiedTemplateName()) {
TemplateDecl *Template = QTN->getTemplateDecl();
assert(Template && "qualified template name must refer to a template");
TemplateDecl *TransTemplate
= cast_or_null<TemplateDecl>(getDerived().TransformDecl(NameLoc,
Template));
if (!TransTemplate)
return TemplateName();
if (!getDerived().AlwaysRebuild() &&
SS.getScopeRep() == QTN->getQualifier() &&
TransTemplate == Template)
return Name;
return getDerived().RebuildTemplateName(SS, QTN->hasTemplateKeyword(),
TransTemplate);
}
if (DependentTemplateName *DTN = Name.getAsDependentTemplateName()) {
if (SS.getScopeRep()) {
// These apply to the scope specifier, not the template.
ObjectType = QualType();
FirstQualifierInScope = nullptr;
}
if (!getDerived().AlwaysRebuild() &&
SS.getScopeRep() == DTN->getQualifier() &&
ObjectType.isNull())
return Name;
// FIXME: Preserve the location of the "template" keyword.
SourceLocation TemplateKWLoc = NameLoc;
if (DTN->isIdentifier()) {
return getDerived().RebuildTemplateName(SS,
TemplateKWLoc,
*DTN->getIdentifier(),
NameLoc,
ObjectType,
FirstQualifierInScope,
AllowInjectedClassName);
}
return getDerived().RebuildTemplateName(SS, TemplateKWLoc,
DTN->getOperator(), NameLoc,
ObjectType, AllowInjectedClassName);
}
if (TemplateDecl *Template = Name.getAsTemplateDecl()) {
TemplateDecl *TransTemplate
= cast_or_null<TemplateDecl>(getDerived().TransformDecl(NameLoc,
Template));
if (!TransTemplate)
return TemplateName();
if (!getDerived().AlwaysRebuild() &&
TransTemplate == Template)
return Name;
return TemplateName(TransTemplate);
}
if (SubstTemplateTemplateParmPackStorage *SubstPack
= Name.getAsSubstTemplateTemplateParmPack()) {
TemplateTemplateParmDecl *TransParam
= cast_or_null<TemplateTemplateParmDecl>(
getDerived().TransformDecl(NameLoc, SubstPack->getParameterPack()));
if (!TransParam)
return TemplateName();
if (!getDerived().AlwaysRebuild() &&
TransParam == SubstPack->getParameterPack())
return Name;
return getDerived().RebuildTemplateName(TransParam,
SubstPack->getArgumentPack());
}
// These should be getting filtered out before they reach the AST.
llvm_unreachable("overloaded function decl survived to here");
}
template<typename Derived>
void TreeTransform<Derived>::InventTemplateArgumentLoc(
const TemplateArgument &Arg,
TemplateArgumentLoc &Output) {
SourceLocation Loc = getDerived().getBaseLocation();
switch (Arg.getKind()) {
case TemplateArgument::Null:
llvm_unreachable("null template argument in TreeTransform");
break;
case TemplateArgument::Type:
Output = TemplateArgumentLoc(Arg,
SemaRef.Context.getTrivialTypeSourceInfo(Arg.getAsType(), Loc));
break;
case TemplateArgument::Template:
case TemplateArgument::TemplateExpansion: {
NestedNameSpecifierLocBuilder Builder;
TemplateName Template = Arg.getAsTemplateOrTemplatePattern();
if (DependentTemplateName *DTN = Template.getAsDependentTemplateName())
Builder.MakeTrivial(SemaRef.Context, DTN->getQualifier(), Loc);
else if (QualifiedTemplateName *QTN = Template.getAsQualifiedTemplateName())
Builder.MakeTrivial(SemaRef.Context, QTN->getQualifier(), Loc);
if (Arg.getKind() == TemplateArgument::Template)
Output = TemplateArgumentLoc(Arg,
Builder.getWithLocInContext(SemaRef.Context),
Loc);
else
Output = TemplateArgumentLoc(Arg,
Builder.getWithLocInContext(SemaRef.Context),
Loc, Loc);
break;
}
case TemplateArgument::Expression:
Output = TemplateArgumentLoc(Arg, Arg.getAsExpr());
break;
case TemplateArgument::Declaration:
case TemplateArgument::Integral:
case TemplateArgument::Pack:
case TemplateArgument::NullPtr:
Output = TemplateArgumentLoc(Arg, TemplateArgumentLocInfo());
break;
}
}
template<typename Derived>
bool TreeTransform<Derived>::TransformTemplateArgument(
const TemplateArgumentLoc &Input,
TemplateArgumentLoc &Output, bool Uneval) {
EnterExpressionEvaluationContext EEEC(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated,
/*LambdaContextDecl=*/nullptr, /*ExprContext=*/
Sema::ExpressionEvaluationContextRecord::EK_TemplateArgument);
const TemplateArgument &Arg = Input.getArgument();
switch (Arg.getKind()) {
case TemplateArgument::Null:
case TemplateArgument::Integral:
case TemplateArgument::Pack:
case TemplateArgument::Declaration:
case TemplateArgument::NullPtr:
llvm_unreachable("Unexpected TemplateArgument");
case TemplateArgument::Type: {
TypeSourceInfo *DI = Input.getTypeSourceInfo();
if (!DI)
DI = InventTypeSourceInfo(Input.getArgument().getAsType());
DI = getDerived().TransformType(DI);
if (!DI) return true;
Output = TemplateArgumentLoc(TemplateArgument(DI->getType()), DI);
return false;
}
case TemplateArgument::Template: {
NestedNameSpecifierLoc QualifierLoc = Input.getTemplateQualifierLoc();
if (QualifierLoc) {
QualifierLoc = getDerived().TransformNestedNameSpecifierLoc(QualifierLoc);
if (!QualifierLoc)
return true;
}
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
TemplateName Template
= getDerived().TransformTemplateName(SS, Arg.getAsTemplate(),
Input.getTemplateNameLoc());
if (Template.isNull())
return true;
Output = TemplateArgumentLoc(TemplateArgument(Template), QualifierLoc,
Input.getTemplateNameLoc());
return false;
}
case TemplateArgument::TemplateExpansion:
llvm_unreachable("Caller should expand pack expansions");
case TemplateArgument::Expression: {
// Template argument expressions are constant expressions.
EnterExpressionEvaluationContext Unevaluated(
getSema(), Uneval
? Sema::ExpressionEvaluationContext::Unevaluated
: Sema::ExpressionEvaluationContext::ConstantEvaluated);
Expr *InputExpr = Input.getSourceExpression();
if (!InputExpr) InputExpr = Input.getArgument().getAsExpr();
ExprResult E = getDerived().TransformExpr(InputExpr);
E = SemaRef.ActOnConstantExpression(E);
if (E.isInvalid()) return true;
Output = TemplateArgumentLoc(TemplateArgument(E.get()), E.get());
return false;
}
}
// Work around bogus GCC warning
return true;
}
/// Iterator adaptor that invents template argument location information
/// for each of the template arguments in its underlying iterator.
template<typename Derived, typename InputIterator>
class TemplateArgumentLocInventIterator {
TreeTransform<Derived> &Self;
InputIterator Iter;
public:
typedef TemplateArgumentLoc value_type;
typedef TemplateArgumentLoc reference;
typedef typename std::iterator_traits<InputIterator>::difference_type
difference_type;
typedef std::input_iterator_tag iterator_category;
class pointer {
TemplateArgumentLoc Arg;
public:
explicit pointer(TemplateArgumentLoc Arg) : Arg(Arg) { }
const TemplateArgumentLoc *operator->() const { return &Arg; }
};
TemplateArgumentLocInventIterator() { }
explicit TemplateArgumentLocInventIterator(TreeTransform<Derived> &Self,
InputIterator Iter)
: Self(Self), Iter(Iter) { }
TemplateArgumentLocInventIterator &operator++() {
++Iter;
return *this;
}
TemplateArgumentLocInventIterator operator++(int) {
TemplateArgumentLocInventIterator Old(*this);
++(*this);
return Old;
}
reference operator*() const {
TemplateArgumentLoc Result;
Self.InventTemplateArgumentLoc(*Iter, Result);
return Result;
}
pointer operator->() const { return pointer(**this); }
friend bool operator==(const TemplateArgumentLocInventIterator &X,
const TemplateArgumentLocInventIterator &Y) {
return X.Iter == Y.Iter;
}
friend bool operator!=(const TemplateArgumentLocInventIterator &X,
const TemplateArgumentLocInventIterator &Y) {
return X.Iter != Y.Iter;
}
};
template<typename Derived>
template<typename InputIterator>
bool TreeTransform<Derived>::TransformTemplateArguments(
InputIterator First, InputIterator Last, TemplateArgumentListInfo &Outputs,
bool Uneval) {
for (; First != Last; ++First) {
TemplateArgumentLoc Out;
TemplateArgumentLoc In = *First;
if (In.getArgument().getKind() == TemplateArgument::Pack) {
// Unpack argument packs, which we translate them into separate
// arguments.
// FIXME: We could do much better if we could guarantee that the
// TemplateArgumentLocInfo for the pack expansion would be usable for
// all of the template arguments in the argument pack.
typedef TemplateArgumentLocInventIterator<Derived,
TemplateArgument::pack_iterator>
PackLocIterator;
if (TransformTemplateArguments(PackLocIterator(*this,
In.getArgument().pack_begin()),
PackLocIterator(*this,
In.getArgument().pack_end()),
Outputs, Uneval))
return true;
continue;
}
if (In.getArgument().isPackExpansion()) {
// We have a pack expansion, for which we will be substituting into
// the pattern.
SourceLocation Ellipsis;
Optional<unsigned> OrigNumExpansions;
TemplateArgumentLoc Pattern
= getSema().getTemplateArgumentPackExpansionPattern(
In, Ellipsis, OrigNumExpansions);
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
getSema().collectUnexpandedParameterPacks(Pattern, Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can and should
// be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(Ellipsis,
Pattern.getSourceRange(),
Unexpanded,
Expand,
RetainExpansion,
NumExpansions))
return true;
if (!Expand) {
// The transform has determined that we should perform a simple
// transformation on the pack expansion, producing another pack
// expansion.
TemplateArgumentLoc OutPattern;
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
if (getDerived().TransformTemplateArgument(Pattern, OutPattern, Uneval))
return true;
Out = getDerived().RebuildPackExpansion(OutPattern, Ellipsis,
NumExpansions);
if (Out.getArgument().isNull())
return true;
Outputs.addArgument(Out);
continue;
}
// The transform has determined that we should perform an elementwise
// expansion of the pattern. Do so.
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
if (getDerived().TransformTemplateArgument(Pattern, Out, Uneval))
return true;
if (Out.getArgument().containsUnexpandedParameterPack()) {
Out = getDerived().RebuildPackExpansion(Out, Ellipsis,
OrigNumExpansions);
if (Out.getArgument().isNull())
return true;
}
Outputs.addArgument(Out);
}
// If we're supposed to retain a pack expansion, do so by temporarily
// forgetting the partially-substituted parameter pack.
if (RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
if (getDerived().TransformTemplateArgument(Pattern, Out, Uneval))
return true;
Out = getDerived().RebuildPackExpansion(Out, Ellipsis,
OrigNumExpansions);
if (Out.getArgument().isNull())
return true;
Outputs.addArgument(Out);
}
continue;
}
// The simple case:
if (getDerived().TransformTemplateArgument(In, Out, Uneval))
return true;
Outputs.addArgument(Out);
}
return false;
}
//===----------------------------------------------------------------------===//
// Type transformation
//===----------------------------------------------------------------------===//
template<typename Derived>
QualType TreeTransform<Derived>::TransformType(QualType T) {
if (getDerived().AlreadyTransformed(T))
return T;
// Temporary workaround. All of these transformations should
// eventually turn into transformations on TypeLocs.
TypeSourceInfo *DI = getSema().Context.getTrivialTypeSourceInfo(T,
getDerived().getBaseLocation());
TypeSourceInfo *NewDI = getDerived().TransformType(DI);
if (!NewDI)
return QualType();
return NewDI->getType();
}
template<typename Derived>
TypeSourceInfo *TreeTransform<Derived>::TransformType(TypeSourceInfo *DI) {
// Refine the base location to the type's location.
TemporaryBase Rebase(*this, DI->getTypeLoc().getBeginLoc(),
getDerived().getBaseEntity());
if (getDerived().AlreadyTransformed(DI->getType()))
return DI;
TypeLocBuilder TLB;
TypeLoc TL = DI->getTypeLoc();
TLB.reserve(TL.getFullDataSize());
QualType Result = getDerived().TransformType(TLB, TL);
if (Result.isNull())
return nullptr;
return TLB.getTypeSourceInfo(SemaRef.Context, Result);
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformType(TypeLocBuilder &TLB, TypeLoc T) {
switch (T.getTypeLocClass()) {
#define ABSTRACT_TYPELOC(CLASS, PARENT)
#define TYPELOC(CLASS, PARENT) \
case TypeLoc::CLASS: \
return getDerived().Transform##CLASS##Type(TLB, \
T.castAs<CLASS##TypeLoc>());
#include "clang/AST/TypeLocNodes.def"
}
llvm_unreachable("unhandled type loc!");
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTypeWithDeducedTST(QualType T) {
if (!isa<DependentNameType>(T))
return TransformType(T);
if (getDerived().AlreadyTransformed(T))
return T;
TypeSourceInfo *DI = getSema().Context.getTrivialTypeSourceInfo(T,
getDerived().getBaseLocation());
TypeSourceInfo *NewDI = getDerived().TransformTypeWithDeducedTST(DI);
return NewDI ? NewDI->getType() : QualType();
}
template<typename Derived>
TypeSourceInfo *
TreeTransform<Derived>::TransformTypeWithDeducedTST(TypeSourceInfo *DI) {
if (!isa<DependentNameType>(DI->getType()))
return TransformType(DI);
// Refine the base location to the type's location.
TemporaryBase Rebase(*this, DI->getTypeLoc().getBeginLoc(),
getDerived().getBaseEntity());
if (getDerived().AlreadyTransformed(DI->getType()))
return DI;
TypeLocBuilder TLB;
TypeLoc TL = DI->getTypeLoc();
TLB.reserve(TL.getFullDataSize());
auto QTL = TL.getAs<QualifiedTypeLoc>();
if (QTL)
TL = QTL.getUnqualifiedLoc();
auto DNTL = TL.castAs<DependentNameTypeLoc>();
QualType Result = getDerived().TransformDependentNameType(
TLB, DNTL, /*DeducedTSTContext*/true);
if (Result.isNull())
return nullptr;
if (QTL) {
Result = getDerived().RebuildQualifiedType(Result, QTL);
if (Result.isNull())
return nullptr;
TLB.TypeWasModifiedSafely(Result);
}
return TLB.getTypeSourceInfo(SemaRef.Context, Result);
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformQualifiedType(TypeLocBuilder &TLB,
QualifiedTypeLoc T) {
QualType Result = getDerived().TransformType(TLB, T.getUnqualifiedLoc());
if (Result.isNull())
return QualType();
Result = getDerived().RebuildQualifiedType(Result, T);
if (Result.isNull())
return QualType();
// RebuildQualifiedType might have updated the type, but not in a way
// that invalidates the TypeLoc. (There's no location information for
// qualifiers.)
TLB.TypeWasModifiedSafely(Result);
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::RebuildQualifiedType(QualType T,
QualifiedTypeLoc TL) {
SourceLocation Loc = TL.getBeginLoc();
Qualifiers Quals = TL.getType().getLocalQualifiers();
if (((T.getAddressSpace() != LangAS::Default &&
Quals.getAddressSpace() != LangAS::Default)) &&
T.getAddressSpace() != Quals.getAddressSpace()) {
SemaRef.Diag(Loc, diag::err_address_space_mismatch_templ_inst)
<< TL.getType() << T;
return QualType();
}
// C++ [dcl.fct]p7:
// [When] adding cv-qualifications on top of the function type [...] the
// cv-qualifiers are ignored.
if (T->isFunctionType()) {
T = SemaRef.getASTContext().getAddrSpaceQualType(T,
Quals.getAddressSpace());
return T;
}
// C++ [dcl.ref]p1:
// when the cv-qualifiers are introduced through the use of a typedef-name
// or decltype-specifier [...] the cv-qualifiers are ignored.
// Note that [dcl.ref]p1 lists all cases in which cv-qualifiers can be
// applied to a reference type.
if (T->isReferenceType()) {
// The only qualifier that applies to a reference type is restrict.
if (!Quals.hasRestrict())
return T;
Quals = Qualifiers::fromCVRMask(Qualifiers::Restrict);
}
// Suppress Objective-C lifetime qualifiers if they don't make sense for the
// resulting type.
if (Quals.hasObjCLifetime()) {
if (!T->isObjCLifetimeType() && !T->isDependentType())
Quals.removeObjCLifetime();
else if (T.getObjCLifetime()) {
// Objective-C ARC:
// A lifetime qualifier applied to a substituted template parameter
// overrides the lifetime qualifier from the template argument.
const AutoType *AutoTy;
if (const SubstTemplateTypeParmType *SubstTypeParam
= dyn_cast<SubstTemplateTypeParmType>(T)) {
QualType Replacement = SubstTypeParam->getReplacementType();
Qualifiers Qs = Replacement.getQualifiers();
Qs.removeObjCLifetime();
Replacement = SemaRef.Context.getQualifiedType(
Replacement.getUnqualifiedType(), Qs);
T = SemaRef.Context.getSubstTemplateTypeParmType(
SubstTypeParam->getReplacedParameter(), Replacement);
} else if ((AutoTy = dyn_cast<AutoType>(T)) && AutoTy->isDeduced()) {
// 'auto' types behave the same way as template parameters.
QualType Deduced = AutoTy->getDeducedType();
Qualifiers Qs = Deduced.getQualifiers();
Qs.removeObjCLifetime();
Deduced =
SemaRef.Context.getQualifiedType(Deduced.getUnqualifiedType(), Qs);
T = SemaRef.Context.getAutoType(Deduced, AutoTy->getKeyword(),
AutoTy->isDependentType());
} else {
// Otherwise, complain about the addition of a qualifier to an
// already-qualified type.
// FIXME: Why is this check not in Sema::BuildQualifiedType?
SemaRef.Diag(Loc, diag::err_attr_objc_ownership_redundant) << T;
Quals.removeObjCLifetime();
}
}
}
return SemaRef.BuildQualifiedType(T, Loc, Quals);
}
template<typename Derived>
TypeLoc
TreeTransform<Derived>::TransformTypeInObjectScope(TypeLoc TL,
QualType ObjectType,
NamedDecl *UnqualLookup,
CXXScopeSpec &SS) {
if (getDerived().AlreadyTransformed(TL.getType()))
return TL;
TypeSourceInfo *TSI =
TransformTSIInObjectScope(TL, ObjectType, UnqualLookup, SS);
if (TSI)
return TSI->getTypeLoc();
return TypeLoc();
}
template<typename Derived>
TypeSourceInfo *
TreeTransform<Derived>::TransformTypeInObjectScope(TypeSourceInfo *TSInfo,
QualType ObjectType,
NamedDecl *UnqualLookup,
CXXScopeSpec &SS) {
if (getDerived().AlreadyTransformed(TSInfo->getType()))
return TSInfo;
return TransformTSIInObjectScope(TSInfo->getTypeLoc(), ObjectType,
UnqualLookup, SS);
}
template <typename Derived>
TypeSourceInfo *TreeTransform<Derived>::TransformTSIInObjectScope(
TypeLoc TL, QualType ObjectType, NamedDecl *UnqualLookup,
CXXScopeSpec &SS) {
QualType T = TL.getType();
assert(!getDerived().AlreadyTransformed(T));
TypeLocBuilder TLB;
QualType Result;
if (isa<TemplateSpecializationType>(T)) {
TemplateSpecializationTypeLoc SpecTL =
TL.castAs<TemplateSpecializationTypeLoc>();
TemplateName Template = getDerived().TransformTemplateName(
SS, SpecTL.getTypePtr()->getTemplateName(), SpecTL.getTemplateNameLoc(),
ObjectType, UnqualLookup, /*AllowInjectedClassName*/true);
if (Template.isNull())
return nullptr;
Result = getDerived().TransformTemplateSpecializationType(TLB, SpecTL,
Template);
} else if (isa<DependentTemplateSpecializationType>(T)) {
DependentTemplateSpecializationTypeLoc SpecTL =
TL.castAs<DependentTemplateSpecializationTypeLoc>();
TemplateName Template
= getDerived().RebuildTemplateName(SS,
SpecTL.getTemplateKeywordLoc(),
*SpecTL.getTypePtr()->getIdentifier(),
SpecTL.getTemplateNameLoc(),
ObjectType, UnqualLookup,
/*AllowInjectedClassName*/true);
if (Template.isNull())
return nullptr;
Result = getDerived().TransformDependentTemplateSpecializationType(TLB,
SpecTL,
Template,
SS);
} else {
// Nothing special needs to be done for these.
Result = getDerived().TransformType(TLB, TL);
}
if (Result.isNull())
return nullptr;
return TLB.getTypeSourceInfo(SemaRef.Context, Result);
}
template <class TyLoc> static inline
QualType TransformTypeSpecType(TypeLocBuilder &TLB, TyLoc T) {
TyLoc NewT = TLB.push<TyLoc>(T.getType());
NewT.setNameLoc(T.getNameLoc());
return T.getType();
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformBuiltinType(TypeLocBuilder &TLB,
BuiltinTypeLoc T) {
BuiltinTypeLoc NewT = TLB.push<BuiltinTypeLoc>(T.getType());
NewT.setBuiltinLoc(T.getBuiltinLoc());
if (T.needsExtraLocalData())
NewT.getWrittenBuiltinSpecs() = T.getWrittenBuiltinSpecs();
return T.getType();
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformComplexType(TypeLocBuilder &TLB,
ComplexTypeLoc T) {
// FIXME: recurse?
return TransformTypeSpecType(TLB, T);
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformAdjustedType(TypeLocBuilder &TLB,
AdjustedTypeLoc TL) {
// Adjustments applied during transformation are handled elsewhere.
return getDerived().TransformType(TLB, TL.getOriginalLoc());
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDecayedType(TypeLocBuilder &TLB,
DecayedTypeLoc TL) {
QualType OriginalType = getDerived().TransformType(TLB, TL.getOriginalLoc());
if (OriginalType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
OriginalType != TL.getOriginalLoc().getType())
Result = SemaRef.Context.getDecayedType(OriginalType);
TLB.push<DecayedTypeLoc>(Result);
// Nothing to set for DecayedTypeLoc.
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformPointerType(TypeLocBuilder &TLB,
PointerTypeLoc TL) {
QualType PointeeType
= getDerived().TransformType(TLB, TL.getPointeeLoc());
if (PointeeType.isNull())
return QualType();
QualType Result = TL.getType();
if (PointeeType->getAs<ObjCObjectType>()) {
// A dependent pointer type 'T *' has is being transformed such
// that an Objective-C class type is being replaced for 'T'. The
// resulting pointer type is an ObjCObjectPointerType, not a
// PointerType.
Result = SemaRef.Context.getObjCObjectPointerType(PointeeType);
ObjCObjectPointerTypeLoc NewT = TLB.push<ObjCObjectPointerTypeLoc>(Result);
NewT.setStarLoc(TL.getStarLoc());
return Result;
}
if (getDerived().AlwaysRebuild() ||
PointeeType != TL.getPointeeLoc().getType()) {
Result = getDerived().RebuildPointerType(PointeeType, TL.getSigilLoc());
if (Result.isNull())
return QualType();
}
// Objective-C ARC can add lifetime qualifiers to the type that we're
// pointing to.
TLB.TypeWasModifiedSafely(Result->getPointeeType());
PointerTypeLoc NewT = TLB.push<PointerTypeLoc>(Result);
NewT.setSigilLoc(TL.getSigilLoc());
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformBlockPointerType(TypeLocBuilder &TLB,
BlockPointerTypeLoc TL) {
QualType PointeeType
= getDerived().TransformType(TLB, TL.getPointeeLoc());
if (PointeeType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
PointeeType != TL.getPointeeLoc().getType()) {
Result = getDerived().RebuildBlockPointerType(PointeeType,
TL.getSigilLoc());
if (Result.isNull())
return QualType();
}
BlockPointerTypeLoc NewT = TLB.push<BlockPointerTypeLoc>(Result);
NewT.setSigilLoc(TL.getSigilLoc());
return Result;
}
/// Transforms a reference type. Note that somewhat paradoxically we
/// don't care whether the type itself is an l-value type or an r-value
/// type; we only care if the type was *written* as an l-value type
/// or an r-value type.
template<typename Derived>
QualType
TreeTransform<Derived>::TransformReferenceType(TypeLocBuilder &TLB,
ReferenceTypeLoc TL) {
const ReferenceType *T = TL.getTypePtr();
// Note that this works with the pointee-as-written.
QualType PointeeType = getDerived().TransformType(TLB, TL.getPointeeLoc());
if (PointeeType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
PointeeType != T->getPointeeTypeAsWritten()) {
Result = getDerived().RebuildReferenceType(PointeeType,
T->isSpelledAsLValue(),
TL.getSigilLoc());
if (Result.isNull())
return QualType();
}
// Objective-C ARC can add lifetime qualifiers to the type that we're
// referring to.
TLB.TypeWasModifiedSafely(
Result->getAs<ReferenceType>()->getPointeeTypeAsWritten());
// r-value references can be rebuilt as l-value references.
ReferenceTypeLoc NewTL;
if (isa<LValueReferenceType>(Result))
NewTL = TLB.push<LValueReferenceTypeLoc>(Result);
else
NewTL = TLB.push<RValueReferenceTypeLoc>(Result);
NewTL.setSigilLoc(TL.getSigilLoc());
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformLValueReferenceType(TypeLocBuilder &TLB,
LValueReferenceTypeLoc TL) {
return TransformReferenceType(TLB, TL);
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformRValueReferenceType(TypeLocBuilder &TLB,
RValueReferenceTypeLoc TL) {
return TransformReferenceType(TLB, TL);
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformMemberPointerType(TypeLocBuilder &TLB,
MemberPointerTypeLoc TL) {
QualType PointeeType = getDerived().TransformType(TLB, TL.getPointeeLoc());
if (PointeeType.isNull())
return QualType();
TypeSourceInfo* OldClsTInfo = TL.getClassTInfo();
TypeSourceInfo *NewClsTInfo = nullptr;
if (OldClsTInfo) {
NewClsTInfo = getDerived().TransformType(OldClsTInfo);
if (!NewClsTInfo)
return QualType();
}
const MemberPointerType *T = TL.getTypePtr();
QualType OldClsType = QualType(T->getClass(), 0);
QualType NewClsType;
if (NewClsTInfo)
NewClsType = NewClsTInfo->getType();
else {
NewClsType = getDerived().TransformType(OldClsType);
if (NewClsType.isNull())
return QualType();
}
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
PointeeType != T->getPointeeType() ||
NewClsType != OldClsType) {
Result = getDerived().RebuildMemberPointerType(PointeeType, NewClsType,
TL.getStarLoc());
if (Result.isNull())
return QualType();
}
// If we had to adjust the pointee type when building a member pointer, make
// sure to push TypeLoc info for it.
const MemberPointerType *MPT = Result->getAs<MemberPointerType>();
if (MPT && PointeeType != MPT->getPointeeType()) {
assert(isa<AdjustedType>(MPT->getPointeeType()));
TLB.push<AdjustedTypeLoc>(MPT->getPointeeType());
}
MemberPointerTypeLoc NewTL = TLB.push<MemberPointerTypeLoc>(Result);
NewTL.setSigilLoc(TL.getSigilLoc());
NewTL.setClassTInfo(NewClsTInfo);
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformConstantArrayType(TypeLocBuilder &TLB,
ConstantArrayTypeLoc TL) {
const ConstantArrayType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(TLB, TL.getElementLoc());
if (ElementType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType()) {
Result = getDerived().RebuildConstantArrayType(ElementType,
T->getSizeModifier(),
T->getSize(),
T->getIndexTypeCVRQualifiers(),
TL.getBracketsRange());
if (Result.isNull())
return QualType();
}
// We might have either a ConstantArrayType or a VariableArrayType now:
// a ConstantArrayType is allowed to have an element type which is a
// VariableArrayType if the type is dependent. Fortunately, all array
// types have the same location layout.
ArrayTypeLoc NewTL = TLB.push<ArrayTypeLoc>(Result);
NewTL.setLBracketLoc(TL.getLBracketLoc());
NewTL.setRBracketLoc(TL.getRBracketLoc());
Expr *Size = TL.getSizeExpr();
if (Size) {
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
Size = getDerived().TransformExpr(Size).template getAs<Expr>();
Size = SemaRef.ActOnConstantExpression(Size).get();
}
NewTL.setSizeExpr(Size);
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformIncompleteArrayType(
TypeLocBuilder &TLB,
IncompleteArrayTypeLoc TL) {
const IncompleteArrayType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(TLB, TL.getElementLoc());
if (ElementType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType()) {
Result = getDerived().RebuildIncompleteArrayType(ElementType,
T->getSizeModifier(),
T->getIndexTypeCVRQualifiers(),
TL.getBracketsRange());
if (Result.isNull())
return QualType();
}
IncompleteArrayTypeLoc NewTL = TLB.push<IncompleteArrayTypeLoc>(Result);
NewTL.setLBracketLoc(TL.getLBracketLoc());
NewTL.setRBracketLoc(TL.getRBracketLoc());
NewTL.setSizeExpr(nullptr);
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformVariableArrayType(TypeLocBuilder &TLB,
VariableArrayTypeLoc TL) {
const VariableArrayType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(TLB, TL.getElementLoc());
if (ElementType.isNull())
return QualType();
ExprResult SizeResult;
{
EnterExpressionEvaluationContext Context(
SemaRef, Sema::ExpressionEvaluationContext::PotentiallyEvaluated);
SizeResult = getDerived().TransformExpr(T->getSizeExpr());
}
if (SizeResult.isInvalid())
return QualType();
SizeResult =
SemaRef.ActOnFinishFullExpr(SizeResult.get(), /*DiscardedValue*/ false);
if (SizeResult.isInvalid())
return QualType();
Expr *Size = SizeResult.get();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType() ||
Size != T->getSizeExpr()) {
Result = getDerived().RebuildVariableArrayType(ElementType,
T->getSizeModifier(),
Size,
T->getIndexTypeCVRQualifiers(),
TL.getBracketsRange());
if (Result.isNull())
return QualType();
}
// We might have constant size array now, but fortunately it has the same
// location layout.
ArrayTypeLoc NewTL = TLB.push<ArrayTypeLoc>(Result);
NewTL.setLBracketLoc(TL.getLBracketLoc());
NewTL.setRBracketLoc(TL.getRBracketLoc());
NewTL.setSizeExpr(Size);
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformDependentSizedArrayType(TypeLocBuilder &TLB,
DependentSizedArrayTypeLoc TL) {
const DependentSizedArrayType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(TLB, TL.getElementLoc());
if (ElementType.isNull())
return QualType();
// Array bounds are constant expressions.
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
// Prefer the expression from the TypeLoc; the other may have been uniqued.
Expr *origSize = TL.getSizeExpr();
if (!origSize) origSize = T->getSizeExpr();
ExprResult sizeResult
= getDerived().TransformExpr(origSize);
sizeResult = SemaRef.ActOnConstantExpression(sizeResult);
if (sizeResult.isInvalid())
return QualType();
Expr *size = sizeResult.get();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType() ||
size != origSize) {
Result = getDerived().RebuildDependentSizedArrayType(ElementType,
T->getSizeModifier(),
size,
T->getIndexTypeCVRQualifiers(),
TL.getBracketsRange());
if (Result.isNull())
return QualType();
}
// We might have any sort of array type now, but fortunately they
// all have the same location layout.
ArrayTypeLoc NewTL = TLB.push<ArrayTypeLoc>(Result);
NewTL.setLBracketLoc(TL.getLBracketLoc());
NewTL.setRBracketLoc(TL.getRBracketLoc());
NewTL.setSizeExpr(size);
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformDependentVectorType(
TypeLocBuilder &TLB, DependentVectorTypeLoc TL) {
const DependentVectorType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(T->getElementType());
if (ElementType.isNull())
return QualType();
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
ExprResult Size = getDerived().TransformExpr(T->getSizeExpr());
Size = SemaRef.ActOnConstantExpression(Size);
if (Size.isInvalid())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || ElementType != T->getElementType() ||
Size.get() != T->getSizeExpr()) {
Result = getDerived().RebuildDependentVectorType(
ElementType, Size.get(), T->getAttributeLoc(), T->getVectorKind());
if (Result.isNull())
return QualType();
}
// Result might be dependent or not.
if (isa<DependentVectorType>(Result)) {
DependentVectorTypeLoc NewTL =
TLB.push<DependentVectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
} else {
VectorTypeLoc NewTL = TLB.push<VectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
}
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDependentSizedExtVectorType(
TypeLocBuilder &TLB,
DependentSizedExtVectorTypeLoc TL) {
const DependentSizedExtVectorType *T = TL.getTypePtr();
// FIXME: ext vector locs should be nested
QualType ElementType = getDerived().TransformType(T->getElementType());
if (ElementType.isNull())
return QualType();
// Vector sizes are constant expressions.
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
ExprResult Size = getDerived().TransformExpr(T->getSizeExpr());
Size = SemaRef.ActOnConstantExpression(Size);
if (Size.isInvalid())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType() ||
Size.get() != T->getSizeExpr()) {
Result = getDerived().RebuildDependentSizedExtVectorType(ElementType,
Size.get(),
T->getAttributeLoc());
if (Result.isNull())
return QualType();
}
// Result might be dependent or not.
if (isa<DependentSizedExtVectorType>(Result)) {
DependentSizedExtVectorTypeLoc NewTL
= TLB.push<DependentSizedExtVectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
} else {
ExtVectorTypeLoc NewTL = TLB.push<ExtVectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
}
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformDependentAddressSpaceType(
TypeLocBuilder &TLB, DependentAddressSpaceTypeLoc TL) {
const DependentAddressSpaceType *T = TL.getTypePtr();
QualType pointeeType = getDerived().TransformType(T->getPointeeType());
if (pointeeType.isNull())
return QualType();
// Address spaces are constant expressions.
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
ExprResult AddrSpace = getDerived().TransformExpr(T->getAddrSpaceExpr());
AddrSpace = SemaRef.ActOnConstantExpression(AddrSpace);
if (AddrSpace.isInvalid())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || pointeeType != T->getPointeeType() ||
AddrSpace.get() != T->getAddrSpaceExpr()) {
Result = getDerived().RebuildDependentAddressSpaceType(
pointeeType, AddrSpace.get(), T->getAttributeLoc());
if (Result.isNull())
return QualType();
}
// Result might be dependent or not.
if (isa<DependentAddressSpaceType>(Result)) {
DependentAddressSpaceTypeLoc NewTL =
TLB.push<DependentAddressSpaceTypeLoc>(Result);
NewTL.setAttrOperandParensRange(TL.getAttrOperandParensRange());
NewTL.setAttrExprOperand(TL.getAttrExprOperand());
NewTL.setAttrNameLoc(TL.getAttrNameLoc());
} else {
TypeSourceInfo *DI = getSema().Context.getTrivialTypeSourceInfo(
Result, getDerived().getBaseLocation());
TransformType(TLB, DI->getTypeLoc());
}
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformVectorType(TypeLocBuilder &TLB,
VectorTypeLoc TL) {
const VectorType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(T->getElementType());
if (ElementType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType()) {
Result = getDerived().RebuildVectorType(ElementType, T->getNumElements(),
T->getVectorKind());
if (Result.isNull())
return QualType();
}
VectorTypeLoc NewTL = TLB.push<VectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformExtVectorType(TypeLocBuilder &TLB,
ExtVectorTypeLoc TL) {
const VectorType *T = TL.getTypePtr();
QualType ElementType = getDerived().TransformType(T->getElementType());
if (ElementType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ElementType != T->getElementType()) {
Result = getDerived().RebuildExtVectorType(ElementType,
T->getNumElements(),
/*FIXME*/ SourceLocation());
if (Result.isNull())
return QualType();
}
ExtVectorTypeLoc NewTL = TLB.push<ExtVectorTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template <typename Derived>
ParmVarDecl *TreeTransform<Derived>::TransformFunctionTypeParam(
ParmVarDecl *OldParm, int indexAdjustment, Optional<unsigned> NumExpansions,
bool ExpectParameterPack) {
TypeSourceInfo *OldDI = OldParm->getTypeSourceInfo();
TypeSourceInfo *NewDI = nullptr;
if (NumExpansions && isa<PackExpansionType>(OldDI->getType())) {
// If we're substituting into a pack expansion type and we know the
// length we want to expand to, just substitute for the pattern.
TypeLoc OldTL = OldDI->getTypeLoc();
PackExpansionTypeLoc OldExpansionTL = OldTL.castAs<PackExpansionTypeLoc>();
TypeLocBuilder TLB;
TypeLoc NewTL = OldDI->getTypeLoc();
TLB.reserve(NewTL.getFullDataSize());
QualType Result = getDerived().TransformType(TLB,
OldExpansionTL.getPatternLoc());
if (Result.isNull())
return nullptr;
Result = RebuildPackExpansionType(Result,
OldExpansionTL.getPatternLoc().getSourceRange(),
OldExpansionTL.getEllipsisLoc(),
NumExpansions);
if (Result.isNull())
return nullptr;
PackExpansionTypeLoc NewExpansionTL
= TLB.push<PackExpansionTypeLoc>(Result);
NewExpansionTL.setEllipsisLoc(OldExpansionTL.getEllipsisLoc());
NewDI = TLB.getTypeSourceInfo(SemaRef.Context, Result);
} else
NewDI = getDerived().TransformType(OldDI);
if (!NewDI)
return nullptr;
if (NewDI == OldDI && indexAdjustment == 0)
return OldParm;
ParmVarDecl *newParm = ParmVarDecl::Create(SemaRef.Context,
OldParm->getDeclContext(),
OldParm->getInnerLocStart(),
OldParm->getLocation(),
OldParm->getIdentifier(),
NewDI->getType(),
NewDI,
OldParm->getStorageClass(),
/* DefArg */ nullptr);
newParm->setScopeInfo(OldParm->getFunctionScopeDepth(),
OldParm->getFunctionScopeIndex() + indexAdjustment);
return newParm;
}
template <typename Derived>
bool TreeTransform<Derived>::TransformFunctionTypeParams(
SourceLocation Loc, ArrayRef<ParmVarDecl *> Params,
const QualType *ParamTypes,
const FunctionProtoType::ExtParameterInfo *ParamInfos,
SmallVectorImpl<QualType> &OutParamTypes,
SmallVectorImpl<ParmVarDecl *> *PVars,
Sema::ExtParameterInfoBuilder &PInfos) {
int indexAdjustment = 0;
unsigned NumParams = Params.size();
for (unsigned i = 0; i != NumParams; ++i) {
if (ParmVarDecl *OldParm = Params[i]) {
assert(OldParm->getFunctionScopeIndex() == i);
Optional<unsigned> NumExpansions;
ParmVarDecl *NewParm = nullptr;
if (OldParm->isParameterPack()) {
// We have a function parameter pack that may need to be expanded.
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
// Find the parameter packs that could be expanded.
TypeLoc TL = OldParm->getTypeSourceInfo()->getTypeLoc();
PackExpansionTypeLoc ExpansionTL = TL.castAs<PackExpansionTypeLoc>();
TypeLoc Pattern = ExpansionTL.getPatternLoc();
SemaRef.collectUnexpandedParameterPacks(Pattern, Unexpanded);
assert(Unexpanded.size() > 0 && "Could not find parameter packs!");
// Determine whether we should expand the parameter packs.
bool ShouldExpand = false;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions =
ExpansionTL.getTypePtr()->getNumExpansions();
NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(ExpansionTL.getEllipsisLoc(),
Pattern.getSourceRange(),
Unexpanded,
ShouldExpand,
RetainExpansion,
NumExpansions)) {
return true;
}
if (ShouldExpand) {
// Expand the function parameter pack into multiple, separate
// parameters.
getDerived().ExpandingFunctionParameterPack(OldParm);
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
ParmVarDecl *NewParm
= getDerived().TransformFunctionTypeParam(OldParm,
indexAdjustment++,
OrigNumExpansions,
/*ExpectParameterPack=*/false);
if (!NewParm)
return true;
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewParm->getType());
if (PVars)
PVars->push_back(NewParm);
}
// If we're supposed to retain a pack expansion, do so by temporarily
// forgetting the partially-substituted parameter pack.
if (RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
ParmVarDecl *NewParm
= getDerived().TransformFunctionTypeParam(OldParm,
indexAdjustment++,
OrigNumExpansions,
/*ExpectParameterPack=*/false);
if (!NewParm)
return true;
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewParm->getType());
if (PVars)
PVars->push_back(NewParm);
}
// The next parameter should have the same adjustment as the
// last thing we pushed, but we post-incremented indexAdjustment
// on every push. Also, if we push nothing, the adjustment should
// go down by one.
indexAdjustment--;
// We're done with the pack expansion.
continue;
}
// We'll substitute the parameter now without expanding the pack
// expansion.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
NewParm = getDerived().TransformFunctionTypeParam(OldParm,
indexAdjustment,
NumExpansions,
/*ExpectParameterPack=*/true);
} else {
NewParm = getDerived().TransformFunctionTypeParam(
OldParm, indexAdjustment, None, /*ExpectParameterPack=*/ false);
}
if (!NewParm)
return true;
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewParm->getType());
if (PVars)
PVars->push_back(NewParm);
continue;
}
// Deal with the possibility that we don't have a parameter
// declaration for this parameter.
QualType OldType = ParamTypes[i];
bool IsPackExpansion = false;
Optional<unsigned> NumExpansions;
QualType NewType;
if (const PackExpansionType *Expansion
= dyn_cast<PackExpansionType>(OldType)) {
// We have a function parameter pack that may need to be expanded.
QualType Pattern = Expansion->getPattern();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
getSema().collectUnexpandedParameterPacks(Pattern, Unexpanded);
// Determine whether we should expand the parameter packs.
bool ShouldExpand = false;
bool RetainExpansion = false;
if (getDerived().TryExpandParameterPacks(Loc, SourceRange(),
Unexpanded,
ShouldExpand,
RetainExpansion,
NumExpansions)) {
return true;
}
if (ShouldExpand) {
// Expand the function parameter pack into multiple, separate
// parameters.
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
QualType NewType = getDerived().TransformType(Pattern);
if (NewType.isNull())
return true;
if (NewType->containsUnexpandedParameterPack()) {
NewType =
getSema().getASTContext().getPackExpansionType(NewType, None);
if (NewType.isNull())
return true;
}
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewType);
if (PVars)
PVars->push_back(nullptr);
}
// We're done with the pack expansion.
continue;
}
// If we're supposed to retain a pack expansion, do so by temporarily
// forgetting the partially-substituted parameter pack.
if (RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
QualType NewType = getDerived().TransformType(Pattern);
if (NewType.isNull())
return true;
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewType);
if (PVars)
PVars->push_back(nullptr);
}
// We'll substitute the parameter now without expanding the pack
// expansion.
OldType = Expansion->getPattern();
IsPackExpansion = true;
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
NewType = getDerived().TransformType(OldType);
} else {
NewType = getDerived().TransformType(OldType);
}
if (NewType.isNull())
return true;
if (IsPackExpansion)
NewType = getSema().Context.getPackExpansionType(NewType,
NumExpansions);
if (ParamInfos)
PInfos.set(OutParamTypes.size(), ParamInfos[i]);
OutParamTypes.push_back(NewType);
if (PVars)
PVars->push_back(nullptr);
}
#ifndef NDEBUG
if (PVars) {
for (unsigned i = 0, e = PVars->size(); i != e; ++i)
if (ParmVarDecl *parm = (*PVars)[i])
assert(parm->getFunctionScopeIndex() == i);
}
#endif
return false;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformFunctionProtoType(TypeLocBuilder &TLB,
FunctionProtoTypeLoc TL) {
SmallVector<QualType, 4> ExceptionStorage;
TreeTransform *This = this; // Work around gcc.gnu.org/PR56135.
return getDerived().TransformFunctionProtoType(
TLB, TL, nullptr, Qualifiers(),
[&](FunctionProtoType::ExceptionSpecInfo &ESI, bool &Changed) {
return This->TransformExceptionSpec(TL.getBeginLoc(), ESI,
ExceptionStorage, Changed);
});
}
template<typename Derived> template<typename Fn>
QualType TreeTransform<Derived>::TransformFunctionProtoType(
TypeLocBuilder &TLB, FunctionProtoTypeLoc TL, CXXRecordDecl *ThisContext,
Qualifiers ThisTypeQuals, Fn TransformExceptionSpec) {
// Transform the parameters and return type.
//
// We are required to instantiate the params and return type in source order.
// When the function has a trailing return type, we instantiate the
// parameters before the return type, since the return type can then refer
// to the parameters themselves (via decltype, sizeof, etc.).
//
SmallVector<QualType, 4> ParamTypes;
SmallVector<ParmVarDecl*, 4> ParamDecls;
Sema::ExtParameterInfoBuilder ExtParamInfos;
const FunctionProtoType *T = TL.getTypePtr();
QualType ResultType;
if (T->hasTrailingReturn()) {
if (getDerived().TransformFunctionTypeParams(
TL.getBeginLoc(), TL.getParams(),
TL.getTypePtr()->param_type_begin(),
T->getExtParameterInfosOrNull(),
ParamTypes, &ParamDecls, ExtParamInfos))
return QualType();
{
// C++11 [expr.prim.general]p3:
// If a declaration declares a member function or member function
// template of a class X, the expression this is a prvalue of type
// "pointer to cv-qualifier-seq X" between the optional cv-qualifer-seq
// and the end of the function-definition, member-declarator, or
// declarator.
Sema::CXXThisScopeRAII ThisScope(SemaRef, ThisContext, ThisTypeQuals);
ResultType = getDerived().TransformType(TLB, TL.getReturnLoc());
if (ResultType.isNull())
return QualType();
}
}
else {
ResultType = getDerived().TransformType(TLB, TL.getReturnLoc());
if (ResultType.isNull())
return QualType();
// Return type can not be qualified with an address space.
if (ResultType.getAddressSpace() != LangAS::Default) {
SemaRef.Diag(TL.getReturnLoc().getBeginLoc(),
diag::err_attribute_address_function_type);
return QualType();
}
if (getDerived().TransformFunctionTypeParams(
TL.getBeginLoc(), TL.getParams(),
TL.getTypePtr()->param_type_begin(),
T->getExtParameterInfosOrNull(),
ParamTypes, &ParamDecls, ExtParamInfos))
return QualType();
}
FunctionProtoType::ExtProtoInfo EPI = T->getExtProtoInfo();
bool EPIChanged = false;
if (TransformExceptionSpec(EPI.ExceptionSpec, EPIChanged))
return QualType();
// Handle extended parameter information.
if (auto NewExtParamInfos =
ExtParamInfos.getPointerOrNull(ParamTypes.size())) {
if (!EPI.ExtParameterInfos ||
llvm::makeArrayRef(EPI.ExtParameterInfos, TL.getNumParams())
!= llvm::makeArrayRef(NewExtParamInfos, ParamTypes.size())) {
EPIChanged = true;
}
EPI.ExtParameterInfos = NewExtParamInfos;
} else if (EPI.ExtParameterInfos) {
EPIChanged = true;
EPI.ExtParameterInfos = nullptr;
}
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || ResultType != T->getReturnType() ||
T->getParamTypes() != llvm::makeArrayRef(ParamTypes) || EPIChanged) {
Result = getDerived().RebuildFunctionProtoType(ResultType, ParamTypes, EPI);
if (Result.isNull())
return QualType();
}
FunctionProtoTypeLoc NewTL = TLB.push<FunctionProtoTypeLoc>(Result);
NewTL.setLocalRangeBegin(TL.getLocalRangeBegin());
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
NewTL.setExceptionSpecRange(TL.getExceptionSpecRange());
NewTL.setLocalRangeEnd(TL.getLocalRangeEnd());
for (unsigned i = 0, e = NewTL.getNumParams(); i != e; ++i)
NewTL.setParam(i, ParamDecls[i]);
return Result;
}
template<typename Derived>
bool TreeTransform<Derived>::TransformExceptionSpec(
SourceLocation Loc, FunctionProtoType::ExceptionSpecInfo &ESI,
SmallVectorImpl<QualType> &Exceptions, bool &Changed) {
assert(ESI.Type != EST_Uninstantiated && ESI.Type != EST_Unevaluated);
// Instantiate a dynamic noexcept expression, if any.
if (isComputedNoexcept(ESI.Type)) {
EnterExpressionEvaluationContext Unevaluated(
getSema(), Sema::ExpressionEvaluationContext::ConstantEvaluated);
ExprResult NoexceptExpr = getDerived().TransformExpr(ESI.NoexceptExpr);
if (NoexceptExpr.isInvalid())
return true;
ExceptionSpecificationType EST = ESI.Type;
NoexceptExpr =
getSema().ActOnNoexceptSpec(Loc, NoexceptExpr.get(), EST);
if (NoexceptExpr.isInvalid())
return true;
if (ESI.NoexceptExpr != NoexceptExpr.get() || EST != ESI.Type)
Changed = true;
ESI.NoexceptExpr = NoexceptExpr.get();
ESI.Type = EST;
}
if (ESI.Type != EST_Dynamic)
return false;
// Instantiate a dynamic exception specification's type.
for (QualType T : ESI.Exceptions) {
if (const PackExpansionType *PackExpansion =
T->getAs<PackExpansionType>()) {
Changed = true;
// We have a pack expansion. Instantiate it.
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
SemaRef.collectUnexpandedParameterPacks(PackExpansion->getPattern(),
Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can and
// should
// be expanded.
bool Expand = false;
bool RetainExpansion = false;
Optional<unsigned> NumExpansions = PackExpansion->getNumExpansions();
// FIXME: Track the location of the ellipsis (and track source location
// information for the types in the exception specification in general).
if (getDerived().TryExpandParameterPacks(
Loc, SourceRange(), Unexpanded, Expand,
RetainExpansion, NumExpansions))
return true;
if (!Expand) {
// We can't expand this pack expansion into separate arguments yet;
// just substitute into the pattern and create a new pack expansion
// type.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
QualType U = getDerived().TransformType(PackExpansion->getPattern());
if (U.isNull())
return true;
U = SemaRef.Context.getPackExpansionType(U, NumExpansions);
Exceptions.push_back(U);
continue;
}
// Substitute into the pack expansion pattern for each slice of the
// pack.
for (unsigned ArgIdx = 0; ArgIdx != *NumExpansions; ++ArgIdx) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), ArgIdx);
QualType U = getDerived().TransformType(PackExpansion->getPattern());
if (U.isNull() || SemaRef.CheckSpecifiedExceptionType(U, Loc))
return true;
Exceptions.push_back(U);
}
} else {
QualType U = getDerived().TransformType(T);
if (U.isNull() || SemaRef.CheckSpecifiedExceptionType(U, Loc))
return true;
if (T != U)
Changed = true;
Exceptions.push_back(U);
}
}
ESI.Exceptions = Exceptions;
if (ESI.Exceptions.empty())
ESI.Type = EST_DynamicNone;
return false;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformFunctionNoProtoType(
TypeLocBuilder &TLB,
FunctionNoProtoTypeLoc TL) {
const FunctionNoProtoType *T = TL.getTypePtr();
QualType ResultType = getDerived().TransformType(TLB, TL.getReturnLoc());
if (ResultType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || ResultType != T->getReturnType())
Result = getDerived().RebuildFunctionNoProtoType(ResultType);
FunctionNoProtoTypeLoc NewTL = TLB.push<FunctionNoProtoTypeLoc>(Result);
NewTL.setLocalRangeBegin(TL.getLocalRangeBegin());
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
NewTL.setLocalRangeEnd(TL.getLocalRangeEnd());
return Result;
}
template<typename Derived> QualType
TreeTransform<Derived>::TransformUnresolvedUsingType(TypeLocBuilder &TLB,
UnresolvedUsingTypeLoc TL) {
const UnresolvedUsingType *T = TL.getTypePtr();
Decl *D = getDerived().TransformDecl(TL.getNameLoc(), T->getDecl());
if (!D)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || D != T->getDecl()) {
Result = getDerived().RebuildUnresolvedUsingType(TL.getNameLoc(), D);
if (Result.isNull())
return QualType();
}
// We might get an arbitrary type spec type back. We should at
// least always get a type spec type, though.
TypeSpecTypeLoc NewTL = TLB.pushTypeSpec(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTypedefType(TypeLocBuilder &TLB,
TypedefTypeLoc TL) {
const TypedefType *T = TL.getTypePtr();
TypedefNameDecl *Typedef
= cast_or_null<TypedefNameDecl>(getDerived().TransformDecl(TL.getNameLoc(),
T->getDecl()));
if (!Typedef)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
Typedef != T->getDecl()) {
Result = getDerived().RebuildTypedefType(Typedef);
if (Result.isNull())
return QualType();
}
TypedefTypeLoc NewTL = TLB.push<TypedefTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTypeOfExprType(TypeLocBuilder &TLB,
TypeOfExprTypeLoc TL) {
// typeof expressions are not potentially evaluated contexts
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated,
Sema::ReuseLambdaContextDecl);
ExprResult E = getDerived().TransformExpr(TL.getUnderlyingExpr());
if (E.isInvalid())
return QualType();
E = SemaRef.HandleExprEvaluationContextForTypeof(E.get());
if (E.isInvalid())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
E.get() != TL.getUnderlyingExpr()) {
Result = getDerived().RebuildTypeOfExprType(E.get(), TL.getTypeofLoc());
if (Result.isNull())
return QualType();
}
else E.get();
TypeOfExprTypeLoc NewTL = TLB.push<TypeOfExprTypeLoc>(Result);
NewTL.setTypeofLoc(TL.getTypeofLoc());
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTypeOfType(TypeLocBuilder &TLB,
TypeOfTypeLoc TL) {
TypeSourceInfo* Old_Under_TI = TL.getUnderlyingTInfo();
TypeSourceInfo* New_Under_TI = getDerived().TransformType(Old_Under_TI);
if (!New_Under_TI)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || New_Under_TI != Old_Under_TI) {
Result = getDerived().RebuildTypeOfType(New_Under_TI->getType());
if (Result.isNull())
return QualType();
}
TypeOfTypeLoc NewTL = TLB.push<TypeOfTypeLoc>(Result);
NewTL.setTypeofLoc(TL.getTypeofLoc());
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
NewTL.setUnderlyingTInfo(New_Under_TI);
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDecltypeType(TypeLocBuilder &TLB,
DecltypeTypeLoc TL) {
const DecltypeType *T = TL.getTypePtr();
// decltype expressions are not potentially evaluated contexts
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated, nullptr,
Sema::ExpressionEvaluationContextRecord::EK_Decltype);
ExprResult E = getDerived().TransformExpr(T->getUnderlyingExpr());
if (E.isInvalid())
return QualType();
E = getSema().ActOnDecltypeExpression(E.get());
if (E.isInvalid())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
E.get() != T->getUnderlyingExpr()) {
Result = getDerived().RebuildDecltypeType(E.get(), TL.getNameLoc());
if (Result.isNull())
return QualType();
}
else E.get();
DecltypeTypeLoc NewTL = TLB.push<DecltypeTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformUnaryTransformType(
TypeLocBuilder &TLB,
UnaryTransformTypeLoc TL) {
QualType Result = TL.getType();
if (Result->isDependentType()) {
const UnaryTransformType *T = TL.getTypePtr();
QualType NewBase =
getDerived().TransformType(TL.getUnderlyingTInfo())->getType();
Result = getDerived().RebuildUnaryTransformType(NewBase,
T->getUTTKind(),
TL.getKWLoc());
if (Result.isNull())
return QualType();
}
UnaryTransformTypeLoc NewTL = TLB.push<UnaryTransformTypeLoc>(Result);
NewTL.setKWLoc(TL.getKWLoc());
NewTL.setParensRange(TL.getParensRange());
NewTL.setUnderlyingTInfo(TL.getUnderlyingTInfo());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformAutoType(TypeLocBuilder &TLB,
AutoTypeLoc TL) {
const AutoType *T = TL.getTypePtr();
QualType OldDeduced = T->getDeducedType();
QualType NewDeduced;
if (!OldDeduced.isNull()) {
NewDeduced = getDerived().TransformType(OldDeduced);
if (NewDeduced.isNull())
return QualType();
}
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || NewDeduced != OldDeduced ||
T->isDependentType()) {
Result = getDerived().RebuildAutoType(NewDeduced, T->getKeyword());
if (Result.isNull())
return QualType();
}
AutoTypeLoc NewTL = TLB.push<AutoTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDeducedTemplateSpecializationType(
TypeLocBuilder &TLB, DeducedTemplateSpecializationTypeLoc TL) {
const DeducedTemplateSpecializationType *T = TL.getTypePtr();
CXXScopeSpec SS;
TemplateName TemplateName = getDerived().TransformTemplateName(
SS, T->getTemplateName(), TL.getTemplateNameLoc());
if (TemplateName.isNull())
return QualType();
QualType OldDeduced = T->getDeducedType();
QualType NewDeduced;
if (!OldDeduced.isNull()) {
NewDeduced = getDerived().TransformType(OldDeduced);
if (NewDeduced.isNull())
return QualType();
}
QualType Result = getDerived().RebuildDeducedTemplateSpecializationType(
TemplateName, NewDeduced);
if (Result.isNull())
return QualType();
DeducedTemplateSpecializationTypeLoc NewTL =
TLB.push<DeducedTemplateSpecializationTypeLoc>(Result);
NewTL.setTemplateNameLoc(TL.getTemplateNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformRecordType(TypeLocBuilder &TLB,
RecordTypeLoc TL) {
const RecordType *T = TL.getTypePtr();
RecordDecl *Record
= cast_or_null<RecordDecl>(getDerived().TransformDecl(TL.getNameLoc(),
T->getDecl()));
if (!Record)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
Record != T->getDecl()) {
Result = getDerived().RebuildRecordType(Record);
if (Result.isNull())
return QualType();
}
RecordTypeLoc NewTL = TLB.push<RecordTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformEnumType(TypeLocBuilder &TLB,
EnumTypeLoc TL) {
const EnumType *T = TL.getTypePtr();
EnumDecl *Enum
= cast_or_null<EnumDecl>(getDerived().TransformDecl(TL.getNameLoc(),
T->getDecl()));
if (!Enum)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
Enum != T->getDecl()) {
Result = getDerived().RebuildEnumType(Enum);
if (Result.isNull())
return QualType();
}
EnumTypeLoc NewTL = TLB.push<EnumTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformInjectedClassNameType(
TypeLocBuilder &TLB,
InjectedClassNameTypeLoc TL) {
Decl *D = getDerived().TransformDecl(TL.getNameLoc(),
TL.getTypePtr()->getDecl());
if (!D) return QualType();
QualType T = SemaRef.Context.getTypeDeclType(cast<TypeDecl>(D));
TLB.pushTypeSpec(T).setNameLoc(TL.getNameLoc());
return T;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTemplateTypeParmType(
TypeLocBuilder &TLB,
TemplateTypeParmTypeLoc TL) {
return TransformTypeSpecType(TLB, TL);
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformSubstTemplateTypeParmType(
TypeLocBuilder &TLB,
SubstTemplateTypeParmTypeLoc TL) {
const SubstTemplateTypeParmType *T = TL.getTypePtr();
// Substitute into the replacement type, which itself might involve something
// that needs to be transformed. This only tends to occur with default
// template arguments of template template parameters.
TemporaryBase Rebase(*this, TL.getNameLoc(), DeclarationName());
QualType Replacement = getDerived().TransformType(T->getReplacementType());
if (Replacement.isNull())
return QualType();
// Always canonicalize the replacement type.
Replacement = SemaRef.Context.getCanonicalType(Replacement);
QualType Result
= SemaRef.Context.getSubstTemplateTypeParmType(T->getReplacedParameter(),
Replacement);
// Propagate type-source information.
SubstTemplateTypeParmTypeLoc NewTL
= TLB.push<SubstTemplateTypeParmTypeLoc>(Result);
NewTL.setNameLoc(TL.getNameLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformSubstTemplateTypeParmPackType(
TypeLocBuilder &TLB,
SubstTemplateTypeParmPackTypeLoc TL) {
return TransformTypeSpecType(TLB, TL);
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformTemplateSpecializationType(
TypeLocBuilder &TLB,
TemplateSpecializationTypeLoc TL) {
const TemplateSpecializationType *T = TL.getTypePtr();
// The nested-name-specifier never matters in a TemplateSpecializationType,
// because we can't have a dependent nested-name-specifier anyway.
CXXScopeSpec SS;
TemplateName Template
= getDerived().TransformTemplateName(SS, T->getTemplateName(),
TL.getTemplateNameLoc());
if (Template.isNull())
return QualType();
return getDerived().TransformTemplateSpecializationType(TLB, TL, Template);
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformAtomicType(TypeLocBuilder &TLB,
AtomicTypeLoc TL) {
QualType ValueType = getDerived().TransformType(TLB, TL.getValueLoc());
if (ValueType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
ValueType != TL.getValueLoc().getType()) {
Result = getDerived().RebuildAtomicType(ValueType, TL.getKWLoc());
if (Result.isNull())
return QualType();
}
AtomicTypeLoc NewTL = TLB.push<AtomicTypeLoc>(Result);
NewTL.setKWLoc(TL.getKWLoc());
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformPipeType(TypeLocBuilder &TLB,
PipeTypeLoc TL) {
QualType ValueType = getDerived().TransformType(TLB, TL.getValueLoc());
if (ValueType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || ValueType != TL.getValueLoc().getType()) {
const PipeType *PT = Result->getAs<PipeType>();
bool isReadPipe = PT->isReadOnly();
Result = getDerived().RebuildPipeType(ValueType, TL.getKWLoc(), isReadPipe);
if (Result.isNull())
return QualType();
}
PipeTypeLoc NewTL = TLB.push<PipeTypeLoc>(Result);
NewTL.setKWLoc(TL.getKWLoc());
return Result;
}
/// Simple iterator that traverses the template arguments in a
/// container that provides a \c getArgLoc() member function.
///
/// This iterator is intended to be used with the iterator form of
/// \c TreeTransform<Derived>::TransformTemplateArguments().
template<typename ArgLocContainer>
class TemplateArgumentLocContainerIterator {
ArgLocContainer *Container;
unsigned Index;
public:
typedef TemplateArgumentLoc value_type;
typedef TemplateArgumentLoc reference;
typedef int difference_type;
typedef std::input_iterator_tag iterator_category;
class pointer {
TemplateArgumentLoc Arg;
public:
explicit pointer(TemplateArgumentLoc Arg) : Arg(Arg) { }
const TemplateArgumentLoc *operator->() const {
return &Arg;
}
};
TemplateArgumentLocContainerIterator() {}
TemplateArgumentLocContainerIterator(ArgLocContainer &Container,
unsigned Index)
: Container(&Container), Index(Index) { }
TemplateArgumentLocContainerIterator &operator++() {
++Index;
return *this;
}
TemplateArgumentLocContainerIterator operator++(int) {
TemplateArgumentLocContainerIterator Old(*this);
++(*this);
return Old;
}
TemplateArgumentLoc operator*() const {
return Container->getArgLoc(Index);
}
pointer operator->() const {
return pointer(Container->getArgLoc(Index));
}
friend bool operator==(const TemplateArgumentLocContainerIterator &X,
const TemplateArgumentLocContainerIterator &Y) {
return X.Container == Y.Container && X.Index == Y.Index;
}
friend bool operator!=(const TemplateArgumentLocContainerIterator &X,
const TemplateArgumentLocContainerIterator &Y) {
return !(X == Y);
}
};
template <typename Derived>
QualType TreeTransform<Derived>::TransformTemplateSpecializationType(
TypeLocBuilder &TLB,
TemplateSpecializationTypeLoc TL,
TemplateName Template) {
TemplateArgumentListInfo NewTemplateArgs;
NewTemplateArgs.setLAngleLoc(TL.getLAngleLoc());
NewTemplateArgs.setRAngleLoc(TL.getRAngleLoc());
typedef TemplateArgumentLocContainerIterator<TemplateSpecializationTypeLoc>
ArgIterator;
if (getDerived().TransformTemplateArguments(ArgIterator(TL, 0),
ArgIterator(TL, TL.getNumArgs()),
NewTemplateArgs))
return QualType();
// FIXME: maybe don't rebuild if all the template arguments are the same.
QualType Result =
getDerived().RebuildTemplateSpecializationType(Template,
TL.getTemplateNameLoc(),
NewTemplateArgs);
if (!Result.isNull()) {
// Specializations of template template parameters are represented as
// TemplateSpecializationTypes, and substitution of type alias templates
// within a dependent context can transform them into
// DependentTemplateSpecializationTypes.
if (isa<DependentTemplateSpecializationType>(Result)) {
DependentTemplateSpecializationTypeLoc NewTL
= TLB.push<DependentTemplateSpecializationTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(SourceLocation());
NewTL.setQualifierLoc(NestedNameSpecifierLoc());
NewTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
NewTL.setTemplateNameLoc(TL.getTemplateNameLoc());
NewTL.setLAngleLoc(TL.getLAngleLoc());
NewTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned i = 0, e = NewTemplateArgs.size(); i != e; ++i)
NewTL.setArgLocInfo(i, NewTemplateArgs[i].getLocInfo());
return Result;
}
TemplateSpecializationTypeLoc NewTL
= TLB.push<TemplateSpecializationTypeLoc>(Result);
NewTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
NewTL.setTemplateNameLoc(TL.getTemplateNameLoc());
NewTL.setLAngleLoc(TL.getLAngleLoc());
NewTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned i = 0, e = NewTemplateArgs.size(); i != e; ++i)
NewTL.setArgLocInfo(i, NewTemplateArgs[i].getLocInfo());
}
return Result;
}
template <typename Derived>
QualType TreeTransform<Derived>::TransformDependentTemplateSpecializationType(
TypeLocBuilder &TLB,
DependentTemplateSpecializationTypeLoc TL,
TemplateName Template,
CXXScopeSpec &SS) {
TemplateArgumentListInfo NewTemplateArgs;
NewTemplateArgs.setLAngleLoc(TL.getLAngleLoc());
NewTemplateArgs.setRAngleLoc(TL.getRAngleLoc());
typedef TemplateArgumentLocContainerIterator<
DependentTemplateSpecializationTypeLoc> ArgIterator;
if (getDerived().TransformTemplateArguments(ArgIterator(TL, 0),
ArgIterator(TL, TL.getNumArgs()),
NewTemplateArgs))
return QualType();
// FIXME: maybe don't rebuild if all the template arguments are the same.
if (DependentTemplateName *DTN = Template.getAsDependentTemplateName()) {
QualType Result
= getSema().Context.getDependentTemplateSpecializationType(
TL.getTypePtr()->getKeyword(),
DTN->getQualifier(),
DTN->getIdentifier(),
NewTemplateArgs);
DependentTemplateSpecializationTypeLoc NewTL
= TLB.push<DependentTemplateSpecializationTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
NewTL.setQualifierLoc(SS.getWithLocInContext(SemaRef.Context));
NewTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
NewTL.setTemplateNameLoc(TL.getTemplateNameLoc());
NewTL.setLAngleLoc(TL.getLAngleLoc());
NewTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned i = 0, e = NewTemplateArgs.size(); i != e; ++i)
NewTL.setArgLocInfo(i, NewTemplateArgs[i].getLocInfo());
return Result;
}
QualType Result
= getDerived().RebuildTemplateSpecializationType(Template,
TL.getTemplateNameLoc(),
NewTemplateArgs);
if (!Result.isNull()) {
/// FIXME: Wrap this in an elaborated-type-specifier?
TemplateSpecializationTypeLoc NewTL
= TLB.push<TemplateSpecializationTypeLoc>(Result);
NewTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
NewTL.setTemplateNameLoc(TL.getTemplateNameLoc());
NewTL.setLAngleLoc(TL.getLAngleLoc());
NewTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned i = 0, e = NewTemplateArgs.size(); i != e; ++i)
NewTL.setArgLocInfo(i, NewTemplateArgs[i].getLocInfo());
}
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformElaboratedType(TypeLocBuilder &TLB,
ElaboratedTypeLoc TL) {
const ElaboratedType *T = TL.getTypePtr();
NestedNameSpecifierLoc QualifierLoc;
// NOTE: the qualifier in an ElaboratedType is optional.
if (TL.getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(TL.getQualifierLoc());
if (!QualifierLoc)
return QualType();
}
QualType NamedT = getDerived().TransformType(TLB, TL.getNamedTypeLoc());
if (NamedT.isNull())
return QualType();
// C++0x [dcl.type.elab]p2:
// If the identifier resolves to a typedef-name or the simple-template-id
// resolves to an alias template specialization, the
// elaborated-type-specifier is ill-formed.
if (T->getKeyword() != ETK_None && T->getKeyword() != ETK_Typename) {
if (const TemplateSpecializationType *TST =
NamedT->getAs<TemplateSpecializationType>()) {
TemplateName Template = TST->getTemplateName();
if (TypeAliasTemplateDecl *TAT = dyn_cast_or_null<TypeAliasTemplateDecl>(
Template.getAsTemplateDecl())) {
SemaRef.Diag(TL.getNamedTypeLoc().getBeginLoc(),
diag::err_tag_reference_non_tag)
<< TAT << Sema::NTK_TypeAliasTemplate
<< ElaboratedType::getTagTypeKindForKeyword(T->getKeyword());
SemaRef.Diag(TAT->getLocation(), diag::note_declared_at);
}
}
}
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
QualifierLoc != TL.getQualifierLoc() ||
NamedT != T->getNamedType()) {
Result = getDerived().RebuildElaboratedType(TL.getElaboratedKeywordLoc(),
T->getKeyword(),
QualifierLoc, NamedT);
if (Result.isNull())
return QualType();
}
ElaboratedTypeLoc NewTL = TLB.push<ElaboratedTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
NewTL.setQualifierLoc(QualifierLoc);
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformAttributedType(
TypeLocBuilder &TLB,
AttributedTypeLoc TL) {
const AttributedType *oldType = TL.getTypePtr();
QualType modifiedType = getDerived().TransformType(TLB, TL.getModifiedLoc());
if (modifiedType.isNull())
return QualType();
// oldAttr can be null if we started with a QualType rather than a TypeLoc.
const Attr *oldAttr = TL.getAttr();
const Attr *newAttr = oldAttr ? getDerived().TransformAttr(oldAttr) : nullptr;
if (oldAttr && !newAttr)
return QualType();
QualType result = TL.getType();
// FIXME: dependent operand expressions?
if (getDerived().AlwaysRebuild() ||
modifiedType != oldType->getModifiedType()) {
// TODO: this is really lame; we should really be rebuilding the
// equivalent type from first principles.
QualType equivalentType
= getDerived().TransformType(oldType->getEquivalentType());
if (equivalentType.isNull())
return QualType();
// Check whether we can add nullability; it is only represented as
// type sugar, and therefore cannot be diagnosed in any other way.
if (auto nullability = oldType->getImmediateNullability()) {
if (!modifiedType->canHaveNullability()) {
SemaRef.Diag(TL.getAttr()->getLocation(),
diag::err_nullability_nonpointer)
<< DiagNullabilityKind(*nullability, false) << modifiedType;
return QualType();
}
}
result = SemaRef.Context.getAttributedType(TL.getAttrKind(),
modifiedType,
equivalentType);
}
AttributedTypeLoc newTL = TLB.push<AttributedTypeLoc>(result);
newTL.setAttr(newAttr);
return result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformParenType(TypeLocBuilder &TLB,
ParenTypeLoc TL) {
QualType Inner = getDerived().TransformType(TLB, TL.getInnerLoc());
if (Inner.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
Inner != TL.getInnerLoc().getType()) {
Result = getDerived().RebuildParenType(Inner);
if (Result.isNull())
return QualType();
}
ParenTypeLoc NewTL = TLB.push<ParenTypeLoc>(Result);
NewTL.setLParenLoc(TL.getLParenLoc());
NewTL.setRParenLoc(TL.getRParenLoc());
return Result;
}
template <typename Derived>
QualType
TreeTransform<Derived>::TransformMacroQualifiedType(TypeLocBuilder &TLB,
MacroQualifiedTypeLoc TL) {
QualType Inner = getDerived().TransformType(TLB, TL.getInnerLoc());
if (Inner.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || Inner != TL.getInnerLoc().getType()) {
Result =
getDerived().RebuildMacroQualifiedType(Inner, TL.getMacroIdentifier());
if (Result.isNull())
return QualType();
}
MacroQualifiedTypeLoc NewTL = TLB.push<MacroQualifiedTypeLoc>(Result);
NewTL.setExpansionLoc(TL.getExpansionLoc());
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDependentNameType(
TypeLocBuilder &TLB, DependentNameTypeLoc TL) {
return TransformDependentNameType(TLB, TL, false);
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformDependentNameType(
TypeLocBuilder &TLB, DependentNameTypeLoc TL, bool DeducedTSTContext) {
const DependentNameType *T = TL.getTypePtr();
NestedNameSpecifierLoc QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(TL.getQualifierLoc());
if (!QualifierLoc)
return QualType();
QualType Result
= getDerived().RebuildDependentNameType(T->getKeyword(),
TL.getElaboratedKeywordLoc(),
QualifierLoc,
T->getIdentifier(),
TL.getNameLoc(),
DeducedTSTContext);
if (Result.isNull())
return QualType();
if (const ElaboratedType* ElabT = Result->getAs<ElaboratedType>()) {
QualType NamedT = ElabT->getNamedType();
TLB.pushTypeSpec(NamedT).setNameLoc(TL.getNameLoc());
ElaboratedTypeLoc NewTL = TLB.push<ElaboratedTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
NewTL.setQualifierLoc(QualifierLoc);
} else {
DependentNameTypeLoc NewTL = TLB.push<DependentNameTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
NewTL.setQualifierLoc(QualifierLoc);
NewTL.setNameLoc(TL.getNameLoc());
}
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::
TransformDependentTemplateSpecializationType(TypeLocBuilder &TLB,
DependentTemplateSpecializationTypeLoc TL) {
NestedNameSpecifierLoc QualifierLoc;
if (TL.getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(TL.getQualifierLoc());
if (!QualifierLoc)
return QualType();
}
return getDerived()
.TransformDependentTemplateSpecializationType(TLB, TL, QualifierLoc);
}
template<typename Derived>
QualType TreeTransform<Derived>::
TransformDependentTemplateSpecializationType(TypeLocBuilder &TLB,
DependentTemplateSpecializationTypeLoc TL,
NestedNameSpecifierLoc QualifierLoc) {
const DependentTemplateSpecializationType *T = TL.getTypePtr();
TemplateArgumentListInfo NewTemplateArgs;
NewTemplateArgs.setLAngleLoc(TL.getLAngleLoc());
NewTemplateArgs.setRAngleLoc(TL.getRAngleLoc());
typedef TemplateArgumentLocContainerIterator<
DependentTemplateSpecializationTypeLoc> ArgIterator;
if (getDerived().TransformTemplateArguments(ArgIterator(TL, 0),
ArgIterator(TL, TL.getNumArgs()),
NewTemplateArgs))
return QualType();
QualType Result = getDerived().RebuildDependentTemplateSpecializationType(
T->getKeyword(), QualifierLoc, TL.getTemplateKeywordLoc(),
T->getIdentifier(), TL.getTemplateNameLoc(), NewTemplateArgs,
/*AllowInjectedClassName*/ false);
if (Result.isNull())
return QualType();
if (const ElaboratedType *ElabT = dyn_cast<ElaboratedType>(Result)) {
QualType NamedT = ElabT->getNamedType();
// Copy information relevant to the template specialization.
TemplateSpecializationTypeLoc NamedTL
= TLB.push<TemplateSpecializationTypeLoc>(NamedT);
NamedTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
NamedTL.setTemplateNameLoc(TL.getTemplateNameLoc());
NamedTL.setLAngleLoc(TL.getLAngleLoc());
NamedTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned I = 0, E = NewTemplateArgs.size(); I != E; ++I)
NamedTL.setArgLocInfo(I, NewTemplateArgs[I].getLocInfo());
// Copy information relevant to the elaborated type.
ElaboratedTypeLoc NewTL = TLB.push<ElaboratedTypeLoc>(Result);
NewTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
NewTL.setQualifierLoc(QualifierLoc);
} else if (isa<DependentTemplateSpecializationType>(Result)) {
DependentTemplateSpecializationTypeLoc SpecTL
= TLB.push<DependentTemplateSpecializationTypeLoc>(Result);
SpecTL.setElaboratedKeywordLoc(TL.getElaboratedKeywordLoc());
SpecTL.setQualifierLoc(QualifierLoc);
SpecTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
SpecTL.setTemplateNameLoc(TL.getTemplateNameLoc());
SpecTL.setLAngleLoc(TL.getLAngleLoc());
SpecTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned I = 0, E = NewTemplateArgs.size(); I != E; ++I)
SpecTL.setArgLocInfo(I, NewTemplateArgs[I].getLocInfo());
} else {
TemplateSpecializationTypeLoc SpecTL
= TLB.push<TemplateSpecializationTypeLoc>(Result);
SpecTL.setTemplateKeywordLoc(TL.getTemplateKeywordLoc());
SpecTL.setTemplateNameLoc(TL.getTemplateNameLoc());
SpecTL.setLAngleLoc(TL.getLAngleLoc());
SpecTL.setRAngleLoc(TL.getRAngleLoc());
for (unsigned I = 0, E = NewTemplateArgs.size(); I != E; ++I)
SpecTL.setArgLocInfo(I, NewTemplateArgs[I].getLocInfo());
}
return Result;
}
template<typename Derived>
QualType TreeTransform<Derived>::TransformPackExpansionType(TypeLocBuilder &TLB,
PackExpansionTypeLoc TL) {
QualType Pattern
= getDerived().TransformType(TLB, TL.getPatternLoc());
if (Pattern.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
Pattern != TL.getPatternLoc().getType()) {
Result = getDerived().RebuildPackExpansionType(Pattern,
TL.getPatternLoc().getSourceRange(),
TL.getEllipsisLoc(),
TL.getTypePtr()->getNumExpansions());
if (Result.isNull())
return QualType();
}
PackExpansionTypeLoc NewT = TLB.push<PackExpansionTypeLoc>(Result);
NewT.setEllipsisLoc(TL.getEllipsisLoc());
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformObjCInterfaceType(TypeLocBuilder &TLB,
ObjCInterfaceTypeLoc TL) {
// ObjCInterfaceType is never dependent.
TLB.pushFullCopy(TL);
return TL.getType();
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformObjCTypeParamType(TypeLocBuilder &TLB,
ObjCTypeParamTypeLoc TL) {
const ObjCTypeParamType *T = TL.getTypePtr();
ObjCTypeParamDecl *OTP = cast_or_null<ObjCTypeParamDecl>(
getDerived().TransformDecl(T->getDecl()->getLocation(), T->getDecl()));
if (!OTP)
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
OTP != T->getDecl()) {
Result = getDerived().RebuildObjCTypeParamType(OTP,
TL.getProtocolLAngleLoc(),
llvm::makeArrayRef(TL.getTypePtr()->qual_begin(),
TL.getNumProtocols()),
TL.getProtocolLocs(),
TL.getProtocolRAngleLoc());
if (Result.isNull())
return QualType();
}
ObjCTypeParamTypeLoc NewTL = TLB.push<ObjCTypeParamTypeLoc>(Result);
if (TL.getNumProtocols()) {
NewTL.setProtocolLAngleLoc(TL.getProtocolLAngleLoc());
for (unsigned i = 0, n = TL.getNumProtocols(); i != n; ++i)
NewTL.setProtocolLoc(i, TL.getProtocolLoc(i));
NewTL.setProtocolRAngleLoc(TL.getProtocolRAngleLoc());
}
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformObjCObjectType(TypeLocBuilder &TLB,
ObjCObjectTypeLoc TL) {
// Transform base type.
QualType BaseType = getDerived().TransformType(TLB, TL.getBaseLoc());
if (BaseType.isNull())
return QualType();
bool AnyChanged = BaseType != TL.getBaseLoc().getType();
// Transform type arguments.
SmallVector<TypeSourceInfo *, 4> NewTypeArgInfos;
for (unsigned i = 0, n = TL.getNumTypeArgs(); i != n; ++i) {
TypeSourceInfo *TypeArgInfo = TL.getTypeArgTInfo(i);
TypeLoc TypeArgLoc = TypeArgInfo->getTypeLoc();
QualType TypeArg = TypeArgInfo->getType();
if (auto PackExpansionLoc = TypeArgLoc.getAs<PackExpansionTypeLoc>()) {
AnyChanged = true;
// We have a pack expansion. Instantiate it.
const auto *PackExpansion = PackExpansionLoc.getType()
->castAs<PackExpansionType>();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
SemaRef.collectUnexpandedParameterPacks(PackExpansion->getPattern(),
Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can
// and should be expanded.
TypeLoc PatternLoc = PackExpansionLoc.getPatternLoc();
bool Expand = false;
bool RetainExpansion = false;
Optional<unsigned> NumExpansions = PackExpansion->getNumExpansions();
if (getDerived().TryExpandParameterPacks(
PackExpansionLoc.getEllipsisLoc(), PatternLoc.getSourceRange(),
Unexpanded, Expand, RetainExpansion, NumExpansions))
return QualType();
if (!Expand) {
// We can't expand this pack expansion into separate arguments yet;
// just substitute into the pattern and create a new pack expansion
// type.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
TypeLocBuilder TypeArgBuilder;
TypeArgBuilder.reserve(PatternLoc.getFullDataSize());
QualType NewPatternType = getDerived().TransformType(TypeArgBuilder,
PatternLoc);
if (NewPatternType.isNull())
return QualType();
QualType NewExpansionType = SemaRef.Context.getPackExpansionType(
NewPatternType, NumExpansions);
auto NewExpansionLoc = TLB.push<PackExpansionTypeLoc>(NewExpansionType);
NewExpansionLoc.setEllipsisLoc(PackExpansionLoc.getEllipsisLoc());
NewTypeArgInfos.push_back(
TypeArgBuilder.getTypeSourceInfo(SemaRef.Context, NewExpansionType));
continue;
}
// Substitute into the pack expansion pattern for each slice of the
// pack.
for (unsigned ArgIdx = 0; ArgIdx != *NumExpansions; ++ArgIdx) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), ArgIdx);
TypeLocBuilder TypeArgBuilder;
TypeArgBuilder.reserve(PatternLoc.getFullDataSize());
QualType NewTypeArg = getDerived().TransformType(TypeArgBuilder,
PatternLoc);
if (NewTypeArg.isNull())
return QualType();
NewTypeArgInfos.push_back(
TypeArgBuilder.getTypeSourceInfo(SemaRef.Context, NewTypeArg));
}
continue;
}
TypeLocBuilder TypeArgBuilder;
TypeArgBuilder.reserve(TypeArgLoc.getFullDataSize());
QualType NewTypeArg = getDerived().TransformType(TypeArgBuilder, TypeArgLoc);
if (NewTypeArg.isNull())
return QualType();
// If nothing changed, just keep the old TypeSourceInfo.
if (NewTypeArg == TypeArg) {
NewTypeArgInfos.push_back(TypeArgInfo);
continue;
}
NewTypeArgInfos.push_back(
TypeArgBuilder.getTypeSourceInfo(SemaRef.Context, NewTypeArg));
AnyChanged = true;
}
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() || AnyChanged) {
// Rebuild the type.
Result = getDerived().RebuildObjCObjectType(
BaseType, TL.getBeginLoc(), TL.getTypeArgsLAngleLoc(), NewTypeArgInfos,
TL.getTypeArgsRAngleLoc(), TL.getProtocolLAngleLoc(),
llvm::makeArrayRef(TL.getTypePtr()->qual_begin(), TL.getNumProtocols()),
TL.getProtocolLocs(), TL.getProtocolRAngleLoc());
if (Result.isNull())
return QualType();
}
ObjCObjectTypeLoc NewT = TLB.push<ObjCObjectTypeLoc>(Result);
NewT.setHasBaseTypeAsWritten(true);
NewT.setTypeArgsLAngleLoc(TL.getTypeArgsLAngleLoc());
for (unsigned i = 0, n = TL.getNumTypeArgs(); i != n; ++i)
NewT.setTypeArgTInfo(i, NewTypeArgInfos[i]);
NewT.setTypeArgsRAngleLoc(TL.getTypeArgsRAngleLoc());
NewT.setProtocolLAngleLoc(TL.getProtocolLAngleLoc());
for (unsigned i = 0, n = TL.getNumProtocols(); i != n; ++i)
NewT.setProtocolLoc(i, TL.getProtocolLoc(i));
NewT.setProtocolRAngleLoc(TL.getProtocolRAngleLoc());
return Result;
}
template<typename Derived>
QualType
TreeTransform<Derived>::TransformObjCObjectPointerType(TypeLocBuilder &TLB,
ObjCObjectPointerTypeLoc TL) {
QualType PointeeType = getDerived().TransformType(TLB, TL.getPointeeLoc());
if (PointeeType.isNull())
return QualType();
QualType Result = TL.getType();
if (getDerived().AlwaysRebuild() ||
PointeeType != TL.getPointeeLoc().getType()) {
Result = getDerived().RebuildObjCObjectPointerType(PointeeType,
TL.getStarLoc());
if (Result.isNull())
return QualType();
}
ObjCObjectPointerTypeLoc NewT = TLB.push<ObjCObjectPointerTypeLoc>(Result);
NewT.setStarLoc(TL.getStarLoc());
return Result;
}
//===----------------------------------------------------------------------===//
// Statement transformation
//===----------------------------------------------------------------------===//
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformNullStmt(NullStmt *S) {
return S;
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCompoundStmt(CompoundStmt *S) {
return getDerived().TransformCompoundStmt(S, false);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCompoundStmt(CompoundStmt *S,
bool IsStmtExpr) {
Sema::CompoundScopeRAII CompoundScope(getSema());
bool SubStmtInvalid = false;
bool SubStmtChanged = false;
SmallVector<Stmt*, 8> Statements;
for (auto *B : S->body()) {
StmtResult Result = getDerived().TransformStmt(
B,
IsStmtExpr && B == S->body_back() ? SDK_StmtExprResult : SDK_Discarded);
if (Result.isInvalid()) {
// Immediately fail if this was a DeclStmt, since it's very
// likely that this will cause problems for future statements.
if (isa<DeclStmt>(B))
return StmtError();
// Otherwise, just keep processing substatements and fail later.
SubStmtInvalid = true;
continue;
}
SubStmtChanged = SubStmtChanged || Result.get() != B;
Statements.push_back(Result.getAs<Stmt>());
}
if (SubStmtInvalid)
return StmtError();
if (!getDerived().AlwaysRebuild() &&
!SubStmtChanged)
return S;
return getDerived().RebuildCompoundStmt(S->getLBracLoc(),
Statements,
S->getRBracLoc(),
IsStmtExpr);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCaseStmt(CaseStmt *S) {
ExprResult LHS, RHS;
{
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::ConstantEvaluated);
// Transform the left-hand case value.
LHS = getDerived().TransformExpr(S->getLHS());
LHS = SemaRef.ActOnCaseExpr(S->getCaseLoc(), LHS);
if (LHS.isInvalid())
return StmtError();
// Transform the right-hand case value (for the GNU case-range extension).
RHS = getDerived().TransformExpr(S->getRHS());
RHS = SemaRef.ActOnCaseExpr(S->getCaseLoc(), RHS);
if (RHS.isInvalid())
return StmtError();
}
// Build the case statement.
// Case statements are always rebuilt so that they will attached to their
// transformed switch statement.
StmtResult Case = getDerived().RebuildCaseStmt(S->getCaseLoc(),
LHS.get(),
S->getEllipsisLoc(),
RHS.get(),
S->getColonLoc());
if (Case.isInvalid())
return StmtError();
// Transform the statement following the case
StmtResult SubStmt =
getDerived().TransformStmt(S->getSubStmt());
if (SubStmt.isInvalid())
return StmtError();
// Attach the body to the case statement
return getDerived().RebuildCaseStmtBody(Case.get(), SubStmt.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformDefaultStmt(DefaultStmt *S) {
// Transform the statement following the default case
StmtResult SubStmt =
getDerived().TransformStmt(S->getSubStmt());
if (SubStmt.isInvalid())
return StmtError();
// Default statements are always rebuilt
return getDerived().RebuildDefaultStmt(S->getDefaultLoc(), S->getColonLoc(),
SubStmt.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformLabelStmt(LabelStmt *S, StmtDiscardKind SDK) {
StmtResult SubStmt = getDerived().TransformStmt(S->getSubStmt(), SDK);
if (SubStmt.isInvalid())
return StmtError();
Decl *LD = getDerived().TransformDecl(S->getDecl()->getLocation(),
S->getDecl());
if (!LD)
return StmtError();
// If we're transforming "in-place" (we're not creating new local
// declarations), assume we're replacing the old label statement
// and clear out the reference to it.
if (LD == S->getDecl())
S->getDecl()->setStmt(nullptr);
// FIXME: Pass the real colon location in.
return getDerived().RebuildLabelStmt(S->getIdentLoc(),
cast<LabelDecl>(LD), SourceLocation(),
SubStmt.get());
}
template <typename Derived>
const Attr *TreeTransform<Derived>::TransformAttr(const Attr *R) {
if (!R)
return R;
switch (R->getKind()) {
// Transform attributes with a pragma spelling by calling TransformXXXAttr.
#define ATTR(X)
#define PRAGMA_SPELLING_ATTR(X) \
case attr::X: \
return getDerived().Transform##X##Attr(cast<X##Attr>(R));
#include "clang/Basic/AttrList.inc"
default:
return R;
}
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformAttributedStmt(AttributedStmt *S,
StmtDiscardKind SDK) {
bool AttrsChanged = false;
SmallVector<const Attr *, 1> Attrs;
// Visit attributes and keep track if any are transformed.
for (const auto *I : S->getAttrs()) {
const Attr *R = getDerived().TransformAttr(I);
AttrsChanged |= (I != R);
Attrs.push_back(R);
}
StmtResult SubStmt = getDerived().TransformStmt(S->getSubStmt(), SDK);
if (SubStmt.isInvalid())
return StmtError();
if (SubStmt.get() == S->getSubStmt() && !AttrsChanged)
return S;
return getDerived().RebuildAttributedStmt(S->getAttrLoc(), Attrs,
SubStmt.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformIfStmt(IfStmt *S) {
// Transform the initialization statement
StmtResult Init = getDerived().TransformStmt(S->getInit());
if (Init.isInvalid())
return StmtError();
// Transform the condition
Sema::ConditionResult Cond = getDerived().TransformCondition(
S->getIfLoc(), S->getConditionVariable(), S->getCond(),
S->isConstexpr() ? Sema::ConditionKind::ConstexprIf
: Sema::ConditionKind::Boolean);
if (Cond.isInvalid())
return StmtError();
// If this is a constexpr if, determine which arm we should instantiate.
llvm::Optional<bool> ConstexprConditionValue;
if (S->isConstexpr())
ConstexprConditionValue = Cond.getKnownValue();
// Transform the "then" branch.
StmtResult Then;
if (!ConstexprConditionValue || *ConstexprConditionValue) {
Then = getDerived().TransformStmt(S->getThen());
if (Then.isInvalid())
return StmtError();
} else {
Then = new (getSema().Context) NullStmt(S->getThen()->getBeginLoc());
}
// Transform the "else" branch.
StmtResult Else;
if (!ConstexprConditionValue || !*ConstexprConditionValue) {
Else = getDerived().TransformStmt(S->getElse());
if (Else.isInvalid())
return StmtError();
}
if (!getDerived().AlwaysRebuild() &&
Init.get() == S->getInit() &&
Cond.get() == std::make_pair(S->getConditionVariable(), S->getCond()) &&
Then.get() == S->getThen() &&
Else.get() == S->getElse())
return S;
return getDerived().RebuildIfStmt(S->getIfLoc(), S->isConstexpr(), Cond,
Init.get(), Then.get(), S->getElseLoc(),
Else.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformSwitchStmt(SwitchStmt *S) {
// Transform the initialization statement
StmtResult Init = getDerived().TransformStmt(S->getInit());
if (Init.isInvalid())
return StmtError();
// Transform the condition.
Sema::ConditionResult Cond = getDerived().TransformCondition(
S->getSwitchLoc(), S->getConditionVariable(), S->getCond(),
Sema::ConditionKind::Switch);
if (Cond.isInvalid())
return StmtError();
// Rebuild the switch statement.
StmtResult Switch
= getDerived().RebuildSwitchStmtStart(S->getSwitchLoc(), Init.get(), Cond);
if (Switch.isInvalid())
return StmtError();
// Transform the body of the switch statement.
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
// Complete the switch statement.
return getDerived().RebuildSwitchStmtBody(S->getSwitchLoc(), Switch.get(),
Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformWhileStmt(WhileStmt *S) {
// Transform the condition
Sema::ConditionResult Cond = getDerived().TransformCondition(
S->getWhileLoc(), S->getConditionVariable(), S->getCond(),
Sema::ConditionKind::Boolean);
if (Cond.isInvalid())
return StmtError();
// Transform the body
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
if (!getDerived().AlwaysRebuild() &&
Cond.get() == std::make_pair(S->getConditionVariable(), S->getCond()) &&
Body.get() == S->getBody())
return Owned(S);
return getDerived().RebuildWhileStmt(S->getWhileLoc(), Cond, Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformDoStmt(DoStmt *S) {
// Transform the body
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
// Transform the condition
ExprResult Cond = getDerived().TransformExpr(S->getCond());
if (Cond.isInvalid())
return StmtError();
if (!getDerived().AlwaysRebuild() &&
Cond.get() == S->getCond() &&
Body.get() == S->getBody())
return S;
return getDerived().RebuildDoStmt(S->getDoLoc(), Body.get(), S->getWhileLoc(),
/*FIXME:*/S->getWhileLoc(), Cond.get(),
S->getRParenLoc());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformForStmt(ForStmt *S) {
if (getSema().getLangOpts().OpenMP)
getSema().startOpenMPLoop();
// Transform the initialization statement
StmtResult Init = getDerived().TransformStmt(S->getInit());
if (Init.isInvalid())
return StmtError();
// In OpenMP loop region loop control variable must be captured and be
// private. Perform analysis of first part (if any).
if (getSema().getLangOpts().OpenMP && Init.isUsable())
getSema().ActOnOpenMPLoopInitialization(S->getForLoc(), Init.get());
// Transform the condition
Sema::ConditionResult Cond = getDerived().TransformCondition(
S->getForLoc(), S->getConditionVariable(), S->getCond(),
Sema::ConditionKind::Boolean);
if (Cond.isInvalid())
return StmtError();
// Transform the increment
ExprResult Inc = getDerived().TransformExpr(S->getInc());
if (Inc.isInvalid())
return StmtError();
Sema::FullExprArg FullInc(getSema().MakeFullDiscardedValueExpr(Inc.get()));
if (S->getInc() && !FullInc.get())
return StmtError();
// Transform the body
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
if (!getDerived().AlwaysRebuild() &&
Init.get() == S->getInit() &&
Cond.get() == std::make_pair(S->getConditionVariable(), S->getCond()) &&
Inc.get() == S->getInc() &&
Body.get() == S->getBody())
return S;
return getDerived().RebuildForStmt(S->getForLoc(), S->getLParenLoc(),
Init.get(), Cond, FullInc,
S->getRParenLoc(), Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformGotoStmt(GotoStmt *S) {
Decl *LD = getDerived().TransformDecl(S->getLabel()->getLocation(),
S->getLabel());
if (!LD)
return StmtError();
// Goto statements must always be rebuilt, to resolve the label.
return getDerived().RebuildGotoStmt(S->getGotoLoc(), S->getLabelLoc(),
cast<LabelDecl>(LD));
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformIndirectGotoStmt(IndirectGotoStmt *S) {
ExprResult Target = getDerived().TransformExpr(S->getTarget());
if (Target.isInvalid())
return StmtError();
Target = SemaRef.MaybeCreateExprWithCleanups(Target.get());
if (!getDerived().AlwaysRebuild() &&
Target.get() == S->getTarget())
return S;
return getDerived().RebuildIndirectGotoStmt(S->getGotoLoc(), S->getStarLoc(),
Target.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformContinueStmt(ContinueStmt *S) {
return S;
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformBreakStmt(BreakStmt *S) {
return S;
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformReturnStmt(ReturnStmt *S) {
ExprResult Result = getDerived().TransformInitializer(S->getRetValue(),
/*NotCopyInit*/false);
if (Result.isInvalid())
return StmtError();
// FIXME: We always rebuild the return statement because there is no way
// to tell whether the return type of the function has changed.
return getDerived().RebuildReturnStmt(S->getReturnLoc(), Result.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformDeclStmt(DeclStmt *S) {
bool DeclChanged = false;
SmallVector<Decl *, 4> Decls;
for (auto *D : S->decls()) {
Decl *Transformed = getDerived().TransformDefinition(D->getLocation(), D);
if (!Transformed)
return StmtError();
if (Transformed != D)
DeclChanged = true;
Decls.push_back(Transformed);
}
if (!getDerived().AlwaysRebuild() && !DeclChanged)
return S;
return getDerived().RebuildDeclStmt(Decls, S->getBeginLoc(), S->getEndLoc());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformGCCAsmStmt(GCCAsmStmt *S) {
SmallVector<Expr*, 8> Constraints;
SmallVector<Expr*, 8> Exprs;
SmallVector<IdentifierInfo *, 4> Names;
ExprResult AsmString;
SmallVector<Expr*, 8> Clobbers;
bool ExprsChanged = false;
// Go through the outputs.
for (unsigned I = 0, E = S->getNumOutputs(); I != E; ++I) {
Names.push_back(S->getOutputIdentifier(I));
// No need to transform the constraint literal.
Constraints.push_back(S->getOutputConstraintLiteral(I));
// Transform the output expr.
Expr *OutputExpr = S->getOutputExpr(I);
ExprResult Result = getDerived().TransformExpr(OutputExpr);
if (Result.isInvalid())
return StmtError();
ExprsChanged |= Result.get() != OutputExpr;
Exprs.push_back(Result.get());
}
// Go through the inputs.
for (unsigned I = 0, E = S->getNumInputs(); I != E; ++I) {
Names.push_back(S->getInputIdentifier(I));
// No need to transform the constraint literal.
Constraints.push_back(S->getInputConstraintLiteral(I));
// Transform the input expr.
Expr *InputExpr = S->getInputExpr(I);
ExprResult Result = getDerived().TransformExpr(InputExpr);
if (Result.isInvalid())
return StmtError();
ExprsChanged |= Result.get() != InputExpr;
Exprs.push_back(Result.get());
}
// Go through the Labels.
for (unsigned I = 0, E = S->getNumLabels(); I != E; ++I) {
Names.push_back(S->getLabelIdentifier(I));
ExprResult Result = getDerived().TransformExpr(S->getLabelExpr(I));
if (Result.isInvalid())
return StmtError();
ExprsChanged |= Result.get() != S->getLabelExpr(I);
Exprs.push_back(Result.get());
}
if (!getDerived().AlwaysRebuild() && !ExprsChanged)
return S;
// Go through the clobbers.
for (unsigned I = 0, E = S->getNumClobbers(); I != E; ++I)
Clobbers.push_back(S->getClobberStringLiteral(I));
// No need to transform the asm string literal.
AsmString = S->getAsmString();
return getDerived().RebuildGCCAsmStmt(S->getAsmLoc(), S->isSimple(),
S->isVolatile(), S->getNumOutputs(),
S->getNumInputs(), Names.data(),
Constraints, Exprs, AsmString.get(),
Clobbers, S->getNumLabels(),
S->getRParenLoc());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformMSAsmStmt(MSAsmStmt *S) {
ArrayRef<Token> AsmToks =
llvm::makeArrayRef(S->getAsmToks(), S->getNumAsmToks());
bool HadError = false, HadChange = false;
ArrayRef<Expr*> SrcExprs = S->getAllExprs();
SmallVector<Expr*, 8> TransformedExprs;
TransformedExprs.reserve(SrcExprs.size());
for (unsigned i = 0, e = SrcExprs.size(); i != e; ++i) {
ExprResult Result = getDerived().TransformExpr(SrcExprs[i]);
if (!Result.isUsable()) {
HadError = true;
} else {
HadChange |= (Result.get() != SrcExprs[i]);
TransformedExprs.push_back(Result.get());
}
}
if (HadError) return StmtError();
if (!HadChange && !getDerived().AlwaysRebuild())
return Owned(S);
return getDerived().RebuildMSAsmStmt(S->getAsmLoc(), S->getLBraceLoc(),
AsmToks, S->getAsmString(),
S->getNumOutputs(), S->getNumInputs(),
S->getAllConstraints(), S->getClobbers(),
TransformedExprs, S->getEndLoc());
}
// C++ Coroutines TS
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCoroutineBodyStmt(CoroutineBodyStmt *S) {
auto *ScopeInfo = SemaRef.getCurFunction();
auto *FD = cast<FunctionDecl>(SemaRef.CurContext);
assert(FD && ScopeInfo && !ScopeInfo->CoroutinePromise &&
ScopeInfo->NeedsCoroutineSuspends &&
ScopeInfo->CoroutineSuspends.first == nullptr &&
ScopeInfo->CoroutineSuspends.second == nullptr &&
"expected clean scope info");
// Set that we have (possibly-invalid) suspend points before we do anything
// that may fail.
ScopeInfo->setNeedsCoroutineSuspends(false);
// The new CoroutinePromise object needs to be built and put into the current
// FunctionScopeInfo before any transformations or rebuilding occurs.
if (!SemaRef.buildCoroutineParameterMoves(FD->getLocation()))
return StmtError();
auto *Promise = SemaRef.buildCoroutinePromise(FD->getLocation());
if (!Promise)
return StmtError();
getDerived().transformedLocalDecl(S->getPromiseDecl(), {Promise});
ScopeInfo->CoroutinePromise = Promise;
// Transform the implicit coroutine statements we built during the initial
// parse.
StmtResult InitSuspend = getDerived().TransformStmt(S->getInitSuspendStmt());
if (InitSuspend.isInvalid())
return StmtError();
StmtResult FinalSuspend =
getDerived().TransformStmt(S->getFinalSuspendStmt());
if (FinalSuspend.isInvalid())
return StmtError();
ScopeInfo->setCoroutineSuspends(InitSuspend.get(), FinalSuspend.get());
assert(isa<Expr>(InitSuspend.get()) && isa<Expr>(FinalSuspend.get()));
StmtResult BodyRes = getDerived().TransformStmt(S->getBody());
if (BodyRes.isInvalid())
return StmtError();
CoroutineStmtBuilder Builder(SemaRef, *FD, *ScopeInfo, BodyRes.get());
if (Builder.isInvalid())
return StmtError();
Expr *ReturnObject = S->getReturnValueInit();
assert(ReturnObject && "the return object is expected to be valid");
ExprResult Res = getDerived().TransformInitializer(ReturnObject,
/*NoCopyInit*/ false);
if (Res.isInvalid())
return StmtError();
Builder.ReturnValue = Res.get();
if (S->hasDependentPromiseType()) {
// PR41909: We may find a generic coroutine lambda definition within a
// template function that is being instantiated. In this case, the lambda
// will have a dependent promise type, until it is used in an expression
// that creates an instantiation with a non-dependent promise type. We
// should not assert or build coroutine dependent statements for such a
// generic lambda.
auto *MD = dyn_cast_or_null<CXXMethodDecl>(FD);
if (!MD || !MD->getParent()->isGenericLambda()) {
assert(!Promise->getType()->isDependentType() &&
"the promise type must no longer be dependent");
assert(!S->getFallthroughHandler() && !S->getExceptionHandler() &&
!S->getReturnStmtOnAllocFailure() && !S->getDeallocate() &&
"these nodes should not have been built yet");
if (!Builder.buildDependentStatements())
return StmtError();
}
} else {
if (auto *OnFallthrough = S->getFallthroughHandler()) {
StmtResult Res = getDerived().TransformStmt(OnFallthrough);
if (Res.isInvalid())
return StmtError();
Builder.OnFallthrough = Res.get();
}
if (auto *OnException = S->getExceptionHandler()) {
StmtResult Res = getDerived().TransformStmt(OnException);
if (Res.isInvalid())
return StmtError();
Builder.OnException = Res.get();
}
if (auto *OnAllocFailure = S->getReturnStmtOnAllocFailure()) {
StmtResult Res = getDerived().TransformStmt(OnAllocFailure);
if (Res.isInvalid())
return StmtError();
Builder.ReturnStmtOnAllocFailure = Res.get();
}
// Transform any additional statements we may have already built
assert(S->getAllocate() && S->getDeallocate() &&
"allocation and deallocation calls must already be built");
ExprResult AllocRes = getDerived().TransformExpr(S->getAllocate());
if (AllocRes.isInvalid())
return StmtError();
Builder.Allocate = AllocRes.get();
ExprResult DeallocRes = getDerived().TransformExpr(S->getDeallocate());
if (DeallocRes.isInvalid())
return StmtError();
Builder.Deallocate = DeallocRes.get();
assert(S->getResultDecl() && "ResultDecl must already be built");
StmtResult ResultDecl = getDerived().TransformStmt(S->getResultDecl());
if (ResultDecl.isInvalid())
return StmtError();
Builder.ResultDecl = ResultDecl.get();
if (auto *ReturnStmt = S->getReturnStmt()) {
StmtResult Res = getDerived().TransformStmt(ReturnStmt);
if (Res.isInvalid())
return StmtError();
Builder.ReturnStmt = Res.get();
}
}
return getDerived().RebuildCoroutineBodyStmt(Builder);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCoreturnStmt(CoreturnStmt *S) {
ExprResult Result = getDerived().TransformInitializer(S->getOperand(),
/*NotCopyInit*/false);
if (Result.isInvalid())
return StmtError();
// Always rebuild; we don't know if this needs to be injected into a new
// context or if the promise type has changed.
return getDerived().RebuildCoreturnStmt(S->getKeywordLoc(), Result.get(),
S->isImplicit());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCoawaitExpr(CoawaitExpr *E) {
ExprResult Result = getDerived().TransformInitializer(E->getOperand(),
/*NotCopyInit*/false);
if (Result.isInvalid())
return ExprError();
// Always rebuild; we don't know if this needs to be injected into a new
// context or if the promise type has changed.
return getDerived().RebuildCoawaitExpr(E->getKeywordLoc(), Result.get(),
E->isImplicit());
}
template <typename Derived>
ExprResult
TreeTransform<Derived>::TransformDependentCoawaitExpr(DependentCoawaitExpr *E) {
ExprResult OperandResult = getDerived().TransformInitializer(E->getOperand(),
/*NotCopyInit*/ false);
if (OperandResult.isInvalid())
return ExprError();
ExprResult LookupResult = getDerived().TransformUnresolvedLookupExpr(
E->getOperatorCoawaitLookup());
if (LookupResult.isInvalid())
return ExprError();
// Always rebuild; we don't know if this needs to be injected into a new
// context or if the promise type has changed.
return getDerived().RebuildDependentCoawaitExpr(
E->getKeywordLoc(), OperandResult.get(),
cast<UnresolvedLookupExpr>(LookupResult.get()));
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCoyieldExpr(CoyieldExpr *E) {
ExprResult Result = getDerived().TransformInitializer(E->getOperand(),
/*NotCopyInit*/false);
if (Result.isInvalid())
return ExprError();
// Always rebuild; we don't know if this needs to be injected into a new
// context or if the promise type has changed.
return getDerived().RebuildCoyieldExpr(E->getKeywordLoc(), Result.get());
}
// Objective-C Statements.
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAtTryStmt(ObjCAtTryStmt *S) {
// Transform the body of the @try.
StmtResult TryBody = getDerived().TransformStmt(S->getTryBody());
if (TryBody.isInvalid())
return StmtError();
// Transform the @catch statements (if present).
bool AnyCatchChanged = false;
SmallVector<Stmt*, 8> CatchStmts;
for (unsigned I = 0, N = S->getNumCatchStmts(); I != N; ++I) {
StmtResult Catch = getDerived().TransformStmt(S->getCatchStmt(I));
if (Catch.isInvalid())
return StmtError();
if (Catch.get() != S->getCatchStmt(I))
AnyCatchChanged = true;
CatchStmts.push_back(Catch.get());
}
// Transform the @finally statement (if present).
StmtResult Finally;
if (S->getFinallyStmt()) {
Finally = getDerived().TransformStmt(S->getFinallyStmt());
if (Finally.isInvalid())
return StmtError();
}
// If nothing changed, just retain this statement.
if (!getDerived().AlwaysRebuild() &&
TryBody.get() == S->getTryBody() &&
!AnyCatchChanged &&
Finally.get() == S->getFinallyStmt())
return S;
// Build a new statement.
return getDerived().RebuildObjCAtTryStmt(S->getAtTryLoc(), TryBody.get(),
CatchStmts, Finally.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAtCatchStmt(ObjCAtCatchStmt *S) {
// Transform the @catch parameter, if there is one.
VarDecl *Var = nullptr;
if (VarDecl *FromVar = S->getCatchParamDecl()) {
TypeSourceInfo *TSInfo = nullptr;
if (FromVar->getTypeSourceInfo()) {
TSInfo = getDerived().TransformType(FromVar->getTypeSourceInfo());
if (!TSInfo)
return StmtError();
}
QualType T;
if (TSInfo)
T = TSInfo->getType();
else {
T = getDerived().TransformType(FromVar->getType());
if (T.isNull())
return StmtError();
}
Var = getDerived().RebuildObjCExceptionDecl(FromVar, TSInfo, T);
if (!Var)
return StmtError();
}
StmtResult Body = getDerived().TransformStmt(S->getCatchBody());
if (Body.isInvalid())
return StmtError();
return getDerived().RebuildObjCAtCatchStmt(S->getAtCatchLoc(),
S->getRParenLoc(),
Var, Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAtFinallyStmt(ObjCAtFinallyStmt *S) {
// Transform the body.
StmtResult Body = getDerived().TransformStmt(S->getFinallyBody());
if (Body.isInvalid())
return StmtError();
// If nothing changed, just retain this statement.
if (!getDerived().AlwaysRebuild() &&
Body.get() == S->getFinallyBody())
return S;
// Build a new statement.
return getDerived().RebuildObjCAtFinallyStmt(S->getAtFinallyLoc(),
Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAtThrowStmt(ObjCAtThrowStmt *S) {
ExprResult Operand;
if (S->getThrowExpr()) {
Operand = getDerived().TransformExpr(S->getThrowExpr());
if (Operand.isInvalid())
return StmtError();
}
if (!getDerived().AlwaysRebuild() &&
Operand.get() == S->getThrowExpr())
return S;
return getDerived().RebuildObjCAtThrowStmt(S->getThrowLoc(), Operand.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAtSynchronizedStmt(
ObjCAtSynchronizedStmt *S) {
// Transform the object we are locking.
ExprResult Object = getDerived().TransformExpr(S->getSynchExpr());
if (Object.isInvalid())
return StmtError();
Object =
getDerived().RebuildObjCAtSynchronizedOperand(S->getAtSynchronizedLoc(),
Object.get());
if (Object.isInvalid())
return StmtError();
// Transform the body.
StmtResult Body = getDerived().TransformStmt(S->getSynchBody());
if (Body.isInvalid())
return StmtError();
// If nothing change, just retain the current statement.
if (!getDerived().AlwaysRebuild() &&
Object.get() == S->getSynchExpr() &&
Body.get() == S->getSynchBody())
return S;
// Build a new statement.
return getDerived().RebuildObjCAtSynchronizedStmt(S->getAtSynchronizedLoc(),
Object.get(), Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCAutoreleasePoolStmt(
ObjCAutoreleasePoolStmt *S) {
// Transform the body.
StmtResult Body = getDerived().TransformStmt(S->getSubStmt());
if (Body.isInvalid())
return StmtError();
// If nothing changed, just retain this statement.
if (!getDerived().AlwaysRebuild() &&
Body.get() == S->getSubStmt())
return S;
// Build a new statement.
return getDerived().RebuildObjCAutoreleasePoolStmt(
S->getAtLoc(), Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformObjCForCollectionStmt(
ObjCForCollectionStmt *S) {
// Transform the element statement.
StmtResult Element =
getDerived().TransformStmt(S->getElement(), SDK_NotDiscarded);
if (Element.isInvalid())
return StmtError();
// Transform the collection expression.
ExprResult Collection = getDerived().TransformExpr(S->getCollection());
if (Collection.isInvalid())
return StmtError();
// Transform the body.
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
// If nothing changed, just retain this statement.
if (!getDerived().AlwaysRebuild() &&
Element.get() == S->getElement() &&
Collection.get() == S->getCollection() &&
Body.get() == S->getBody())
return S;
// Build a new statement.
return getDerived().RebuildObjCForCollectionStmt(S->getForLoc(),
Element.get(),
Collection.get(),
S->getRParenLoc(),
Body.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformCXXCatchStmt(CXXCatchStmt *S) {
// Transform the exception declaration, if any.
VarDecl *Var = nullptr;
if (VarDecl *ExceptionDecl = S->getExceptionDecl()) {
TypeSourceInfo *T =
getDerived().TransformType(ExceptionDecl->getTypeSourceInfo());
if (!T)
return StmtError();
Var = getDerived().RebuildExceptionDecl(
ExceptionDecl, T, ExceptionDecl->getInnerLocStart(),
ExceptionDecl->getLocation(), ExceptionDecl->getIdentifier());
if (!Var || Var->isInvalidDecl())
return StmtError();
}
// Transform the actual exception handler.
StmtResult Handler = getDerived().TransformStmt(S->getHandlerBlock());
if (Handler.isInvalid())
return StmtError();
if (!getDerived().AlwaysRebuild() && !Var &&
Handler.get() == S->getHandlerBlock())
return S;
return getDerived().RebuildCXXCatchStmt(S->getCatchLoc(), Var, Handler.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformCXXTryStmt(CXXTryStmt *S) {
// Transform the try block itself.
StmtResult TryBlock = getDerived().TransformCompoundStmt(S->getTryBlock());
if (TryBlock.isInvalid())
return StmtError();
// Transform the handlers.
bool HandlerChanged = false;
SmallVector<Stmt *, 8> Handlers;
for (unsigned I = 0, N = S->getNumHandlers(); I != N; ++I) {
StmtResult Handler = getDerived().TransformCXXCatchStmt(S->getHandler(I));
if (Handler.isInvalid())
return StmtError();
HandlerChanged = HandlerChanged || Handler.get() != S->getHandler(I);
Handlers.push_back(Handler.getAs<Stmt>());
}
if (!getDerived().AlwaysRebuild() && TryBlock.get() == S->getTryBlock() &&
!HandlerChanged)
return S;
return getDerived().RebuildCXXTryStmt(S->getTryLoc(), TryBlock.get(),
Handlers);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCXXForRangeStmt(CXXForRangeStmt *S) {
StmtResult Init =
S->getInit() ? getDerived().TransformStmt(S->getInit()) : StmtResult();
if (Init.isInvalid())
return StmtError();
StmtResult Range = getDerived().TransformStmt(S->getRangeStmt());
if (Range.isInvalid())
return StmtError();
StmtResult Begin = getDerived().TransformStmt(S->getBeginStmt());
if (Begin.isInvalid())
return StmtError();
StmtResult End = getDerived().TransformStmt(S->getEndStmt());
if (End.isInvalid())
return StmtError();
ExprResult Cond = getDerived().TransformExpr(S->getCond());
if (Cond.isInvalid())
return StmtError();
if (Cond.get())
Cond = SemaRef.CheckBooleanCondition(S->getColonLoc(), Cond.get());
if (Cond.isInvalid())
return StmtError();
if (Cond.get())
Cond = SemaRef.MaybeCreateExprWithCleanups(Cond.get());
ExprResult Inc = getDerived().TransformExpr(S->getInc());
if (Inc.isInvalid())
return StmtError();
if (Inc.get())
Inc = SemaRef.MaybeCreateExprWithCleanups(Inc.get());
StmtResult LoopVar = getDerived().TransformStmt(S->getLoopVarStmt());
if (LoopVar.isInvalid())
return StmtError();
StmtResult NewStmt = S;
if (getDerived().AlwaysRebuild() ||
Init.get() != S->getInit() ||
Range.get() != S->getRangeStmt() ||
Begin.get() != S->getBeginStmt() ||
End.get() != S->getEndStmt() ||
Cond.get() != S->getCond() ||
Inc.get() != S->getInc() ||
LoopVar.get() != S->getLoopVarStmt()) {
NewStmt = getDerived().RebuildCXXForRangeStmt(S->getForLoc(),
S->getCoawaitLoc(), Init.get(),
S->getColonLoc(), Range.get(),
Begin.get(), End.get(),
Cond.get(),
Inc.get(), LoopVar.get(),
S->getRParenLoc());
if (NewStmt.isInvalid())
return StmtError();
}
StmtResult Body = getDerived().TransformStmt(S->getBody());
if (Body.isInvalid())
return StmtError();
// Body has changed but we didn't rebuild the for-range statement. Rebuild
// it now so we have a new statement to attach the body to.
if (Body.get() != S->getBody() && NewStmt.get() == S) {
NewStmt = getDerived().RebuildCXXForRangeStmt(S->getForLoc(),
S->getCoawaitLoc(), Init.get(),
S->getColonLoc(), Range.get(),
Begin.get(), End.get(),
Cond.get(),
Inc.get(), LoopVar.get(),
S->getRParenLoc());
if (NewStmt.isInvalid())
return StmtError();
}
if (NewStmt.get() == S)
return S;
return FinishCXXForRangeStmt(NewStmt.get(), Body.get());
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformMSDependentExistsStmt(
MSDependentExistsStmt *S) {
// Transform the nested-name-specifier, if any.
NestedNameSpecifierLoc QualifierLoc;
if (S->getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(S->getQualifierLoc());
if (!QualifierLoc)
return StmtError();
}
// Transform the declaration name.
DeclarationNameInfo NameInfo = S->getNameInfo();
if (NameInfo.getName()) {
NameInfo = getDerived().TransformDeclarationNameInfo(NameInfo);
if (!NameInfo.getName())
return StmtError();
}
// Check whether anything changed.
if (!getDerived().AlwaysRebuild() &&
QualifierLoc == S->getQualifierLoc() &&
NameInfo.getName() == S->getNameInfo().getName())
return S;
// Determine whether this name exists, if we can.
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
bool Dependent = false;
switch (getSema().CheckMicrosoftIfExistsSymbol(/*S=*/nullptr, SS, NameInfo)) {
case Sema::IER_Exists:
if (S->isIfExists())
break;
return new (getSema().Context) NullStmt(S->getKeywordLoc());
case Sema::IER_DoesNotExist:
if (S->isIfNotExists())
break;
return new (getSema().Context) NullStmt(S->getKeywordLoc());
case Sema::IER_Dependent:
Dependent = true;
break;
case Sema::IER_Error:
return StmtError();
}
// We need to continue with the instantiation, so do so now.
StmtResult SubStmt = getDerived().TransformCompoundStmt(S->getSubStmt());
if (SubStmt.isInvalid())
return StmtError();
// If we have resolved the name, just transform to the substatement.
if (!Dependent)
return SubStmt;
// The name is still dependent, so build a dependent expression again.
return getDerived().RebuildMSDependentExistsStmt(S->getKeywordLoc(),
S->isIfExists(),
QualifierLoc,
NameInfo,
SubStmt.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformMSPropertyRefExpr(MSPropertyRefExpr *E) {
NestedNameSpecifierLoc QualifierLoc;
if (E->getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(E->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
}
MSPropertyDecl *PD = cast_or_null<MSPropertyDecl>(
getDerived().TransformDecl(E->getMemberLoc(), E->getPropertyDecl()));
if (!PD)
return ExprError();
ExprResult Base = getDerived().TransformExpr(E->getBaseExpr());
if (Base.isInvalid())
return ExprError();
return new (SemaRef.getASTContext())
MSPropertyRefExpr(Base.get(), PD, E->isArrow(),
SemaRef.getASTContext().PseudoObjectTy, VK_LValue,
QualifierLoc, E->getMemberLoc());
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformMSPropertySubscriptExpr(
MSPropertySubscriptExpr *E) {
auto BaseRes = getDerived().TransformExpr(E->getBase());
if (BaseRes.isInvalid())
return ExprError();
auto IdxRes = getDerived().TransformExpr(E->getIdx());
if (IdxRes.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
BaseRes.get() == E->getBase() &&
IdxRes.get() == E->getIdx())
return E;
return getDerived().RebuildArraySubscriptExpr(
BaseRes.get(), SourceLocation(), IdxRes.get(), E->getRBracketLoc());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformSEHTryStmt(SEHTryStmt *S) {
StmtResult TryBlock = getDerived().TransformCompoundStmt(S->getTryBlock());
if (TryBlock.isInvalid())
return StmtError();
StmtResult Handler = getDerived().TransformSEHHandler(S->getHandler());
if (Handler.isInvalid())
return StmtError();
if (!getDerived().AlwaysRebuild() && TryBlock.get() == S->getTryBlock() &&
Handler.get() == S->getHandler())
return S;
return getDerived().RebuildSEHTryStmt(S->getIsCXXTry(), S->getTryLoc(),
TryBlock.get(), Handler.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformSEHFinallyStmt(SEHFinallyStmt *S) {
StmtResult Block = getDerived().TransformCompoundStmt(S->getBlock());
if (Block.isInvalid())
return StmtError();
return getDerived().RebuildSEHFinallyStmt(S->getFinallyLoc(), Block.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformSEHExceptStmt(SEHExceptStmt *S) {
ExprResult FilterExpr = getDerived().TransformExpr(S->getFilterExpr());
if (FilterExpr.isInvalid())
return StmtError();
StmtResult Block = getDerived().TransformCompoundStmt(S->getBlock());
if (Block.isInvalid())
return StmtError();
return getDerived().RebuildSEHExceptStmt(S->getExceptLoc(), FilterExpr.get(),
Block.get());
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformSEHHandler(Stmt *Handler) {
if (isa<SEHFinallyStmt>(Handler))
return getDerived().TransformSEHFinallyStmt(cast<SEHFinallyStmt>(Handler));
else
return getDerived().TransformSEHExceptStmt(cast<SEHExceptStmt>(Handler));
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformSEHLeaveStmt(SEHLeaveStmt *S) {
return S;
}
//===----------------------------------------------------------------------===//
// OpenMP directive transformation
//===----------------------------------------------------------------------===//
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPExecutableDirective(
OMPExecutableDirective *D) {
// Transform the clauses
llvm::SmallVector<OMPClause *, 16> TClauses;
ArrayRef<OMPClause *> Clauses = D->clauses();
TClauses.reserve(Clauses.size());
for (ArrayRef<OMPClause *>::iterator I = Clauses.begin(), E = Clauses.end();
I != E; ++I) {
if (*I) {
getDerived().getSema().StartOpenMPClause((*I)->getClauseKind());
OMPClause *Clause = getDerived().TransformOMPClause(*I);
getDerived().getSema().EndOpenMPClause();
if (Clause)
TClauses.push_back(Clause);
} else {
TClauses.push_back(nullptr);
}
}
StmtResult AssociatedStmt;
if (D->hasAssociatedStmt() && D->getAssociatedStmt()) {
getDerived().getSema().ActOnOpenMPRegionStart(D->getDirectiveKind(),
/*CurScope=*/nullptr);
StmtResult Body;
{
Sema::CompoundScopeRAII CompoundScope(getSema());
Stmt *CS = D->getInnermostCapturedStmt()->getCapturedStmt();
Body = getDerived().TransformStmt(CS);
}
AssociatedStmt =
getDerived().getSema().ActOnOpenMPRegionEnd(Body, TClauses);
if (AssociatedStmt.isInvalid()) {
return StmtError();
}
}
if (TClauses.size() != Clauses.size()) {
return StmtError();
}
// Transform directive name for 'omp critical' directive.
DeclarationNameInfo DirName;
if (D->getDirectiveKind() == OMPD_critical) {
DirName = cast<OMPCriticalDirective>(D)->getDirectiveName();
DirName = getDerived().TransformDeclarationNameInfo(DirName);
}
OpenMPDirectiveKind CancelRegion = OMPD_unknown;
if (D->getDirectiveKind() == OMPD_cancellation_point) {
CancelRegion = cast<OMPCancellationPointDirective>(D)->getCancelRegion();
} else if (D->getDirectiveKind() == OMPD_cancel) {
CancelRegion = cast<OMPCancelDirective>(D)->getCancelRegion();
}
return getDerived().RebuildOMPExecutableDirective(
D->getDirectiveKind(), DirName, CancelRegion, TClauses,
AssociatedStmt.get(), D->getBeginLoc(), D->getEndLoc());
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPParallelDirective(OMPParallelDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_parallel, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPSimdDirective(OMPSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_simd, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPForDirective(OMPForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_for, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPForSimdDirective(OMPForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_for_simd, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPSectionsDirective(OMPSectionsDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_sections, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPSectionDirective(OMPSectionDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_section, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPSingleDirective(OMPSingleDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_single, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPMasterDirective(OMPMasterDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_master, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPCriticalDirective(OMPCriticalDirective *D) {
getDerived().getSema().StartOpenMPDSABlock(
OMPD_critical, D->getDirectiveName(), nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPParallelForDirective(
OMPParallelForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_parallel_for, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPParallelForSimdDirective(
OMPParallelForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_parallel_for_simd, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPParallelSectionsDirective(
OMPParallelSectionsDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_parallel_sections, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTaskDirective(OMPTaskDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_task, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTaskyieldDirective(
OMPTaskyieldDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_taskyield, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPBarrierDirective(OMPBarrierDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_barrier, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTaskwaitDirective(OMPTaskwaitDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_taskwait, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTaskgroupDirective(
OMPTaskgroupDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_taskgroup, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPFlushDirective(OMPFlushDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_flush, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPOrderedDirective(OMPOrderedDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_ordered, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPAtomicDirective(OMPAtomicDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_atomic, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTargetDirective(OMPTargetDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetDataDirective(
OMPTargetDataDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_data, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetEnterDataDirective(
OMPTargetEnterDataDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_enter_data, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetExitDataDirective(
OMPTargetExitDataDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_exit_data, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetParallelDirective(
OMPTargetParallelDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_parallel, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetParallelForDirective(
OMPTargetParallelForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_parallel_for, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetUpdateDirective(
OMPTargetUpdateDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_update, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTeamsDirective(OMPTeamsDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_teams, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPCancellationPointDirective(
OMPCancellationPointDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_cancellation_point, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPCancelDirective(OMPCancelDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_cancel, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTaskLoopDirective(OMPTaskLoopDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_taskloop, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTaskLoopSimdDirective(
OMPTaskLoopSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_taskloop_simd, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPDistributeDirective(
OMPDistributeDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_distribute, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPDistributeParallelForDirective(
OMPDistributeParallelForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_distribute_parallel_for, DirName, nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPDistributeParallelForSimdDirective(
OMPDistributeParallelForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_distribute_parallel_for_simd, DirName, nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPDistributeSimdDirective(
OMPDistributeSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_distribute_simd, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetParallelForSimdDirective(
OMPTargetParallelForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_target_parallel_for_simd, DirName, nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetSimdDirective(
OMPTargetSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_simd, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTeamsDistributeDirective(
OMPTeamsDistributeDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_teams_distribute, DirName,
nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTeamsDistributeSimdDirective(
OMPTeamsDistributeSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_teams_distribute_simd, DirName, nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTeamsDistributeParallelForSimdDirective(
OMPTeamsDistributeParallelForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_teams_distribute_parallel_for_simd, DirName, nullptr,
D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTeamsDistributeParallelForDirective(
OMPTeamsDistributeParallelForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_teams_distribute_parallel_for, DirName, nullptr, D->getBeginLoc());
StmtResult Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetTeamsDirective(
OMPTargetTeamsDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(OMPD_target_teams, DirName,
nullptr, D->getBeginLoc());
auto Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::TransformOMPTargetTeamsDistributeDirective(
OMPTargetTeamsDistributeDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_target_teams_distribute, DirName, nullptr, D->getBeginLoc());
auto Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTargetTeamsDistributeParallelForDirective(
OMPTargetTeamsDistributeParallelForDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_target_teams_distribute_parallel_for, DirName, nullptr,
D->getBeginLoc());
auto Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult TreeTransform<Derived>::
TransformOMPTargetTeamsDistributeParallelForSimdDirective(
OMPTargetTeamsDistributeParallelForSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_target_teams_distribute_parallel_for_simd, DirName, nullptr,
D->getBeginLoc());
auto Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
template <typename Derived>
StmtResult
TreeTransform<Derived>::TransformOMPTargetTeamsDistributeSimdDirective(
OMPTargetTeamsDistributeSimdDirective *D) {
DeclarationNameInfo DirName;
getDerived().getSema().StartOpenMPDSABlock(
OMPD_target_teams_distribute_simd, DirName, nullptr, D->getBeginLoc());
auto Res = getDerived().TransformOMPExecutableDirective(D);
getDerived().getSema().EndOpenMPDSABlock(Res.get());
return Res;
}
//===----------------------------------------------------------------------===//
// OpenMP clause transformation
//===----------------------------------------------------------------------===//
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPIfClause(OMPIfClause *C) {
ExprResult Cond = getDerived().TransformExpr(C->getCondition());
if (Cond.isInvalid())
return nullptr;
return getDerived().RebuildOMPIfClause(
C->getNameModifier(), Cond.get(), C->getBeginLoc(), C->getLParenLoc(),
C->getNameModifierLoc(), C->getColonLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPFinalClause(OMPFinalClause *C) {
ExprResult Cond = getDerived().TransformExpr(C->getCondition());
if (Cond.isInvalid())
return nullptr;
return getDerived().RebuildOMPFinalClause(Cond.get(), C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPNumThreadsClause(OMPNumThreadsClause *C) {
ExprResult NumThreads = getDerived().TransformExpr(C->getNumThreads());
if (NumThreads.isInvalid())
return nullptr;
return getDerived().RebuildOMPNumThreadsClause(
NumThreads.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPSafelenClause(OMPSafelenClause *C) {
ExprResult E = getDerived().TransformExpr(C->getSafelen());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPSafelenClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPAllocatorClause(OMPAllocatorClause *C) {
ExprResult E = getDerived().TransformExpr(C->getAllocator());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPAllocatorClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPSimdlenClause(OMPSimdlenClause *C) {
ExprResult E = getDerived().TransformExpr(C->getSimdlen());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPSimdlenClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPCollapseClause(OMPCollapseClause *C) {
ExprResult E = getDerived().TransformExpr(C->getNumForLoops());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPCollapseClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPDefaultClause(OMPDefaultClause *C) {
return getDerived().RebuildOMPDefaultClause(
C->getDefaultKind(), C->getDefaultKindKwLoc(), C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPProcBindClause(OMPProcBindClause *C) {
return getDerived().RebuildOMPProcBindClause(
C->getProcBindKind(), C->getProcBindKindKwLoc(), C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPScheduleClause(OMPScheduleClause *C) {
ExprResult E = getDerived().TransformExpr(C->getChunkSize());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPScheduleClause(
C->getFirstScheduleModifier(), C->getSecondScheduleModifier(),
C->getScheduleKind(), E.get(), C->getBeginLoc(), C->getLParenLoc(),
C->getFirstScheduleModifierLoc(), C->getSecondScheduleModifierLoc(),
C->getScheduleKindLoc(), C->getCommaLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPOrderedClause(OMPOrderedClause *C) {
ExprResult E;
if (auto *Num = C->getNumForLoops()) {
E = getDerived().TransformExpr(Num);
if (E.isInvalid())
return nullptr;
}
return getDerived().RebuildOMPOrderedClause(C->getBeginLoc(), C->getEndLoc(),
C->getLParenLoc(), E.get());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPNowaitClause(OMPNowaitClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPUntiedClause(OMPUntiedClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPMergeableClause(OMPMergeableClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPReadClause(OMPReadClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPWriteClause(OMPWriteClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPUpdateClause(OMPUpdateClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPCaptureClause(OMPCaptureClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPSeqCstClause(OMPSeqCstClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPThreadsClause(OMPThreadsClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPSIMDClause(OMPSIMDClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPNogroupClause(OMPNogroupClause *C) {
// No need to rebuild this clause, no template-dependent parameters.
return C;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPUnifiedAddressClause(
OMPUnifiedAddressClause *C) {
llvm_unreachable("unified_address clause cannot appear in dependent context");
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPUnifiedSharedMemoryClause(
OMPUnifiedSharedMemoryClause *C) {
llvm_unreachable(
"unified_shared_memory clause cannot appear in dependent context");
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPReverseOffloadClause(
OMPReverseOffloadClause *C) {
llvm_unreachable("reverse_offload clause cannot appear in dependent context");
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPDynamicAllocatorsClause(
OMPDynamicAllocatorsClause *C) {
llvm_unreachable(
"dynamic_allocators clause cannot appear in dependent context");
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPAtomicDefaultMemOrderClause(
OMPAtomicDefaultMemOrderClause *C) {
llvm_unreachable(
"atomic_default_mem_order clause cannot appear in dependent context");
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPPrivateClause(OMPPrivateClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPPrivateClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPFirstprivateClause(
OMPFirstprivateClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPFirstprivateClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPLastprivateClause(OMPLastprivateClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPLastprivateClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPSharedClause(OMPSharedClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPSharedClause(Vars, C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPReductionClause(OMPReductionClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
CXXScopeSpec ReductionIdScopeSpec;
ReductionIdScopeSpec.Adopt(C->getQualifierLoc());
DeclarationNameInfo NameInfo = C->getNameInfo();
if (NameInfo.getName()) {
NameInfo = getDerived().TransformDeclarationNameInfo(NameInfo);
if (!NameInfo.getName())
return nullptr;
}
// Build a list of all UDR decls with the same names ranged by the Scopes.
// The Scope boundary is a duplication of the previous decl.
llvm::SmallVector<Expr *, 16> UnresolvedReductions;
for (auto *E : C->reduction_ops()) {
// Transform all the decls.
if (E) {
auto *ULE = cast<UnresolvedLookupExpr>(E);
UnresolvedSet<8> Decls;
for (auto *D : ULE->decls()) {
NamedDecl *InstD =
cast<NamedDecl>(getDerived().TransformDecl(E->getExprLoc(), D));
Decls.addDecl(InstD, InstD->getAccess());
}
UnresolvedReductions.push_back(
UnresolvedLookupExpr::Create(
SemaRef.Context, /*NamingClass=*/nullptr,
ReductionIdScopeSpec.getWithLocInContext(SemaRef.Context),
NameInfo, /*ADL=*/true, ULE->isOverloaded(),
Decls.begin(), Decls.end()));
} else
UnresolvedReductions.push_back(nullptr);
}
return getDerived().RebuildOMPReductionClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getColonLoc(),
C->getEndLoc(), ReductionIdScopeSpec, NameInfo, UnresolvedReductions);
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPTaskReductionClause(
OMPTaskReductionClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
CXXScopeSpec ReductionIdScopeSpec;
ReductionIdScopeSpec.Adopt(C->getQualifierLoc());
DeclarationNameInfo NameInfo = C->getNameInfo();
if (NameInfo.getName()) {
NameInfo = getDerived().TransformDeclarationNameInfo(NameInfo);
if (!NameInfo.getName())
return nullptr;
}
// Build a list of all UDR decls with the same names ranged by the Scopes.
// The Scope boundary is a duplication of the previous decl.
llvm::SmallVector<Expr *, 16> UnresolvedReductions;
for (auto *E : C->reduction_ops()) {
// Transform all the decls.
if (E) {
auto *ULE = cast<UnresolvedLookupExpr>(E);
UnresolvedSet<8> Decls;
for (auto *D : ULE->decls()) {
NamedDecl *InstD =
cast<NamedDecl>(getDerived().TransformDecl(E->getExprLoc(), D));
Decls.addDecl(InstD, InstD->getAccess());
}
UnresolvedReductions.push_back(UnresolvedLookupExpr::Create(
SemaRef.Context, /*NamingClass=*/nullptr,
ReductionIdScopeSpec.getWithLocInContext(SemaRef.Context), NameInfo,
/*ADL=*/true, ULE->isOverloaded(), Decls.begin(), Decls.end()));
} else
UnresolvedReductions.push_back(nullptr);
}
return getDerived().RebuildOMPTaskReductionClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getColonLoc(),
C->getEndLoc(), ReductionIdScopeSpec, NameInfo, UnresolvedReductions);
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPInReductionClause(OMPInReductionClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
CXXScopeSpec ReductionIdScopeSpec;
ReductionIdScopeSpec.Adopt(C->getQualifierLoc());
DeclarationNameInfo NameInfo = C->getNameInfo();
if (NameInfo.getName()) {
NameInfo = getDerived().TransformDeclarationNameInfo(NameInfo);
if (!NameInfo.getName())
return nullptr;
}
// Build a list of all UDR decls with the same names ranged by the Scopes.
// The Scope boundary is a duplication of the previous decl.
llvm::SmallVector<Expr *, 16> UnresolvedReductions;
for (auto *E : C->reduction_ops()) {
// Transform all the decls.
if (E) {
auto *ULE = cast<UnresolvedLookupExpr>(E);
UnresolvedSet<8> Decls;
for (auto *D : ULE->decls()) {
NamedDecl *InstD =
cast<NamedDecl>(getDerived().TransformDecl(E->getExprLoc(), D));
Decls.addDecl(InstD, InstD->getAccess());
}
UnresolvedReductions.push_back(UnresolvedLookupExpr::Create(
SemaRef.Context, /*NamingClass=*/nullptr,
ReductionIdScopeSpec.getWithLocInContext(SemaRef.Context), NameInfo,
/*ADL=*/true, ULE->isOverloaded(), Decls.begin(), Decls.end()));
} else
UnresolvedReductions.push_back(nullptr);
}
return getDerived().RebuildOMPInReductionClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getColonLoc(),
C->getEndLoc(), ReductionIdScopeSpec, NameInfo, UnresolvedReductions);
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPLinearClause(OMPLinearClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
ExprResult Step = getDerived().TransformExpr(C->getStep());
if (Step.isInvalid())
return nullptr;
return getDerived().RebuildOMPLinearClause(
Vars, Step.get(), C->getBeginLoc(), C->getLParenLoc(), C->getModifier(),
C->getModifierLoc(), C->getColonLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPAlignedClause(OMPAlignedClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
ExprResult Alignment = getDerived().TransformExpr(C->getAlignment());
if (Alignment.isInvalid())
return nullptr;
return getDerived().RebuildOMPAlignedClause(
Vars, Alignment.get(), C->getBeginLoc(), C->getLParenLoc(),
C->getColonLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPCopyinClause(OMPCopyinClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPCopyinClause(Vars, C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPCopyprivateClause(OMPCopyprivateClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPCopyprivateClause(
Vars, C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPFlushClause(OMPFlushClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPFlushClause(Vars, C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPDependClause(OMPDependClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPDependClause(
C->getDependencyKind(), C->getDependencyLoc(), C->getColonLoc(), Vars,
C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPDeviceClause(OMPDeviceClause *C) {
ExprResult E = getDerived().TransformExpr(C->getDevice());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPDeviceClause(E.get(), C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived, class T>
bool transformOMPMappableExprListClause(
TreeTransform<Derived> &TT, OMPMappableExprListClause<T> *C,
llvm::SmallVectorImpl<Expr *> &Vars, CXXScopeSpec &MapperIdScopeSpec,
DeclarationNameInfo &MapperIdInfo,
llvm::SmallVectorImpl<Expr *> &UnresolvedMappers) {
// Transform expressions in the list.
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = TT.getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return true;
Vars.push_back(EVar.get());
}
// Transform mapper scope specifier and identifier.
NestedNameSpecifierLoc QualifierLoc;
if (C->getMapperQualifierLoc()) {
QualifierLoc = TT.getDerived().TransformNestedNameSpecifierLoc(
C->getMapperQualifierLoc());
if (!QualifierLoc)
return true;
}
MapperIdScopeSpec.Adopt(QualifierLoc);
MapperIdInfo = C->getMapperIdInfo();
if (MapperIdInfo.getName()) {
MapperIdInfo = TT.getDerived().TransformDeclarationNameInfo(MapperIdInfo);
if (!MapperIdInfo.getName())
return true;
}
// Build a list of all candidate OMPDeclareMapperDecls, which is provided by
// the previous user-defined mapper lookup in dependent environment.
for (auto *E : C->mapperlists()) {
// Transform all the decls.
if (E) {
auto *ULE = cast<UnresolvedLookupExpr>(E);
UnresolvedSet<8> Decls;
for (auto *D : ULE->decls()) {
NamedDecl *InstD =
cast<NamedDecl>(TT.getDerived().TransformDecl(E->getExprLoc(), D));
Decls.addDecl(InstD, InstD->getAccess());
}
UnresolvedMappers.push_back(UnresolvedLookupExpr::Create(
TT.getSema().Context, /*NamingClass=*/nullptr,
MapperIdScopeSpec.getWithLocInContext(TT.getSema().Context),
MapperIdInfo, /*ADL=*/true, ULE->isOverloaded(), Decls.begin(),
Decls.end()));
} else {
UnresolvedMappers.push_back(nullptr);
}
}
return false;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPMapClause(OMPMapClause *C) {
OMPVarListLocTy Locs(C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
llvm::SmallVector<Expr *, 16> Vars;
CXXScopeSpec MapperIdScopeSpec;
DeclarationNameInfo MapperIdInfo;
llvm::SmallVector<Expr *, 16> UnresolvedMappers;
if (transformOMPMappableExprListClause<Derived, OMPMapClause>(
*this, C, Vars, MapperIdScopeSpec, MapperIdInfo, UnresolvedMappers))
return nullptr;
return getDerived().RebuildOMPMapClause(
C->getMapTypeModifiers(), C->getMapTypeModifiersLoc(), MapperIdScopeSpec,
MapperIdInfo, C->getMapType(), C->isImplicitMapType(), C->getMapLoc(),
C->getColonLoc(), Vars, Locs, UnresolvedMappers);
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPAllocateClause(OMPAllocateClause *C) {
Expr *Allocator = C->getAllocator();
if (Allocator) {
ExprResult AllocatorRes = getDerived().TransformExpr(Allocator);
if (AllocatorRes.isInvalid())
return nullptr;
Allocator = AllocatorRes.get();
}
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
return getDerived().RebuildOMPAllocateClause(
Allocator, Vars, C->getBeginLoc(), C->getLParenLoc(), C->getColonLoc(),
C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPNumTeamsClause(OMPNumTeamsClause *C) {
ExprResult E = getDerived().TransformExpr(C->getNumTeams());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPNumTeamsClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPThreadLimitClause(OMPThreadLimitClause *C) {
ExprResult E = getDerived().TransformExpr(C->getThreadLimit());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPThreadLimitClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPPriorityClause(OMPPriorityClause *C) {
ExprResult E = getDerived().TransformExpr(C->getPriority());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPPriorityClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPGrainsizeClause(OMPGrainsizeClause *C) {
ExprResult E = getDerived().TransformExpr(C->getGrainsize());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPGrainsizeClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPNumTasksClause(OMPNumTasksClause *C) {
ExprResult E = getDerived().TransformExpr(C->getNumTasks());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPNumTasksClause(
E.get(), C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPHintClause(OMPHintClause *C) {
ExprResult E = getDerived().TransformExpr(C->getHint());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPHintClause(E.get(), C->getBeginLoc(),
C->getLParenLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPDistScheduleClause(
OMPDistScheduleClause *C) {
ExprResult E = getDerived().TransformExpr(C->getChunkSize());
if (E.isInvalid())
return nullptr;
return getDerived().RebuildOMPDistScheduleClause(
C->getDistScheduleKind(), E.get(), C->getBeginLoc(), C->getLParenLoc(),
C->getDistScheduleKindLoc(), C->getCommaLoc(), C->getEndLoc());
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPDefaultmapClause(OMPDefaultmapClause *C) {
return C;
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPToClause(OMPToClause *C) {
OMPVarListLocTy Locs(C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
llvm::SmallVector<Expr *, 16> Vars;
CXXScopeSpec MapperIdScopeSpec;
DeclarationNameInfo MapperIdInfo;
llvm::SmallVector<Expr *, 16> UnresolvedMappers;
if (transformOMPMappableExprListClause<Derived, OMPToClause>(
*this, C, Vars, MapperIdScopeSpec, MapperIdInfo, UnresolvedMappers))
return nullptr;
return getDerived().RebuildOMPToClause(Vars, MapperIdScopeSpec, MapperIdInfo,
Locs, UnresolvedMappers);
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPFromClause(OMPFromClause *C) {
OMPVarListLocTy Locs(C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
llvm::SmallVector<Expr *, 16> Vars;
CXXScopeSpec MapperIdScopeSpec;
DeclarationNameInfo MapperIdInfo;
llvm::SmallVector<Expr *, 16> UnresolvedMappers;
if (transformOMPMappableExprListClause<Derived, OMPFromClause>(
*this, C, Vars, MapperIdScopeSpec, MapperIdInfo, UnresolvedMappers))
return nullptr;
return getDerived().RebuildOMPFromClause(
Vars, MapperIdScopeSpec, MapperIdInfo, Locs, UnresolvedMappers);
}
template <typename Derived>
OMPClause *TreeTransform<Derived>::TransformOMPUseDevicePtrClause(
OMPUseDevicePtrClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
OMPVarListLocTy Locs(C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
return getDerived().RebuildOMPUseDevicePtrClause(Vars, Locs);
}
template <typename Derived>
OMPClause *
TreeTransform<Derived>::TransformOMPIsDevicePtrClause(OMPIsDevicePtrClause *C) {
llvm::SmallVector<Expr *, 16> Vars;
Vars.reserve(C->varlist_size());
for (auto *VE : C->varlists()) {
ExprResult EVar = getDerived().TransformExpr(cast<Expr>(VE));
if (EVar.isInvalid())
return nullptr;
Vars.push_back(EVar.get());
}
OMPVarListLocTy Locs(C->getBeginLoc(), C->getLParenLoc(), C->getEndLoc());
return getDerived().RebuildOMPIsDevicePtrClause(Vars, Locs);
}
//===----------------------------------------------------------------------===//
// Expression transformation
//===----------------------------------------------------------------------===//
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformConstantExpr(ConstantExpr *E) {
return TransformExpr(E->getSubExpr());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformPredefinedExpr(PredefinedExpr *E) {
if (!E->isTypeDependent())
return E;
return getDerived().RebuildPredefinedExpr(E->getLocation(),
E->getIdentKind());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformDeclRefExpr(DeclRefExpr *E) {
NestedNameSpecifierLoc QualifierLoc;
if (E->getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(E->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
}
ValueDecl *ND
= cast_or_null<ValueDecl>(getDerived().TransformDecl(E->getLocation(),
E->getDecl()));
if (!ND)
return ExprError();
DeclarationNameInfo NameInfo = E->getNameInfo();
if (NameInfo.getName()) {
NameInfo = getDerived().TransformDeclarationNameInfo(NameInfo);
if (!NameInfo.getName())
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
QualifierLoc == E->getQualifierLoc() &&
ND == E->getDecl() &&
NameInfo.getName() == E->getDecl()->getDeclName() &&
!E->hasExplicitTemplateArgs()) {
// Mark it referenced in the new context regardless.
// FIXME: this is a bit instantiation-specific.
SemaRef.MarkDeclRefReferenced(E);
return E;
}
TemplateArgumentListInfo TransArgs, *TemplateArgs = nullptr;
if (E->hasExplicitTemplateArgs()) {
TemplateArgs = &TransArgs;
TransArgs.setLAngleLoc(E->getLAngleLoc());
TransArgs.setRAngleLoc(E->getRAngleLoc());
if (getDerived().TransformTemplateArguments(E->getTemplateArgs(),
E->getNumTemplateArgs(),
TransArgs))
return ExprError();
}
return getDerived().RebuildDeclRefExpr(QualifierLoc, ND, NameInfo,
TemplateArgs);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformIntegerLiteral(IntegerLiteral *E) {
return E;
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformFixedPointLiteral(
FixedPointLiteral *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformFloatingLiteral(FloatingLiteral *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformImaginaryLiteral(ImaginaryLiteral *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformStringLiteral(StringLiteral *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCharacterLiteral(CharacterLiteral *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformUserDefinedLiteral(UserDefinedLiteral *E) {
if (FunctionDecl *FD = E->getDirectCallee())
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), FD);
return SemaRef.MaybeBindToTemporary(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformGenericSelectionExpr(GenericSelectionExpr *E) {
ExprResult ControllingExpr =
getDerived().TransformExpr(E->getControllingExpr());
if (ControllingExpr.isInvalid())
return ExprError();
SmallVector<Expr *, 4> AssocExprs;
SmallVector<TypeSourceInfo *, 4> AssocTypes;
for (const GenericSelectionExpr::Association &Assoc : E->associations()) {
TypeSourceInfo *TSI = Assoc.getTypeSourceInfo();
if (TSI) {
TypeSourceInfo *AssocType = getDerived().TransformType(TSI);
if (!AssocType)
return ExprError();
AssocTypes.push_back(AssocType);
} else {
AssocTypes.push_back(nullptr);
}
ExprResult AssocExpr =
getDerived().TransformExpr(Assoc.getAssociationExpr());
if (AssocExpr.isInvalid())
return ExprError();
AssocExprs.push_back(AssocExpr.get());
}
return getDerived().RebuildGenericSelectionExpr(E->getGenericLoc(),
E->getDefaultLoc(),
E->getRParenLoc(),
ControllingExpr.get(),
AssocTypes,
AssocExprs);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformParenExpr(ParenExpr *E) {
ExprResult SubExpr = getDerived().TransformExpr(E->getSubExpr());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildParenExpr(SubExpr.get(), E->getLParen(),
E->getRParen());
}
/// The operand of a unary address-of operator has special rules: it's
/// allowed to refer to a non-static member of a class even if there's no 'this'
/// object available.
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformAddressOfOperand(Expr *E) {
if (DependentScopeDeclRefExpr *DRE = dyn_cast<DependentScopeDeclRefExpr>(E))
return getDerived().TransformDependentScopeDeclRefExpr(DRE, true, nullptr);
else
return getDerived().TransformExpr(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformUnaryOperator(UnaryOperator *E) {
ExprResult SubExpr;
if (E->getOpcode() == UO_AddrOf)
SubExpr = TransformAddressOfOperand(E->getSubExpr());
else
SubExpr = TransformExpr(E->getSubExpr());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildUnaryOperator(E->getOperatorLoc(),
E->getOpcode(),
SubExpr.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformOffsetOfExpr(OffsetOfExpr *E) {
// Transform the type.
TypeSourceInfo *Type = getDerived().TransformType(E->getTypeSourceInfo());
if (!Type)
return ExprError();
// Transform all of the components into components similar to what the
// parser uses.
// FIXME: It would be slightly more efficient in the non-dependent case to
// just map FieldDecls, rather than requiring the rebuilder to look for
// the fields again. However, __builtin_offsetof is rare enough in
// template code that we don't care.
bool ExprChanged = false;
typedef Sema::OffsetOfComponent Component;
SmallVector<Component, 4> Components;
for (unsigned I = 0, N = E->getNumComponents(); I != N; ++I) {
const OffsetOfNode &ON = E->getComponent(I);
Component Comp;
Comp.isBrackets = true;
Comp.LocStart = ON.getSourceRange().getBegin();
Comp.LocEnd = ON.getSourceRange().getEnd();
switch (ON.getKind()) {
case OffsetOfNode::Array: {
Expr *FromIndex = E->getIndexExpr(ON.getArrayExprIndex());
ExprResult Index = getDerived().TransformExpr(FromIndex);
if (Index.isInvalid())
return ExprError();
ExprChanged = ExprChanged || Index.get() != FromIndex;
Comp.isBrackets = true;
Comp.U.E = Index.get();
break;
}
case OffsetOfNode::Field:
case OffsetOfNode::Identifier:
Comp.isBrackets = false;
Comp.U.IdentInfo = ON.getFieldName();
if (!Comp.U.IdentInfo)
continue;
break;
case OffsetOfNode::Base:
// Will be recomputed during the rebuild.
continue;
}
Components.push_back(Comp);
}
// If nothing changed, retain the existing expression.
if (!getDerived().AlwaysRebuild() &&
Type == E->getTypeSourceInfo() &&
!ExprChanged)
return E;
// Build a new offsetof expression.
return getDerived().RebuildOffsetOfExpr(E->getOperatorLoc(), Type,
Components, E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformOpaqueValueExpr(OpaqueValueExpr *E) {
assert((!E->getSourceExpr() || getDerived().AlreadyTransformed(E->getType())) &&
"opaque value expression requires transformation");
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformTypoExpr(TypoExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformPseudoObjectExpr(PseudoObjectExpr *E) {
// Rebuild the syntactic form. The original syntactic form has
// opaque-value expressions in it, so strip those away and rebuild
// the result. This is a really awful way of doing this, but the
// better solution (rebuilding the semantic expressions and
// rebinding OVEs as necessary) doesn't work; we'd need
// TreeTransform to not strip away implicit conversions.
Expr *newSyntacticForm = SemaRef.recreateSyntacticForm(E);
ExprResult result = getDerived().TransformExpr(newSyntacticForm);
if (result.isInvalid()) return ExprError();
// If that gives us a pseudo-object result back, the pseudo-object
// expression must have been an lvalue-to-rvalue conversion which we
// should reapply.
if (result.get()->hasPlaceholderType(BuiltinType::PseudoObject))
result = SemaRef.checkPseudoObjectRValue(result.get());
return result;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformUnaryExprOrTypeTraitExpr(
UnaryExprOrTypeTraitExpr *E) {
if (E->isArgumentType()) {
TypeSourceInfo *OldT = E->getArgumentTypeInfo();
TypeSourceInfo *NewT = getDerived().TransformType(OldT);
if (!NewT)
return ExprError();
if (!getDerived().AlwaysRebuild() && OldT == NewT)
return E;
return getDerived().RebuildUnaryExprOrTypeTrait(NewT, E->getOperatorLoc(),
E->getKind(),
E->getSourceRange());
}
// C++0x [expr.sizeof]p1:
// The operand is either an expression, which is an unevaluated operand
// [...]
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated,
Sema::ReuseLambdaContextDecl);
// Try to recover if we have something like sizeof(T::X) where X is a type.
// Notably, there must be *exactly* one set of parens if X is a type.
TypeSourceInfo *RecoveryTSI = nullptr;
ExprResult SubExpr;
auto *PE = dyn_cast<ParenExpr>(E->getArgumentExpr());
if (auto *DRE =
PE ? dyn_cast<DependentScopeDeclRefExpr>(PE->getSubExpr()) : nullptr)
SubExpr = getDerived().TransformParenDependentScopeDeclRefExpr(
PE, DRE, false, &RecoveryTSI);
else
SubExpr = getDerived().TransformExpr(E->getArgumentExpr());
if (RecoveryTSI) {
return getDerived().RebuildUnaryExprOrTypeTrait(
RecoveryTSI, E->getOperatorLoc(), E->getKind(), E->getSourceRange());
} else if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getArgumentExpr())
return E;
return getDerived().RebuildUnaryExprOrTypeTrait(SubExpr.get(),
E->getOperatorLoc(),
E->getKind(),
E->getSourceRange());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformArraySubscriptExpr(ArraySubscriptExpr *E) {
ExprResult LHS = getDerived().TransformExpr(E->getLHS());
if (LHS.isInvalid())
return ExprError();
ExprResult RHS = getDerived().TransformExpr(E->getRHS());
if (RHS.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
LHS.get() == E->getLHS() &&
RHS.get() == E->getRHS())
return E;
return getDerived().RebuildArraySubscriptExpr(
LHS.get(),
/*FIXME:*/ E->getLHS()->getBeginLoc(), RHS.get(), E->getRBracketLoc());
}
template <typename Derived>
ExprResult
TreeTransform<Derived>::TransformOMPArraySectionExpr(OMPArraySectionExpr *E) {
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
ExprResult LowerBound;
if (E->getLowerBound()) {
LowerBound = getDerived().TransformExpr(E->getLowerBound());
if (LowerBound.isInvalid())
return ExprError();
}
ExprResult Length;
if (E->getLength()) {
Length = getDerived().TransformExpr(E->getLength());
if (Length.isInvalid())
return ExprError();
}
if (!getDerived().AlwaysRebuild() && Base.get() == E->getBase() &&
LowerBound.get() == E->getLowerBound() && Length.get() == E->getLength())
return E;
return getDerived().RebuildOMPArraySectionExpr(
Base.get(), E->getBase()->getEndLoc(), LowerBound.get(), E->getColonLoc(),
Length.get(), E->getRBracketLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCallExpr(CallExpr *E) {
// Transform the callee.
ExprResult Callee = getDerived().TransformExpr(E->getCallee());
if (Callee.isInvalid())
return ExprError();
// Transform arguments.
bool ArgChanged = false;
SmallVector<Expr*, 8> Args;
if (getDerived().TransformExprs(E->getArgs(), E->getNumArgs(), true, Args,
&ArgChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Callee.get() == E->getCallee() &&
!ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
// FIXME: Wrong source location information for the '('.
SourceLocation FakeLParenLoc
= ((Expr *)Callee.get())->getSourceRange().getBegin();
return getDerived().RebuildCallExpr(Callee.get(), FakeLParenLoc,
Args,
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformMemberExpr(MemberExpr *E) {
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
NestedNameSpecifierLoc QualifierLoc;
if (E->hasQualifier()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(E->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
}
SourceLocation TemplateKWLoc = E->getTemplateKeywordLoc();
ValueDecl *Member
= cast_or_null<ValueDecl>(getDerived().TransformDecl(E->getMemberLoc(),
E->getMemberDecl()));
if (!Member)
return ExprError();
NamedDecl *FoundDecl = E->getFoundDecl();
if (FoundDecl == E->getMemberDecl()) {
FoundDecl = Member;
} else {
FoundDecl = cast_or_null<NamedDecl>(
getDerived().TransformDecl(E->getMemberLoc(), FoundDecl));
if (!FoundDecl)
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
Base.get() == E->getBase() &&
QualifierLoc == E->getQualifierLoc() &&
Member == E->getMemberDecl() &&
FoundDecl == E->getFoundDecl() &&
!E->hasExplicitTemplateArgs()) {
// Mark it referenced in the new context regardless.
// FIXME: this is a bit instantiation-specific.
SemaRef.MarkMemberReferenced(E);
return E;
}
TemplateArgumentListInfo TransArgs;
if (E->hasExplicitTemplateArgs()) {
TransArgs.setLAngleLoc(E->getLAngleLoc());
TransArgs.setRAngleLoc(E->getRAngleLoc());
if (getDerived().TransformTemplateArguments(E->getTemplateArgs(),
E->getNumTemplateArgs(),
TransArgs))
return ExprError();
}
// FIXME: Bogus source location for the operator
SourceLocation FakeOperatorLoc =
SemaRef.getLocForEndOfToken(E->getBase()->getSourceRange().getEnd());
// FIXME: to do this check properly, we will need to preserve the
// first-qualifier-in-scope here, just in case we had a dependent
// base (and therefore couldn't do the check) and a
// nested-name-qualifier (and therefore could do the lookup).
NamedDecl *FirstQualifierInScope = nullptr;
DeclarationNameInfo MemberNameInfo = E->getMemberNameInfo();
if (MemberNameInfo.getName()) {
MemberNameInfo = getDerived().TransformDeclarationNameInfo(MemberNameInfo);
if (!MemberNameInfo.getName())
return ExprError();
}
return getDerived().RebuildMemberExpr(Base.get(), FakeOperatorLoc,
E->isArrow(),
QualifierLoc,
TemplateKWLoc,
MemberNameInfo,
Member,
FoundDecl,
(E->hasExplicitTemplateArgs()
? &TransArgs : nullptr),
FirstQualifierInScope);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformBinaryOperator(BinaryOperator *E) {
ExprResult LHS = getDerived().TransformExpr(E->getLHS());
if (LHS.isInvalid())
return ExprError();
ExprResult RHS = getDerived().TransformExpr(E->getRHS());
if (RHS.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
LHS.get() == E->getLHS() &&
RHS.get() == E->getRHS())
return E;
Sema::FPContractStateRAII FPContractState(getSema());
getSema().FPFeatures = E->getFPFeatures();
return getDerived().RebuildBinaryOperator(E->getOperatorLoc(), E->getOpcode(),
LHS.get(), RHS.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCompoundAssignOperator(
CompoundAssignOperator *E) {
return getDerived().TransformBinaryOperator(E);
}
template<typename Derived>
ExprResult TreeTransform<Derived>::
TransformBinaryConditionalOperator(BinaryConditionalOperator *e) {
// Just rebuild the common and RHS expressions and see whether we
// get any changes.
ExprResult commonExpr = getDerived().TransformExpr(e->getCommon());
if (commonExpr.isInvalid())
return ExprError();
ExprResult rhs = getDerived().TransformExpr(e->getFalseExpr());
if (rhs.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
commonExpr.get() == e->getCommon() &&
rhs.get() == e->getFalseExpr())
return e;
return getDerived().RebuildConditionalOperator(commonExpr.get(),
e->getQuestionLoc(),
nullptr,
e->getColonLoc(),
rhs.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformConditionalOperator(ConditionalOperator *E) {
ExprResult Cond = getDerived().TransformExpr(E->getCond());
if (Cond.isInvalid())
return ExprError();
ExprResult LHS = getDerived().TransformExpr(E->getLHS());
if (LHS.isInvalid())
return ExprError();
ExprResult RHS = getDerived().TransformExpr(E->getRHS());
if (RHS.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Cond.get() == E->getCond() &&
LHS.get() == E->getLHS() &&
RHS.get() == E->getRHS())
return E;
return getDerived().RebuildConditionalOperator(Cond.get(),
E->getQuestionLoc(),
LHS.get(),
E->getColonLoc(),
RHS.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformImplicitCastExpr(ImplicitCastExpr *E) {
// Implicit casts are eliminated during transformation, since they
// will be recomputed by semantic analysis after transformation.
return getDerived().TransformExpr(E->getSubExprAsWritten());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCStyleCastExpr(CStyleCastExpr *E) {
TypeSourceInfo *Type = getDerived().TransformType(E->getTypeInfoAsWritten());
if (!Type)
return ExprError();
ExprResult SubExpr
= getDerived().TransformExpr(E->getSubExprAsWritten());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Type == E->getTypeInfoAsWritten() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildCStyleCastExpr(E->getLParenLoc(),
Type,
E->getRParenLoc(),
SubExpr.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCompoundLiteralExpr(CompoundLiteralExpr *E) {
TypeSourceInfo *OldT = E->getTypeSourceInfo();
TypeSourceInfo *NewT = getDerived().TransformType(OldT);
if (!NewT)
return ExprError();
ExprResult Init = getDerived().TransformExpr(E->getInitializer());
if (Init.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
OldT == NewT &&
Init.get() == E->getInitializer())
return SemaRef.MaybeBindToTemporary(E);
// Note: the expression type doesn't necessarily match the
// type-as-written, but that's okay, because it should always be
// derivable from the initializer.
return getDerived().RebuildCompoundLiteralExpr(
E->getLParenLoc(), NewT,
/*FIXME:*/ E->getInitializer()->getEndLoc(), Init.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformExtVectorElementExpr(ExtVectorElementExpr *E) {
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Base.get() == E->getBase())
return E;
// FIXME: Bad source location
SourceLocation FakeOperatorLoc =
SemaRef.getLocForEndOfToken(E->getBase()->getEndLoc());
return getDerived().RebuildExtVectorElementExpr(Base.get(), FakeOperatorLoc,
E->getAccessorLoc(),
E->getAccessor());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformInitListExpr(InitListExpr *E) {
if (InitListExpr *Syntactic = E->getSyntacticForm())
E = Syntactic;
bool InitChanged = false;
EnterExpressionEvaluationContext Context(
getSema(), EnterExpressionEvaluationContext::InitList);
SmallVector<Expr*, 4> Inits;
if (getDerived().TransformExprs(E->getInits(), E->getNumInits(), false,
Inits, &InitChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() && !InitChanged) {
// FIXME: Attempt to reuse the existing syntactic form of the InitListExpr
// in some cases. We can't reuse it in general, because the syntactic and
// semantic forms are linked, and we can't know that semantic form will
// match even if the syntactic form does.
}
return getDerived().RebuildInitList(E->getLBraceLoc(), Inits,
E->getRBraceLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformDesignatedInitExpr(DesignatedInitExpr *E) {
Designation Desig;
// transform the initializer value
ExprResult Init = getDerived().TransformExpr(E->getInit());
if (Init.isInvalid())
return ExprError();
// transform the designators.
SmallVector<Expr*, 4> ArrayExprs;
bool ExprChanged = false;
for (const DesignatedInitExpr::Designator &D : E->designators()) {
if (D.isFieldDesignator()) {
Desig.AddDesignator(Designator::getField(D.getFieldName(),
D.getDotLoc(),
D.getFieldLoc()));
if (D.getField()) {
FieldDecl *Field = cast_or_null<FieldDecl>(
getDerived().TransformDecl(D.getFieldLoc(), D.getField()));
if (Field != D.getField())
// Rebuild the expression when the transformed FieldDecl is
// different to the already assigned FieldDecl.
ExprChanged = true;
} else {
// Ensure that the designator expression is rebuilt when there isn't
// a resolved FieldDecl in the designator as we don't want to assign
// a FieldDecl to a pattern designator that will be instantiated again.
ExprChanged = true;
}
continue;
}
if (D.isArrayDesignator()) {
ExprResult Index = getDerived().TransformExpr(E->getArrayIndex(D));
if (Index.isInvalid())
return ExprError();
Desig.AddDesignator(
Designator::getArray(Index.get(), D.getLBracketLoc()));
ExprChanged = ExprChanged || Init.get() != E->getArrayIndex(D);
ArrayExprs.push_back(Index.get());
continue;
}
assert(D.isArrayRangeDesignator() && "New kind of designator?");
ExprResult Start
= getDerived().TransformExpr(E->getArrayRangeStart(D));
if (Start.isInvalid())
return ExprError();
ExprResult End = getDerived().TransformExpr(E->getArrayRangeEnd(D));
if (End.isInvalid())
return ExprError();
Desig.AddDesignator(Designator::getArrayRange(Start.get(),
End.get(),
D.getLBracketLoc(),
D.getEllipsisLoc()));
ExprChanged = ExprChanged || Start.get() != E->getArrayRangeStart(D) ||
End.get() != E->getArrayRangeEnd(D);
ArrayExprs.push_back(Start.get());
ArrayExprs.push_back(End.get());
}
if (!getDerived().AlwaysRebuild() &&
Init.get() == E->getInit() &&
!ExprChanged)
return E;
return getDerived().RebuildDesignatedInitExpr(Desig, ArrayExprs,
E->getEqualOrColonLoc(),
E->usesGNUSyntax(), Init.get());
}
// Seems that if TransformInitListExpr() only works on the syntactic form of an
// InitListExpr, then a DesignatedInitUpdateExpr is not encountered.
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformDesignatedInitUpdateExpr(
DesignatedInitUpdateExpr *E) {
llvm_unreachable("Unexpected DesignatedInitUpdateExpr in syntactic form of "
"initializer");
return ExprError();
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformNoInitExpr(
NoInitExpr *E) {
llvm_unreachable("Unexpected NoInitExpr in syntactic form of initializer");
return ExprError();
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformArrayInitLoopExpr(ArrayInitLoopExpr *E) {
llvm_unreachable("Unexpected ArrayInitLoopExpr outside of initializer");
return ExprError();
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformArrayInitIndexExpr(ArrayInitIndexExpr *E) {
llvm_unreachable("Unexpected ArrayInitIndexExpr outside of initializer");
return ExprError();
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformImplicitValueInitExpr(
ImplicitValueInitExpr *E) {
TemporaryBase Rebase(*this, E->getBeginLoc(), DeclarationName());
// FIXME: Will we ever have proper type location here? Will we actually
// need to transform the type?
QualType T = getDerived().TransformType(E->getType());
if (T.isNull())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
T == E->getType())
return E;
return getDerived().RebuildImplicitValueInitExpr(T);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformVAArgExpr(VAArgExpr *E) {
TypeSourceInfo *TInfo = getDerived().TransformType(E->getWrittenTypeInfo());
if (!TInfo)
return ExprError();
ExprResult SubExpr = getDerived().TransformExpr(E->getSubExpr());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
TInfo == E->getWrittenTypeInfo() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildVAArgExpr(E->getBuiltinLoc(), SubExpr.get(),
TInfo, E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformParenListExpr(ParenListExpr *E) {
bool ArgumentChanged = false;
SmallVector<Expr*, 4> Inits;
if (TransformExprs(E->getExprs(), E->getNumExprs(), true, Inits,
&ArgumentChanged))
return ExprError();
return getDerived().RebuildParenListExpr(E->getLParenLoc(),
Inits,
E->getRParenLoc());
}
/// Transform an address-of-label expression.
///
/// By default, the transformation of an address-of-label expression always
/// rebuilds the expression, so that the label identifier can be resolved to
/// the corresponding label statement by semantic analysis.
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformAddrLabelExpr(AddrLabelExpr *E) {
Decl *LD = getDerived().TransformDecl(E->getLabel()->getLocation(),
E->getLabel());
if (!LD)
return ExprError();
return getDerived().RebuildAddrLabelExpr(E->getAmpAmpLoc(), E->getLabelLoc(),
cast<LabelDecl>(LD));
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformStmtExpr(StmtExpr *E) {
SemaRef.ActOnStartStmtExpr();
StmtResult SubStmt
= getDerived().TransformCompoundStmt(E->getSubStmt(), true);
if (SubStmt.isInvalid()) {
SemaRef.ActOnStmtExprError();
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
SubStmt.get() == E->getSubStmt()) {
// Calling this an 'error' is unintuitive, but it does the right thing.
SemaRef.ActOnStmtExprError();
return SemaRef.MaybeBindToTemporary(E);
}
return getDerived().RebuildStmtExpr(E->getLParenLoc(),
SubStmt.get(),
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformChooseExpr(ChooseExpr *E) {
ExprResult Cond = getDerived().TransformExpr(E->getCond());
if (Cond.isInvalid())
return ExprError();
ExprResult LHS = getDerived().TransformExpr(E->getLHS());
if (LHS.isInvalid())
return ExprError();
ExprResult RHS = getDerived().TransformExpr(E->getRHS());
if (RHS.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Cond.get() == E->getCond() &&
LHS.get() == E->getLHS() &&
RHS.get() == E->getRHS())
return E;
return getDerived().RebuildChooseExpr(E->getBuiltinLoc(),
Cond.get(), LHS.get(), RHS.get(),
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformGNUNullExpr(GNUNullExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXOperatorCallExpr(CXXOperatorCallExpr *E) {
switch (E->getOperator()) {
case OO_New:
case OO_Delete:
case OO_Array_New:
case OO_Array_Delete:
llvm_unreachable("new and delete operators cannot use CXXOperatorCallExpr");
case OO_Call: {
// This is a call to an object's operator().
assert(E->getNumArgs() >= 1 && "Object call is missing arguments");
// Transform the object itself.
ExprResult Object = getDerived().TransformExpr(E->getArg(0));
if (Object.isInvalid())
return ExprError();
// FIXME: Poor location information
SourceLocation FakeLParenLoc = SemaRef.getLocForEndOfToken(
static_cast<Expr *>(Object.get())->getEndLoc());
// Transform the call arguments.
SmallVector<Expr*, 8> Args;
if (getDerived().TransformExprs(E->getArgs() + 1, E->getNumArgs() - 1, true,
Args))
return ExprError();
return getDerived().RebuildCallExpr(Object.get(), FakeLParenLoc, Args,
E->getEndLoc());
}
#define OVERLOADED_OPERATOR(Name,Spelling,Token,Unary,Binary,MemberOnly) \
case OO_##Name:
#define OVERLOADED_OPERATOR_MULTI(Name,Spelling,Unary,Binary,MemberOnly)
#include "clang/Basic/OperatorKinds.def"
case OO_Subscript:
// Handled below.
break;
case OO_Conditional:
llvm_unreachable("conditional operator is not actually overloadable");
case OO_None:
case NUM_OVERLOADED_OPERATORS:
llvm_unreachable("not an overloaded operator?");
}
ExprResult Callee = getDerived().TransformExpr(E->getCallee());
if (Callee.isInvalid())
return ExprError();
ExprResult First;
if (E->getOperator() == OO_Amp)
First = getDerived().TransformAddressOfOperand(E->getArg(0));
else
First = getDerived().TransformExpr(E->getArg(0));
if (First.isInvalid())
return ExprError();
ExprResult Second;
if (E->getNumArgs() == 2) {
Second = getDerived().TransformExpr(E->getArg(1));
if (Second.isInvalid())
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
Callee.get() == E->getCallee() &&
First.get() == E->getArg(0) &&
(E->getNumArgs() != 2 || Second.get() == E->getArg(1)))
return SemaRef.MaybeBindToTemporary(E);
Sema::FPContractStateRAII FPContractState(getSema());
getSema().FPFeatures = E->getFPFeatures();
return getDerived().RebuildCXXOperatorCallExpr(E->getOperator(),
E->getOperatorLoc(),
Callee.get(),
First.get(),
Second.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXMemberCallExpr(CXXMemberCallExpr *E) {
return getDerived().TransformCallExpr(E);
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformSourceLocExpr(SourceLocExpr *E) {
bool NeedRebuildFunc = E->getIdentKind() == SourceLocExpr::Function &&
getSema().CurContext != E->getParentContext();
if (!getDerived().AlwaysRebuild() && !NeedRebuildFunc)
return E;
return getDerived().RebuildSourceLocExpr(E->getIdentKind(), E->getBeginLoc(),
E->getEndLoc(),
getSema().CurContext);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCUDAKernelCallExpr(CUDAKernelCallExpr *E) {
// Transform the callee.
ExprResult Callee = getDerived().TransformExpr(E->getCallee());
if (Callee.isInvalid())
return ExprError();
// Transform exec config.
ExprResult EC = getDerived().TransformCallExpr(E->getConfig());
if (EC.isInvalid())
return ExprError();
// Transform arguments.
bool ArgChanged = false;
SmallVector<Expr*, 8> Args;
if (getDerived().TransformExprs(E->getArgs(), E->getNumArgs(), true, Args,
&ArgChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Callee.get() == E->getCallee() &&
!ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
// FIXME: Wrong source location information for the '('.
SourceLocation FakeLParenLoc
= ((Expr *)Callee.get())->getSourceRange().getBegin();
return getDerived().RebuildCallExpr(Callee.get(), FakeLParenLoc,
Args,
E->getRParenLoc(), EC.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXNamedCastExpr(CXXNamedCastExpr *E) {
TypeSourceInfo *Type = getDerived().TransformType(E->getTypeInfoAsWritten());
if (!Type)
return ExprError();
ExprResult SubExpr
= getDerived().TransformExpr(E->getSubExprAsWritten());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Type == E->getTypeInfoAsWritten() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildCXXNamedCastExpr(
E->getOperatorLoc(), E->getStmtClass(), E->getAngleBrackets().getBegin(),
Type, E->getAngleBrackets().getEnd(),
// FIXME. this should be '(' location
E->getAngleBrackets().getEnd(), SubExpr.get(), E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXStaticCastExpr(CXXStaticCastExpr *E) {
return getDerived().TransformCXXNamedCastExpr(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXDynamicCastExpr(CXXDynamicCastExpr *E) {
return getDerived().TransformCXXNamedCastExpr(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXReinterpretCastExpr(
CXXReinterpretCastExpr *E) {
return getDerived().TransformCXXNamedCastExpr(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXConstCastExpr(CXXConstCastExpr *E) {
return getDerived().TransformCXXNamedCastExpr(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXFunctionalCastExpr(
CXXFunctionalCastExpr *E) {
TypeSourceInfo *Type =
getDerived().TransformTypeWithDeducedTST(E->getTypeInfoAsWritten());
if (!Type)
return ExprError();
ExprResult SubExpr
= getDerived().TransformExpr(E->getSubExprAsWritten());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Type == E->getTypeInfoAsWritten() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildCXXFunctionalCastExpr(Type,
E->getLParenLoc(),
SubExpr.get(),
E->getRParenLoc(),
E->isListInitialization());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXTypeidExpr(CXXTypeidExpr *E) {
if (E->isTypeOperand()) {
TypeSourceInfo *TInfo
= getDerived().TransformType(E->getTypeOperandSourceInfo());
if (!TInfo)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
TInfo == E->getTypeOperandSourceInfo())
return E;
return getDerived().RebuildCXXTypeidExpr(E->getType(), E->getBeginLoc(),
TInfo, E->getEndLoc());
}
// We don't know whether the subexpression is potentially evaluated until
// after we perform semantic analysis. We speculatively assume it is
// unevaluated; it will get fixed later if the subexpression is in fact
// potentially evaluated.
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated,
Sema::ReuseLambdaContextDecl);
ExprResult SubExpr = getDerived().TransformExpr(E->getExprOperand());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
SubExpr.get() == E->getExprOperand())
return E;
return getDerived().RebuildCXXTypeidExpr(E->getType(), E->getBeginLoc(),
SubExpr.get(), E->getEndLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXUuidofExpr(CXXUuidofExpr *E) {
if (E->isTypeOperand()) {
TypeSourceInfo *TInfo
= getDerived().TransformType(E->getTypeOperandSourceInfo());
if (!TInfo)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
TInfo == E->getTypeOperandSourceInfo())
return E;
return getDerived().RebuildCXXUuidofExpr(E->getType(), E->getBeginLoc(),
TInfo, E->getEndLoc());
}
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated);
ExprResult SubExpr = getDerived().TransformExpr(E->getExprOperand());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
SubExpr.get() == E->getExprOperand())
return E;
return getDerived().RebuildCXXUuidofExpr(E->getType(), E->getBeginLoc(),
SubExpr.get(), E->getEndLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXBoolLiteralExpr(CXXBoolLiteralExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXNullPtrLiteralExpr(
CXXNullPtrLiteralExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXThisExpr(CXXThisExpr *E) {
QualType T = getSema().getCurrentThisType();
if (!getDerived().AlwaysRebuild() && T == E->getType()) {
// Mark it referenced in the new context regardless.
// FIXME: this is a bit instantiation-specific.
getSema().MarkThisReferenced(E);
return E;
}
return getDerived().RebuildCXXThisExpr(E->getBeginLoc(), T, E->isImplicit());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXThrowExpr(CXXThrowExpr *E) {
ExprResult SubExpr = getDerived().TransformExpr(E->getSubExpr());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildCXXThrowExpr(E->getThrowLoc(), SubExpr.get(),
E->isThrownVariableInScope());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXDefaultArgExpr(CXXDefaultArgExpr *E) {
ParmVarDecl *Param = cast_or_null<ParmVarDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getParam()));
if (!Param)
return ExprError();
if (!getDerived().AlwaysRebuild() && Param == E->getParam() &&
E->getUsedContext() == SemaRef.CurContext)
return E;
return getDerived().RebuildCXXDefaultArgExpr(E->getUsedLocation(), Param);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXDefaultInitExpr(CXXDefaultInitExpr *E) {
FieldDecl *Field = cast_or_null<FieldDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getField()));
if (!Field)
return ExprError();
if (!getDerived().AlwaysRebuild() && Field == E->getField() &&
E->getUsedContext() == SemaRef.CurContext)
return E;
return getDerived().RebuildCXXDefaultInitExpr(E->getExprLoc(), Field);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXScalarValueInitExpr(
CXXScalarValueInitExpr *E) {
TypeSourceInfo *T = getDerived().TransformType(E->getTypeSourceInfo());
if (!T)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
T == E->getTypeSourceInfo())
return E;
return getDerived().RebuildCXXScalarValueInitExpr(T,
/*FIXME:*/T->getTypeLoc().getEndLoc(),
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXNewExpr(CXXNewExpr *E) {
// Transform the type that we're allocating
TypeSourceInfo *AllocTypeInfo =
getDerived().TransformTypeWithDeducedTST(E->getAllocatedTypeSourceInfo());
if (!AllocTypeInfo)
return ExprError();
// Transform the size of the array we're allocating (if any).
Optional<Expr *> ArraySize;
if (Optional<Expr *> OldArraySize = E->getArraySize()) {
ExprResult NewArraySize;
if (*OldArraySize) {
NewArraySize = getDerived().TransformExpr(*OldArraySize);
if (NewArraySize.isInvalid())
return ExprError();
}
ArraySize = NewArraySize.get();
}
// Transform the placement arguments (if any).
bool ArgumentChanged = false;
SmallVector<Expr*, 8> PlacementArgs;
if (getDerived().TransformExprs(E->getPlacementArgs(),
E->getNumPlacementArgs(), true,
PlacementArgs, &ArgumentChanged))
return ExprError();
// Transform the initializer (if any).
Expr *OldInit = E->getInitializer();
ExprResult NewInit;
if (OldInit)
NewInit = getDerived().TransformInitializer(OldInit, true);
if (NewInit.isInvalid())
return ExprError();
// Transform new operator and delete operator.
FunctionDecl *OperatorNew = nullptr;
if (E->getOperatorNew()) {
OperatorNew = cast_or_null<FunctionDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getOperatorNew()));
if (!OperatorNew)
return ExprError();
}
FunctionDecl *OperatorDelete = nullptr;
if (E->getOperatorDelete()) {
OperatorDelete = cast_or_null<FunctionDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getOperatorDelete()));
if (!OperatorDelete)
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
AllocTypeInfo == E->getAllocatedTypeSourceInfo() &&
ArraySize == E->getArraySize() &&
NewInit.get() == OldInit &&
OperatorNew == E->getOperatorNew() &&
OperatorDelete == E->getOperatorDelete() &&
!ArgumentChanged) {
// Mark any declarations we need as referenced.
// FIXME: instantiation-specific.
if (OperatorNew)
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), OperatorNew);
if (OperatorDelete)
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), OperatorDelete);
if (E->isArray() && !E->getAllocatedType()->isDependentType()) {
QualType ElementType
= SemaRef.Context.getBaseElementType(E->getAllocatedType());
if (const RecordType *RecordT = ElementType->getAs<RecordType>()) {
CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordT->getDecl());
if (CXXDestructorDecl *Destructor = SemaRef.LookupDestructor(Record)) {
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), Destructor);
}
}
}
return E;
}
QualType AllocType = AllocTypeInfo->getType();
if (!ArraySize) {
// If no array size was specified, but the new expression was
// instantiated with an array type (e.g., "new T" where T is
// instantiated with "int[4]"), extract the outer bound from the
// array type as our array size. We do this with constant and
// dependently-sized array types.
const ArrayType *ArrayT = SemaRef.Context.getAsArrayType(AllocType);
if (!ArrayT) {
// Do nothing
} else if (const ConstantArrayType *ConsArrayT
= dyn_cast<ConstantArrayType>(ArrayT)) {
ArraySize = IntegerLiteral::Create(SemaRef.Context, ConsArrayT->getSize(),
SemaRef.Context.getSizeType(),
/*FIXME:*/ E->getBeginLoc());
AllocType = ConsArrayT->getElementType();
} else if (const DependentSizedArrayType *DepArrayT
= dyn_cast<DependentSizedArrayType>(ArrayT)) {
if (DepArrayT->getSizeExpr()) {
ArraySize = DepArrayT->getSizeExpr();
AllocType = DepArrayT->getElementType();
}
}
}
return getDerived().RebuildCXXNewExpr(
E->getBeginLoc(), E->isGlobalNew(),
/*FIXME:*/ E->getBeginLoc(), PlacementArgs,
/*FIXME:*/ E->getBeginLoc(), E->getTypeIdParens(), AllocType,
AllocTypeInfo, ArraySize, E->getDirectInitRange(), NewInit.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXDeleteExpr(CXXDeleteExpr *E) {
ExprResult Operand = getDerived().TransformExpr(E->getArgument());
if (Operand.isInvalid())
return ExprError();
// Transform the delete operator, if known.
FunctionDecl *OperatorDelete = nullptr;
if (E->getOperatorDelete()) {
OperatorDelete = cast_or_null<FunctionDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getOperatorDelete()));
if (!OperatorDelete)
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
Operand.get() == E->getArgument() &&
OperatorDelete == E->getOperatorDelete()) {
// Mark any declarations we need as referenced.
// FIXME: instantiation-specific.
if (OperatorDelete)
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), OperatorDelete);
if (!E->getArgument()->isTypeDependent()) {
QualType Destroyed = SemaRef.Context.getBaseElementType(
E->getDestroyedType());
if (const RecordType *DestroyedRec = Destroyed->getAs<RecordType>()) {
CXXRecordDecl *Record = cast<CXXRecordDecl>(DestroyedRec->getDecl());
SemaRef.MarkFunctionReferenced(E->getBeginLoc(),
SemaRef.LookupDestructor(Record));
}
}
return E;
}
return getDerived().RebuildCXXDeleteExpr(
E->getBeginLoc(), E->isGlobalDelete(), E->isArrayForm(), Operand.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXPseudoDestructorExpr(
CXXPseudoDestructorExpr *E) {
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
ParsedType ObjectTypePtr;
bool MayBePseudoDestructor = false;
Base = SemaRef.ActOnStartCXXMemberReference(nullptr, Base.get(),
E->getOperatorLoc(),
E->isArrow()? tok::arrow : tok::period,
ObjectTypePtr,
MayBePseudoDestructor);
if (Base.isInvalid())
return ExprError();
QualType ObjectType = ObjectTypePtr.get();
NestedNameSpecifierLoc QualifierLoc = E->getQualifierLoc();
if (QualifierLoc) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(QualifierLoc, ObjectType);
if (!QualifierLoc)
return ExprError();
}
CXXScopeSpec SS;
SS.Adopt(QualifierLoc);
PseudoDestructorTypeStorage Destroyed;
if (E->getDestroyedTypeInfo()) {
TypeSourceInfo *DestroyedTypeInfo
= getDerived().TransformTypeInObjectScope(E->getDestroyedTypeInfo(),
ObjectType, nullptr, SS);
if (!DestroyedTypeInfo)
return ExprError();
Destroyed = DestroyedTypeInfo;
} else if (!ObjectType.isNull() && ObjectType->isDependentType()) {
// We aren't likely to be able to resolve the identifier down to a type
// now anyway, so just retain the identifier.
Destroyed = PseudoDestructorTypeStorage(E->getDestroyedTypeIdentifier(),
E->getDestroyedTypeLoc());
} else {
// Look for a destructor known with the given name.
ParsedType T = SemaRef.getDestructorName(E->getTildeLoc(),
*E->getDestroyedTypeIdentifier(),
E->getDestroyedTypeLoc(),
/*Scope=*/nullptr,
SS, ObjectTypePtr,
false);
if (!T)
return ExprError();
Destroyed
= SemaRef.Context.getTrivialTypeSourceInfo(SemaRef.GetTypeFromParser(T),
E->getDestroyedTypeLoc());
}
TypeSourceInfo *ScopeTypeInfo = nullptr;
if (E->getScopeTypeInfo()) {
CXXScopeSpec EmptySS;
ScopeTypeInfo = getDerived().TransformTypeInObjectScope(
E->getScopeTypeInfo(), ObjectType, nullptr, EmptySS);
if (!ScopeTypeInfo)
return ExprError();
}
return getDerived().RebuildCXXPseudoDestructorExpr(Base.get(),
E->getOperatorLoc(),
E->isArrow(),
SS,
ScopeTypeInfo,
E->getColonColonLoc(),
E->getTildeLoc(),
Destroyed);
}
template <typename Derived>
bool TreeTransform<Derived>::TransformOverloadExprDecls(OverloadExpr *Old,
bool RequiresADL,
LookupResult &R) {
// Transform all the decls.
bool AllEmptyPacks = true;
for (auto *OldD : Old->decls()) {
Decl *InstD = getDerived().TransformDecl(Old->getNameLoc(), OldD);
if (!InstD) {
// Silently ignore these if a UsingShadowDecl instantiated to nothing.
// This can happen because of dependent hiding.
if (isa<UsingShadowDecl>(OldD))
continue;
else {
R.clear();
return true;
}
}
// Expand using pack declarations.
NamedDecl *SingleDecl = cast<NamedDecl>(InstD);
ArrayRef<NamedDecl*> Decls = SingleDecl;
if (auto *UPD = dyn_cast<UsingPackDecl>(InstD))
Decls = UPD->expansions();
// Expand using declarations.
for (auto *D : Decls) {
if (auto *UD = dyn_cast<UsingDecl>(D)) {
for (auto *SD : UD->shadows())
R.addDecl(SD);
} else {
R.addDecl(D);
}
}
AllEmptyPacks &= Decls.empty();
};
// C++ [temp.res]/8.4.2:
// The program is ill-formed, no diagnostic required, if [...] lookup for
// a name in the template definition found a using-declaration, but the
// lookup in the corresponding scope in the instantiation odoes not find
// any declarations because the using-declaration was a pack expansion and
// the corresponding pack is empty
if (AllEmptyPacks && !RequiresADL) {
getSema().Diag(Old->getNameLoc(), diag::err_using_pack_expansion_empty)
<< isa<UnresolvedMemberExpr>(Old) << Old->getName();
return true;
}
// Resolve a kind, but don't do any further analysis. If it's
// ambiguous, the callee needs to deal with it.
R.resolveKind();
return false;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformUnresolvedLookupExpr(
UnresolvedLookupExpr *Old) {
LookupResult R(SemaRef, Old->getName(), Old->getNameLoc(),
Sema::LookupOrdinaryName);
// Transform the declaration set.
if (TransformOverloadExprDecls(Old, Old->requiresADL(), R))
return ExprError();
// Rebuild the nested-name qualifier, if present.
CXXScopeSpec SS;
if (Old->getQualifierLoc()) {
NestedNameSpecifierLoc QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(Old->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
SS.Adopt(QualifierLoc);
}
if (Old->getNamingClass()) {
CXXRecordDecl *NamingClass
= cast_or_null<CXXRecordDecl>(getDerived().TransformDecl(
Old->getNameLoc(),
Old->getNamingClass()));
if (!NamingClass) {
R.clear();
return ExprError();
}
R.setNamingClass(NamingClass);
}
SourceLocation TemplateKWLoc = Old->getTemplateKeywordLoc();
// If we have neither explicit template arguments, nor the template keyword,
// it's a normal declaration name or member reference.
if (!Old->hasExplicitTemplateArgs() && !TemplateKWLoc.isValid()) {
NamedDecl *D = R.getAsSingle<NamedDecl>();
// In a C++11 unevaluated context, an UnresolvedLookupExpr might refer to an
// instance member. In other contexts, BuildPossibleImplicitMemberExpr will
// give a good diagnostic.
if (D && D->isCXXInstanceMember()) {
return SemaRef.BuildPossibleImplicitMemberExpr(SS, TemplateKWLoc, R,
/*TemplateArgs=*/nullptr,
/*Scope=*/nullptr);
}
return getDerived().RebuildDeclarationNameExpr(SS, R, Old->requiresADL());
}
// If we have template arguments, rebuild them, then rebuild the
// templateid expression.
TemplateArgumentListInfo TransArgs(Old->getLAngleLoc(), Old->getRAngleLoc());
if (Old->hasExplicitTemplateArgs() &&
getDerived().TransformTemplateArguments(Old->getTemplateArgs(),
Old->getNumTemplateArgs(),
TransArgs)) {
R.clear();
return ExprError();
}
return getDerived().RebuildTemplateIdExpr(SS, TemplateKWLoc, R,
Old->requiresADL(), &TransArgs);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformTypeTraitExpr(TypeTraitExpr *E) {
bool ArgChanged = false;
SmallVector<TypeSourceInfo *, 4> Args;
for (unsigned I = 0, N = E->getNumArgs(); I != N; ++I) {
TypeSourceInfo *From = E->getArg(I);
TypeLoc FromTL = From->getTypeLoc();
if (!FromTL.getAs<PackExpansionTypeLoc>()) {
TypeLocBuilder TLB;
TLB.reserve(FromTL.getFullDataSize());
QualType To = getDerived().TransformType(TLB, FromTL);
if (To.isNull())
return ExprError();
if (To == From->getType())
Args.push_back(From);
else {
Args.push_back(TLB.getTypeSourceInfo(SemaRef.Context, To));
ArgChanged = true;
}
continue;
}
ArgChanged = true;
// We have a pack expansion. Instantiate it.
PackExpansionTypeLoc ExpansionTL = FromTL.castAs<PackExpansionTypeLoc>();
TypeLoc PatternTL = ExpansionTL.getPatternLoc();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
SemaRef.collectUnexpandedParameterPacks(PatternTL, Unexpanded);
// Determine whether the set of unexpanded parameter packs can and should
// be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions =
ExpansionTL.getTypePtr()->getNumExpansions();
Optional<unsigned> NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(ExpansionTL.getEllipsisLoc(),
PatternTL.getSourceRange(),
Unexpanded,
Expand, RetainExpansion,
NumExpansions))
return ExprError();
if (!Expand) {
// The transform has determined that we should perform a simple
// transformation on the pack expansion, producing another pack
// expansion.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
TypeLocBuilder TLB;
TLB.reserve(From->getTypeLoc().getFullDataSize());
QualType To = getDerived().TransformType(TLB, PatternTL);
if (To.isNull())
return ExprError();
To = getDerived().RebuildPackExpansionType(To,
PatternTL.getSourceRange(),
ExpansionTL.getEllipsisLoc(),
NumExpansions);
if (To.isNull())
return ExprError();
PackExpansionTypeLoc ToExpansionTL
= TLB.push<PackExpansionTypeLoc>(To);
ToExpansionTL.setEllipsisLoc(ExpansionTL.getEllipsisLoc());
Args.push_back(TLB.getTypeSourceInfo(SemaRef.Context, To));
continue;
}
// Expand the pack expansion by substituting for each argument in the
// pack(s).
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(SemaRef, I);
TypeLocBuilder TLB;
TLB.reserve(PatternTL.getFullDataSize());
QualType To = getDerived().TransformType(TLB, PatternTL);
if (To.isNull())
return ExprError();
if (To->containsUnexpandedParameterPack()) {
To = getDerived().RebuildPackExpansionType(To,
PatternTL.getSourceRange(),
ExpansionTL.getEllipsisLoc(),
NumExpansions);
if (To.isNull())
return ExprError();
PackExpansionTypeLoc ToExpansionTL
= TLB.push<PackExpansionTypeLoc>(To);
ToExpansionTL.setEllipsisLoc(ExpansionTL.getEllipsisLoc());
}
Args.push_back(TLB.getTypeSourceInfo(SemaRef.Context, To));
}
if (!RetainExpansion)
continue;
// If we're supposed to retain a pack expansion, do so by temporarily
// forgetting the partially-substituted parameter pack.
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
TypeLocBuilder TLB;
TLB.reserve(From->getTypeLoc().getFullDataSize());
QualType To = getDerived().TransformType(TLB, PatternTL);
if (To.isNull())
return ExprError();
To = getDerived().RebuildPackExpansionType(To,
PatternTL.getSourceRange(),
ExpansionTL.getEllipsisLoc(),
NumExpansions);
if (To.isNull())
return ExprError();
PackExpansionTypeLoc ToExpansionTL
= TLB.push<PackExpansionTypeLoc>(To);
ToExpansionTL.setEllipsisLoc(ExpansionTL.getEllipsisLoc());
Args.push_back(TLB.getTypeSourceInfo(SemaRef.Context, To));
}
if (!getDerived().AlwaysRebuild() && !ArgChanged)
return E;
return getDerived().RebuildTypeTrait(E->getTrait(), E->getBeginLoc(), Args,
E->getEndLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformArrayTypeTraitExpr(ArrayTypeTraitExpr *E) {
TypeSourceInfo *T = getDerived().TransformType(E->getQueriedTypeSourceInfo());
if (!T)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
T == E->getQueriedTypeSourceInfo())
return E;
ExprResult SubExpr;
{
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated);
SubExpr = getDerived().TransformExpr(E->getDimensionExpression());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getDimensionExpression())
return E;
}
return getDerived().RebuildArrayTypeTrait(E->getTrait(), E->getBeginLoc(), T,
SubExpr.get(), E->getEndLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformExpressionTraitExpr(ExpressionTraitExpr *E) {
ExprResult SubExpr;
{
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated);
SubExpr = getDerived().TransformExpr(E->getQueriedExpression());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getQueriedExpression())
return E;
}
return getDerived().RebuildExpressionTrait(E->getTrait(), E->getBeginLoc(),
SubExpr.get(), E->getEndLoc());
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformParenDependentScopeDeclRefExpr(
ParenExpr *PE, DependentScopeDeclRefExpr *DRE, bool AddrTaken,
TypeSourceInfo **RecoveryTSI) {
ExprResult NewDRE = getDerived().TransformDependentScopeDeclRefExpr(
DRE, AddrTaken, RecoveryTSI);
// Propagate both errors and recovered types, which return ExprEmpty.
if (!NewDRE.isUsable())
return NewDRE;
// We got an expr, wrap it up in parens.
if (!getDerived().AlwaysRebuild() && NewDRE.get() == DRE)
return PE;
return getDerived().RebuildParenExpr(NewDRE.get(), PE->getLParen(),
PE->getRParen());
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformDependentScopeDeclRefExpr(
DependentScopeDeclRefExpr *E) {
return TransformDependentScopeDeclRefExpr(E, /*IsAddressOfOperand=*/false,
nullptr);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformDependentScopeDeclRefExpr(
DependentScopeDeclRefExpr *E,
bool IsAddressOfOperand,
TypeSourceInfo **RecoveryTSI) {
assert(E->getQualifierLoc());
NestedNameSpecifierLoc QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(E->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
SourceLocation TemplateKWLoc = E->getTemplateKeywordLoc();
// TODO: If this is a conversion-function-id, verify that the
// destination type name (if present) resolves the same way after
// instantiation as it did in the local scope.
DeclarationNameInfo NameInfo
= getDerived().TransformDeclarationNameInfo(E->getNameInfo());
if (!NameInfo.getName())
return ExprError();
if (!E->hasExplicitTemplateArgs()) {
if (!getDerived().AlwaysRebuild() &&
QualifierLoc == E->getQualifierLoc() &&
// Note: it is sufficient to compare the Name component of NameInfo:
// if name has not changed, DNLoc has not changed either.
NameInfo.getName() == E->getDeclName())
return E;
return getDerived().RebuildDependentScopeDeclRefExpr(
QualifierLoc, TemplateKWLoc, NameInfo, /*TemplateArgs=*/nullptr,
IsAddressOfOperand, RecoveryTSI);
}
TemplateArgumentListInfo TransArgs(E->getLAngleLoc(), E->getRAngleLoc());
if (getDerived().TransformTemplateArguments(E->getTemplateArgs(),
E->getNumTemplateArgs(),
TransArgs))
return ExprError();
return getDerived().RebuildDependentScopeDeclRefExpr(
QualifierLoc, TemplateKWLoc, NameInfo, &TransArgs, IsAddressOfOperand,
RecoveryTSI);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXConstructExpr(CXXConstructExpr *E) {
// CXXConstructExprs other than for list-initialization and
// CXXTemporaryObjectExpr are always implicit, so when we have
// a 1-argument construction we just transform that argument.
if ((E->getNumArgs() == 1 ||
(E->getNumArgs() > 1 && getDerived().DropCallArgument(E->getArg(1)))) &&
(!getDerived().DropCallArgument(E->getArg(0))) &&
!E->isListInitialization())
return getDerived().TransformExpr(E->getArg(0));
TemporaryBase Rebase(*this, /*FIXME*/ E->getBeginLoc(), DeclarationName());
QualType T = getDerived().TransformType(E->getType());
if (T.isNull())
return ExprError();
CXXConstructorDecl *Constructor = cast_or_null<CXXConstructorDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getConstructor()));
if (!Constructor)
return ExprError();
bool ArgumentChanged = false;
SmallVector<Expr*, 8> Args;
{
EnterExpressionEvaluationContext Context(
getSema(), EnterExpressionEvaluationContext::InitList,
E->isListInitialization());
if (getDerived().TransformExprs(E->getArgs(), E->getNumArgs(), true, Args,
&ArgumentChanged))
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
T == E->getType() &&
Constructor == E->getConstructor() &&
!ArgumentChanged) {
// Mark the constructor as referenced.
// FIXME: Instantiation-specific
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), Constructor);
return E;
}
return getDerived().RebuildCXXConstructExpr(
T, /*FIXME:*/ E->getBeginLoc(), Constructor, E->isElidable(), Args,
E->hadMultipleCandidates(), E->isListInitialization(),
E->isStdInitListInitialization(), E->requiresZeroInitialization(),
E->getConstructionKind(), E->getParenOrBraceRange());
}
template<typename Derived>
ExprResult TreeTransform<Derived>::TransformCXXInheritedCtorInitExpr(
CXXInheritedCtorInitExpr *E) {
QualType T = getDerived().TransformType(E->getType());
if (T.isNull())
return ExprError();
CXXConstructorDecl *Constructor = cast_or_null<CXXConstructorDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getConstructor()));
if (!Constructor)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
T == E->getType() &&
Constructor == E->getConstructor()) {
// Mark the constructor as referenced.
// FIXME: Instantiation-specific
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), Constructor);
return E;
}
return getDerived().RebuildCXXInheritedCtorInitExpr(
T, E->getLocation(), Constructor,
E->constructsVBase(), E->inheritedFromVBase());
}
/// Transform a C++ temporary-binding expression.
///
/// Since CXXBindTemporaryExpr nodes are implicitly generated, we just
/// transform the subexpression and return that.
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
return getDerived().TransformExpr(E->getSubExpr());
}
/// Transform a C++ expression that contains cleanups that should
/// be run after the expression is evaluated.
///
/// Since ExprWithCleanups nodes are implicitly generated, we
/// just transform the subexpression and return that.
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformExprWithCleanups(ExprWithCleanups *E) {
return getDerived().TransformExpr(E->getSubExpr());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXTemporaryObjectExpr(
CXXTemporaryObjectExpr *E) {
TypeSourceInfo *T =
getDerived().TransformTypeWithDeducedTST(E->getTypeSourceInfo());
if (!T)
return ExprError();
CXXConstructorDecl *Constructor = cast_or_null<CXXConstructorDecl>(
getDerived().TransformDecl(E->getBeginLoc(), E->getConstructor()));
if (!Constructor)
return ExprError();
bool ArgumentChanged = false;
SmallVector<Expr*, 8> Args;
Args.reserve(E->getNumArgs());
{
EnterExpressionEvaluationContext Context(
getSema(), EnterExpressionEvaluationContext::InitList,
E->isListInitialization());
if (TransformExprs(E->getArgs(), E->getNumArgs(), true, Args,
&ArgumentChanged))
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
T == E->getTypeSourceInfo() &&
Constructor == E->getConstructor() &&
!ArgumentChanged) {
// FIXME: Instantiation-specific
SemaRef.MarkFunctionReferenced(E->getBeginLoc(), Constructor);
return SemaRef.MaybeBindToTemporary(E);
}
// FIXME: We should just pass E->isListInitialization(), but we're not
// prepared to handle list-initialization without a child InitListExpr.
SourceLocation LParenLoc = T->getTypeLoc().getEndLoc();
return getDerived().RebuildCXXTemporaryObjectExpr(
T, LParenLoc, Args, E->getEndLoc(),
/*ListInitialization=*/LParenLoc.isInvalid());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformLambdaExpr(LambdaExpr *E) {
// Transform any init-capture expressions before entering the scope of the
// lambda body, because they are not semantically within that scope.
typedef std::pair<ExprResult, QualType> InitCaptureInfoTy;
struct TransformedInitCapture {
// The location of the ... if the result is retaining a pack expansion.
SourceLocation EllipsisLoc;
// Zero or more expansions of the init-capture.
SmallVector<InitCaptureInfoTy, 4> Expansions;
};
SmallVector<TransformedInitCapture, 4> InitCaptures;
InitCaptures.resize(E->explicit_capture_end() - E->explicit_capture_begin());
for (LambdaExpr::capture_iterator C = E->capture_begin(),
CEnd = E->capture_end();
C != CEnd; ++C) {
if (!E->isInitCapture(C))
continue;
TransformedInitCapture &Result = InitCaptures[C - E->capture_begin()];
VarDecl *OldVD = C->getCapturedVar();
auto SubstInitCapture = [&](SourceLocation EllipsisLoc,
Optional<unsigned> NumExpansions) {
ExprResult NewExprInitResult = getDerived().TransformInitializer(
OldVD->getInit(), OldVD->getInitStyle() == VarDecl::CallInit);
if (NewExprInitResult.isInvalid()) {
Result.Expansions.push_back(InitCaptureInfoTy(ExprError(), QualType()));
return;
}
Expr *NewExprInit = NewExprInitResult.get();
QualType NewInitCaptureType =
getSema().buildLambdaInitCaptureInitialization(
C->getLocation(), OldVD->getType()->isReferenceType(),
EllipsisLoc, NumExpansions, OldVD->getIdentifier(),
C->getCapturedVar()->getInitStyle() != VarDecl::CInit,
NewExprInit);
Result.Expansions.push_back(
InitCaptureInfoTy(NewExprInit, NewInitCaptureType));
};
// If this is an init-capture pack, consider expanding the pack now.
if (OldVD->isParameterPack()) {
PackExpansionTypeLoc ExpansionTL = OldVD->getTypeSourceInfo()
->getTypeLoc()
.castAs<PackExpansionTypeLoc>();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
SemaRef.collectUnexpandedParameterPacks(OldVD->getInit(), Unexpanded);
// Determine whether the set of unexpanded parameter packs can and should
// be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions =
ExpansionTL.getTypePtr()->getNumExpansions();
Optional<unsigned> NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(
ExpansionTL.getEllipsisLoc(),
OldVD->getInit()->getSourceRange(), Unexpanded, Expand,
RetainExpansion, NumExpansions))
return ExprError();
if (Expand) {
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
SubstInitCapture(SourceLocation(), None);
}
}
if (!Expand || RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
SubstInitCapture(ExpansionTL.getEllipsisLoc(), NumExpansions);
Result.EllipsisLoc = ExpansionTL.getEllipsisLoc();
}
} else {
SubstInitCapture(SourceLocation(), None);
}
}
// Transform the template parameters, and add them to the current
// instantiation scope. The null case is handled correctly.
auto TPL = getDerived().TransformTemplateParameterList(
E->getTemplateParameterList());
// Transform the type of the original lambda's call operator.
// The transformation MUST be done in the CurrentInstantiationScope since
// it introduces a mapping of the original to the newly created
// transformed parameters.
TypeSourceInfo *NewCallOpTSI = nullptr;
{
TypeSourceInfo *OldCallOpTSI = E->getCallOperator()->getTypeSourceInfo();
FunctionProtoTypeLoc OldCallOpFPTL =
OldCallOpTSI->getTypeLoc().getAs<FunctionProtoTypeLoc>();
TypeLocBuilder NewCallOpTLBuilder;
SmallVector<QualType, 4> ExceptionStorage;
TreeTransform *This = this; // Work around gcc.gnu.org/PR56135.
QualType NewCallOpType = TransformFunctionProtoType(
NewCallOpTLBuilder, OldCallOpFPTL, nullptr, Qualifiers(),
[&](FunctionProtoType::ExceptionSpecInfo &ESI, bool &Changed) {
return This->TransformExceptionSpec(OldCallOpFPTL.getBeginLoc(), ESI,
ExceptionStorage, Changed);
});
if (NewCallOpType.isNull())
return ExprError();
NewCallOpTSI = NewCallOpTLBuilder.getTypeSourceInfo(getSema().Context,
NewCallOpType);
}
LambdaScopeInfo *LSI = getSema().PushLambdaScope();
Sema::FunctionScopeRAII FuncScopeCleanup(getSema());
LSI->GLTemplateParameterList = TPL;
// Create the local class that will describe the lambda.
CXXRecordDecl *OldClass = E->getLambdaClass();
CXXRecordDecl *Class
= getSema().createLambdaClosureType(E->getIntroducerRange(),
NewCallOpTSI,
/*KnownDependent=*/false,
E->getCaptureDefault());
getDerived().transformedLocalDecl(OldClass, {Class});
Optional<std::pair<unsigned, Decl*>> Mangling;
if (getDerived().ReplacingOriginal())
Mangling = std::make_pair(OldClass->getLambdaManglingNumber(),
OldClass->getLambdaContextDecl());
// Build the call operator.
CXXMethodDecl *NewCallOperator = getSema().startLambdaDefinition(
Class, E->getIntroducerRange(), NewCallOpTSI,
E->getCallOperator()->getEndLoc(),
NewCallOpTSI->getTypeLoc().castAs<FunctionProtoTypeLoc>().getParams(),
E->getCallOperator()->getConstexprKind(), Mangling);
LSI->CallOperator = NewCallOperator;
for (unsigned I = 0, NumParams = NewCallOperator->getNumParams();
I != NumParams; ++I) {
auto *P = NewCallOperator->getParamDecl(I);
if (P->hasUninstantiatedDefaultArg()) {
EnterExpressionEvaluationContext Eval(
getSema(),
Sema::ExpressionEvaluationContext::PotentiallyEvaluatedIfUsed, P);
ExprResult R = getDerived().TransformExpr(
E->getCallOperator()->getParamDecl(I)->getDefaultArg());
P->setDefaultArg(R.get());
}
}
getDerived().transformAttrs(E->getCallOperator(), NewCallOperator);
getDerived().transformedLocalDecl(E->getCallOperator(), {NewCallOperator});
// Introduce the context of the call operator.
Sema::ContextRAII SavedContext(getSema(), NewCallOperator,
/*NewThisContext*/false);
// Enter the scope of the lambda.
getSema().buildLambdaScope(LSI, NewCallOperator,
E->getIntroducerRange(),
E->getCaptureDefault(),
E->getCaptureDefaultLoc(),
E->hasExplicitParameters(),
E->hasExplicitResultType(),
E->isMutable());
bool Invalid = false;
// Transform captures.
for (LambdaExpr::capture_iterator C = E->capture_begin(),
CEnd = E->capture_end();
C != CEnd; ++C) {
// When we hit the first implicit capture, tell Sema that we've finished
// the list of explicit captures.
if (C->isImplicit())
break;
// Capturing 'this' is trivial.
if (C->capturesThis()) {
getSema().CheckCXXThisCapture(C->getLocation(), C->isExplicit(),
/*BuildAndDiagnose*/ true, nullptr,
C->getCaptureKind() == LCK_StarThis);
continue;
}
// Captured expression will be recaptured during captured variables
// rebuilding.
if (C->capturesVLAType())
continue;
// Rebuild init-captures, including the implied field declaration.
if (E->isInitCapture(C)) {
TransformedInitCapture &NewC = InitCaptures[C - E->capture_begin()];
VarDecl *OldVD = C->getCapturedVar();
llvm::SmallVector<Decl*, 4> NewVDs;
for (InitCaptureInfoTy &Info : NewC.Expansions) {
ExprResult Init = Info.first;
QualType InitQualType = Info.second;
if (Init.isInvalid() || InitQualType.isNull()) {
Invalid = true;
break;
}
VarDecl *NewVD = getSema().createLambdaInitCaptureVarDecl(
OldVD->getLocation(), InitQualType, NewC.EllipsisLoc,
OldVD->getIdentifier(), OldVD->getInitStyle(), Init.get());
if (!NewVD) {
Invalid = true;
break;
}
NewVDs.push_back(NewVD);
getSema().addInitCapture(LSI, NewVD);
}
if (Invalid)
break;
getDerived().transformedLocalDecl(OldVD, NewVDs);
continue;
}
assert(C->capturesVariable() && "unexpected kind of lambda capture");
// Determine the capture kind for Sema.
Sema::TryCaptureKind Kind
= C->isImplicit()? Sema::TryCapture_Implicit
: C->getCaptureKind() == LCK_ByCopy
? Sema::TryCapture_ExplicitByVal
: Sema::TryCapture_ExplicitByRef;
SourceLocation EllipsisLoc;
if (C->isPackExpansion()) {
UnexpandedParameterPack Unexpanded(C->getCapturedVar(), C->getLocation());
bool ShouldExpand = false;
bool RetainExpansion = false;
Optional<unsigned> NumExpansions;
if (getDerived().TryExpandParameterPacks(C->getEllipsisLoc(),
C->getLocation(),
Unexpanded,
ShouldExpand, RetainExpansion,
NumExpansions)) {
Invalid = true;
continue;
}
if (ShouldExpand) {
// The transform has determined that we should perform an expansion;
// transform and capture each of the arguments.
// expansion of the pattern. Do so.
VarDecl *Pack = C->getCapturedVar();
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
VarDecl *CapturedVar
= cast_or_null<VarDecl>(getDerived().TransformDecl(C->getLocation(),
Pack));
if (!CapturedVar) {
Invalid = true;
continue;
}
// Capture the transformed variable.
getSema().tryCaptureVariable(CapturedVar, C->getLocation(), Kind);
}
// FIXME: Retain a pack expansion if RetainExpansion is true.
continue;
}
EllipsisLoc = C->getEllipsisLoc();
}
// Transform the captured variable.
VarDecl *CapturedVar
= cast_or_null<VarDecl>(getDerived().TransformDecl(C->getLocation(),
C->getCapturedVar()));
if (!CapturedVar || CapturedVar->isInvalidDecl()) {
Invalid = true;
continue;
}
// Capture the transformed variable.
getSema().tryCaptureVariable(CapturedVar, C->getLocation(), Kind,
EllipsisLoc);
}
getSema().finishLambdaExplicitCaptures(LSI);
// FIXME: Sema's lambda-building mechanism expects us to push an expression
// evaluation context even if we're not transforming the function body.
getSema().PushExpressionEvaluationContext(
Sema::ExpressionEvaluationContext::PotentiallyEvaluated);
// Instantiate the body of the lambda expression.
StmtResult Body =
Invalid ? StmtError() : getDerived().TransformLambdaBody(E, E->getBody());
// ActOnLambda* will pop the function scope for us.
FuncScopeCleanup.disable();
if (Body.isInvalid()) {
SavedContext.pop();
getSema().ActOnLambdaError(E->getBeginLoc(), /*CurScope=*/nullptr,
/*IsInstantiation=*/true);
return ExprError();
}
// Copy the LSI before ActOnFinishFunctionBody removes it.
// FIXME: This is dumb. Store the lambda information somewhere that outlives
// the call operator.
auto LSICopy = *LSI;
getSema().ActOnFinishFunctionBody(NewCallOperator, Body.get(),
/*IsInstantiation*/ true);
SavedContext.pop();
return getSema().BuildLambdaExpr(E->getBeginLoc(), Body.get()->getEndLoc(),
&LSICopy);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformLambdaBody(LambdaExpr *E, Stmt *S) {
return TransformStmt(S);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::SkipLambdaBody(LambdaExpr *E, Stmt *S) {
// Transform captures.
for (LambdaExpr::capture_iterator C = E->capture_begin(),
CEnd = E->capture_end();
C != CEnd; ++C) {
// When we hit the first implicit capture, tell Sema that we've finished
// the list of explicit captures.
if (!C->isImplicit())
continue;
// Capturing 'this' is trivial.
if (C->capturesThis()) {
getSema().CheckCXXThisCapture(C->getLocation(), C->isExplicit(),
/*BuildAndDiagnose*/ true, nullptr,
C->getCaptureKind() == LCK_StarThis);
continue;
}
// Captured expression will be recaptured during captured variables
// rebuilding.
if (C->capturesVLAType())
continue;
assert(C->capturesVariable() && "unexpected kind of lambda capture");
assert(!E->isInitCapture(C) && "implicit init-capture?");
// Transform the captured variable.
VarDecl *CapturedVar = cast_or_null<VarDecl>(
getDerived().TransformDecl(C->getLocation(), C->getCapturedVar()));
if (!CapturedVar || CapturedVar->isInvalidDecl())
return StmtError();
// Capture the transformed variable.
getSema().tryCaptureVariable(CapturedVar, C->getLocation());
}
return S;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXUnresolvedConstructExpr(
CXXUnresolvedConstructExpr *E) {
TypeSourceInfo *T =
getDerived().TransformTypeWithDeducedTST(E->getTypeSourceInfo());
if (!T)
return ExprError();
bool ArgumentChanged = false;
SmallVector<Expr*, 8> Args;
Args.reserve(E->arg_size());
{
EnterExpressionEvaluationContext Context(
getSema(), EnterExpressionEvaluationContext::InitList,
E->isListInitialization());
if (getDerived().TransformExprs(E->arg_begin(), E->arg_size(), true, Args,
&ArgumentChanged))
return ExprError();
}
if (!getDerived().AlwaysRebuild() &&
T == E->getTypeSourceInfo() &&
!ArgumentChanged)
return E;
// FIXME: we're faking the locations of the commas
return getDerived().RebuildCXXUnresolvedConstructExpr(
T, E->getLParenLoc(), Args, E->getRParenLoc(), E->isListInitialization());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXDependentScopeMemberExpr(
CXXDependentScopeMemberExpr *E) {
// Transform the base of the expression.
ExprResult Base((Expr*) nullptr);
Expr *OldBase;
QualType BaseType;
QualType ObjectType;
if (!E->isImplicitAccess()) {
OldBase = E->getBase();
Base = getDerived().TransformExpr(OldBase);
if (Base.isInvalid())
return ExprError();
// Start the member reference and compute the object's type.
ParsedType ObjectTy;
bool MayBePseudoDestructor = false;
Base = SemaRef.ActOnStartCXXMemberReference(nullptr, Base.get(),
E->getOperatorLoc(),
E->isArrow()? tok::arrow : tok::period,
ObjectTy,
MayBePseudoDestructor);
if (Base.isInvalid())
return ExprError();
ObjectType = ObjectTy.get();
BaseType = ((Expr*) Base.get())->getType();
} else {
OldBase = nullptr;
BaseType = getDerived().TransformType(E->getBaseType());
ObjectType = BaseType->getAs<PointerType>()->getPointeeType();
}
// Transform the first part of the nested-name-specifier that qualifies
// the member name.
NamedDecl *FirstQualifierInScope
= getDerived().TransformFirstQualifierInScope(
E->getFirstQualifierFoundInScope(),
E->getQualifierLoc().getBeginLoc());
NestedNameSpecifierLoc QualifierLoc;
if (E->getQualifier()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(E->getQualifierLoc(),
ObjectType,
FirstQualifierInScope);
if (!QualifierLoc)
return ExprError();
}
SourceLocation TemplateKWLoc = E->getTemplateKeywordLoc();
// TODO: If this is a conversion-function-id, verify that the
// destination type name (if present) resolves the same way after
// instantiation as it did in the local scope.
DeclarationNameInfo NameInfo
= getDerived().TransformDeclarationNameInfo(E->getMemberNameInfo());
if (!NameInfo.getName())
return ExprError();
if (!E->hasExplicitTemplateArgs()) {
// This is a reference to a member without an explicitly-specified
// template argument list. Optimize for this common case.
if (!getDerived().AlwaysRebuild() &&
Base.get() == OldBase &&
BaseType == E->getBaseType() &&
QualifierLoc == E->getQualifierLoc() &&
NameInfo.getName() == E->getMember() &&
FirstQualifierInScope == E->getFirstQualifierFoundInScope())
return E;
return getDerived().RebuildCXXDependentScopeMemberExpr(Base.get(),
BaseType,
E->isArrow(),
E->getOperatorLoc(),
QualifierLoc,
TemplateKWLoc,
FirstQualifierInScope,
NameInfo,
/*TemplateArgs*/nullptr);
}
TemplateArgumentListInfo TransArgs(E->getLAngleLoc(), E->getRAngleLoc());
if (getDerived().TransformTemplateArguments(E->getTemplateArgs(),
E->getNumTemplateArgs(),
TransArgs))
return ExprError();
return getDerived().RebuildCXXDependentScopeMemberExpr(Base.get(),
BaseType,
E->isArrow(),
E->getOperatorLoc(),
QualifierLoc,
TemplateKWLoc,
FirstQualifierInScope,
NameInfo,
&TransArgs);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformUnresolvedMemberExpr(UnresolvedMemberExpr *Old) {
// Transform the base of the expression.
ExprResult Base((Expr*) nullptr);
QualType BaseType;
if (!Old->isImplicitAccess()) {
Base = getDerived().TransformExpr(Old->getBase());
if (Base.isInvalid())
return ExprError();
Base = getSema().PerformMemberExprBaseConversion(Base.get(),
Old->isArrow());
if (Base.isInvalid())
return ExprError();
BaseType = Base.get()->getType();
} else {
BaseType = getDerived().TransformType(Old->getBaseType());
}
NestedNameSpecifierLoc QualifierLoc;
if (Old->getQualifierLoc()) {
QualifierLoc
= getDerived().TransformNestedNameSpecifierLoc(Old->getQualifierLoc());
if (!QualifierLoc)
return ExprError();
}
SourceLocation TemplateKWLoc = Old->getTemplateKeywordLoc();
LookupResult R(SemaRef, Old->getMemberNameInfo(),
Sema::LookupOrdinaryName);
// Transform the declaration set.
if (TransformOverloadExprDecls(Old, /*RequiresADL*/false, R))
return ExprError();
// Determine the naming class.
if (Old->getNamingClass()) {
CXXRecordDecl *NamingClass
= cast_or_null<CXXRecordDecl>(getDerived().TransformDecl(
Old->getMemberLoc(),
Old->getNamingClass()));
if (!NamingClass)
return ExprError();
R.setNamingClass(NamingClass);
}
TemplateArgumentListInfo TransArgs;
if (Old->hasExplicitTemplateArgs()) {
TransArgs.setLAngleLoc(Old->getLAngleLoc());
TransArgs.setRAngleLoc(Old->getRAngleLoc());
if (getDerived().TransformTemplateArguments(Old->getTemplateArgs(),
Old->getNumTemplateArgs(),
TransArgs))
return ExprError();
}
// FIXME: to do this check properly, we will need to preserve the
// first-qualifier-in-scope here, just in case we had a dependent
// base (and therefore couldn't do the check) and a
// nested-name-qualifier (and therefore could do the lookup).
NamedDecl *FirstQualifierInScope = nullptr;
return getDerived().RebuildUnresolvedMemberExpr(Base.get(),
BaseType,
Old->getOperatorLoc(),
Old->isArrow(),
QualifierLoc,
TemplateKWLoc,
FirstQualifierInScope,
R,
(Old->hasExplicitTemplateArgs()
? &TransArgs : nullptr));
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXNoexceptExpr(CXXNoexceptExpr *E) {
EnterExpressionEvaluationContext Unevaluated(
SemaRef, Sema::ExpressionEvaluationContext::Unevaluated);
ExprResult SubExpr = getDerived().TransformExpr(E->getOperand());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && SubExpr.get() == E->getOperand())
return E;
return getDerived().RebuildCXXNoexceptExpr(E->getSourceRange(),SubExpr.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformPackExpansionExpr(PackExpansionExpr *E) {
ExprResult Pattern = getDerived().TransformExpr(E->getPattern());
if (Pattern.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() && Pattern.get() == E->getPattern())
return E;
return getDerived().RebuildPackExpansion(Pattern.get(), E->getEllipsisLoc(),
E->getNumExpansions());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformSizeOfPackExpr(SizeOfPackExpr *E) {
// If E is not value-dependent, then nothing will change when we transform it.
// Note: This is an instantiation-centric view.
if (!E->isValueDependent())
return E;
EnterExpressionEvaluationContext Unevaluated(
getSema(), Sema::ExpressionEvaluationContext::Unevaluated);
ArrayRef<TemplateArgument> PackArgs;
TemplateArgument ArgStorage;
// Find the argument list to transform.
if (E->isPartiallySubstituted()) {
PackArgs = E->getPartialArguments();
} else if (E->isValueDependent()) {
UnexpandedParameterPack Unexpanded(E->getPack(), E->getPackLoc());
bool ShouldExpand = false;
bool RetainExpansion = false;
Optional<unsigned> NumExpansions;
if (getDerived().TryExpandParameterPacks(E->getOperatorLoc(), E->getPackLoc(),
Unexpanded,
ShouldExpand, RetainExpansion,
NumExpansions))
return ExprError();
// If we need to expand the pack, build a template argument from it and
// expand that.
if (ShouldExpand) {
auto *Pack = E->getPack();
if (auto *TTPD = dyn_cast<TemplateTypeParmDecl>(Pack)) {
ArgStorage = getSema().Context.getPackExpansionType(
getSema().Context.getTypeDeclType(TTPD), None);
} else if (auto *TTPD = dyn_cast<TemplateTemplateParmDecl>(Pack)) {
ArgStorage = TemplateArgument(TemplateName(TTPD), None);
} else {
auto *VD = cast<ValueDecl>(Pack);
ExprResult DRE = getSema().BuildDeclRefExpr(
VD, VD->getType().getNonLValueExprType(getSema().Context),
VD->getType()->isReferenceType() ? VK_LValue : VK_RValue,
E->getPackLoc());
if (DRE.isInvalid())
return ExprError();
ArgStorage = new (getSema().Context) PackExpansionExpr(
getSema().Context.DependentTy, DRE.get(), E->getPackLoc(), None);
}
PackArgs = ArgStorage;
}
}
// If we're not expanding the pack, just transform the decl.
if (!PackArgs.size()) {
auto *Pack = cast_or_null<NamedDecl>(
getDerived().TransformDecl(E->getPackLoc(), E->getPack()));
if (!Pack)
return ExprError();
return getDerived().RebuildSizeOfPackExpr(E->getOperatorLoc(), Pack,
E->getPackLoc(),
E->getRParenLoc(), None, None);
}
// Try to compute the result without performing a partial substitution.
Optional<unsigned> Result = 0;
for (const TemplateArgument &Arg : PackArgs) {
if (!Arg.isPackExpansion()) {
Result = *Result + 1;
continue;
}
TemplateArgumentLoc ArgLoc;
InventTemplateArgumentLoc(Arg, ArgLoc);
// Find the pattern of the pack expansion.
SourceLocation Ellipsis;
Optional<unsigned> OrigNumExpansions;
TemplateArgumentLoc Pattern =
getSema().getTemplateArgumentPackExpansionPattern(ArgLoc, Ellipsis,
OrigNumExpansions);
// Substitute under the pack expansion. Do not expand the pack (yet).
TemplateArgumentLoc OutPattern;
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
if (getDerived().TransformTemplateArgument(Pattern, OutPattern,
/*Uneval*/ true))
return true;
// See if we can determine the number of arguments from the result.
Optional<unsigned> NumExpansions =
getSema().getFullyPackExpandedSize(OutPattern.getArgument());
if (!NumExpansions) {
// No: we must be in an alias template expansion, and we're going to need
// to actually expand the packs.
Result = None;
break;
}
Result = *Result + *NumExpansions;
}
// Common case: we could determine the number of expansions without
// substituting.
if (Result)
return getDerived().RebuildSizeOfPackExpr(E->getOperatorLoc(), E->getPack(),
E->getPackLoc(),
E->getRParenLoc(), *Result, None);
TemplateArgumentListInfo TransformedPackArgs(E->getPackLoc(),
E->getPackLoc());
{
TemporaryBase Rebase(*this, E->getPackLoc(), getBaseEntity());
typedef TemplateArgumentLocInventIterator<
Derived, const TemplateArgument*> PackLocIterator;
if (TransformTemplateArguments(PackLocIterator(*this, PackArgs.begin()),
PackLocIterator(*this, PackArgs.end()),
TransformedPackArgs, /*Uneval*/true))
return ExprError();
}
// Check whether we managed to fully-expand the pack.
// FIXME: Is it possible for us to do so and not hit the early exit path?
SmallVector<TemplateArgument, 8> Args;
bool PartialSubstitution = false;
for (auto &Loc : TransformedPackArgs.arguments()) {
Args.push_back(Loc.getArgument());
if (Loc.getArgument().isPackExpansion())
PartialSubstitution = true;
}
if (PartialSubstitution)
return getDerived().RebuildSizeOfPackExpr(E->getOperatorLoc(), E->getPack(),
E->getPackLoc(),
E->getRParenLoc(), None, Args);
return getDerived().RebuildSizeOfPackExpr(E->getOperatorLoc(), E->getPack(),
E->getPackLoc(), E->getRParenLoc(),
Args.size(), None);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformSubstNonTypeTemplateParmPackExpr(
SubstNonTypeTemplateParmPackExpr *E) {
// Default behavior is to do nothing with this transformation.
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformSubstNonTypeTemplateParmExpr(
SubstNonTypeTemplateParmExpr *E) {
// Default behavior is to do nothing with this transformation.
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformFunctionParmPackExpr(FunctionParmPackExpr *E) {
// Default behavior is to do nothing with this transformation.
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformMaterializeTemporaryExpr(
MaterializeTemporaryExpr *E) {
return getDerived().TransformExpr(E->GetTemporaryExpr());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXFoldExpr(CXXFoldExpr *E) {
Expr *Pattern = E->getPattern();
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
getSema().collectUnexpandedParameterPacks(Pattern, Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can and should
// be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions = E->getNumExpansions(),
NumExpansions = OrigNumExpansions;
if (getDerived().TryExpandParameterPacks(E->getEllipsisLoc(),
Pattern->getSourceRange(),
Unexpanded,
Expand, RetainExpansion,
NumExpansions))
return true;
if (!Expand) {
// Do not expand any packs here, just transform and rebuild a fold
// expression.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
ExprResult LHS =
E->getLHS() ? getDerived().TransformExpr(E->getLHS()) : ExprResult();
if (LHS.isInvalid())
return true;
ExprResult RHS =
E->getRHS() ? getDerived().TransformExpr(E->getRHS()) : ExprResult();
if (RHS.isInvalid())
return true;
if (!getDerived().AlwaysRebuild() &&
LHS.get() == E->getLHS() && RHS.get() == E->getRHS())
return E;
return getDerived().RebuildCXXFoldExpr(
E->getBeginLoc(), LHS.get(), E->getOperator(), E->getEllipsisLoc(),
RHS.get(), E->getEndLoc(), NumExpansions);
}
// The transform has determined that we should perform an elementwise
// expansion of the pattern. Do so.
ExprResult Result = getDerived().TransformExpr(E->getInit());
if (Result.isInvalid())
return true;
bool LeftFold = E->isLeftFold();
// If we're retaining an expansion for a right fold, it is the innermost
// component and takes the init (if any).
if (!LeftFold && RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
ExprResult Out = getDerived().TransformExpr(Pattern);
if (Out.isInvalid())
return true;
Result = getDerived().RebuildCXXFoldExpr(
E->getBeginLoc(), Out.get(), E->getOperator(), E->getEllipsisLoc(),
Result.get(), E->getEndLoc(), OrigNumExpansions);
if (Result.isInvalid())
return true;
}
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(
getSema(), LeftFold ? I : *NumExpansions - I - 1);
ExprResult Out = getDerived().TransformExpr(Pattern);
if (Out.isInvalid())
return true;
if (Out.get()->containsUnexpandedParameterPack()) {
// We still have a pack; retain a pack expansion for this slice.
Result = getDerived().RebuildCXXFoldExpr(
E->getBeginLoc(), LeftFold ? Result.get() : Out.get(),
E->getOperator(), E->getEllipsisLoc(),
LeftFold ? Out.get() : Result.get(), E->getEndLoc(),
OrigNumExpansions);
} else if (Result.isUsable()) {
// We've got down to a single element; build a binary operator.
Result = getDerived().RebuildBinaryOperator(
E->getEllipsisLoc(), E->getOperator(),
LeftFold ? Result.get() : Out.get(),
LeftFold ? Out.get() : Result.get());
} else
Result = Out;
if (Result.isInvalid())
return true;
}
// If we're retaining an expansion for a left fold, it is the outermost
// component and takes the complete expansion so far as its init (if any).
if (LeftFold && RetainExpansion) {
ForgetPartiallySubstitutedPackRAII Forget(getDerived());
ExprResult Out = getDerived().TransformExpr(Pattern);
if (Out.isInvalid())
return true;
Result = getDerived().RebuildCXXFoldExpr(
E->getBeginLoc(), Result.get(), E->getOperator(), E->getEllipsisLoc(),
Out.get(), E->getEndLoc(), OrigNumExpansions);
if (Result.isInvalid())
return true;
}
// If we had no init and an empty pack, and we're not retaining an expansion,
// then produce a fallback value or error.
if (Result.isUnset())
return getDerived().RebuildEmptyCXXFoldExpr(E->getEllipsisLoc(),
E->getOperator());
return Result;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformCXXStdInitializerListExpr(
CXXStdInitializerListExpr *E) {
return getDerived().TransformExpr(E->getSubExpr());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCStringLiteral(ObjCStringLiteral *E) {
return SemaRef.MaybeBindToTemporary(E);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCBoolLiteralExpr(ObjCBoolLiteralExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCBoxedExpr(ObjCBoxedExpr *E) {
ExprResult SubExpr = getDerived().TransformExpr(E->getSubExpr());
if (SubExpr.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
SubExpr.get() == E->getSubExpr())
return E;
return getDerived().RebuildObjCBoxedExpr(E->getSourceRange(), SubExpr.get());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCArrayLiteral(ObjCArrayLiteral *E) {
// Transform each of the elements.
SmallVector<Expr *, 8> Elements;
bool ArgChanged = false;
if (getDerived().TransformExprs(E->getElements(), E->getNumElements(),
/*IsCall=*/false, Elements, &ArgChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() && !ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
return getDerived().RebuildObjCArrayLiteral(E->getSourceRange(),
Elements.data(),
Elements.size());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCDictionaryLiteral(
ObjCDictionaryLiteral *E) {
// Transform each of the elements.
SmallVector<ObjCDictionaryElement, 8> Elements;
bool ArgChanged = false;
for (unsigned I = 0, N = E->getNumElements(); I != N; ++I) {
ObjCDictionaryElement OrigElement = E->getKeyValueElement(I);
if (OrigElement.isPackExpansion()) {
// This key/value element is a pack expansion.
SmallVector<UnexpandedParameterPack, 2> Unexpanded;
getSema().collectUnexpandedParameterPacks(OrigElement.Key, Unexpanded);
getSema().collectUnexpandedParameterPacks(OrigElement.Value, Unexpanded);
assert(!Unexpanded.empty() && "Pack expansion without parameter packs?");
// Determine whether the set of unexpanded parameter packs can
// and should be expanded.
bool Expand = true;
bool RetainExpansion = false;
Optional<unsigned> OrigNumExpansions = OrigElement.NumExpansions;
Optional<unsigned> NumExpansions = OrigNumExpansions;
SourceRange PatternRange(OrigElement.Key->getBeginLoc(),
OrigElement.Value->getEndLoc());
if (getDerived().TryExpandParameterPacks(OrigElement.EllipsisLoc,
PatternRange, Unexpanded, Expand,
RetainExpansion, NumExpansions))
return ExprError();
if (!Expand) {
// The transform has determined that we should perform a simple
// transformation on the pack expansion, producing another pack
// expansion.
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), -1);
ExprResult Key = getDerived().TransformExpr(OrigElement.Key);
if (Key.isInvalid())
return ExprError();
if (Key.get() != OrigElement.Key)
ArgChanged = true;
ExprResult Value = getDerived().TransformExpr(OrigElement.Value);
if (Value.isInvalid())
return ExprError();
if (Value.get() != OrigElement.Value)
ArgChanged = true;
ObjCDictionaryElement Expansion = {
Key.get(), Value.get(), OrigElement.EllipsisLoc, NumExpansions
};
Elements.push_back(Expansion);
continue;
}
// Record right away that the argument was changed. This needs
// to happen even if the array expands to nothing.
ArgChanged = true;
// The transform has determined that we should perform an elementwise
// expansion of the pattern. Do so.
for (unsigned I = 0; I != *NumExpansions; ++I) {
Sema::ArgumentPackSubstitutionIndexRAII SubstIndex(getSema(), I);
ExprResult Key = getDerived().TransformExpr(OrigElement.Key);
if (Key.isInvalid())
return ExprError();
ExprResult Value = getDerived().TransformExpr(OrigElement.Value);
if (Value.isInvalid())
return ExprError();
ObjCDictionaryElement Element = {
Key.get(), Value.get(), SourceLocation(), NumExpansions
};
// If any unexpanded parameter packs remain, we still have a
// pack expansion.
// FIXME: Can this really happen?
if (Key.get()->containsUnexpandedParameterPack() ||
Value.get()->containsUnexpandedParameterPack())
Element.EllipsisLoc = OrigElement.EllipsisLoc;
Elements.push_back(Element);
}
// FIXME: Retain a pack expansion if RetainExpansion is true.
// We've finished with this pack expansion.
continue;
}
// Transform and check key.
ExprResult Key = getDerived().TransformExpr(OrigElement.Key);
if (Key.isInvalid())
return ExprError();
if (Key.get() != OrigElement.Key)
ArgChanged = true;
// Transform and check value.
ExprResult Value
= getDerived().TransformExpr(OrigElement.Value);
if (Value.isInvalid())
return ExprError();
if (Value.get() != OrigElement.Value)
ArgChanged = true;
ObjCDictionaryElement Element = {
Key.get(), Value.get(), SourceLocation(), None
};
Elements.push_back(Element);
}
if (!getDerived().AlwaysRebuild() && !ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
return getDerived().RebuildObjCDictionaryLiteral(E->getSourceRange(),
Elements);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCEncodeExpr(ObjCEncodeExpr *E) {
TypeSourceInfo *EncodedTypeInfo
= getDerived().TransformType(E->getEncodedTypeSourceInfo());
if (!EncodedTypeInfo)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
EncodedTypeInfo == E->getEncodedTypeSourceInfo())
return E;
return getDerived().RebuildObjCEncodeExpr(E->getAtLoc(),
EncodedTypeInfo,
E->getRParenLoc());
}
template<typename Derived>
ExprResult TreeTransform<Derived>::
TransformObjCIndirectCopyRestoreExpr(ObjCIndirectCopyRestoreExpr *E) {
// This is a kind of implicit conversion, and it needs to get dropped
// and recomputed for the same general reasons that ImplicitCastExprs
// do, as well a more specific one: this expression is only valid when
// it appears *immediately* as an argument expression.
return getDerived().TransformExpr(E->getSubExpr());
}
template<typename Derived>
ExprResult TreeTransform<Derived>::
TransformObjCBridgedCastExpr(ObjCBridgedCastExpr *E) {
TypeSourceInfo *TSInfo
= getDerived().TransformType(E->getTypeInfoAsWritten());
if (!TSInfo)
return ExprError();
ExprResult Result = getDerived().TransformExpr(E->getSubExpr());
if (Result.isInvalid())
return ExprError();
if (!getDerived().AlwaysRebuild() &&
TSInfo == E->getTypeInfoAsWritten() &&
Result.get() == E->getSubExpr())
return E;
return SemaRef.BuildObjCBridgedCast(E->getLParenLoc(), E->getBridgeKind(),
E->getBridgeKeywordLoc(), TSInfo,
Result.get());
}
template <typename Derived>
ExprResult TreeTransform<Derived>::TransformObjCAvailabilityCheckExpr(
ObjCAvailabilityCheckExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCMessageExpr(ObjCMessageExpr *E) {
// Transform arguments.
bool ArgChanged = false;
SmallVector<Expr*, 8> Args;
Args.reserve(E->getNumArgs());
if (getDerived().TransformExprs(E->getArgs(), E->getNumArgs(), false, Args,
&ArgChanged))
return ExprError();
if (E->getReceiverKind() == ObjCMessageExpr::Class) {
// Class message: transform the receiver type.
TypeSourceInfo *ReceiverTypeInfo
= getDerived().TransformType(E->getClassReceiverTypeInfo());
if (!ReceiverTypeInfo)
return ExprError();
// If nothing changed, just retain the existing message send.
if (!getDerived().AlwaysRebuild() &&
ReceiverTypeInfo == E->getClassReceiverTypeInfo() && !ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
// Build a new class message send.
SmallVector<SourceLocation, 16> SelLocs;
E->getSelectorLocs(SelLocs);
return getDerived().RebuildObjCMessageExpr(ReceiverTypeInfo,
E->getSelector(),
SelLocs,
E->getMethodDecl(),
E->getLeftLoc(),
Args,
E->getRightLoc());
}
else if (E->getReceiverKind() == ObjCMessageExpr::SuperClass ||
E->getReceiverKind() == ObjCMessageExpr::SuperInstance) {
if (!E->getMethodDecl())
return ExprError();
// Build a new class message send to 'super'.
SmallVector<SourceLocation, 16> SelLocs;
E->getSelectorLocs(SelLocs);
return getDerived().RebuildObjCMessageExpr(E->getSuperLoc(),
E->getSelector(),
SelLocs,
E->getReceiverType(),
E->getMethodDecl(),
E->getLeftLoc(),
Args,
E->getRightLoc());
}
// Instance message: transform the receiver
assert(E->getReceiverKind() == ObjCMessageExpr::Instance &&
"Only class and instance messages may be instantiated");
ExprResult Receiver
= getDerived().TransformExpr(E->getInstanceReceiver());
if (Receiver.isInvalid())
return ExprError();
// If nothing changed, just retain the existing message send.
if (!getDerived().AlwaysRebuild() &&
Receiver.get() == E->getInstanceReceiver() && !ArgChanged)
return SemaRef.MaybeBindToTemporary(E);
// Build a new instance message send.
SmallVector<SourceLocation, 16> SelLocs;
E->getSelectorLocs(SelLocs);
return getDerived().RebuildObjCMessageExpr(Receiver.get(),
E->getSelector(),
SelLocs,
E->getMethodDecl(),
E->getLeftLoc(),
Args,
E->getRightLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCSelectorExpr(ObjCSelectorExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCProtocolExpr(ObjCProtocolExpr *E) {
return E;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCIvarRefExpr(ObjCIvarRefExpr *E) {
// Transform the base expression.
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
// We don't need to transform the ivar; it will never change.
// If nothing changed, just retain the existing expression.
if (!getDerived().AlwaysRebuild() &&
Base.get() == E->getBase())
return E;
return getDerived().RebuildObjCIvarRefExpr(Base.get(), E->getDecl(),
E->getLocation(),
E->isArrow(), E->isFreeIvar());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCPropertyRefExpr(ObjCPropertyRefExpr *E) {
// 'super' and types never change. Property never changes. Just
// retain the existing expression.
if (!E->isObjectReceiver())
return E;
// Transform the base expression.
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
// We don't need to transform the property; it will never change.
// If nothing changed, just retain the existing expression.
if (!getDerived().AlwaysRebuild() &&
Base.get() == E->getBase())
return E;
if (E->isExplicitProperty())
return getDerived().RebuildObjCPropertyRefExpr(Base.get(),
E->getExplicitProperty(),
E->getLocation());
return getDerived().RebuildObjCPropertyRefExpr(Base.get(),
SemaRef.Context.PseudoObjectTy,
E->getImplicitPropertyGetter(),
E->getImplicitPropertySetter(),
E->getLocation());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCSubscriptRefExpr(ObjCSubscriptRefExpr *E) {
// Transform the base expression.
ExprResult Base = getDerived().TransformExpr(E->getBaseExpr());
if (Base.isInvalid())
return ExprError();
// Transform the key expression.
ExprResult Key = getDerived().TransformExpr(E->getKeyExpr());
if (Key.isInvalid())
return ExprError();
// If nothing changed, just retain the existing expression.
if (!getDerived().AlwaysRebuild() &&
Key.get() == E->getKeyExpr() && Base.get() == E->getBaseExpr())
return E;
return getDerived().RebuildObjCSubscriptRefExpr(E->getRBracket(),
Base.get(), Key.get(),
E->getAtIndexMethodDecl(),
E->setAtIndexMethodDecl());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformObjCIsaExpr(ObjCIsaExpr *E) {
// Transform the base expression.
ExprResult Base = getDerived().TransformExpr(E->getBase());
if (Base.isInvalid())
return ExprError();
// If nothing changed, just retain the existing expression.
if (!getDerived().AlwaysRebuild() &&
Base.get() == E->getBase())
return E;
return getDerived().RebuildObjCIsaExpr(Base.get(), E->getIsaMemberLoc(),
E->getOpLoc(),
E->isArrow());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformShuffleVectorExpr(ShuffleVectorExpr *E) {
bool ArgumentChanged = false;
SmallVector<Expr*, 8> SubExprs;
SubExprs.reserve(E->getNumSubExprs());
if (getDerived().TransformExprs(E->getSubExprs(), E->getNumSubExprs(), false,
SubExprs, &ArgumentChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() &&
!ArgumentChanged)
return E;
return getDerived().RebuildShuffleVectorExpr(E->getBuiltinLoc(),
SubExprs,
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformConvertVectorExpr(ConvertVectorExpr *E) {
ExprResult SrcExpr = getDerived().TransformExpr(E->getSrcExpr());
if (SrcExpr.isInvalid())
return ExprError();
TypeSourceInfo *Type = getDerived().TransformType(E->getTypeSourceInfo());
if (!Type)
return ExprError();
if (!getDerived().AlwaysRebuild() &&
Type == E->getTypeSourceInfo() &&
SrcExpr.get() == E->getSrcExpr())
return E;
return getDerived().RebuildConvertVectorExpr(E->getBuiltinLoc(),
SrcExpr.get(), Type,
E->getRParenLoc());
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformBlockExpr(BlockExpr *E) {
BlockDecl *oldBlock = E->getBlockDecl();
SemaRef.ActOnBlockStart(E->getCaretLocation(), /*Scope=*/nullptr);
BlockScopeInfo *blockScope = SemaRef.getCurBlock();
blockScope->TheDecl->setIsVariadic(oldBlock->isVariadic());
blockScope->TheDecl->setBlockMissingReturnType(
oldBlock->blockMissingReturnType());
SmallVector<ParmVarDecl*, 4> params;
SmallVector<QualType, 4> paramTypes;
const FunctionProtoType *exprFunctionType = E->getFunctionType();
// Parameter substitution.
Sema::ExtParameterInfoBuilder extParamInfos;
if (getDerived().TransformFunctionTypeParams(
E->getCaretLocation(), oldBlock->parameters(), nullptr,
exprFunctionType->getExtParameterInfosOrNull(), paramTypes, ¶ms,
extParamInfos)) {
getSema().ActOnBlockError(E->getCaretLocation(), /*Scope=*/nullptr);
return ExprError();
}
QualType exprResultType =
getDerived().TransformType(exprFunctionType->getReturnType());
auto epi = exprFunctionType->getExtProtoInfo();
epi.ExtParameterInfos = extParamInfos.getPointerOrNull(paramTypes.size());
QualType functionType =
getDerived().RebuildFunctionProtoType(exprResultType, paramTypes, epi);
blockScope->FunctionType = functionType;
// Set the parameters on the block decl.
if (!params.empty())
blockScope->TheDecl->setParams(params);
if (!oldBlock->blockMissingReturnType()) {
blockScope->HasImplicitReturnType = false;
blockScope->ReturnType = exprResultType;
}
// Transform the body
StmtResult body = getDerived().TransformStmt(E->getBody());
if (body.isInvalid()) {
getSema().ActOnBlockError(E->getCaretLocation(), /*Scope=*/nullptr);
return ExprError();
}
#ifndef NDEBUG
// In builds with assertions, make sure that we captured everything we
// captured before.
if (!SemaRef.getDiagnostics().hasErrorOccurred()) {
for (const auto &I : oldBlock->captures()) {
VarDecl *oldCapture = I.getVariable();
// Ignore parameter packs.
if (oldCapture->isParameterPack())
continue;
VarDecl *newCapture =
cast<VarDecl>(getDerived().TransformDecl(E->getCaretLocation(),
oldCapture));
assert(blockScope->CaptureMap.count(newCapture));
}
assert(oldBlock->capturesCXXThis() == blockScope->isCXXThisCaptured());
}
#endif
return SemaRef.ActOnBlockStmtExpr(E->getCaretLocation(), body.get(),
/*Scope=*/nullptr);
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformAsTypeExpr(AsTypeExpr *E) {
llvm_unreachable("Cannot transform asType expressions yet");
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::TransformAtomicExpr(AtomicExpr *E) {
QualType RetTy = getDerived().TransformType(E->getType());
bool ArgumentChanged = false;
SmallVector<Expr*, 8> SubExprs;
SubExprs.reserve(E->getNumSubExprs());
if (getDerived().TransformExprs(E->getSubExprs(), E->getNumSubExprs(), false,
SubExprs, &ArgumentChanged))
return ExprError();
if (!getDerived().AlwaysRebuild() &&
!ArgumentChanged)
return E;
return getDerived().RebuildAtomicExpr(E->getBuiltinLoc(), SubExprs,
RetTy, E->getOp(), E->getRParenLoc());
}
//===----------------------------------------------------------------------===//
// Type reconstruction
//===----------------------------------------------------------------------===//
template<typename Derived>
QualType TreeTransform<Derived>::RebuildPointerType(QualType PointeeType,
SourceLocation Star) {
return SemaRef.BuildPointerType(PointeeType, Star,
getDerived().getBaseEntity());
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildBlockPointerType(QualType PointeeType,
SourceLocation Star) {
return SemaRef.BuildBlockPointerType(PointeeType, Star,
getDerived().getBaseEntity());
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildReferenceType(QualType ReferentType,
bool WrittenAsLValue,
SourceLocation Sigil) {
return SemaRef.BuildReferenceType(ReferentType, WrittenAsLValue,
Sigil, getDerived().getBaseEntity());
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildMemberPointerType(QualType PointeeType,
QualType ClassType,
SourceLocation Sigil) {
return SemaRef.BuildMemberPointerType(PointeeType, ClassType, Sigil,
getDerived().getBaseEntity());
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildObjCTypeParamType(
const ObjCTypeParamDecl *Decl,
SourceLocation ProtocolLAngleLoc,
ArrayRef<ObjCProtocolDecl *> Protocols,
ArrayRef<SourceLocation> ProtocolLocs,
SourceLocation ProtocolRAngleLoc) {
return SemaRef.BuildObjCTypeParamType(Decl,
ProtocolLAngleLoc, Protocols,
ProtocolLocs, ProtocolRAngleLoc,
/*FailOnError=*/true);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildObjCObjectType(
QualType BaseType,
SourceLocation Loc,
SourceLocation TypeArgsLAngleLoc,
ArrayRef<TypeSourceInfo *> TypeArgs,
SourceLocation TypeArgsRAngleLoc,
SourceLocation ProtocolLAngleLoc,
ArrayRef<ObjCProtocolDecl *> Protocols,
ArrayRef<SourceLocation> ProtocolLocs,
SourceLocation ProtocolRAngleLoc) {
return SemaRef.BuildObjCObjectType(BaseType, Loc, TypeArgsLAngleLoc,
TypeArgs, TypeArgsRAngleLoc,
ProtocolLAngleLoc, Protocols, ProtocolLocs,
ProtocolRAngleLoc,
/*FailOnError=*/true);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildObjCObjectPointerType(
QualType PointeeType,
SourceLocation Star) {
return SemaRef.Context.getObjCObjectPointerType(PointeeType);
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
const llvm::APInt *Size,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange) {
if (SizeExpr || !Size)
return SemaRef.BuildArrayType(ElementType, SizeMod, SizeExpr,
IndexTypeQuals, BracketsRange,
getDerived().getBaseEntity());
QualType Types[] = {
SemaRef.Context.UnsignedCharTy, SemaRef.Context.UnsignedShortTy,
SemaRef.Context.UnsignedIntTy, SemaRef.Context.UnsignedLongTy,
SemaRef.Context.UnsignedLongLongTy, SemaRef.Context.UnsignedInt128Ty
};
const unsigned NumTypes = llvm::array_lengthof(Types);
QualType SizeType;
for (unsigned I = 0; I != NumTypes; ++I)
if (Size->getBitWidth() == SemaRef.Context.getIntWidth(Types[I])) {
SizeType = Types[I];
break;
}
// Note that we can return a VariableArrayType here in the case where
// the element type was a dependent VariableArrayType.
IntegerLiteral *ArraySize
= IntegerLiteral::Create(SemaRef.Context, *Size, SizeType,
/*FIXME*/BracketsRange.getBegin());
return SemaRef.BuildArrayType(ElementType, SizeMod, ArraySize,
IndexTypeQuals, BracketsRange,
getDerived().getBaseEntity());
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildConstantArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
const llvm::APInt &Size,
unsigned IndexTypeQuals,
SourceRange BracketsRange) {
return getDerived().RebuildArrayType(ElementType, SizeMod, &Size, nullptr,
IndexTypeQuals, BracketsRange);
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildIncompleteArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
unsigned IndexTypeQuals,
SourceRange BracketsRange) {
return getDerived().RebuildArrayType(ElementType, SizeMod, nullptr, nullptr,
IndexTypeQuals, BracketsRange);
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildVariableArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange) {
return getDerived().RebuildArrayType(ElementType, SizeMod, nullptr,
SizeExpr,
IndexTypeQuals, BracketsRange);
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildDependentSizedArrayType(QualType ElementType,
ArrayType::ArraySizeModifier SizeMod,
Expr *SizeExpr,
unsigned IndexTypeQuals,
SourceRange BracketsRange) {
return getDerived().RebuildArrayType(ElementType, SizeMod, nullptr,
SizeExpr,
IndexTypeQuals, BracketsRange);
}
template <typename Derived>
QualType TreeTransform<Derived>::RebuildDependentAddressSpaceType(
QualType PointeeType, Expr *AddrSpaceExpr, SourceLocation AttributeLoc) {
return SemaRef.BuildAddressSpaceAttr(PointeeType, AddrSpaceExpr,
AttributeLoc);
}
template <typename Derived>
QualType
TreeTransform<Derived>::RebuildVectorType(QualType ElementType,
unsigned NumElements,
VectorType::VectorKind VecKind) {
// FIXME: semantic checking!
return SemaRef.Context.getVectorType(ElementType, NumElements, VecKind);
}
template <typename Derived>
QualType TreeTransform<Derived>::RebuildDependentVectorType(
QualType ElementType, Expr *SizeExpr, SourceLocation AttributeLoc,
VectorType::VectorKind VecKind) {
return SemaRef.BuildVectorType(ElementType, SizeExpr, AttributeLoc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildExtVectorType(QualType ElementType,
unsigned NumElements,
SourceLocation AttributeLoc) {
llvm::APInt numElements(SemaRef.Context.getIntWidth(SemaRef.Context.IntTy),
NumElements, true);
IntegerLiteral *VectorSize
= IntegerLiteral::Create(SemaRef.Context, numElements, SemaRef.Context.IntTy,
AttributeLoc);
return SemaRef.BuildExtVectorType(ElementType, VectorSize, AttributeLoc);
}
template<typename Derived>
QualType
TreeTransform<Derived>::RebuildDependentSizedExtVectorType(QualType ElementType,
Expr *SizeExpr,
SourceLocation AttributeLoc) {
return SemaRef.BuildExtVectorType(ElementType, SizeExpr, AttributeLoc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildFunctionProtoType(
QualType T,
MutableArrayRef<QualType> ParamTypes,
const FunctionProtoType::ExtProtoInfo &EPI) {
return SemaRef.BuildFunctionType(T, ParamTypes,
getDerived().getBaseLocation(),
getDerived().getBaseEntity(),
EPI);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildFunctionNoProtoType(QualType T) {
return SemaRef.Context.getFunctionNoProtoType(T);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildUnresolvedUsingType(SourceLocation Loc,
Decl *D) {
assert(D && "no decl found");
if (D->isInvalidDecl()) return QualType();
// FIXME: Doesn't account for ObjCInterfaceDecl!
TypeDecl *Ty;
if (auto *UPD = dyn_cast<UsingPackDecl>(D)) {
// A valid resolved using typename pack expansion decl can have multiple
// UsingDecls, but they must each have exactly one type, and it must be
// the same type in every case. But we must have at least one expansion!
if (UPD->expansions().empty()) {
getSema().Diag(Loc, diag::err_using_pack_expansion_empty)
<< UPD->isCXXClassMember() << UPD;
return QualType();
}
// We might still have some unresolved types. Try to pick a resolved type
// if we can. The final instantiation will check that the remaining
// unresolved types instantiate to the type we pick.
QualType FallbackT;
QualType T;
for (auto *E : UPD->expansions()) {
QualType ThisT = RebuildUnresolvedUsingType(Loc, E);
if (ThisT.isNull())
continue;
else if (ThisT->getAs<UnresolvedUsingType>())
FallbackT = ThisT;
else if (T.isNull())
T = ThisT;
else
assert(getSema().Context.hasSameType(ThisT, T) &&
"mismatched resolved types in using pack expansion");
}
return T.isNull() ? FallbackT : T;
} else if (auto *Using = dyn_cast<UsingDecl>(D)) {
assert(Using->hasTypename() &&
"UnresolvedUsingTypenameDecl transformed to non-typename using");
// A valid resolved using typename decl points to exactly one type decl.
assert(++Using->shadow_begin() == Using->shadow_end());
Ty = cast<TypeDecl>((*Using->shadow_begin())->getTargetDecl());
} else {
assert(isa<UnresolvedUsingTypenameDecl>(D) &&
"UnresolvedUsingTypenameDecl transformed to non-using decl");
Ty = cast<UnresolvedUsingTypenameDecl>(D);
}
return SemaRef.Context.getTypeDeclType(Ty);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildTypeOfExprType(Expr *E,
SourceLocation Loc) {
return SemaRef.BuildTypeofExprType(E, Loc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildTypeOfType(QualType Underlying) {
return SemaRef.Context.getTypeOfType(Underlying);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildDecltypeType(Expr *E,
SourceLocation Loc) {
return SemaRef.BuildDecltypeType(E, Loc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildUnaryTransformType(QualType BaseType,
UnaryTransformType::UTTKind UKind,
SourceLocation Loc) {
return SemaRef.BuildUnaryTransformType(BaseType, UKind, Loc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildTemplateSpecializationType(
TemplateName Template,
SourceLocation TemplateNameLoc,
TemplateArgumentListInfo &TemplateArgs) {
return SemaRef.CheckTemplateIdType(Template, TemplateNameLoc, TemplateArgs);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildAtomicType(QualType ValueType,
SourceLocation KWLoc) {
return SemaRef.BuildAtomicType(ValueType, KWLoc);
}
template<typename Derived>
QualType TreeTransform<Derived>::RebuildPipeType(QualType ValueType,
SourceLocation KWLoc,
bool isReadPipe) {
return isReadPipe ? SemaRef.BuildReadPipeType(ValueType, KWLoc)
: SemaRef.BuildWritePipeType(ValueType, KWLoc);
}
template<typename Derived>
TemplateName
TreeTransform<Derived>::RebuildTemplateName(CXXScopeSpec &SS,
bool TemplateKW,
TemplateDecl *Template) {
return SemaRef.Context.getQualifiedTemplateName(SS.getScopeRep(), TemplateKW,
Template);
}
template<typename Derived>
TemplateName
TreeTransform<Derived>::RebuildTemplateName(CXXScopeSpec &SS,
SourceLocation TemplateKWLoc,
const IdentifierInfo &Name,
SourceLocation NameLoc,
QualType ObjectType,
NamedDecl *FirstQualifierInScope,
bool AllowInjectedClassName) {
UnqualifiedId TemplateName;
TemplateName.setIdentifier(&Name, NameLoc);
Sema::TemplateTy Template;
getSema().ActOnDependentTemplateName(/*Scope=*/nullptr,
SS, TemplateKWLoc, TemplateName,
ParsedType::make(ObjectType),
/*EnteringContext=*/false,
Template, AllowInjectedClassName);
return Template.get();
}
template<typename Derived>
TemplateName
TreeTransform<Derived>::RebuildTemplateName(CXXScopeSpec &SS,
SourceLocation TemplateKWLoc,
OverloadedOperatorKind Operator,
SourceLocation NameLoc,
QualType ObjectType,
bool AllowInjectedClassName) {
UnqualifiedId Name;
// FIXME: Bogus location information.
SourceLocation SymbolLocations[3] = { NameLoc, NameLoc, NameLoc };
Name.setOperatorFunctionId(NameLoc, Operator, SymbolLocations);
Sema::TemplateTy Template;
getSema().ActOnDependentTemplateName(/*Scope=*/nullptr,
SS, TemplateKWLoc, Name,
ParsedType::make(ObjectType),
/*EnteringContext=*/false,
Template, AllowInjectedClassName);
return Template.get();
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::RebuildCXXOperatorCallExpr(OverloadedOperatorKind Op,
SourceLocation OpLoc,
Expr *OrigCallee,
Expr *First,
Expr *Second) {
Expr *Callee = OrigCallee->IgnoreParenCasts();
bool isPostIncDec = Second && (Op == OO_PlusPlus || Op == OO_MinusMinus);
if (First->getObjectKind() == OK_ObjCProperty) {
BinaryOperatorKind Opc = BinaryOperator::getOverloadedOpcode(Op);
if (BinaryOperator::isAssignmentOp(Opc))
return SemaRef.checkPseudoObjectAssignment(/*Scope=*/nullptr, OpLoc, Opc,
First, Second);
ExprResult Result = SemaRef.CheckPlaceholderExpr(First);
if (Result.isInvalid())
return ExprError();
First = Result.get();
}
if (Second && Second->getObjectKind() == OK_ObjCProperty) {
ExprResult Result = SemaRef.CheckPlaceholderExpr(Second);
if (Result.isInvalid())
return ExprError();
Second = Result.get();
}
// Determine whether this should be a builtin operation.
if (Op == OO_Subscript) {
if (!First->getType()->isOverloadableType() &&
!Second->getType()->isOverloadableType())
return getSema().CreateBuiltinArraySubscriptExpr(
First, Callee->getBeginLoc(), Second, OpLoc);
} else if (Op == OO_Arrow) {
// -> is never a builtin operation.
return SemaRef.BuildOverloadedArrowExpr(nullptr, First, OpLoc);
} else if (Second == nullptr || isPostIncDec) {
if (!First->getType()->isOverloadableType() ||
(Op == OO_Amp && getSema().isQualifiedMemberAccess(First))) {
// The argument is not of overloadable type, or this is an expression
// of the form &Class::member, so try to create a built-in unary
// operation.
UnaryOperatorKind Opc
= UnaryOperator::getOverloadedOpcode(Op, isPostIncDec);
return getSema().CreateBuiltinUnaryOp(OpLoc, Opc, First);
}
} else {
if (!First->getType()->isOverloadableType() &&
!Second->getType()->isOverloadableType()) {
// Neither of the arguments is an overloadable type, so try to
// create a built-in binary operation.
BinaryOperatorKind Opc = BinaryOperator::getOverloadedOpcode(Op);
ExprResult Result
= SemaRef.CreateBuiltinBinOp(OpLoc, Opc, First, Second);
if (Result.isInvalid())
return ExprError();
return Result;
}
}
// Compute the transformed set of functions (and function templates) to be
// used during overload resolution.
UnresolvedSet<16> Functions;
bool RequiresADL;
if (UnresolvedLookupExpr *ULE = dyn_cast<UnresolvedLookupExpr>(Callee)) {
Functions.append(ULE->decls_begin(), ULE->decls_end());
// If the overload could not be resolved in the template definition
// (because we had a dependent argument), ADL is performed as part of
// template instantiation.
RequiresADL = ULE->requiresADL();
} else {
// If we've resolved this to a particular non-member function, just call
// that function. If we resolved it to a member function,
// CreateOverloaded* will find that function for us.
NamedDecl *ND = cast<DeclRefExpr>(Callee)->getDecl();
if (!isa<CXXMethodDecl>(ND))
Functions.addDecl(ND);
RequiresADL = false;
}
// Add any functions found via argument-dependent lookup.
Expr *Args[2] = { First, Second };
unsigned NumArgs = 1 + (Second != nullptr);
// Create the overloaded operator invocation for unary operators.
if (NumArgs == 1 || isPostIncDec) {
UnaryOperatorKind Opc
= UnaryOperator::getOverloadedOpcode(Op, isPostIncDec);
return SemaRef.CreateOverloadedUnaryOp(OpLoc, Opc, Functions, First,
RequiresADL);
}
if (Op == OO_Subscript) {
SourceLocation LBrace;
SourceLocation RBrace;
if (DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Callee)) {
DeclarationNameLoc NameLoc = DRE->getNameInfo().getInfo();
LBrace = SourceLocation::getFromRawEncoding(
NameLoc.CXXOperatorName.BeginOpNameLoc);
RBrace = SourceLocation::getFromRawEncoding(
NameLoc.CXXOperatorName.EndOpNameLoc);
} else {
LBrace = Callee->getBeginLoc();
RBrace = OpLoc;
}
return SemaRef.CreateOverloadedArraySubscriptExpr(LBrace, RBrace,
First, Second);
}
// Create the overloaded operator invocation for binary operators.
BinaryOperatorKind Opc = BinaryOperator::getOverloadedOpcode(Op);
ExprResult Result = SemaRef.CreateOverloadedBinOp(
OpLoc, Opc, Functions, Args[0], Args[1], RequiresADL);
if (Result.isInvalid())
return ExprError();
return Result;
}
template<typename Derived>
ExprResult
TreeTransform<Derived>::RebuildCXXPseudoDestructorExpr(Expr *Base,
SourceLocation OperatorLoc,
bool isArrow,
CXXScopeSpec &SS,
TypeSourceInfo *ScopeType,
SourceLocation CCLoc,
SourceLocation TildeLoc,
PseudoDestructorTypeStorage Destroyed) {
QualType BaseType = Base->getType();
if (Base->isTypeDependent() || Destroyed.getIdentifier() ||
(!isArrow && !BaseType->getAs<RecordType>()) ||
(isArrow && BaseType->getAs<PointerType>() &&
!BaseType->getAs<PointerType>()->getPointeeType()
->template getAs<RecordType>())){
// This pseudo-destructor expression is still a pseudo-destructor.
return SemaRef.BuildPseudoDestructorExpr(
Base, OperatorLoc, isArrow ? tok::arrow : tok::period, SS, ScopeType,
CCLoc, TildeLoc, Destroyed);
}
TypeSourceInfo *DestroyedType = Destroyed.getTypeSourceInfo();
DeclarationName Name(SemaRef.Context.DeclarationNames.getCXXDestructorName(
SemaRef.Context.getCanonicalType(DestroyedType->getType())));
DeclarationNameInfo NameInfo(Name, Destroyed.getLocation());
NameInfo.setNamedTypeInfo(DestroyedType);
// The scope type is now known to be a valid nested name specifier
// component. Tack it on to the end of the nested name specifier.
if (ScopeType) {
if (!ScopeType->getType()->getAs<TagType>()) {
getSema().Diag(ScopeType->getTypeLoc().getBeginLoc(),
diag::err_expected_class_or_namespace)
<< ScopeType->getType() << getSema().getLangOpts().CPlusPlus;
return ExprError();
}
SS.Extend(SemaRef.Context, SourceLocation(), ScopeType->getTypeLoc(),
CCLoc);
}
SourceLocation TemplateKWLoc; // FIXME: retrieve it from caller.
return getSema().BuildMemberReferenceExpr(Base, BaseType,
OperatorLoc, isArrow,
SS, TemplateKWLoc,
/*FIXME: FirstQualifier*/ nullptr,
NameInfo,
/*TemplateArgs*/ nullptr,
/*S*/nullptr);
}
template<typename Derived>
StmtResult
TreeTransform<Derived>::TransformCapturedStmt(CapturedStmt *S) {
SourceLocation Loc = S->getBeginLoc();
CapturedDecl *CD = S->getCapturedDecl();
unsigned NumParams = CD->getNumParams();
unsigned ContextParamPos = CD->getContextParamPosition();
SmallVector<Sema::CapturedParamNameType, 4> Params;
for (unsigned I = 0; I < NumParams; ++I) {
if (I != ContextParamPos) {
Params.push_back(
std::make_pair(
CD->getParam(I)->getName(),
getDerived().TransformType(CD->getParam(I)->getType())));
} else {
Params.push_back(std::make_pair(StringRef(), QualType()));
}
}
getSema().ActOnCapturedRegionStart(Loc, /*CurScope*/nullptr,
S->getCapturedRegionKind(), Params);
StmtResult Body;
{
Sema::CompoundScopeRAII CompoundScope(getSema());
Body = getDerived().TransformStmt(S->getCapturedStmt());
}
if (Body.isInvalid()) {
getSema().ActOnCapturedRegionError();
return StmtError();
}
return getSema().ActOnCapturedRegionEnd(Body.get());
}
} // end namespace clang
#endif // LLVM_CLANG_LIB_SEMA_TREETRANSFORM_H
|
var _ = require('underscore')
var mergeTrees = require('broccoli-merge-trees')
var sieve = require('broccoli-file-sieve')
var makeDepsGlobs = require('./makeDepsGlobs')
/**
* Prepares tree for tech builder.
* Copies files of specific tech matching to deps from levels dirs to new tree.
* Levels dirs names are incremented from '0'.
* @param levels {array.<string>} Levels paths.
* @param deps {Deps} Bem deps object.
* @param suffixes {array.<string>} Suffixes of tech files.
*/
var LevelsReader = function(levels, deps, suffixes) {
if (!(this instanceof LevelsReader)) return new LevelsReader(levels, deps, suffixes)
this.levels = levels
this.deps = deps
this.suffixes = suffixes
}
LevelsReader.prototype.read = function(readTree) {
var levelsTrees = _.map(this.levels, function(level, index) {
// First find all suffix files for performance optimization
var suffixGlobs = this.suffixes.map(function(suffix) { return '**/*.' + suffix })
var suffixSieve = sieve(level, {files: suffixGlobs})
var depsGlobs = makeDepsGlobs(this.deps, this.suffixes, true)
// Remove ':' and replace '..' with two dots symbol
var name = level.slice(-30)
.replace(/\\|\//g, '-')
.replace(/:/g, '')
.replace(/\.\./g, '\u2025')
if (level.length > 30) name = '~' + name
var destDir = String(index) + '-' + name
return sieve(suffixSieve, {
files: depsGlobs,
destDir: destDir
})
}, this)
var mergedTree = mergeTrees(levelsTrees)
return readTree(mergedTree)
}
LevelsReader.prototype.cleanup = function() {}
module.exports = LevelsReader
|
#encoding:utf/8
import sys
from mmdet.apis import inference_detector, init_detector
import json
import os
import numpy as np
import argparse
from tqdm import tqdm
class MyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(MyEncoder, self).default(obj)
#generate result
def result_from_dir():
# index = {1: 1, 2: 9, 3: 5, 4: 3, 5: 4, 6: 2, 7: 8, 8: 6, 9: 10, 10: 7} ###
# build the model from a config file and a checkpoint file
model = init_detector(config2make_json, model2make_json, device='cuda:0')
pics = os.listdir(pic_path)
meta = {}
images = []
annotations = []
num = 1
for im in tqdm(pics):
num += 2
img = os.path.join(pic_path,im)
result_ = inference_detector(model, img)
images_anno = {}
images_anno['file_name'] = im
images_anno['id'] = num
images.append(images_anno)
for i ,boxes in enumerate(result_,1):
if i == 11: # get rid of neg class 11
continue
if len(boxes):
defect_label = i
for box in boxes:
anno = {}
anno['image_id'] = num
anno['category_id'] = defect_label
anno['bbox'] = [round(float(i), 2) for i in box[0:4]]
w = anno['bbox'][2]-anno['bbox'][0]
anno['bbox'][2] = round(w, 2)
h = anno['bbox'][3]-anno['bbox'][1]
anno['bbox'][3] = round(h, 2)
anno['score'] = round(float(box[4]), 2)
annotations.append(anno)
meta['images'] = images
meta['annotations'] = annotations
with open(json_out_path, 'w') as fp:
#json.dump(meta, fp, cls=MyEncoder, indent=4, separators=(',', ': '))
json.dump(meta, fp, cls=MyEncoder)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Generate result")
parser.add_argument("-m", "--model",help="Model path",type=str,)
parser.add_argument("-c", "--config",help="Config path",type=str,)
parser.add_argument("-im", "--im_dir",help="Image path",type=str,)
parser.add_argument('-o', "--out",help="Save path", type=str,)
args = parser.parse_args()
model2make_json = args.model
config2make_json = args.config
json_out_path = args.out
pic_path = args.im_dir
result_from_dir()
|
////////////////////////////////////////////////////////////////////////////////
// The Loki Library
// Copyright (c) 2001 by Andrei Alexandrescu
// This code accompanies the book:
// Alexandrescu, Andrei. "Modern C++ Design: Generic Programming and Design
// Patterns Applied". Copyright (c) 2001. Addison-Wesley.
// Permission to use, copy, modify, distribute and sell this software for any
// purpose is hereby granted without fee, provided that the above copyright
// notice appear in all copies and that both that copyright notice and this
// permission notice appear in supporting documentation.
// The author or Addison-Welsey Longman make no representations about the
// suitability of this software for any purpose. It is provided "as is"
// without express or implied warranty.
////////////////////////////////////////////////////////////////////////////////
// Last update: May 19, 2002
#ifndef FACTORY_INC_
#define FACTORY_INC_
#include "LokiTypeInfo.h"
#include "AssocVector.h"
#include <exception>
namespace Loki
{
////////////////////////////////////////////////////////////////////////////////
// class template DefaultFactoryError
// Manages the "Unknown Type" error in an object factory
////////////////////////////////////////////////////////////////////////////////
template <typename IdentifierType, class AbstractProduct>
struct DefaultFactoryError
{
struct Exception : public std::exception
{
const char* what() const throw() { return "Unknown Type"; }
};
static AbstractProduct* OnUnknownType(IdentifierType)
{
throw Exception();
}
};
////////////////////////////////////////////////////////////////////////////////
// class template Factory
// Implements a generic object factory
////////////////////////////////////////////////////////////////////////////////
template
<
class AbstractProduct,
typename IdentifierType,
typename ProductCreator = AbstractProduct* (*)(),
template<typename, class>
class FactoryErrorPolicy = DefaultFactoryError
>
class Factory
: public FactoryErrorPolicy<IdentifierType, AbstractProduct>
{
public:
bool Register(const IdentifierType& id, ProductCreator creator)
{
return associations_.insert(
IdToProductMap::value_type(id, creator)).second;
}
bool Unregister(const IdentifierType& id)
{
return associations_.erase(id) == 1;
}
AbstractProduct* CreateObject(const IdentifierType& id)
{
typename IdToProductMap::iterator i = associations_.find(id);
if (i != associations_.end())
{
return (i->second)();
}
return OnUnknownType(id);
}
private:
typedef AssocVector<IdentifierType, ProductCreator> IdToProductMap;
IdToProductMap associations_;
};
////////////////////////////////////////////////////////////////////////////////
// class template CloneFactory
// Implements a generic cloning factory
////////////////////////////////////////////////////////////////////////////////
template
<
class AbstractProduct,
class ProductCreator =
AbstractProduct* (*)(const AbstractProduct*),
template<typename, class>
class FactoryErrorPolicy = DefaultFactoryError
>
class CloneFactory
: public FactoryErrorPolicy<TypeInfo, AbstractProduct>
{
public:
bool Register(const TypeInfo& ti, ProductCreator creator)
{
return associations_.insert(
IdToProductMap::value_type(ti, creator)).second;
}
bool Unregister(const TypeInfo& id)
{
return associations_.erase(id) == 1;
}
AbstractProduct* CreateObject(const AbstractProduct* model)
{
if (model == 0) return 0;
typename IdToProductMap::iterator i =
associations_.find(typeid(*model));
if (i != associations_.end())
{
return (i->second)(model);
}
return OnUnknownType(typeid(*model));
}
private:
typedef AssocVector<TypeInfo, ProductCreator> IdToProductMap;
IdToProductMap associations_;
};
} // namespace Loki
////////////////////////////////////////////////////////////////////////////////
// Change log:
// June 20, 2001: ported by Nick Thurn to gcc 2.95.3. Kudos, Nick!!!
// May 10, 2002: ported by Rani Sharoni to VC7 (RTM - 9466)
////////////////////////////////////////////////////////////////////////////////
#endif // FACTORY_INC_
|
import os
import cv2
import numpy as np
import torch
import torch.nn as nn
from PIL import Image
from torchvision import models
from torchvision import transforms as T
def load_img(imgpath):
"""Load image.
Args:
imgpath (string): The path of the image to load.
Returns:
((int, int), torch.Tensor): The size of original image, and the normalized image tensor.
"""
img = Image.open(imgpath)
ori_size = img.size
transforms = T.Compose([
T.Resize((224, 224)),
T.ToTensor(),
T.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
])
img_tensor = transforms(img)
return (ori_size, img_tensor)
def save_img(grad, path):
"""Save result image.
This function convert the gradient result into image, then write to file.
Args:
grad (torch.Tensor): The image tensor used by the model, shape (3, 224, 224).
path (string): The output file path.
Returns:
None.
"""
img = grad.detach().cpu().permute(1, 2, 0).numpy()
img -= img.min()
img /= img.max()
img = np.uint8(255*img)
cv2.imwrite(f"{path}", img)
print(f"Save {path} complete")
class GBP(nn.Module):
"""An easy implementation of GBP, using a resnet bone.
"""
def __init__(self):
super(GBP, self).__init__()
self.bone = models.resnet18(pretrained=True)
self.bone.eval()
self.set_backprop()
def set_backprop(self):
"""Setting up backpropagation of Guided Backpropagation
"""
# Hook function. Filter out all the negative gradients and pass through.
def relu_backward_hook(module, grad_out, grad_in):
modified_grad_out = nn.functional.relu(grad_out[0])
return (modified_grad_out, )
# Register the backward hook function for all ReLU layers.
for idx, item in enumerate(self.bone.modules()):
if isinstance(item, nn.ReLU):
item.register_backward_hook(relu_backward_hook)
def generate_gradient(self, input, target):
# Forward through network
input.requires_grad = True
model_output = self.bone(input)
self.bone.zero_grad()
# Build initial gradient
init_grad = torch.zeros_like(model_output).float()
init_grad[0][target] = 1.
# Backward through network
model_output.backward(gradient=init_grad)
# Return the gradient
return input.grad
def forward(self, input, target):
return self.generate_gradient(input, target)
def main():
img_id = "demo"
class_idx = 243 # Mastiff
img_path = os.path.join("data", img_id+'.png')
_, img_tensor = load_img(img_path)
model = GBP()
img_tensor.unsqueeze_(0)
grad = model(img_tensor, class_idx)
save_img(grad[0], os.path.join("{}-{}.png".format(img_id, str(class_idx))))
main()
|
/**
******************************************************************************
* @file stm8l15x_gpio.h
* @author MCD Application Team
* @version V1.6.1
* @date 30-September-2014
* @brief This file contains all the functions prototypes for the GPIO firmware
* library.
******************************************************************************
* @attention
*
* <h2><center>© COPYRIGHT 2014 STMicroelectronics</center></h2>
*
* Licensed under MCD-ST Liberty SW License Agreement V2, (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.st.com/software_license_agreement_liberty_v2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __STM8L15x_GPIO_H
#define __STM8L15x_GPIO_H
/* Includes ------------------------------------------------------------------*/
#include "stm8l15x.h"
/** @addtogroup STM8L15x_StdPeriph_Driver
* @{
*/
/** @addtogroup I2C
* @{
*/
/* Exported types ------------------------------------------------------------*/
/** @addtogroup GPIO_Exported_Types
* @{
*/
/**
* @defgroup GPIO_Modes
*
* @brief
*
* Bits definitions:
* - Bit 7: 0 = INPUT mode
* 1 = OUTPUT mode
* 1 = PULL-UP (input) or PUSH-PULL (output)
* - Bit 5: 0 = No external interrupt (input) or No slope control (output)
* 1 = External interrupt (input) or Slow control enabled (output)
* - Bit 4: 0 = Low level (output)
* 1 = High level (output push-pull) or HI-Z (output open-drain)
* @{
*/
typedef enum
{
GPIO_Mode_In_FL_No_IT = (uint8_t)0x00, /*!< Input floating, no external interrupt */
GPIO_Mode_In_PU_No_IT = (uint8_t)0x40, /*!< Input pull-up, no external interrupt */
GPIO_Mode_In_FL_IT = (uint8_t)0x20, /*!< Input floating, external interrupt */
GPIO_Mode_In_PU_IT = (uint8_t)0x60, /*!< Input pull-up, external interrupt */
GPIO_Mode_Out_OD_Low_Fast = (uint8_t)0xA0, /*!< Output open-drain, low level, 10MHz */
GPIO_Mode_Out_PP_Low_Fast = (uint8_t)0xE0, /*!< Output push-pull, low level, 10MHz */
GPIO_Mode_Out_OD_Low_Slow = (uint8_t)0x80, /*!< Output open-drain, low level, 2MHz */
GPIO_Mode_Out_PP_Low_Slow = (uint8_t)0xC0, /*!< Output push-pull, low level, 2MHz */
GPIO_Mode_Out_OD_HiZ_Fast = (uint8_t)0xB0, /*!< Output open-drain, high-impedance level, 10MHz */
GPIO_Mode_Out_PP_High_Fast = (uint8_t)0xF0, /*!< Output push-pull, high level, 10MHz */
GPIO_Mode_Out_OD_HiZ_Slow = (uint8_t)0x90, /*!< Output open-drain, high-impedance level, 2MHz */
GPIO_Mode_Out_PP_High_Slow = (uint8_t)0xD0 /*!< Output push-pull, high level, 2MHz */
}GPIO_Mode_TypeDef;
/**
* @}
*/
/** @defgroup GPIO_Pin
* @{
*/
typedef enum
{
GPIO_Pin_0 = ((uint8_t)0x01), /*!< Pin 0 selected */
GPIO_Pin_1 = ((uint8_t)0x02), /*!< Pin 1 selected */
GPIO_Pin_2 = ((uint8_t)0x04), /*!< Pin 2 selected */
GPIO_Pin_3 = ((uint8_t)0x08), /*!< Pin 3 selected */
GPIO_Pin_4 = ((uint8_t)0x10), /*!< Pin 4 selected */
GPIO_Pin_5 = ((uint8_t)0x20), /*!< Pin 5 selected */
GPIO_Pin_6 = ((uint8_t)0x40), /*!< Pin 6 selected */
GPIO_Pin_7 = ((uint8_t)0x80), /*!< Pin 7 selected */
GPIO_Pin_LNib = ((uint8_t)0x0F), /*!< Low nibble pins selected */
GPIO_Pin_HNib = ((uint8_t)0xF0), /*!< High nibble pins selected */
GPIO_Pin_All = ((uint8_t)0xFF) /*!< All pins selected */
}GPIO_Pin_TypeDef;
/**
* @}
*/
/**
* @}
*/
/* Exported constants --------------------------------------------------------*/
/* Exported macros -----------------------------------------------------------*/
/** @addtogroup GPIO_Exported_Macros
* @{
*/
/**
* @brief Macro used by the assert function to check the different functions parameters.
*/
/**
* @brief Macro used by the assert function in order to check the different
* values of GPIOMode_TypeDef.
*/
#define IS_GPIO_MODE(MODE) \
(((MODE) == GPIO_Mode_In_FL_No_IT) || \
((MODE) == GPIO_Mode_In_PU_No_IT) || \
((MODE) == GPIO_Mode_In_FL_IT) || \
((MODE) == GPIO_Mode_In_PU_IT) || \
((MODE) == GPIO_Mode_Out_OD_Low_Fast) || \
((MODE) == GPIO_Mode_Out_PP_Low_Fast) || \
((MODE) == GPIO_Mode_Out_OD_Low_Slow) || \
((MODE) == GPIO_Mode_Out_PP_Low_Slow) || \
((MODE) == GPIO_Mode_Out_OD_HiZ_Fast) || \
((MODE) == GPIO_Mode_Out_PP_High_Fast) || \
((MODE) == GPIO_Mode_Out_OD_HiZ_Slow) || \
((MODE) == GPIO_Mode_Out_PP_High_Slow))
/**
* @brief Macro used by the assert function in order to check the different
* values of GPIO_Pins.
*/
#define IS_GPIO_PIN(PIN) ((PIN) != (uint8_t)0x00)
/**
* @}
*/
/* Exported functions ------------------------------------------------------- */
/* Initialization and Configuration *******************************************/
void GPIO_DeInit(GPIO_TypeDef* GPIOx);
void GPIO_Init(GPIO_TypeDef* GPIOx, uint8_t GPIO_Pin, GPIO_Mode_TypeDef GPIO_Mode);
void GPIO_ExternalPullUpConfig(GPIO_TypeDef* GPIOx, uint8_t GPIO_Pin, FunctionalState NewState);
/* GPIO Read and Write ********************************************************/
void GPIO_Write(GPIO_TypeDef* GPIOx, uint8_t GPIO_PortVal);
void GPIO_WriteBit(GPIO_TypeDef* GPIOx, GPIO_Pin_TypeDef GPIO_Pin, BitAction GPIO_BitVal);
void GPIO_SetBits(GPIO_TypeDef* GPIOx, uint8_t GPIO_Pin);
void GPIO_ResetBits(GPIO_TypeDef* GPIOx, uint8_t GPIO_Pin);
void GPIO_ToggleBits(GPIO_TypeDef* GPIOx, uint8_t GPIO_Pin);
uint8_t GPIO_ReadInputData(GPIO_TypeDef* GPIOx);
uint8_t GPIO_ReadOutputData(GPIO_TypeDef* GPIOx);
BitStatus GPIO_ReadInputDataBit(GPIO_TypeDef* GPIOx, GPIO_Pin_TypeDef GPIO_Pin);
BitStatus GPIO_ReadOutputDataBit(GPIO_TypeDef* GPIOx, GPIO_Pin_TypeDef GPIO_Pin);
#endif /* __STM8L15x_GPIO_H */
/**
* @}
*/
/**
* @}
*/
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
import sys
from Functions import *
class CN_01_AdvSearch:
BrowserSetup('https://github.com/')
SearchRepository('react')
SearchRefinement('JavaScript', '>45', '>50', 'bsl-1.0')
CheckResult('1 repository result', 'mvoloskov/decider')
PrintReadMe(300)
TearDown()
|
import itertools
import math
from operator import itemgetter
from random import random
import import_data
import numpy as np
import networkx
from numpy.ma import log2
import tree_bayesian_network
import import_data
def initialize_trees(train_dataset):
graph = initialize_tree_parameters(train_dataset)
max_spanning_tree = tree_bayesian_network.find_max_spanning_tree(graph)
return max_spanning_tree
def initialize_tree_parameters(train_dataset):
num_of_features = np.shape(train_dataset)[1]
graph = networkx.fast_gnp_random_graph(n=num_of_features, p=0.3)
array = np.zeros((num_of_features, num_of_features))
for each_vertex in graph:
for each_opposite_vertex in graph[each_vertex]:
array[each_vertex][each_opposite_vertex] = 1
return array
def initialize_k_trees(train_dataset, k):
k_trees = dict()
for each_tree in range(k):
k_trees[each_tree] = initialize_trees(train_dataset)
return k_trees
def initialize_mixture_probabilities(k):
mixture_probab = [1 / k] * k
return np.array(mixture_probab)
def initialize_joint_probability_distribution(k, train_dataset, k_trees):
joint_probability_distribution = dict()
for each_k in range(k):
joint_probability_distribution[each_k] = dict()
for each_attribute_1 in range(np.shape(train_dataset)[1]):
joint_probability_distribution[each_k][each_attribute_1] = dict()
for each_attribute_2 in range(np.shape(train_dataset)[1]):
if k_trees[each_k][each_attribute_1][each_attribute_2] != 0:
joint_probability_distribution[each_k][each_attribute_1][each_attribute_2] = np.random.random(
(2, 2))
# -1 is for self probability
joint_probability_distribution[each_k][each_attribute_1][-1] = random()
return joint_probability_distribution
def compute_mutual_information(train_dataset, complete_data, cluster_number):
probability_distribution = dict()
probability_distribution_each_parameter = dict()
num_of_examples = np.shape(train_dataset)[0]
num_of_features = np.shape(train_dataset)[1]
mutual_information = np.zeros((num_of_features, num_of_features))
denominator = np.sum(complete_data[:, cluster_number]) + 2
for each_parameter in range(num_of_features):
count = 2
for each_example in range(num_of_examples):
if train_dataset[each_example][each_parameter] == 1:
count = count + complete_data[each_example][cluster_number]
probability_distribution_each_parameter[each_parameter] = count / (float(denominator))
for count1 in range(num_of_features):
probability_distribution[count1] = dict()
for count2 in range(num_of_features):
each_case = np.zeros((2, 2))
array_temp = train_dataset[:, (count1, count2)]
for each_tuple in array_temp:
if np.array_equal(each_tuple, [0, 0]):
each_case[0, 0] += 1
if np.array_equal(each_tuple, [1, 0]):
each_case[1, 0] += 1
if np.array_equal(each_tuple, [0, 1]):
each_case[0, 1] += 1
if np.array_equal(each_tuple, [1, 1]):
each_case[1, 1] += 1
for each_row in range(np.shape(each_case)[0]):
for each_column in range(np.shape(each_case)[1]):
p_uv = (each_case[each_row][each_column] + 1) / float(num_of_examples + 2)
if each_row == 0:
p_u = 1 - probability_distribution_each_parameter[count1]
else:
p_u = probability_distribution_each_parameter[count1]
if each_column == 0:
p_v = 1 - probability_distribution_each_parameter[count2]
else:
p_v = probability_distribution_each_parameter[count2]
try:
mutual_information[count1][count2] += p_uv * np.ma.log2((p_uv / float(p_u * p_v)))
except:
i = 0
probability_distribution[count1][count2] = np.zeros((2, 2))
for first_case in range(np.shape(each_case)[0]):
for second_case in range(np.shape(each_case)[0]):
probability_distribution[count1][count2][first_case][second_case] = each_case[first_case][
second_case] / float(
denominator)
return probability_distribution_each_parameter, mutual_information, probability_distribution
def get_probability(probability_distribution, parameter_1, parameter_2, value_1, value_2):
try:
probability = probability_distribution[parameter_1][parameter_2][value_1][value_2]
except:
try:
probability = probability_distribution[parameter_2][parameter_1][value_2][value_1]
except:
return 0
return probability
def update_joint_probability_distribution(joint_probability_distribution, probability_distribution_each_parameter,
probability_distribution, train_dataset, k):
joint_probability_distribution_new = dict()
for each_feature in range(np.shape(train_dataset)[1]):
joint_probability_distribution_new[each_feature] = dict()
for each_feature_2 in range(np.shape(train_dataset)[1]):
try:
if joint_probability_distribution[k][each_feature][each_feature_2]:
joint_probability_distribution_new[each_feature][each_feature_2] = np.zeros((2, 2))
for each_parameter in range(2):
for each_parameter_2 in range(2):
joint_probability_distribution_new[each_feature][each_feature_2][each_parameter][
each_parameter_2] = get_probability(
probability_distribution, each_feature, each_feature_2, each_parameter,
each_parameter_2)
except:
continue
try:
joint_probability_distribution_new[each_feature][-1] = probability_distribution_each_parameter[each_feature]
except:
i = 1
joint_probability_distribution[k] = joint_probability_distribution_new
return joint_probability_distribution
def e_step(train_dataset, k, joint_probability_distribution, mixture_probabilities, k_trees):
num_of_examples = np.shape(train_dataset)[0]
num_of_features = np.shape(train_dataset)[1]
completed_data = np.zeros((num_of_examples, k))
for each_example in range(num_of_examples):
for each_k in range(k):
completed_data[each_example][each_k] = mixture_probabilities[each_k]
for each_feature in range(num_of_features):
for each_feature_2 in range(each_feature, num_of_features):
try:
if k_trees[each_k][each_feature][each_feature_2] != 0:
if train_dataset[each_example][each_feature] == 0 and train_dataset[each_example][
each_feature_2] == 0:
completed_data[each_example][each_k] = (completed_data[each_example][each_k] *
joint_probability_distribution[each_k][
each_feature][
each_feature_2][0][0]) / float(
joint_probability_distribution[each_k][each_feature][-1])
if train_dataset[each_example][each_feature] == 0 and train_dataset[each_example][
each_feature_2] == 1:
completed_data[each_example][each_k] = (completed_data[each_example][each_k] *
joint_probability_distribution[each_k][
each_feature][
each_feature_2][0][1]) / float(
joint_probability_distribution[each_k][each_feature][-1])
if train_dataset[each_example][each_feature] == 1 and train_dataset[each_example][
each_feature_2] == 0:
completed_data[each_example][each_k] = (completed_data[each_example][each_k] *
joint_probability_distribution[each_k][
each_feature][
each_feature_2][1][0]) / float(
joint_probability_distribution[each_k][each_feature][-1])
if train_dataset[each_example][each_feature] == 1 and train_dataset[each_example][
each_feature_2] == 1:
completed_data[each_example][each_k] = (completed_data[each_example][each_k] *
joint_probability_distribution[each_k][
each_feature][
each_feature_2][1][1]) / float(
joint_probability_distribution[each_k][each_feature][-1])
except:
i = 1
denominator_for_normalization = np.sum(completed_data[each_example])
for each_k in range(k):
completed_data[each_example][each_k] = completed_data[each_example][each_k] / float(
denominator_for_normalization)
return completed_data
def m_step(train_dataset, k, completed_data, joint_probability_distribution):
num_of_examples = np.shape(train_dataset)[0]
updated_mixture_probabilities = np.zeros((k, 1))
for each_k in range(k):
updated_mixture_probabilities[each_k] = np.sum(completed_data[:, each_k]) / float(num_of_examples)
k_trees = dict()
updated_joint_probability_distribution = dict()
for each_k in range(k):
probability_distribution_each_parameter, mutual_information, probability_distribution = compute_mutual_information(
train_dataset, completed_data, each_k)
spanning_tree = tree_bayesian_network.find_max_spanning_tree(mutual_information)
k_trees[each_k] = spanning_tree
updated_joint_probability_distribution = update_joint_probability_distribution(joint_probability_distribution,
probability_distribution_each_parameter,
probability_distribution,
train_dataset, each_k)
return updated_mixture_probabilities, updated_joint_probability_distribution, k_trees
def test_log_likelihood(test_dataset, mixture_probabilities, joint_probability_distribution, k_trees, k):
num_of_examples = np.shape(test_dataset)[0]
num_of_features = np.shape(test_dataset)[1]
log_likelihood = 0
for each_example in test_dataset:
log_probability_of_example = 0
for each_k in range(k):
log_probability_of_example_for_each_k = 0
edges_dict, edges = tree_bayesian_network.get_edges(k_trees[each_k])
for each_edge_set in edges:
if each_example[each_edge_set[0]] == 0 and each_example[
each_edge_set[1]] == 0:
try:
log_probability_of_example_for_each_k += log2(
joint_probability_distribution[each_k][each_edge_set[0]][each_edge_set[1]][0][0]) - log2(
joint_probability_distribution[each_k][each_edge_set[1]][-1])
except:
i = 0
if each_example[each_edge_set[0]] == 1 and each_example[
each_edge_set[1]] == 0:
try:
log_probability_of_example_for_each_k += log2(
joint_probability_distribution[each_k][each_edge_set[0]][each_edge_set[1]][1][0]) - log2(
joint_probability_distribution[each_k][each_edge_set[1]][-1])
except:
i = 0
if each_example[each_edge_set[0]] == 0 and each_example[
each_edge_set[1]] == 1:
try:
log_probability_of_example_for_each_k += log2(
joint_probability_distribution[each_k][each_edge_set[0]][each_edge_set[1]][0][1]) - log2(
joint_probability_distribution[each_k][each_edge_set[1]][-1])
except:
i = 0
if each_example[each_edge_set[0]] == 1 and each_example[
each_edge_set[1]] == 1:
try:
log_probability_of_example_for_each_k += log2(
joint_probability_distribution[each_k][each_edge_set[0]][each_edge_set[1]][1][1]) - log2(
joint_probability_distribution[each_k][each_edge_set[1]][-1])
except:
i = 0
log_probability_of_example += log2(mixture_probabilities[each_k]) + log_probability_of_example_for_each_k
log_likelihood = log_likelihood + log_probability_of_example
return log_likelihood
def run_model(train_dataset, test_dataset, valid_dataset, k, num_of_iterations, num_of_iterations_for_em):
log_likelihood_for_each_iteration = np.zeros((num_of_iterations, 1))
for each_iteration in range(num_of_iterations):
k_trees = initialize_k_trees(train_dataset, k)
mixture_probabilities = initialize_mixture_probabilities(k)
joint_probability_distribution = initialize_joint_probability_distribution(k, train_dataset, k_trees)
converged = False
for each_iteration_for_em in range(num_of_iterations_for_em):
if converged:
break;
else:
completed_data = e_step(train_dataset, k, joint_probability_distribution, mixture_probabilities,
k_trees)
updated_mixture_probabilities, updated_joint_probability_distribution, k_trees = m_step(train_dataset,
k,
completed_data,
joint_probability_distribution)
if not converged:
for each_k in range(k):
if np.allclose(updated_mixture_probabilities, mixture_probabilities, 1 / 10, 1 / 10):
converged = not converged
mixture_probabilities = updated_mixture_probabilities
joint_probability_distribution = updated_joint_probability_distribution
log_likelihood_for_each_iteration[each_iteration] = test_log_likelihood(test_dataset, mixture_probabilities,
joint_probability_distribution, k_trees,
k)
mean = np.mean(log_likelihood_for_each_iteration)
standard_deviation = np.std(log_likelihood_for_each_iteration)
return mean, standard_deviation
def validation_of_model(dataset_name, num_of_iterations):
train_dataset, test_dataset, valid_dataset = import_data.import_data(dataset_name)
k = range(5, 21, 5)
best_k = 5
best_log_likelihood = -math.inf
for each in k:
# Here I am testing on the validation dataset
mean, standard_deviation = run_model(train_dataset, valid_dataset, test_dataset, each, 10, 100)
if mean > best_log_likelihood:
best_k = each
log_likelihood_mean_final, log_likelihood_standard_deviation_final = run_model(train_dataset, test_dataset,
valid_dataset, best_k,
10, 100)
return log_likelihood_mean_final, log_likelihood_standard_deviation_final
"""
import import_data
import sys
import warnings
arguments = list(sys.argv)
try:
data_set_name = str(arguments[1])
algorithm_name = str(arguments[2])
except:
print("You have given less arguments")
try:
number_of_iterations = int(str(arguments[3]))
except:
print(
"You want to run Part 1 or Part 2, else you have provided wrong commands, please check. Please check the readme")
train_dataset, test_dataset, valid_dataset = import_data.import_data(data_set_name)
mean, standard_deviation = validation_of_model(data_set_name, number_of_iterations)
print("The log likelihood mean and standard deviation are ", str(mean), str(standard_deviation))
If the above code does not run please use the following code
train_dataset, test_dataset, valid_dataset = import_data.import_data(data_set_name)
mean, standard_deviation = run_model(train_dataset, test_dataset, valid_dataset, 15, number_of_iterations, 100)
print(mean, standard_deviation)
"""
|
//@target: ES6
var k, v;
var map = new Map([
[
"",
true
]
]);
for ([k, ...[v]] of map){
k;
v;
}
|
import matplotlib.pyplot as plt
from matplotlib import rc, rcParams
from matplotlib.ticker import FormatStrFormatter
import numpy as np
from functools import reduce
import logging
from fractions import Fraction
import json
import argparse
logger = logging.getLogger(__name__)
def create_line_graph(data, title, x_axis_label, y_axis_label, filename, yscale='linear'):
""" data is a dictionary of category (string) to dictionaries. Each inner dictionary is a dictionary of bar label (string) to bar height (number)
"""
fig, ax = plt.subplots(nrows=2, ncols=2, figsize=(10, 5))
rcParams['mathtext.default'] = 'regular'
# num_series = len(data)
# width = 0.3
# x_ticks = reduce(np.union1d, [list(data[entry].keys()) for entry in data])
# x_tick_labels = [r'$\frac{%s}{%s}$' % (
# Fraction(t).numerator, Fraction(t).denominator) if Fraction(t) >= Fraction(1/32) else '' for t in x_ticks]
# x_tick_labels[-1] = 1
# num_data_points = len(labels)
# indexes = np.arange(num_data_points) * (num_series+1) * width
ax_list = [ax[0][0], ax[0][1], ax[1][0], ax[1][1]]
for idx, series in enumerate(data):
# bar_heights = [data[series].get(label, 0) for label in labels]
x_vals = np.array(list(data[series].keys()))
y_vals = np.array([data[series][x_val] for x_val in x_vals])
ax_list[idx % len(ax_list)].plot(x_vals, y_vals, label=series, zorder=2, marker='o',
linewidth=2, markersize=6)
# Axis labels
# ax.set_xticks(x_ticks)
# ax.set_xticklabels(x_tick_labels, fontdict={
# 'fontsize': rcParams['axes.titlesize']})
for subplot in ax_list:
plt.xlabel(x_axis_label)
plt.ylabel(y_axis_label)
plt.yscale('log')
plt.xscale('log')
subplot.xaxis.set_major_formatter(FormatStrFormatter('%.3g'))
subplot.xaxis.set_minor_formatter(FormatStrFormatter('%.3g'))
subplot.yaxis.set_major_formatter(FormatStrFormatter('%.3g'))
# horizontal gridlines
subplot.grid(axis='y', linestyle=':', zorder=1)
# Misc properties
subplot.legend(bbox_to_anchor=(1, 1), loc="upper left")
fig.suptitle(title)
fig.savefig(filename, bbox_inches='tight', dpi=300)
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('mode', metavar='M', type=str,
choices=['join', 'analysis'],
help='Constrains which benchmarks are run')
args = parser.parse_args()
output_file = f"subsampling_index_{args.mode}_benchmark"
data_files = [f"subsampling_{args.mode}_benchmark.json",
f"subsampling_{args.mode}_benchmark_no_index.json"]
benchmark_data = {}
for data_file in data_files:
with open(f"results/{data_file}", 'r') as file:
benchmark_data.update(json.loads(file.read(),
object_hook=lambda d: {float(k) if isfloat(k) else k: v for k, v in d.items()}))
logger.info(benchmark_data)
create_line_graph(benchmark_data, "Time to Run Query With Different Dataset Sizes", "Size of Dataset Relative to Original",
"Seconds", f"figures/{output_file}.png", yscale='linear')
|
"""
Set of functions for IO communication with DisPerSe
# Author: Antonio Martinez-Sanchez (Max Planck Institute for Biochemistry)
# Date: 02.04.14
"""
__author__ = 'martinez'
__version__ = "$Revision: 001 $"
from .disperse_io import *
from .handler import DisPerSe
|
from django import template
register = template.Library()
@register.filter(name = "songtime")
def songtime(value):
if value < 0:
return "0:00"
#return "{}{}:{}".format(("" if value / 60 > 10 else "0"), value / 60, value % 60)
return "{}:{}{}".format(value / 60, ("" if value % 60 >= 10 else "0"), value % 60)
|
# coding: utf-8
"""
CONS3RT Web API
A CONS3RT ReSTful API # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: Fred@gigagantic-server.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import cons3rt
from cons3rt.models.input_disk_for_template import InputDiskForTemplate # noqa: E501
from cons3rt.rest import ApiException
class TestInputDiskForTemplate(unittest.TestCase):
"""InputDiskForTemplate unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInputDiskForTemplate(self):
"""Test InputDiskForTemplate"""
# FIXME: construct object with mandatory attributes with example values
# model = cons3rt.models.input_disk_for_template.InputDiskForTemplate() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
//--------------------------------------------------------------------------------
// This file is a portion of the Hieroglyph 3 Rendering Engine. It is distributed
// under the MIT License, available in the root of this distribution and
// at the following URL:
//
// http://www.opensource.org/licenses/mit-license.php
//
// Copyright (c) Jason Zink
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
// RenderEffectDX11
//
// A render effect is a complete pipeline configuration, excluding only the input
// geometry and the output buffers for rendering commands. This allows for an
// application to specify a single render effect to completely configure the
// renderer based on it's current state.
//--------------------------------------------------------------------------------
#ifndef RenderEffectDX11_h
#define RenderEffectDX11_h
//--------------------------------------------------------------------------------
#include "ShaderDX11.h"
//--------------------------------------------------------------------------------
namespace Glyph3
{
class PipelineManagerDX11;
class IParameterManager;
class RenderEffectDX11
{
public:
RenderEffectDX11();
virtual ~RenderEffectDX11();
void SetVertexShader( int index );
void SetHullShader( int index );
void SetDomainShader( int index );
void SetGeometryShader( int index );
void SetPixelShader( int index );
void SetComputeShader( int index );
int GetVertexShader( );
int GetHullShader( );
int GetDomainShader( );
int GetGeometryShader( );
int GetPixelShader( );
int GetComputeShader( );
void ConfigurePipeline( PipelineManagerDX11* pPipeline, IParameterManager* pParamManager );
int m_iBlendState;
int m_iDepthStencilState;
int m_iRasterizerState;
UINT m_uStencilRef;
protected:
void UpdateConstantBufferList( );
int m_aiIndices[6];
ShaderDX11* m_apShaders[6];
std::vector< RenderParameterDX11* > m_uniqueConstBuffers;
};
};
//--------------------------------------------------------------------------------
#endif // RenderEffectDX11_h
//--------------------------------------------------------------------------------
|
/*-
* Copyright (c) 2011 NetApp, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY NETAPP, INC ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL NETAPP, INC OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD$
*/
#include <sys/cdefs.h>
#include <sys/types.h>
#include <sys/select.h>
#include <err.h>
#include <stdio.h>
#include <stdlib.h>
#include <termios.h>
#include <unistd.h>
#include <stdbool.h>
#include <sysexits.h>
#include "inout.h"
#include "lpc.h"
#define BVM_CONSOLE_PORT 0x220
#define BVM_CONS_SIG ('b' << 8 | 'v')
static struct termios tio_orig, tio_new;
static void
ttyclose(void)
{
tcsetattr(STDIN_FILENO, TCSANOW, &tio_orig);
}
static void
ttyopen(void)
{
tcgetattr(STDIN_FILENO, &tio_orig);
cfmakeraw(&tio_new);
tcsetattr(STDIN_FILENO, TCSANOW, &tio_new);
atexit(ttyclose);
}
static bool
tty_char_available(void)
{
fd_set rfds;
struct timeval tv;
FD_ZERO(&rfds);
FD_SET(STDIN_FILENO, &rfds);
tv.tv_sec = 0;
tv.tv_usec = 0;
if (select(STDIN_FILENO + 1, &rfds, NULL, NULL, &tv) > 0)
return true;
else
return false;
}
static int
ttyread(void)
{
char rb;
if (tty_char_available()) {
read(STDIN_FILENO, &rb, 1);
return (rb & 0xff);
} else {
return -1;
}
}
static void
ttywrite(unsigned char wb)
{
(void) write(STDOUT_FILENO, &wb, 1);
}
static int
console_handler(struct vmctx *ctx, int vcpu, int in, int port, int bytes,
uint32_t *eax, void *arg)
{
static int opened;
if (bytes == 2 && in) {
*eax = BVM_CONS_SIG;
return 0;
}
/*
* Guests might probe this port to look for old ISA devices
* using single-byte reads. Return 0xff for those.
*/
if (bytes == 1 && in) {
*eax = 0xff;
return 0;
}
if (bytes != 4)
return -1;
if (!opened) {
ttyopen();
opened = 1;
}
if (in)
*eax = ttyread();
else
ttywrite(*eax);
return 0;
}
SYSRES_IO(BVM_CONSOLE_PORT, 4);
static struct inout_port consport = {
"bvmcons",
BVM_CONSOLE_PORT,
1,
IOPORT_F_INOUT,
console_handler
};
void
init_bvmcons(void)
{
register_inout(&consport);
}
|
"""
Codemonk link: https://www.hackerearth.com/practice/data-structures/trees/binary-and-nary-trees/practice-problems/algorithm/mirror-image-2/
You are given a binary tree rooted at 1. You have to find the mirror image of any node qi about node 1. If it doesn't
exist then print -1.
Input - Output:
First line of input is N and Q.
Next N-1 lines consists of two integers and one character first of whose is the parent node,
second is child node and character "L" representing Left child and "R" representing right child.
Next Q lines represents qi.
For each qi print it mirror node if it exists else print -1.
Sample input:
10 8
1 2 R
1 3 L
2 4 R
2 5 L
3 6 R
3 7 L
5 8 R
5 9 L
7 10 R
2
5
3
6
1
10
9
4
Sample Output:
3
6
2
5
1
-1
-1
7
"""
"""
We implement a simple binary tree. To do that we create all the nodes and place them in an array. Afterwards based on
the input we associate the nodes from the array with each other. Then, to find the mirror node we start from the root
going downwards the tree simultaneously, either from left and right children, or, if we don't find it there, we change
direction to right and left children. It sound strange but after thinking of it it makes sense.
Final complexity: O(Q*N)
"""
class BinaryTree:
def __init__(self, value):
self.left = None
self.right = None
self.value = value
def find_mirror(left, right, mirror):
if left is None or right is None:
return -1
if left == mirror:
return right
if right == mirror:
return left
res = find_mirror(left.left, right.right, mirror)
if res != -1:
return res
return find_mirror(left.right, right.left, mirror)
n, q = map(int, input().split())
nodes_array = [BinaryTree(i) for i in range(1, n+1)]
for _ in range(n-1):
node, child, which = input().split()
node, child = int(node), int(child)
temp_node = nodes_array[node-1]
temp_child = nodes_array[child-1]
if which == "R":
temp_node.right = temp_child
else:
temp_node.left = temp_child
# nodes_array = [0] * n
# nodes_array[0] = BinaryTree(1)
# for _ in range(n-1):
# node, child, which = input().split()
# node, child = int(node), int(child)
# temp_node = BinaryTree(child)
# nodes_array[child-1] = temp_node
# if which == "R":
# nodes_array[node-1].right = temp_node
# else:
# nodes_array[node-1].left = temp_node
for i in range(q):
mirror_node = int(input())
if nodes_array[mirror_node-1] == nodes_array[0]:
print(nodes_array[0].value)
else:
answer = find_mirror(nodes_array[0].left, nodes_array[0].right, nodes_array[mirror_node-1])
if answer != -1:
print(answer.value)
else:
print(-1)
|
from pygame.locals import K_UP, K_DOWN, K_LEFT, K_RIGHT, K_SPACE
class Controls:
DOWN: int = K_DOWN
UP: int = K_UP
LEFT: int = K_LEFT
RIGHT: int = K_RIGHT
SPACE: int = K_SPACE
|
from typing import Any, Dict, Tuple
from collections import OrderedDict
from django.views.generic import TemplateView
from django.conf import settings
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
from django.template import loader
import os
import random
import re
from zerver.lib.integrations import CATEGORIES, INTEGRATIONS, HubotIntegration, \
WebhookIntegration
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.subdomains import get_subdomain
from zerver.models import Realm
from zerver.templatetags.app_filters import render_markdown_path
from zerver.context_processors import zulip_default_context
def add_api_uri_context(context: Dict[str, Any], request: HttpRequest) -> None:
context.update(zulip_default_context(request))
subdomain = get_subdomain(request)
if (subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
or not settings.ROOT_DOMAIN_LANDING_PAGE):
display_subdomain = subdomain
html_settings_links = True
else:
display_subdomain = 'yourZulipDomain'
html_settings_links = False
display_host = Realm.host_for_subdomain(display_subdomain)
api_url_scheme_relative = display_host + "/api"
api_url = settings.EXTERNAL_URI_SCHEME + api_url_scheme_relative
zulip_url = settings.EXTERNAL_URI_SCHEME + display_host
context['external_uri_scheme'] = settings.EXTERNAL_URI_SCHEME
context['api_url'] = api_url
context['api_url_scheme_relative'] = api_url_scheme_relative
context['zulip_url'] = zulip_url
context["html_settings_links"] = html_settings_links
if html_settings_links:
settings_html = '<a href="/#settings">Zulip settings page</a>'
subscriptions_html = '<a target="_blank" href="/#streams">streams page</a>'
else:
settings_html = 'Zulip settings page'
subscriptions_html = 'streams page'
context['settings_html'] = settings_html
context['subscriptions_html'] = subscriptions_html
class ApiURLView(TemplateView):
def get_context_data(self, **kwargs: Any) -> Dict[str, str]:
context = super().get_context_data(**kwargs)
add_api_uri_context(context, self.request)
return context
class MarkdownDirectoryView(ApiURLView):
path_template = ""
def get_path(self, article: str) -> Tuple[str, int]:
http_status = 200
if article == "":
article = "index"
elif article == "include/sidebar_index":
pass
elif "/" in article:
article = "missing"
http_status = 404
elif len(article) > 100 or not re.match('^[0-9a-zA-Z_-]+$', article):
article = "missing"
http_status = 404
path = self.path_template % (article,)
try:
loader.get_template(path)
return (path, http_status)
except loader.TemplateDoesNotExist:
return (self.path_template % ("missing",), 404)
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
article = kwargs["article"]
context = super().get_context_data() # type: Dict[str, Any]
(context["article"], http_status_ignored) = self.get_path(article)
# For disabling the "Back to home" on the homepage
context["not_index_page"] = not context["article"].endswith("/index.md")
if self.path_template == '/zerver/help/%s.md':
context["page_is_help_center"] = True
context["doc_root"] = "/help/"
(sidebar_index, http_status_ignored) = self.get_path("include/sidebar_index")
# We want the sliding/collapsing behavior for /help pages only
sidebar_class = "sidebar slide"
title_base = "Zulip Help Center"
else:
context["page_is_api_center"] = True
context["doc_root"] = "/api/"
(sidebar_index, http_status_ignored) = self.get_path("sidebar_index")
sidebar_class = "sidebar"
title_base = "Zulip API Documentation"
# The following is a somewhat hacky approach to extract titles from articles.
# Hack: `context["article"] has a leading `/`, so we use + to add directories.
article_path = os.path.join(settings.DEPLOY_ROOT, 'templates') + context["article"]
if os.path.exists(article_path):
with open(article_path) as article_file:
first_line = article_file.readlines()[0]
# Strip the header and then use the first line to get the article title
article_title = first_line.strip().lstrip("# ")
if context["not_index_page"]:
context["OPEN_GRAPH_TITLE"] = "%s (%s)" % (article_title, title_base)
else:
context["OPEN_GRAPH_TITLE"] = title_base
self.request.placeholder_open_graph_description = (
"REPLACMENT_OPEN_GRAPH_DESCRIPTION_%s" % (int(2**24 * random.random()),))
context["OPEN_GRAPH_DESCRIPTION"] = self.request.placeholder_open_graph_description
context["sidebar_index"] = sidebar_index
context["sidebar_class"] = sidebar_class
# An "article" might require the api_uri_context to be rendered
api_uri_context = {} # type: Dict[str, Any]
add_api_uri_context(api_uri_context, self.request)
api_uri_context["run_content_validators"] = True
context["api_uri_context"] = api_uri_context
return context
def get(self, request: HttpRequest, article: str="") -> HttpResponse:
(path, http_status) = self.get_path(article)
result = super().get(self, article=article)
if http_status != 200:
result.status_code = http_status
return result
def add_integrations_context(context: Dict[str, Any]) -> None:
alphabetical_sorted_categories = OrderedDict(sorted(CATEGORIES.items()))
alphabetical_sorted_integration = OrderedDict(sorted(INTEGRATIONS.items()))
enabled_integrations_count = len(list(filter(lambda v: v.is_enabled(), INTEGRATIONS.values())))
# Subtract 1 so saying "Over X integrations" is correct. Then,
# round down to the nearest multiple of 10.
integrations_count_display = ((enabled_integrations_count - 1) // 10) * 10
context['categories_dict'] = alphabetical_sorted_categories
context['integrations_dict'] = alphabetical_sorted_integration
context['integrations_count_display'] = integrations_count_display
def add_integrations_open_graph_context(context: Dict[str, Any], request: HttpRequest) -> None:
path_name = request.path.rstrip('/').split('/')[-1]
description = ('Zulip comes with over a hundred native integrations out of the box, '
'and integrates with Zapier, IFTTT, and Hubot to provide hundreds more. '
'Connect the apps you use everyday to Zulip.')
if path_name in INTEGRATIONS:
integration = INTEGRATIONS[path_name]
context['OPEN_GRAPH_TITLE'] = 'Connect {name} to Zulip'.format(name=integration.display_name)
context['OPEN_GRAPH_DESCRIPTION'] = description
elif path_name in CATEGORIES:
category = CATEGORIES[path_name]
context['OPEN_GRAPH_TITLE'] = 'Connect your {category} tools to Zulip'.format(category=category)
context['OPEN_GRAPH_DESCRIPTION'] = description
elif path_name == 'integrations':
context['OPEN_GRAPH_TITLE'] = 'Connect the tools you use to Zulip'
context['OPEN_GRAPH_DESCRIPTION'] = description
class IntegrationView(ApiURLView):
template_name = 'zerver/integrations/index.html'
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
context = super().get_context_data(**kwargs) # type: Dict[str, Any]
add_integrations_context(context)
add_integrations_open_graph_context(context, self.request)
return context
@has_request_variables
def integration_doc(request: HttpRequest, integration_name: str=REQ(default=None)) -> HttpResponse:
if not request.is_ajax():
return HttpResponseNotFound()
try:
integration = INTEGRATIONS[integration_name]
except KeyError:
return HttpResponseNotFound()
context = {} # type: Dict[str, Any]
add_api_uri_context(context, request)
context['integration_name'] = integration.name
context['integration_display_name'] = integration.display_name
if hasattr(integration, 'stream_name'):
context['recommended_stream_name'] = integration.stream_name
if isinstance(integration, WebhookIntegration):
context['integration_url'] = integration.url[3:]
if isinstance(integration, HubotIntegration):
context['hubot_docs_url'] = integration.hubot_docs_url
doc_html_str = render_markdown_path(integration.doc, context)
return HttpResponse(doc_html_str)
|
import os
import sys
from template import Template
from template.plugins import Plugins
from template.test import TestCase, main
class PluginsTest(TestCase):
def testPlugins(self):
sys.path.insert(0, os.path.abspath("test/plugin"))
tt1 = Template({ "PLUGIN_BASE": "MyPlugs" })
tt2 = Template({ "PLUGINS": { "bar": "MyPlugs.Bar",
"baz": ("MyPlugs.Foo", "Foo"),
"cgi": ("MyPlugs.Bar", "Bar") } })
tt3 = Template({ "LOAD_PYTHON": 1 })
del Plugins.STD_PLUGINS["date"]
Plugins.PLUGIN_BASE = ""
tt4 = Template({ "PLUGIN_BASE": "MyPlugs" })
tt5 = Template()
tt = (("def", Template()),
("tt1", tt1),
("tt2", tt2),
("tt3", tt3),
("tt4", tt4),
("tt5", tt5))
self.Expect(DATA, tt, self._callsign())
DATA = r"""
#------------------------------------------------------------------------
# basic plugin loads
#------------------------------------------------------------------------
-- test --
[% USE Table([2, 3, 5, 7, 11, 13], rows=2) -%]
[% Table.row(0).join(', ') %]
-- expect --
2, 5, 11
-- test --
[% USE table([17, 19, 23, 29, 31, 37], rows=2) -%]
[% table.row(0).join(', ') %]
-- expect --
17, 23, 31
-- test --
[% USE t = Table([41, 43, 47, 49, 53, 59], rows=2) -%]
[% t.row(0).join(', ') %]
-- expect --
41, 47, 53
-- test --
[% USE t = table([61, 67, 71, 73, 79, 83], rows=2) -%]
[% t.row(0).join(', ') %]
-- expect --
61, 71, 79
#------------------------------------------------------------------------
# load Foo plugin through custom PLUGIN_BASE
#------------------------------------------------------------------------
-- test --
-- use tt1 --
-- test --
[% USE t = table([89, 97, 101, 103, 107, 109], rows=2) -%]
[% t.row(0).join(', ') %]
-- expect --
89, 101, 107
-- test --
[% USE Foo(2) -%]
[% Foo.output %]
-- expect --
This is the Foo plugin, value is 2
-- test --
[% USE Bar(4) -%]
[% Bar.output %]
-- expect --
This is the Bar plugin, value is 4
#------------------------------------------------------------------------
# load Foo plugin through custom PLUGINS
#------------------------------------------------------------------------
-- test --
-- use tt2 --
[% USE t = table([113, 127, 131, 137, 139, 149], rows=2) -%]
[% t.row(0).join(', ') %]
-- expect --
113, 131, 139
-- test --
[% TRY -%]
[% USE Foo(8) -%]
[% Foo.output %]
[% CATCH -%]
ERROR: [% error.info %]
[% END %]
-- expect --
ERROR: Foo: plugin not found
-- test --
[% USE bar(16) -%]
[% bar.output %]
-- expect --
This is the Bar plugin, value is 16
-- test --
[% USE qux = baz(32) -%]
[% qux.output %]
-- expect --
This is the Foo plugin, value is 32
-- test --
[% USE wiz = cgi(64) -%]
[% wiz.output %]
-- expect --
This is the Bar plugin, value is 64
#------------------------------------------------------------------------
# LOAD_PERL
#------------------------------------------------------------------------
-- test --
-- use tt3 --
[% USE baz = MyPlugs.Baz(128) -%]
[% baz.output %]
-- expect --
This is the Baz module, value is 128
-- test --
[% USE boz = MyPlugs.Baz(256) -%]
[% boz.output %]
-- expect --
This is the Baz module, value is 256
#------------------------------------------------------------------------
# Test case insensitivity of plugin names. We first look for the plugin
# using the name specified in its original case. From v2.15 we also look
# for standard plugins using the lower case conversion of the plugin name
# specified.
#------------------------------------------------------------------------
-- test --
[% USE mycgi = url('/cgi-bin/bar.pl', debug=1); %][% mycgi %]
-- expect --
/cgi-bin/bar.pl?debug=1
-- test --
[% USE mycgi = URL('/cgi-bin/bar.pl', debug=1); %][% mycgi %]
-- expect --
/cgi-bin/bar.pl?debug=1
-- test --
[% USE mycgi = UrL('/cgi-bin/bar.pl', debug=1); %][% mycgi %]
-- expect --
/cgi-bin/bar.pl?debug=1
#------------------------------------------------------------------------
# ADD_DEFAULT_PLUGIN_BASE = 0.
# Template::Plugins::URL no longer works since Template::Plugins is not
# added to the default plugin base. Same with others. However, url will
# work since it is specified as a plugin in
# Template::Plugins::STD_PLUGINS.
#------------------------------------------------------------------------
# should find Foo as we've specified 'MyPlugs' in the PLUGIN_BASE
-- test --
-- use tt4 --
[% USE Foo(20) -%]
[% Foo.output %]
-- expect --
This is the Foo plugin, value is 20
-- test --
-- use tt4 --
[% TRY -%]
[% USE Date() -%]
[% CATCH -%]
ERROR: [% error.info %]
[% END %]
-- expect --
ERROR: Date: plugin not found
-- test --
[% USE mycgi = url('/cgi-bin/bar.pl', debug=1); %][% mycgi %]
-- expect --
/cgi-bin/bar.pl?debug=1
"""
|
# 비트코인 15분 전 KRW 시세
from blockchain import exchangerates
tk = exchangerates.get_ticker()
print('1 bitcoin =', tk['KRW'].p15min, 'KRW')
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.9.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ExtensionsV1beta1DeploymentStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'rolling_update': 'ExtensionsV1beta1RollingUpdateDeployment',
'type': 'str'
}
attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
def __init__(self, rolling_update=None, type=None):
"""
ExtensionsV1beta1DeploymentStrategy - a model defined in Swagger
"""
self._rolling_update = None
self._type = None
self.discriminator = None
if rolling_update is not None:
self.rolling_update = rolling_update
if type is not None:
self.type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this ExtensionsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:return: The rolling_update of this ExtensionsV1beta1DeploymentStrategy.
:rtype: ExtensionsV1beta1RollingUpdateDeployment
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this ExtensionsV1beta1DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param rolling_update: The rolling_update of this ExtensionsV1beta1DeploymentStrategy.
:type: ExtensionsV1beta1RollingUpdateDeployment
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this ExtensionsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:return: The type of this ExtensionsV1beta1DeploymentStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this ExtensionsV1beta1DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:param type: The type of this ExtensionsV1beta1DeploymentStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ExtensionsV1beta1DeploymentStrategy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import time
import datetime
import socket
import traceback
import logging
import errno
import os
import re
import inspect
import copy
import threading
import string
from MDSplus import *
from threading import Thread
from threading import Lock
from MDSplus.mdsExceptions import DevBAD_PARAMETER
class EPFES(Device):
parts = [
{'path': ':COMMENT', 'type': 'text',
'value': 'Electrostatic probes front end slave'},
{'path': ':IP', 'type': 'text', 'value': '192.168.62.55'},
{'path': ':PORT', 'type': 'numeric', 'value': 1893},
{'path': ':EVT', 'type': 'text', 'value': 'EP_EVT'},
{'path': ':T_ON', 'type': 'numeric', 'value': 1.0},
{'path': ':T_OFF', 'type': 'numeric', 'value': 1.0},
{'path': ':N_CYCL', 'type': 'numeric', 'value': 1},
{'path': ':WD_TMT', 'type': 'numeric', 'value': 60.0}
]
for i in range(8):
parts.append({'path': '.CH_%02d' % (i + 1), 'type': 'structure'})
parts.append({'path': '.CH_%02d:SOURCE' % (
i + 1), 'type': 'text', 'value': 'INT'})
parts.append({'path': '.CH_%02d:IRANGE' % (
i + 1), 'type': 'text', 'value': 'LOW'})
del i
parts.append({'path': ':INIT_ACTION', 'type': 'action',
'valueExpr': "Action(Dispatch('ELBP_PROBE_SERVER', 'INIT', 50, None), Method(None, 'init', head))", 'options': ('no_write_shot',)})
parts.append({'path': ':STORE_ACTION', 'type': 'action',
'valueExpr': "Action(Dispatch('ELBP_PROBE_SERVER', 'POST_PULSE_CHECK', 50, None), Method(None, 'store', head))", 'options': ('no_write_shot',)})
workers = {}
class MyEvent(Event):
class Job(threading.Thread):
def __init__(self, device):
threading.Thread.__init__(self, name=event)
self.doSequenceEvent = threading.Event()
self.stopSequenceEvent = threading.Event()
self.exitEvent = threading.Event()
self.programEvent = threading.Event()
self.newEvent = threading.Event()
self.offEvent = threading.Event()
self.onEvent = threading.Event()
self.device = device
self.nCycles = 0
self.tON = 0
self.tOFF = 0
self.local = 0
def doAllOn(self):
try:
ip = self.device.getNode('IP').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid IP')
raise DevBAD_PARAMETER
try:
port = self.device.getNode('PORT').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid PORT')
raise DevBAD_PARAMETER
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "SYS:BIAS:ON\r\n"
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doAllOn() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doAllOn() - " + sock.recv(50).rstrip("\r\n"))
sock.close()
def doAllOff(self):
try:
ip = self.device.getNode('IP').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid IP')
raise DevBAD_PARAMETER
try:
port = self.device.getNode('PORT').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid PORT')
raise DevBAD_PARAMETER
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "SYS:BIAS:OFF\r\n"
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doAllOff() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doAllOff() - " + sock.recv(50).rstrip("\r\n"))
sock.close()
def doProgram(self):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram()")
try:
ip = self.device.getNode('IP').data()
except Exception as ex:
Data.execute(
'DevLogErr($1, $2)', self.device.getNid(), 'Invalid IP %s' % (str(ex)))
raise DevBAD_PARAMETER
try:
port = self.device.getNode('PORT').data()
except Exception as ex:
Data.execute('DevLogErr($1, $2)', self.device.getNid(
), 'Invalid PORT %s' % (str(ex)))
raise DevBAD_PARAMETER
try:
self.tOn = self.device.getNode('T_ON').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid T_ON')
raise DevBAD_PARAMETER
try:
self.tOff = self.device.getNode('T_OFF').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid T_OFF')
raise DevBAD_PARAMETER
try:
self.nCycles = self.device.getNode('N_CYCL').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid N_CYCL')
raise DevBAD_PARAMETER
try:
wdTimeout = self.device.getNode('WD_TMT').data()
except:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Invalid WD_TMT')
raise DevBAD_PARAMETER
ch = []
i = 0
for i in range(8):
ch.append(self.device.getNode(
'CH_%02d:IRANGE' % (i + 1)).data())
ch.append(self.device.getNode(
'CH_%02d:SOURCE' % (i + 1)).data())
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "SYS:WD_TIMEOUT:?\r\n"
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
rData = sock.recv(50)
sock.close()
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + rData.rstrip("\r\n"))
if "Loc" in rData:
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doProgram() - Module in local: skipped")
self.local = 1
return 1
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "SYS:WD_TIMEOUT:%s\r\n" % (wdTimeout)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doProgram() - " + sock.recv(50).rstrip("\r\n"))
sock.close()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "SYS:WD_TIMEOUT:?\r\n"
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
rData = sock.recv(50)
sock.close()
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + rData.rstrip("\r\n"))
rData = float(rData.split(':')[-1].strip())
if ('???' != rData) and (str(rData) == str(float(self.device.getNode('WD_TMT').data()))):
pass
else:
Data.execute('DevLogErr($1, $2)',
self.device.getNid(), 'Command failed ' + cmd)
raise mdsExceptions.TclFAILED_ESSENTIAL
if self.exitEvent.isSet() or self.stopSequenceEvent.isSet():
return
i = 0
j = 0
for i in range(8):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2.0)
sock.connect((ip, port))
cmd = "CH%d:IRANge:%s\r\n" % (i + 1, string.upper(ch[j]))
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doProgram() - " + sock.recv(50).rstrip("\r\n"))
sock.close()
if self.exitEvent.isSet() or self.stopSequenceEvent.isSet():
break
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
cmd = "CH%d:SOUrce:%s\r\n" % (
i + 1, string.upper(ch[j + 1]))
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - doProgram() - " + cmd.rstrip("\r\n"))
sock.sendall(cmd)
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - doProgram() - " + sock.recv(50).rstrip("\r\n"))
sock.close()
j += 2
if self.exitEvent.isSet() or self.stopSequenceEvent.isSet():
break
def program(self):
if not self.programEvent.isSet():
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - program() - Executing")
self.programEvent.set()
self.newEvent.set()
else:
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - program() - Already in execution")
def myStart(self):
if self.local:
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - myStart() - Module in local: skipped")
return
if not self.doSequenceEvent.isSet():
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - myStart() - Executing")
self.doSequenceEvent.set()
self.newEvent.set()
else:
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - myStart() - Already in execution")
def stop(self):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - stop()")
self.stopSequenceEvent.set()
self.onEvent.set()
self.offEvent.set()
self.newEvent.set()
def myExit(self):
self.local = 0
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - myExit()")
self.exitEvent.set()
self.onEvent.set()
self.offEvent.set()
self.newEvent.set()
def run(self):
while True:
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - Waiting for event...")
self.newEvent.wait()
now = datetime.datetime.now()
n = int(self.nCycles)
if self.doSequenceEvent.isSet():
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - run() - doSequenceEvent - Cycles to do: " + str(n))
elif self.exitEvent.isSet():
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - exitEvent")
elif self.stopSequenceEvent.isSet():
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - stopSequenceEvent")
elif self.programEvent.isSet():
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - programEvent")
else:
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - Warning: unknown event")
nToDo = n
if nToDo == 0:
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - Warning: cycles to do is 0")
i = 1
while n > 0 and self.doSequenceEvent.isSet() and not self.exitEvent.isSet() and not self.stopSequenceEvent.isSet():
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - Job - run() - Cycle " + str(i) + " of " + str(nToDo))
i += 1
self.doAllOn()
self.offEvent.wait(self.tOn)
self.doAllOff()
n -= 1
self.onEvent.wait(self.tOff)
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.device.getPath() + " - Job - run() - Done")
if self.programEvent.isSet() and not self.exitEvent.isSet():
self.doProgram()
self.newEvent.clear()
self.onEvent.clear()
self.offEvent.clear()
self.doSequenceEvent.clear()
self.stopSequenceEvent.clear()
self.programEvent.clear()
if self.exitEvent.isSet():
self.exitEvent.clear()
break
def __init__(self, event, device):
Event.__init__(self, event)
self.device = device
self.nid = device.nid
self.job = self.Job(device)
self.job.start()
def run(self):
evtName = self.getName()
evtData = string.lower(Data.getString(self.getData()))
data = evtData.split(" ")
if "exit_%s" % (str(self.nid)) == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.myExit()
self.job.join()
self.cancel()
elif "exit" == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.myExit()
self.job.join()
self.cancel()
elif "start_%s" % (str(self.nid)) == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.myStart()
elif "start" == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.myStart()
elif "stop_%s" % (str(self.nid)) == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.stop()
elif "stop" == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.stop()
elif "init_%s" % (str(self.nid)) == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.program()
elif "init" == string.lower(data[0]):
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " + self.device.getPath() +
" - MyEvent - run() - Arrived event " + string.lower(data[0]))
self.job.program()
def init(self):
try:
evtName = self.getNode('EVT').data()
except:
Data.execute('DevLogErr($1, $2)', self.getNid(), 'Invalid EVT')
raise DevBAD_PARAMETER
if self.nid in EPFES.workers.keys():
param = "exit_" + str(self.nid)
Event.setevent(EPFES.workers[self.nid].getName(), param)
EPFES.workers[self.nid].join()
del EPFES.workers[self.nid]
evt = self.MyEvent(evtName, self.copy())
EPFES.workers[self.nid] = evt
param = "init_" + str(self.nid)
Event.setevent(evtName, param)
return 1
def start(self):
if self.nid in EPFES.workers.keys():
param = "start_" + str(self.nid)
Event.setevent(EPFES.workers[self.nid].getName(), param)
else:
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M:%S") + " - " +
self.getPath() + " - EPFES - start() - Init not done")
return 1
def stop(self):
if self.nid in EPFES.workers.keys():
param = "stop_" + str(self.nid)
Event.setevent(EPFES.workers[self.nid].getName(), param)
else:
try:
evtName = self.getNode('EVT').data()
except:
Data.execute('DevLogErr($1, $2)', self.getNid(), 'Invalid EVT')
raise DevBAD_PARAMETER
param = "stop_" + str(self.nid)
Event.setevent(evtName, param)
return 1
def store(self):
if self.nid in EPFES.workers.keys():
param = "exit_" + str(self.nid)
Event.setevent(EPFES.workers[self.nid].getName(), param)
EPFES.workers[self.nid].join()
del EPFES.workers[self.nid]
else:
try:
evtName = self.getNode('EVT').data()
except:
Data.execute('DevLogErr($1, $2)', self.getNid(), 'Invalid EVT')
raise DevBAD_PARAMETER
param = "exit_" + str(self.nid)
Event.setevent(evtName, param)
return 1
|
from unittest import TestCase
from unittest.mock import ANY, MagicMock, Mock, call, patch
from samcli.commands.deploy.command import do_cli
from samcli.commands.deploy.exceptions import GuidedDeployFailedError
from samcli.commands.deploy.guided_config import GuidedConfig
from samcli.commands.deploy.exceptions import DeployResolveS3AndS3SetError
from tests.unit.cli.test_cli_config_file import MockContext
def get_mock_sam_config():
mock_sam_config = MagicMock()
mock_sam_config.exists = MagicMock(return_value=True)
return mock_sam_config
MOCK_SAM_CONFIG = get_mock_sam_config()
class TestDeployCliCommand(TestCase):
def setUp(self):
self.template_file = "input-template-file"
self.stack_name = "stack-name"
self.s3_bucket = "s3-bucket"
self.s3_prefix = "s3-prefix"
self.kms_key_id = "kms-key-id"
self.no_execute_changeset = False
self.notification_arns = []
self.parameter_overrides = {"a": "b"}
self.capabilities = ("CAPABILITY_IAM",)
self.tags = {"c": "d"}
self.fail_on_empty_changset = True
self.role_arn = "role_arn"
self.force_upload = False
self.no_progressbar = False
self.metadata = {"abc": "def"}
self.region = None
self.profile = None
self.use_json = True
self.metadata = {}
self.guided = False
self.confirm_changeset = False
self.resolve_s3 = False
self.config_env = "mock-default-env"
self.config_file = "mock-default-filename"
MOCK_SAM_CONFIG.reset_mock()
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
def test_all_args(self, mock_deploy_context, mock_deploy_click, mock_package_context, mock_package_click):
context_mock = Mock()
mock_deploy_context.return_value.__enter__.return_value = context_mock
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=self.s3_bucket,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=self.guided,
confirm_changeset=self.confirm_changeset,
resolve_s3=self.resolve_s3,
config_env=self.config_env,
config_file=self.config_file,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name=self.stack_name,
s3_bucket=self.s3_bucket,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
confirm_changeset=self.confirm_changeset,
)
context_mock.run.assert_called_with()
self.assertEqual(context_mock.run.call_count, 1)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.guided_context.manage_stack")
@patch("samcli.commands.deploy.guided_context.auth_per_resource")
@patch("samcli.commands.deploy.guided_context.get_template_parameters")
@patch("samcli.commands.deploy.guided_context.get_template_data")
@patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config())))
@patch("samcli.commands.deploy.guided_context.prompt")
@patch("samcli.commands.deploy.guided_context.confirm")
def test_all_args_guided_no_to_authorization_confirmation_prompt(
self,
mock_confirm,
mock_prompt,
mock_get_template_data,
mock_get_template_parameters,
mockauth_per_resource,
mock_managed_stack,
mock_deploy_context,
mock_deploy_click,
mock_package_context,
mock_package_click,
):
context_mock = Mock()
mockauth_per_resource.return_value = [("HelloWorldResource1", False), ("HelloWorldResource2", False)]
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_confirm.side_effect = [True, True, True, False]
mock_prompt.side_effect = [
"sam-app",
"us-east-1",
"guidedParameter",
"secure",
("CAPABILITY_IAM",),
"testconfig.toml",
"test-env",
]
mock_get_template_parameters.return_value = {
"Myparameter": {"Type": "String"},
"MyNoEchoParameter": {"Type": "String", "NoEcho": True},
}
mock_managed_stack.return_value = "managed-s3-bucket"
with patch.object(GuidedConfig, "save_config", MagicMock(return_value=True)) as mock_save_config:
with self.assertRaises(GuidedDeployFailedError):
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=True,
confirm_changeset=True,
resolve_s3=self.resolve_s3,
config_env=self.config_env,
config_file=self.config_file,
)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.guided_context.manage_stack")
@patch("samcli.commands.deploy.guided_context.auth_per_resource")
@patch("samcli.commands.deploy.guided_context.get_template_parameters")
@patch("samcli.commands.deploy.guided_context.get_template_data")
@patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config())))
@patch("samcli.commands.deploy.guided_context.prompt")
@patch("samcli.commands.deploy.guided_context.confirm")
def test_all_args_guided(
self,
mock_confirm,
mock_prompt,
mock_get_template_data,
mock_get_template_parameters,
mockauth_per_resource,
mock_managed_stack,
mock_deploy_context,
mock_deploy_click,
mock_package_context,
mock_package_click,
):
context_mock = Mock()
mockauth_per_resource.return_value = [("HelloWorldResource", False)]
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_confirm.side_effect = [True, False, True, True]
mock_prompt.side_effect = [
"sam-app",
"us-east-1",
"guidedParameter",
"secure",
("CAPABILITY_IAM",),
"testconfig.toml",
"test-env",
]
mock_get_template_parameters.return_value = {
"Myparameter": {"Type": "String"},
"MyNoEchoParameter": {"Type": "String", "NoEcho": True},
}
mock_managed_stack.return_value = "managed-s3-bucket"
with patch.object(GuidedConfig, "save_config", MagicMock(return_value=True)) as mock_save_config:
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=True,
confirm_changeset=True,
resolve_s3=self.resolve_s3,
config_env=self.config_env,
config_file=self.config_file,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name="sam-app",
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix="sam-app",
kms_key_id=self.kms_key_id,
parameter_overrides={"Myparameter": "guidedParameter", "MyNoEchoParameter": "secure"},
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region="us-east-1",
profile=self.profile,
confirm_changeset=True,
)
context_mock.run.assert_called_with()
mock_save_config.assert_called_with(
{
"Myparameter": {"Value": "guidedParameter", "Hidden": False},
"MyNoEchoParameter": {"Value": "secure", "Hidden": True},
},
"test-env",
"testconfig.toml",
capabilities=("CAPABILITY_IAM",),
confirm_changeset=True,
profile=self.profile,
region="us-east-1",
s3_bucket="managed-s3-bucket",
stack_name="sam-app",
s3_prefix="sam-app",
)
mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1")
self.assertEqual(context_mock.run.call_count, 1)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.guided_context.manage_stack")
@patch("samcli.commands.deploy.guided_context.auth_per_resource")
@patch("samcli.commands.deploy.guided_context.get_template_data")
@patch("samcli.commands.deploy.guided_context.get_template_parameters")
@patch.object(
GuidedConfig,
"get_config_ctx",
MagicMock(return_value=(MockContext(info_name="deploy", parent=None), MOCK_SAM_CONFIG)),
)
@patch("samcli.commands.deploy.guided_context.prompt")
@patch("samcli.commands.deploy.guided_context.confirm")
def test_all_args_guided_no_save_echo_param_to_config(
self,
mock_confirm,
mock_prompt,
mock_get_template_parameters,
mock_get_template_data,
mockauth_per_resource,
mock_managed_stack,
mock_deploy_context,
mock_deploy_click,
mock_package_context,
mock_package_click,
):
context_mock = Mock()
mockauth_per_resource.return_value = [("HelloWorldResource", False)]
mock_get_template_parameters.return_value = {
"Myparameter": {"Type": "String"},
"MyParameterSpaces": {"Type": "String"},
"MyNoEchoParameter": {"Type": "String", "NoEcho": True},
}
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_prompt.side_effect = [
"sam-app",
"us-east-1",
"guidedParameter",
"guided parameter with spaces",
"secure",
("CAPABILITY_IAM",),
"testconfig.toml",
"test-env",
]
mock_confirm.side_effect = [True, False, True, True]
mock_managed_stack.return_value = "managed-s3-bucket"
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=True,
confirm_changeset=True,
resolve_s3=self.resolve_s3,
config_env=self.config_env,
config_file=self.config_file,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name="sam-app",
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix="sam-app",
kms_key_id=self.kms_key_id,
parameter_overrides={
"Myparameter": "guidedParameter",
"MyParameterSpaces": "guided parameter with spaces",
"MyNoEchoParameter": "secure",
},
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region="us-east-1",
profile=self.profile,
confirm_changeset=True,
)
context_mock.run.assert_called_with()
mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1")
self.assertEqual(context_mock.run.call_count, 1)
self.assertEqual(MOCK_SAM_CONFIG.put.call_count, 7)
self.assertEqual(
MOCK_SAM_CONFIG.put.call_args_list,
[
call(["deploy"], "parameters", "stack_name", "sam-app", env="test-env"),
call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket", env="test-env"),
call(["deploy"], "parameters", "s3_prefix", "sam-app", env="test-env"),
call(["deploy"], "parameters", "region", "us-east-1", env="test-env"),
call(["deploy"], "parameters", "confirm_changeset", True, env="test-env"),
call(["deploy"], "parameters", "capabilities", "CAPABILITY_IAM", env="test-env"),
call(
["deploy"],
"parameters",
"parameter_overrides",
'Myparameter="guidedParameter" MyParameterSpaces="guided parameter with spaces"',
env="test-env",
),
],
)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.guided_context.auth_per_resource")
@patch("samcli.commands.deploy.guided_context.get_template_data")
@patch("samcli.commands.deploy.guided_context.manage_stack")
@patch("samcli.commands.deploy.guided_context.get_template_parameters")
@patch.object(
GuidedConfig,
"get_config_ctx",
MagicMock(return_value=(MockContext(info_name="deploy", parent=None), MOCK_SAM_CONFIG)),
)
@patch("samcli.commands.deploy.guided_context.prompt")
@patch("samcli.commands.deploy.guided_context.confirm")
@patch("samcli.commands.deploy.guided_config.SamConfig")
@patch("samcli.commands.deploy.guided_config.get_cmd_names")
def test_all_args_guided_no_params_save_config(
self,
mock_get_cmd_names,
mock_sam_config,
mock_confirm,
mock_prompt,
mock_get_template_parameters,
mock_managed_stack,
mock_get_template_data,
mockauth_per_resource,
mock_deploy_context,
mock_deploy_click,
mock_package_context,
mock_package_click,
):
context_mock = Mock()
mockauth_per_resource.return_value = [("HelloWorldResource", False)]
mock_get_template_parameters.return_value = {}
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_prompt.side_effect = ["sam-app", "us-east-1", ("CAPABILITY_IAM",), "testconfig.toml", "test-env"]
mock_confirm.side_effect = [True, False, True, True]
mock_get_cmd_names.return_value = ["deploy"]
mock_managed_stack.return_value = "managed-s3-bucket"
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=True,
confirm_changeset=True,
resolve_s3=self.resolve_s3,
config_env=self.config_env,
config_file=self.config_file,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name="sam-app",
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix="sam-app",
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region="us-east-1",
profile=self.profile,
confirm_changeset=True,
)
context_mock.run.assert_called_with()
mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1")
self.assertEqual(context_mock.run.call_count, 1)
self.assertEqual(MOCK_SAM_CONFIG.put.call_count, 7)
self.assertEqual(
MOCK_SAM_CONFIG.put.call_args_list,
[
call(["deploy"], "parameters", "stack_name", "sam-app", env="test-env"),
call(["deploy"], "parameters", "s3_bucket", "managed-s3-bucket", env="test-env"),
call(["deploy"], "parameters", "s3_prefix", "sam-app", env="test-env"),
call(["deploy"], "parameters", "region", "us-east-1", env="test-env"),
call(["deploy"], "parameters", "confirm_changeset", True, env="test-env"),
call(["deploy"], "parameters", "capabilities", "CAPABILITY_IAM", env="test-env"),
call(["deploy"], "parameters", "parameter_overrides", 'a="b"', env="test-env"),
],
)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.guided_context.manage_stack")
@patch("samcli.commands.deploy.guided_context.auth_per_resource")
@patch("samcli.commands.deploy.guided_context.get_template_data")
@patch("samcli.commands.deploy.guided_context.get_template_parameters")
@patch.object(GuidedConfig, "get_config_ctx", MagicMock(return_value=(None, get_mock_sam_config())))
@patch("samcli.commands.deploy.guided_context.prompt")
@patch("samcli.commands.deploy.guided_context.confirm")
def test_all_args_guided_no_params_no_save_config(
self,
mock_confirm,
mock_prompt,
mock_get_template_parameters,
mock_get_template_data,
mockauth_per_resource,
mock_managed_stack,
mock_deploy_context,
mock_deploy_click,
mock_package_context,
mock_package_click,
):
context_mock = Mock()
mockauth_per_resource.return_value = [("HelloWorldResource", False)]
mock_get_template_parameters.return_value = {}
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_prompt.side_effect = ["sam-app", "us-east-1", ("CAPABILITY_IAM",)]
mock_confirm.side_effect = [True, False, True, False]
mock_managed_stack.return_value = "managed-s3-bucket"
with patch.object(GuidedConfig, "save_config", MagicMock(return_value=False)) as mock_save_config:
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=True,
confirm_changeset=True,
resolve_s3=self.resolve_s3,
config_file=self.config_file,
config_env=self.config_env,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name="sam-app",
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix="sam-app",
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region="us-east-1",
profile=self.profile,
confirm_changeset=True,
)
context_mock.run.assert_called_with()
self.assertEqual(mock_save_config.call_count, 0)
mock_managed_stack.assert_called_with(profile=self.profile, region="us-east-1")
self.assertEqual(context_mock.run.call_count, 1)
@patch("samcli.commands.package.command.click")
@patch("samcli.commands.package.package_context.PackageContext")
@patch("samcli.commands.deploy.command.click")
@patch("samcli.commands.deploy.deploy_context.DeployContext")
@patch("samcli.commands.deploy.command.manage_stack")
def test_all_args_resolve_s3(
self, mock_manage_stack, mock_deploy_context, mock_deploy_click, mock_package_context, mock_package_click
):
context_mock = Mock()
mock_deploy_context.return_value.__enter__.return_value = context_mock
mock_manage_stack.return_value = "managed-s3-bucket"
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket=None,
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=self.guided,
confirm_changeset=self.confirm_changeset,
resolve_s3=True,
config_file=self.config_file,
config_env=self.config_env,
)
mock_deploy_context.assert_called_with(
template_file=ANY,
stack_name=self.stack_name,
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
confirm_changeset=self.confirm_changeset,
)
context_mock.run.assert_called_with()
self.assertEqual(context_mock.run.call_count, 1)
def test_resolve_s3_and_s3_bucket_both_set(self):
with self.assertRaises(DeployResolveS3AndS3SetError):
do_cli(
template_file=self.template_file,
stack_name=self.stack_name,
s3_bucket="managed-s3-bucket",
force_upload=self.force_upload,
no_progressbar=self.no_progressbar,
s3_prefix=self.s3_prefix,
kms_key_id=self.kms_key_id,
parameter_overrides=self.parameter_overrides,
capabilities=self.capabilities,
no_execute_changeset=self.no_execute_changeset,
role_arn=self.role_arn,
notification_arns=self.notification_arns,
fail_on_empty_changeset=self.fail_on_empty_changset,
tags=self.tags,
region=self.region,
profile=self.profile,
use_json=self.use_json,
metadata=self.metadata,
guided=False,
confirm_changeset=True,
resolve_s3=True,
config_file=self.config_file,
config_env=self.config_env,
)
|
/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <ruby/ruby.h>
#include "rb_grpc_imports.generated.h"
#include "rb_byte_buffer.h"
#include <grpc/grpc.h>
#include <grpc/byte_buffer_reader.h>
#include <grpc/slice.h>
#include "rb_grpc.h"
grpc_byte_buffer* grpc_rb_s_to_byte_buffer(char *string, size_t length) {
grpc_slice slice = grpc_slice_from_copied_buffer(string, length);
grpc_byte_buffer *buffer = grpc_raw_byte_buffer_create(&slice, 1);
grpc_slice_unref(slice);
return buffer;
}
VALUE grpc_rb_byte_buffer_to_s(grpc_byte_buffer *buffer) {
VALUE rb_string;
grpc_byte_buffer_reader reader;
grpc_slice next;
if (buffer == NULL) {
return Qnil;
}
rb_string = rb_str_buf_new(grpc_byte_buffer_length(buffer));
if (!grpc_byte_buffer_reader_init(&reader, buffer)) {
rb_raise(rb_eRuntimeError, "Error initializing byte buffer reader.");
return Qnil;
}
while (grpc_byte_buffer_reader_next(&reader, &next) != 0) {
rb_str_cat(rb_string, (const char *) GRPC_SLICE_START_PTR(next),
GRPC_SLICE_LENGTH(next));
grpc_slice_unref(next);
}
grpc_byte_buffer_reader_destroy(&reader);
return rb_string;
}
VALUE grpc_rb_slice_to_ruby_string(grpc_slice slice) {
if (GRPC_SLICE_START_PTR(slice) == NULL) {
rb_raise(rb_eRuntimeError, "attempt to convert uninitialized grpc_slice to ruby string");
}
return rb_str_new((char*)GRPC_SLICE_START_PTR(slice), GRPC_SLICE_LENGTH(slice));
}
|
#pragma bank 1
#include "SRAM.h"
#include <string.h>
#define MAGIC "ZGB-SAVE"
#define MAGIC_LENGTH 9
void CheckSRAMIntegrity(UINT8* ptr, UINT16 size) BANKED {
ENABLE_RAM;
UINT16 bytes_to_clear = 0;
UINT16* bytes_stored = (UINT16*)(ptr + MAGIC_LENGTH);
if(strcmp((char*)ptr, MAGIC) != 0) {
strcpy(ptr, MAGIC);
//Clear everything
bytes_to_clear = size - MAGIC_LENGTH - 2;
} else {
if(*bytes_stored < size) {
bytes_to_clear = size - *bytes_stored;
}
}
if(*bytes_stored != size) {
*bytes_stored = size;
}
if(bytes_to_clear > 0) {
memset(ptr + size - bytes_to_clear, 0, bytes_to_clear);
}
DISABLE_RAM;
}
|
"""
Name : wk2d.py
Author: Ajay Lotekar
e-mail: ablotekar@gmail.com
Date : 2021-07-15
DESC :
"""
import numpy as np
import math as mt
def wk2d(z, dx, dt):
"""
:param z: Matrix which 2D fft need to calculate row = time, column = space
:param dx: Spatial grid size
:param dt: Temporal grid size
:return: z 2D fft , k wave number array and w frequency array
"""
mm = z.shape[0] # Size of matrix in y direction (time)
nx = z.shape[1] # Size of the matrix in x direction (space)
nx1 = nx - 1
kmin = 2 * (mt.pi) / (nx1 * dx) # Minimum wavelength can resolve
wmin = 2 * (mt.pi) / (nx1 * dt) # Mininum frequency can resolve
# Wave number array
k = np.arange(start=-nx1 / 2, stop=nx1 / 2, step=1) * kmin
# Frequency array
w = np.arange(start=-mm / 2, stop=mm / 2, step=1) * wmin
w = np.delete(w, int(np.fix(mm / 2))) # Removing zero
z = np.fft.fft2(z) # 2D fft
z = np.fft.fftshift(z)
z = 2 * np.square(np.abs(z)) / (nx * mm)
z = 10 * np.log10(z)
return z, k, w
|
/**
* @license Apache-2.0
*
* Copyright (c) 2018 The Stdlib Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var Gumbel = require( './../lib' );
var gumbel = new Gumbel( 2.0, 4.0 );
var mean = gumbel.mean;
console.log( 'Mean = %d', mean );
// => 'Mean = 4.308862659606131'
var median = gumbel.median;
console.log( 'Median = %d', median );
// => 'Median = 3.4660516823266576'
var s2 = gumbel.variance;
console.log( 'Variance = %d', s2 );
// => 'Variance = 26.318945069571622'
var y = gumbel.cdf( 0.8 );
console.log( 'F(0.8) = %d', y );
// => 'F(0.8) = 0.25927686599082755'
|
# Copyright 2019-present NAVER Corp.
# CC BY-NC-SA 3.0
# Available only for non-commercial use
import pdb
import torch
import torch.nn as nn
import torch.nn.functional as F
from nets.sampler import *
from nets.repeatability_loss import *
from nets.reliability_loss import *
class MultiLoss (nn.Module):
""" Combines several loss functions for convenience.
*args: [loss weight (float), loss creator, ... ]
Example:
loss = MultiLoss( 1, MyFirstLoss(), 0.5, MySecondLoss() )
"""
def __init__(self, *args, dbg=()):
nn.Module.__init__(self)
assert len(args) % 2 == 0, 'args must be a list of (float, loss)'
self.weights = []
self.losses = nn.ModuleList()
for i in range(len(args)//2):
weight = float(args[2*i+0])
loss = args[2*i+1]
assert isinstance(loss, nn.Module), "%s is not a loss!" % loss
self.weights.append(weight)
self.losses.append(loss)
def forward(self, select=None, **variables):
assert not select or all(1<=n<=len(self.losses) for n in select)
d = dict()
cum_loss = 0
for num, (weight, loss_func) in enumerate(zip(self.weights, self.losses),1):
if select is not None and num not in select: continue
l = loss_func(**{k:v for k,v in variables.items()})
if isinstance(l, tuple):
assert len(l) == 2 and isinstance(l[1], dict)
else:
l = l, {loss_func.name:l}
cum_loss = cum_loss + weight * l[0]
for key,val in l[1].items():
d['loss_'+key] = float(val)
d['loss'] = float(cum_loss)
return cum_loss, d
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ElementModel.class_name'
db.add_column(u'improcflow_elementmodel', 'class_name',
self.gf('django.db.models.fields.CharField')(default='element', max_length=120),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ElementModel.class_name'
db.delete_column(u'improcflow_elementmodel', 'class_name')
models = {
u'improcflow.elementmodel': {
'Meta': {'object_name': 'ElementModel'},
'class_name': ('django.db.models.fields.CharField', [], {'default': "'element'", 'max_length': '120'}),
'flow': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['improcflow.FlowModel']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'untitled'", 'max_length': '120'})
},
u'improcflow.flowmodel': {
'Meta': {'object_name': 'FlowModel'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "'untitled'", 'max_length': '120'})
}
}
complete_apps = ['improcflow']
|
from flask import Flask, render_template, request, make_response, g
from redis import Redis
import os
import socket
import random
import json
import sys
import time
import logging
import random
from jaeger_client import Config
from flask_opentracing import FlaskTracing
option_a = os.getenv('OPTION_A', "Cats")
option_b = os.getenv('OPTION_B', "Dogs")
hostname = socket.gethostname()
app = Flask(__name__)
logging.getLogger('').handlers = []
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'local_agent': {
'reporting_host': "jaeger-agent",
'reporting_port': 5775,
},
'logging': True,
},
service_name='voting',
)
jaeger_tracer = config.initialize_tracer()
tracing = FlaskTracing(jaeger_tracer, True, app)
def get_redis():
if not hasattr(g, 'redis'):
g.redis = Redis(host="redis", db=0, socket_timeout=5)
return g.redis
@app.route("/", methods=['POST','GET'])
def hello():
with jaeger_tracer.start_active_span('booking') as scope:
voter_id = request.cookies.get('voter_id')
if not voter_id:
voter_id = hex(random.getrandbits(64))[2:-1]
scope.span.set_tag('movie', voter_id)
span = jaeger_tracer.active_span
logging.info(span)
vote = None
if request.method == 'POST':
redis = get_redis()
vote = request.form['vote']
data = json.dumps({'voter_id': voter_id, 'vote': vote})
redis.rpush('votes', data)
resp = make_response(render_template(
'index.html',
option_a=option_a,
option_b=option_b,
hostname=hostname,
vote=vote,
))
resp.set_cookie('voter_id', voter_id)
time.sleep(2)
return resp
if __name__ == "__main__":
app.run(host='0.0.0.0', port=80, debug=True, threaded=True)
# yield to IOLoop to flush the spans
|
import { $ } from './utils/dom.js';
import {menuApi} from './utils/api.js';
function App() {
//initial status
this.init = async () => {
// => init이라는 async 함수를 정의하는데, 이 함수의 메뉴에서 currentCategory 즉 "espresso"의 value에 해당하는 것(리스트)에다가
// => menuApi 객체의 getAllMenuByCategory라는 메소드를 사용해라. 메소드의 입력값은 this.currentcategory(즉 "espresso")
// => 렌더함수 실행해주고, initEventListners 실행해줘라.
// => 새로 생성되는 객체 instance의 menu를 위와 같은 형태로 생성함.
this.menu = {
espresso: [],
frappuccino: [],
blended: [],
teavana: [],
desert: [],
};
// => 처음 생성되는 객체의 currentCategory 디폴트 값은 espresso로.
this.currentCategory = "espresso";
this.menu[this.currentCategory] = await menuApi.getAllMenuByCategory(this.currentCategory);
render();
initEventListners();
}
//rendering
const render = async () => {
console.log(`${this.currentCategory} 에 있는 아이템들을 렌더합니다.`);
this.menu[this.currentCategory] = await menuApi.getAllMenuByCategory(this.currentCategory);
const template = this.menu[this.currentCategory].map(menuItem => {
return `
<li data-menu-id= "${menuItem.id}" class=" menu-list-item d-flex items-center py-2">
<span class= "${menuItem.isSoldOut ? "sold-out" : ""} w-100 pl-2 menu-name">${menuItem.name}</span>
<button
type="button"
class="bg-gray-50 text-gray-500 text-sm mr-1 menu-sold-out-button"
>
품절
</button>
<button
type="button"
class="bg-gray-50 text-gray-500 text-sm mr-1 menu-edit-button"
>
수정
</button>
<button
type="button"
class="bg-gray-50 text-gray-500 text-sm menu-remove-button"
>
삭제
</button>
</li>`;
}).join("")
//join method => 원래는 어레이 형태로 받아졌을 텐데, 그것을 string으로 합침.
$("#menu-list").innerHTML = template;
updateMenuCount();
}
//이벤트 바인딩
const initEventListners = () => {
//입력 누를 때 디폴트 실행값 멈춰주기. 폼 태그가 입력값을 자동으로 전송하는 것을 막아주는 함수를 정의함.
$("#menu-form").addEventListener("submit", (e) => {
e.preventDefault();
});
//확인 입력
$("#menu-submit-button").addEventListener("click", addMenuName);
//엔터 입력
$("#menu-name").addEventListener("keypress", (e) => {
if (e.key !== "Enter") {
return;
}
addMenuName(e);
});
//수정하기와 삭제하기
$("#menu-list").addEventListener("click", (e) => {
if (e.target.classList.contains("menu-edit-button")) {
updateMenuName(e);
return; // if문이 연달아 있을 때는 return을 해주는 습관 => 불필요한 연산 안해도 됨
}
if (e.target.classList.contains("menu-remove-button")) {
removeMenuName(e);
return;
}
if (e.target.classList.contains("menu-sold-out-button")) {
soldOutMenu(e);
return;
}
//네비게이션
$("nav").addEventListener("click", (e) => {
navRender(e);
});
};
//메뉴 생성, 수정, 삭제, 품절 관리 등의 기능들
//생성
const addMenuName = async (e) => {
//input 값이 없으면, "값을 넣어 주세요" 출력함.
if ($("#menu-name").value === ""){
alert("Please enter a value.");
return;
}
//menuApi의 createMenu method 실행. 입력값은, 현 카테고리와 입력값.
await menuApi.createMenu(this.currentCategory, $("#menu-name").value);
render();
$("#menu-name").value = "";
};
//메뉴 수정
const updateMenuName = async (e) => {
const $menuName = e.target.closest("li").querySelector(".menu-name");
const $menuId = e.target.closest("li").dataset.menuId;
// event object 의 타겟속성에 closest method 적용. 거기에 다시 querySelector method 적용.=> 이벤트가 발생한 object에서 가장 가까운 li에서 menu-name이라는 클래스를 찾아라. (.은 class #은 id))
const updatedMenuName = prompt("Change the menu", $menuName.innerText);
await menuApi.updateMenu(this.currentCategory, updatedMenuName, $menuId);
render();
}
//품절 관리
const soldOutMenu = async (e) => {
const $menuId = e.target.closest("li").dataset.menuId;
const $menuName = e.target.closest("li").querySelector(".menu-name").innerText;
this.menu[this.currentCategory].find(x => x.name == $menuName).isSoldOut = !this.menu[this.currentCategory].find(x => x.name == $menuName).isSoldOut;
const soldOut = this.menu[this.currentCategory].find(x => x.name == $menuName).isSoldOut;
await menuApi.soldOutMenu(this.currentCategory, $menuId, soldOut);
render();
}
//메뉴 삭제
const removeMenuName = async (e) => {
if (confirm("Do you really want to delete the menu?") == true) {
const $menuName = e.target.closest("li").querySelector(".menu-name");
const $menuId = e.target.closest("li").dataset.menuId;
await menuApi.removeMenu(this.currentCategory, $menuId);
render();
} else {
return;
}
};
//메뉴 수 카운트
const updateMenuCount = () => {
let menuCount = this.menu[this.currentCategory].length;
$(".menu-count").innerText = `총 ${menuCount}개`;
}
// 네비게이션
const navRender = async (e) => {
const isCategoryButton =
e.target.classList.contains("cafe-category-name");
if (isCategoryButton) {
this.currentCategory = e.target.dataset.categoryName;
$("#category-title").innerText = `${e.target.innerText} 메뉴 관리`;
this.menu[this.currentCategory] = await menuApi.getAllMenuByCategory(this.currentCategory);
render();
}
}
};
// new => 하나의 함수로 여러 instance를 만듬? 함수를 모델로 새로운 객체들이 여러개 만들어질 수 있으며 각각의 상태를 가질 수 있다.
//채팅창이 여러 개 띄워진 사이트들 => 채팅 창 하나하나가 각각의 인스턴스들.
const app = new App();
//app 이라는 객체가 생성되고 그 객체의 init이라는 메서드를 불러옴 불러와서 위의 로직이 실현될 수 있도록.
app.init();
|
""" FFplay proess handler """
import os
import sys
from shutil import which
from signal import SIGTERM
from subprocess import Popen
from time import sleep
import psutil
from zenlog import log
class Player:
"""FFPlayer handler, it holds all the attributes to properly execute ffplay
FFmepg required to be installed seperately
"""
def __init__(self, URL):
self.url = URL
self.is_playing = False
self.process = None
self.exe_path = None
self.program_name = "ffplay" # constant value
log.debug("player: url => {}".format(self.url))
# check if FFplay is installed
self.exe_path = which(self.program_name)
log.debug("FFplay: {}".format(self.exe_path))
if self.exe_path is None:
log.critical("FFplay not found, install it first please")
sys.exit(1)
self.process = Popen(
[self.exe_path, "-nodisp", "-nostats", "-loglevel", "0", self.url],
shell=False,
)
log.debug("player: ffplay => PID {} initiated".format(self.process.pid))
#sleep(3) # sleeping for 3 seconds wainting for ffplay to start properly
if self.is_active():
self.is_playing = True
log.info("Radio started successfully")
else:
log.error("Radio could not be stared, may be a dead station")
sys.exit(0)
def is_active(self):
"""checks for if the ffplay is still active or not,
will be used to terminate FFPLAY when the radioactive terminates"""
proc = psutil.Process(self.process.pid)
if proc.status() == psutil.STATUS_ZOMBIE:
return False
return True
def play(self):
"""Nothing"""
if not self.is_playing:
pass # call the init function again ?
def stop(self):
"""sends a SIGTERM to the process id of the current FFPLAY"""
if self.is_playing:
log.debug("Killing ffplay PID: {}".format(self.process.pid))
os.kill(self.process.pid, SIGTERM)
else:
log.warn("Player: radio is not playing")
|
/*
* The routines in this file
* deal with the region, that magic space
* between "." and mark. Some functions are
* commands. Some functions are just for
* internal use.
*/
#include <stdio.h>
#include "estruct.h"
#include "edef.h"
/*
* Kill the region. Ask "getregion"
* to figure out the bounds of the region.
* Move "." to the start, and kill the characters.
* Bound to "C-W".
*/
killregion(f, n)
{
register int s;
REGION region;
if (curbp->b_mode&MDVIEW) /* don't allow this command if */
return(rdonly()); /* we are in read only mode */
if ((s=getregion(®ion)) != TRUE)
return (s);
if ((lastflag&CFKILL) == 0) /* This is a kill type */
kdelete(); /* command, so do magic */
thisflag |= CFKILL; /* kill buffer stuff. */
curwp->w_dotp = region.r_linep;
curwp->w_doto = region.r_offset;
return (ldelete(region.r_size, TRUE));
}
/*
* Copy all of the characters in the
* region to the kill buffer. Don't move dot
* at all. This is a bit like a kill region followed
* by a yank. Bound to "M-W".
*/
copyregion(f, n)
{
register LINE *linep;
register int loffs;
register int s;
REGION region;
if ((s=getregion(®ion)) != TRUE)
return (s);
if ((lastflag&CFKILL) == 0) /* Kill type command. */
kdelete();
thisflag |= CFKILL;
linep = region.r_linep; /* Current line. */
loffs = region.r_offset; /* Current offset. */
while (region.r_size--) {
if (loffs == llength(linep)) { /* End of line. */
if ((s=kinsert('\n')) != TRUE)
return (s);
linep = lforw(linep);
loffs = 0;
} else { /* Middle of line. */
if ((s=kinsert(lgetc(linep, loffs))) != TRUE)
return (s);
++loffs;
}
}
return (TRUE);
}
/*
* Lower case region. Zap all of the upper
* case characters in the region to lower case. Use
* the region code to set the limits. Scan the buffer,
* doing the changes. Call "lchange" to ensure that
* redisplay is done in all buffers. Bound to
* "C-X C-L".
*/
lowerregion(f, n)
{
register LINE *linep;
register int loffs;
register int c;
register int s;
REGION region;
if (curbp->b_mode&MDVIEW) /* don't allow this command if */
return(rdonly()); /* we are in read only mode */
if ((s=getregion(®ion)) != TRUE)
return (s);
lchange(WFHARD);
linep = region.r_linep;
loffs = region.r_offset;
while (region.r_size--) {
if (loffs == llength(linep)) {
linep = lforw(linep);
loffs = 0;
} else {
c = lgetc(linep, loffs);
if (c>='A' && c<='Z')
lputc(linep, loffs, c+'a'-'A');
++loffs;
}
}
return (TRUE);
}
/*
* Upper case region. Zap all of the lower
* case characters in the region to upper case. Use
* the region code to set the limits. Scan the buffer,
* doing the changes. Call "lchange" to ensure that
* redisplay is done in all buffers. Bound to
* "C-X C-L".
*/
upperregion(f, n)
{
register LINE *linep;
register int loffs;
register int c;
register int s;
REGION region;
if (curbp->b_mode&MDVIEW) /* don't allow this command if */
return(rdonly()); /* we are in read only mode */
if ((s=getregion(®ion)) != TRUE)
return (s);
lchange(WFHARD);
linep = region.r_linep;
loffs = region.r_offset;
while (region.r_size--) {
if (loffs == llength(linep)) {
linep = lforw(linep);
loffs = 0;
} else {
c = lgetc(linep, loffs);
if (c>='a' && c<='z')
lputc(linep, loffs, c-'a'+'A');
++loffs;
}
}
return (TRUE);
}
/*
* This routine figures out the
* bounds of the region in the current window, and
* fills in the fields of the "REGION" structure pointed
* to by "rp". Because the dot and mark are usually very
* close together, we scan outward from dot looking for
* mark. This should save time. Return a standard code.
* Callers of this routine should be prepared to get
* an "ABORT" status; we might make this have the
* conform thing later.
*/
getregion(rp)
register REGION *rp;
{
register LINE *flp;
register LINE *blp;
int fsize;
register int bsize;
if (curwp->w_markp == NULL) {
mlwrite("No mark set in this window");
return (FALSE);
}
if (curwp->w_dotp == curwp->w_markp) {
rp->r_linep = curwp->w_dotp;
if (curwp->w_doto < curwp->w_marko) {
rp->r_offset = curwp->w_doto;
rp->r_size = curwp->w_marko-curwp->w_doto;
} else {
rp->r_offset = curwp->w_marko;
rp->r_size = curwp->w_doto-curwp->w_marko;
}
return (TRUE);
}
blp = curwp->w_dotp;
bsize = curwp->w_doto;
flp = curwp->w_dotp;
fsize = llength(flp)-curwp->w_doto+1;
while (flp!=curbp->b_linep || lback(blp)!=curbp->b_linep) {
if (flp != curbp->b_linep) {
flp = lforw(flp);
if (flp == curwp->w_markp) {
rp->r_linep = curwp->w_dotp;
rp->r_offset = curwp->w_doto;
rp->r_size = fsize+curwp->w_marko;
return (TRUE);
}
fsize += llength(flp)+1;
}
if (lback(blp) != curbp->b_linep) {
blp = lback(blp);
bsize += llength(blp)+1;
if (blp == curwp->w_markp) {
rp->r_linep = blp;
rp->r_offset = curwp->w_marko;
rp->r_size = bsize - curwp->w_marko;
return (TRUE);
}
}
}
mlwrite("Bug: lost mark");
return (FALSE);
}
|
// @flow
import * as React from 'react'
import { connect } from 'react-redux'
import { Splash } from '@opentrons/components'
import { START_TERMINAL_ITEM_ID, type TerminalItemId } from '../steplist'
import { Portal as MainPageModalPortal } from '../components/portals/MainPageModalPortal'
import { DeckSetup } from '../components/DeckSetup'
import { ConnectedFilePage } from '../containers/ConnectedFilePage'
import { SettingsPage } from '../components/SettingsPage'
import { LiquidsPage } from '../components/LiquidsPage'
import { Hints } from '../components/Hints'
import { LiquidPlacementModal } from '../components/LiquidPlacementModal.js'
import { LabwareSelectionModal } from '../components/LabwareSelectionModal'
import { StepEditForm } from '../components/StepEditForm'
import { StepSelectionBanner } from '../components/StepSelectionBanner'
import { TimelineAlerts } from '../components/alerts/TimelineAlerts'
import { getSelectedTerminalItemId } from '../ui/steps'
import { selectors as labwareIngredSelectors } from '../labware-ingred/selectors'
import { selectors, type Page } from '../navigation'
import type { BaseState } from '../types'
type Props = {
page: Page,
selectedTerminalItemId: ?TerminalItemId,
ingredSelectionMode: boolean,
}
function MainPanelComponent(props: Props) {
const { page, selectedTerminalItemId, ingredSelectionMode } = props
switch (page) {
case 'file-splash':
return <Splash />
case 'file-detail':
return <ConnectedFilePage />
case 'liquids':
return <LiquidsPage />
case 'settings-app':
return <SettingsPage />
default: {
const startTerminalItemSelected =
selectedTerminalItemId === START_TERMINAL_ITEM_ID
return (
<>
<MainPageModalPortal>
<TimelineAlerts />
<Hints />
{startTerminalItemSelected && <LabwareSelectionModal />}
<StepSelectionBanner />
<StepEditForm />
{startTerminalItemSelected && ingredSelectionMode && (
<LiquidPlacementModal />
)}
</MainPageModalPortal>
<DeckSetup />
</>
)
}
}
}
function mapStateToProps(state: BaseState): $Exact<Props> {
return {
page: selectors.getCurrentPage(state),
selectedTerminalItemId: getSelectedTerminalItemId(state),
ingredSelectionMode:
labwareIngredSelectors.getSelectedLabwareId(state) != null,
}
}
export const ConnectedMainPanel: React.AbstractComponent<{||}> = connect<
Props,
{||},
_,
_,
_,
_
>(mapStateToProps)(MainPanelComponent)
|
#pragma once
#if NET_4_0
#if IL2CPP_THREADS_PTHREAD && !IL2CPP_DOTS_WITHOUT_DEBUGGER
#include <pthread.h>
#include "utils/NonCopyable.h"
class FastMutexImpl;
namespace il2cpp
{
namespace os
{
class ConditionVariableImpl : public il2cpp::utils::NonCopyable
{
public:
ConditionVariableImpl();
~ConditionVariableImpl();
int Wait(FastMutexImpl* lock);
int TimedWait(FastMutexImpl* lock, uint32_t timeout_ms);
void Broadcast();
void Signal();
private:
pthread_cond_t m_ConditionVariable;
};
}
}
#endif
#endif
|
//Importar depedencia
const express = require('express');
const path = require('path'); /**esta linha criou a barra invertida para o windows[\] */
const pages = require('./pages.js');/**esta linha seria o equivalente link do "href" do html? */
// iniciando o express
const server = express()/* este express é uma biblioteca que esta sendo chamada aqui*/
server
//utilizando os ar arquivos estaticos
.use(express.static('public')) /**com este codigo estou dizendo pro servidor o endrreço dos meus arquivos estaticos da aplicação. com isto ele vai criar sozinho todas as rotas para o public */
//configurar template engine
.set('views', path.join(__dirname, "views"))
.set('view engine', 'hbs')
//criar uma rota
.get('/', pages.index)
.get('/orphanage', pages.orphanage)
.get('/orphanages', pages.orphanages)
.get('/create-orphanage', pages.createOrphanage)
//ligar o servidor
server.listen(5500)
|
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Scott Shawcroft for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
import sys
# Handle size constants with K or M suffixes (allowed in .ld but not in Python).
K_PATTERN = re.compile(r'([0-9]+)[Kk]')
K_REPLACE = r'(\1*1024)'
M_PATTERN = re.compile(r'([0-9]+)[Mm]')
M_REPLACE = r'(\1*1024*1024)'
print()
text = 0
data = 0
bss = 0
# stdin is the linker output.
for line in sys.stdin:
line = line.strip()
if not line.startswith("text"):
text, data, bss = map(int, line.split()[:3])
regions = {}
# This file is the linker script.
with open(sys.argv[1], "r") as f:
for line in f:
line = line.strip()
if line.startswith(("FLASH", "RAM")):
regions[line.split()[0]] = line.split("=")[-1]
for region in regions:
space = regions[region]
if "/*" in space:
space = space.split("/*")[0]
space = K_PATTERN.sub(K_REPLACE, space)
space = M_PATTERN.sub(M_REPLACE, space)
regions[region] = eval(space)
free_flash = regions["FLASH"] - text - data
free_ram = regions["RAM"] - data - bss
print(free_flash, "bytes free in flash out of", regions["FLASH"], "bytes (", regions["FLASH"] / 1024, "kb ).")
print(free_ram, "bytes free in ram for stack out of", regions["RAM"], "bytes (", regions["RAM"] / 1024, "kb ).")
print()
# Check that we have free flash space. GCC doesn't fail when the text + data
# sections don't fit in FLASH. It only counts data in RAM.
if free_flash < 0:
print("Too little flash!!!")
print()
sys.exit(-1)
|
// TODO:
// * convert listenerCount() usage to emit() return value checking?
// * emit error when connection severed early (e.g. before handshake)
// * add '.connected' or similar property to connection objects to allow
// immediate connection status checking
'use strict';
const { Server: netServer } = require('net');
const EventEmitter = require('events');
const { listenerCount } = EventEmitter;
const {
CHANNEL_OPEN_FAILURE,
DEFAULT_CIPHER,
DEFAULT_COMPRESSION,
DEFAULT_KEX,
DEFAULT_MAC,
DEFAULT_SERVER_HOST_KEY,
DISCONNECT_REASON,
DISCONNECT_REASON_BY_VALUE,
SUPPORTED_CIPHER,
SUPPORTED_COMPRESSION,
SUPPORTED_KEX,
SUPPORTED_MAC,
SUPPORTED_SERVER_HOST_KEY,
} = require('./protocol/constants.js');
const { init: cryptoInit } = require('./protocol/crypto.js');
const { KexInit } = require('./protocol/kex.js');
const { parseKey } = require('./protocol/keyParser.js');
const Protocol = require('./protocol/Protocol.js');
const { SFTP } = require('./protocol/SFTP.js');
const { writeUInt32BE } = require('./protocol/utils.js');
const {
Channel,
MAX_WINDOW,
PACKET_SIZE,
windowAdjust,
WINDOW_THRESHOLD,
} = require('./Channel.js');
const {
ChannelManager,
generateAlgorithmList,
isWritable,
onChannelOpenFailure,
onCHANNEL_CLOSE,
} = require('./utils.js');
const MAX_PENDING_AUTHS = 10;
class AuthContext extends EventEmitter {
constructor(protocol, username, service, method, cb) {
super();
this.username = this.user = username;
this.service = service;
this.method = method;
this._initialResponse = false;
this._finalResponse = false;
this._multistep = false;
this._cbfinal = (allowed, methodsLeft, isPartial) => {
if (!this._finalResponse) {
this._finalResponse = true;
cb(this, allowed, methodsLeft, isPartial);
}
};
this._protocol = protocol;
}
accept() {
this._cleanup && this._cleanup();
this._initialResponse = true;
this._cbfinal(true);
}
reject(methodsLeft, isPartial) {
this._cleanup && this._cleanup();
this._initialResponse = true;
this._cbfinal(false, methodsLeft, isPartial);
}
}
class KeyboardAuthContext extends AuthContext {
constructor(protocol, username, service, method, submethods, cb) {
super(protocol, username, service, method, cb);
this._multistep = true;
this._cb = undefined;
this._onInfoResponse = (responses) => {
const callback = this._cb;
if (callback) {
this._cb = undefined;
callback(responses);
}
};
this.submethods = submethods;
this.on('abort', () => {
this._cb && this._cb(new Error('Authentication request aborted'));
});
}
prompt(prompts, title, instructions, cb) {
if (!Array.isArray(prompts))
prompts = [ prompts ];
if (typeof title === 'function') {
cb = title;
title = instructions = undefined;
} else if (typeof instructions === 'function') {
cb = instructions;
instructions = undefined;
} else if (typeof cb !== 'function') {
cb = undefined;
}
for (let i = 0; i < prompts.length; ++i) {
if (typeof prompts[i] === 'string') {
prompts[i] = {
prompt: prompts[i],
echo: true
};
}
}
this._cb = cb;
this._initialResponse = true;
this._protocol.authInfoReq(title, instructions, prompts);
}
}
class PKAuthContext extends AuthContext {
constructor(protocol, username, service, method, pkInfo, cb) {
super(protocol, username, service, method, cb);
this.key = { algo: pkInfo.keyAlgo, data: pkInfo.key };
this.signature = pkInfo.signature;
this.blob = pkInfo.blob;
}
accept() {
if (!this.signature) {
this._initialResponse = true;
this._protocol.authPKOK(this.key.algo, this.key.data);
} else {
AuthContext.prototype.accept.call(this);
}
}
}
class HostbasedAuthContext extends AuthContext {
constructor(protocol, username, service, method, pkInfo, cb) {
super(protocol, username, service, method, cb);
this.key = { algo: pkInfo.keyAlgo, data: pkInfo.key };
this.signature = pkInfo.signature;
this.blob = pkInfo.blob;
this.localHostname = pkInfo.localHostname;
this.localUsername = pkInfo.localUsername;
}
}
class PwdAuthContext extends AuthContext {
constructor(protocol, username, service, method, password, cb) {
super(protocol, username, service, method, cb);
this.password = password;
this._changeCb = undefined;
}
requestChange(prompt, cb) {
if (this._changeCb)
throw new Error('Change request already in progress');
if (typeof prompt !== 'string')
throw new Error('prompt argument must be a string');
if (typeof cb !== 'function')
throw new Error('Callback argument must be a function');
this._changeCb = cb;
this._protocol.authPasswdChg(prompt);
}
}
class Session extends EventEmitter {
constructor(client, info, localChan) {
super();
this.type = 'session';
this.subtype = undefined;
this._ending = false;
this._channel = undefined;
this._chanInfo = {
type: 'session',
incoming: {
id: localChan,
window: MAX_WINDOW,
packetSize: PACKET_SIZE,
state: 'open'
},
outgoing: {
id: info.sender,
window: info.window,
packetSize: info.packetSize,
state: 'open'
}
};
}
}
class Server extends EventEmitter {
constructor(cfg, listener) {
super();
if (typeof cfg !== 'object' || cfg === null)
throw new Error('Missing configuration object');
const hostKeys = Object.create(null);
const hostKeyAlgoOrder = [];
const hostKeys_ = cfg.hostKeys;
if (!Array.isArray(hostKeys_))
throw new Error('hostKeys must be an array');
const cfgAlgos = (
typeof cfg.algorithms === 'object' && cfg.algorithms !== null
? cfg.algorithms
: {}
);
const hostKeyAlgos = generateAlgorithmList(
cfgAlgos.serverHostKey,
DEFAULT_SERVER_HOST_KEY,
SUPPORTED_SERVER_HOST_KEY
);
for (let i = 0; i < hostKeys_.length; ++i) {
let privateKey;
if (Buffer.isBuffer(hostKeys_[i]) || typeof hostKeys_[i] === 'string')
privateKey = parseKey(hostKeys_[i]);
else
privateKey = parseKey(hostKeys_[i].key, hostKeys_[i].passphrase);
if (privateKey instanceof Error)
throw new Error(`Cannot parse privateKey: ${privateKey.message}`);
if (Array.isArray(privateKey)) {
// OpenSSH's newer format only stores 1 key for now
privateKey = privateKey[0];
}
if (privateKey.getPrivatePEM() === null)
throw new Error('privateKey value contains an invalid private key');
// Discard key if we already found a key of the same type
if (hostKeyAlgoOrder.includes(privateKey.type))
continue;
if (privateKey.type === 'ssh-rsa') {
// SSH supports multiple signature hashing algorithms for RSA, so we add
// the algorithms in the desired order
let sha1Pos = hostKeyAlgos.indexOf('ssh-rsa');
const sha256Pos = hostKeyAlgos.indexOf('rsa-sha2-256');
const sha512Pos = hostKeyAlgos.indexOf('rsa-sha2-512');
if (sha1Pos === -1) {
// Fall back to giving SHA1 the lowest priority
sha1Pos = Infinity;
}
[sha1Pos, sha256Pos, sha512Pos].sort(compareNumbers).forEach((pos) => {
if (pos === -1)
return;
let type;
switch (pos) {
case sha1Pos: type = 'ssh-rsa'; break;
case sha256Pos: type = 'rsa-sha2-256'; break;
case sha512Pos: type = 'rsa-sha2-512'; break;
default: return;
}
// Store same RSA key under each hash algorithm name for convenience
hostKeys[type] = privateKey;
hostKeyAlgoOrder.push(type);
});
} else {
hostKeys[privateKey.type] = privateKey;
hostKeyAlgoOrder.push(privateKey.type);
}
}
const algorithms = {
kex: generateAlgorithmList(cfgAlgos.kex, DEFAULT_KEX, SUPPORTED_KEX),
serverHostKey: hostKeyAlgoOrder,
cs: {
cipher: generateAlgorithmList(
cfgAlgos.cipher,
DEFAULT_CIPHER,
SUPPORTED_CIPHER
),
mac: generateAlgorithmList(cfgAlgos.hmac, DEFAULT_MAC, SUPPORTED_MAC),
compress: generateAlgorithmList(
cfgAlgos.compress,
DEFAULT_COMPRESSION,
SUPPORTED_COMPRESSION
),
lang: [],
},
sc: undefined,
};
algorithms.sc = algorithms.cs;
if (typeof listener === 'function')
this.on('connection', listener);
const origDebug = (typeof cfg.debug === 'function' ? cfg.debug : undefined);
const ident = (cfg.ident ? Buffer.from(cfg.ident) : undefined);
const offer = new KexInit(algorithms);
this._srv = new netServer((socket) => {
if (this._connections >= this.maxConnections) {
socket.destroy();
return;
}
++this._connections;
socket.once('close', () => {
--this._connections;
});
let debug;
if (origDebug) {
// Prepend debug output with a unique identifier in case there are
// multiple clients connected at the same time
const debugPrefix = `[${process.hrtime().join('.')}] `;
debug = (msg) => {
origDebug(`${debugPrefix}${msg}`);
};
}
// eslint-disable-next-line no-use-before-define
new Client(socket, hostKeys, ident, offer, debug, this, cfg);
}).on('error', (err) => {
this.emit('error', err);
}).on('listening', () => {
this.emit('listening');
}).on('close', () => {
this.emit('close');
});
this._connections = 0;
this.maxConnections = Infinity;
}
injectSocket(socket) {
this._srv.emit('connection', socket);
}
listen(...args) {
this._srv.listen(...args);
return this;
}
address() {
return this._srv.address();
}
getConnections(cb) {
this._srv.getConnections(cb);
return this;
}
close(cb) {
this._srv.close(cb);
return this;
}
ref() {
this._srv.ref();
return this;
}
unref() {
this._srv.unref();
return this;
}
}
Server.KEEPALIVE_CLIENT_INTERVAL = 15000;
Server.KEEPALIVE_CLIENT_COUNT_MAX = 3;
class Client extends EventEmitter {
constructor(socket, hostKeys, ident, offer, debug, server, srvCfg) {
super();
let exchanges = 0;
let acceptedAuthSvc = false;
let pendingAuths = [];
let authCtx;
let kaTimer;
let onPacket;
const unsentGlobalRequestsReplies = [];
this._sock = socket;
this._chanMgr = new ChannelManager(this);
this._debug = debug;
this.noMoreSessions = false;
this.authenticated = false;
// Silence pre-header errors
function onClientPreHeaderError(err) {}
this.on('error', onClientPreHeaderError);
const DEBUG_HANDLER = (!debug ? undefined : (p, display, msg) => {
debug(`Debug output from client: ${JSON.stringify(msg)}`);
});
const kaIntvl = (
typeof srvCfg.keepaliveInterval === 'number'
&& isFinite(srvCfg.keepaliveInterval)
&& srvCfg.keepaliveInterval > 0
? srvCfg.keepaliveInterval
: (
typeof Server.KEEPALIVE_CLIENT_INTERVAL === 'number'
&& isFinite(Server.KEEPALIVE_CLIENT_INTERVAL)
&& Server.KEEPALIVE_CLIENT_INTERVAL > 0
? Server.KEEPALIVE_CLIENT_INTERVAL
: -1
)
);
const kaCountMax = (
typeof srvCfg.keepaliveCountMax === 'number'
&& isFinite(srvCfg.keepaliveCountMax)
&& srvCfg.keepaliveCountMax >= 0
? srvCfg.keepaliveCountMax
: (
typeof Server.KEEPALIVE_CLIENT_COUNT_MAX === 'number'
&& isFinite(Server.KEEPALIVE_CLIENT_COUNT_MAX)
&& Server.KEEPALIVE_CLIENT_COUNT_MAX >= 0
? Server.KEEPALIVE_CLIENT_COUNT_MAX
: -1
)
);
let kaCurCount = 0;
if (kaIntvl !== -1 && kaCountMax !== -1) {
this.once('ready', () => {
const onClose = () => {
clearInterval(kaTimer);
};
this.on('close', onClose).on('end', onClose);
kaTimer = setInterval(() => {
if (++kaCurCount > kaCountMax) {
clearInterval(kaTimer);
const err = new Error('Keepalive timeout');
err.level = 'client-timeout';
this.emit('error', err);
this.end();
} else {
// XXX: if the server ever starts sending real global requests to
// the client, we will need to add a dummy callback here to
// keep the correct reply order
proto.ping();
}
}, kaIntvl);
});
// TODO: re-verify keepalive behavior with OpenSSH
onPacket = () => {
kaTimer && kaTimer.refresh();
kaCurCount = 0;
};
}
const proto = this._protocol = new Protocol({
server: true,
hostKeys,
ident,
offer,
onPacket,
greeting: srvCfg.greeting,
banner: srvCfg.banner,
onWrite: (data) => {
if (isWritable(socket))
socket.write(data);
},
onError: (err) => {
if (!proto._destruct)
socket.removeAllListeners('data');
this.emit('error', err);
try {
socket.end();
} catch {}
},
onHeader: (header) => {
this.removeListener('error', onClientPreHeaderError);
const info = {
ip: socket.remoteAddress,
family: socket.remoteFamily,
port: socket.remotePort,
header,
};
if (!server.emit('connection', this, info)) {
// auto reject
proto.disconnect(DISCONNECT_REASON.BY_APPLICATION);
socket.end();
return;
}
if (header.greeting)
this.emit('greeting', header.greeting);
},
onHandshakeComplete: (negotiated) => {
if (++exchanges > 1)
this.emit('rekey');
this.emit('handshake', negotiated);
},
debug,
messageHandlers: {
DEBUG: DEBUG_HANDLER,
DISCONNECT: (p, reason, desc) => {
if (reason !== DISCONNECT_REASON.BY_APPLICATION) {
if (!desc) {
desc = DISCONNECT_REASON_BY_VALUE[reason];
if (desc === undefined)
desc = `Unexpected disconnection reason: ${reason}`;
}
const err = new Error(desc);
err.code = reason;
this.emit('error', err);
}
socket.end();
},
CHANNEL_OPEN: (p, info) => {
// Handle incoming requests from client
// Do early reject in some cases to prevent wasteful channel
// allocation
if ((info.type === 'session' && this.noMoreSessions)
|| !this.authenticated) {
const reasonCode = CHANNEL_OPEN_FAILURE.ADMINISTRATIVELY_PROHIBITED;
return proto.channelOpenFail(info.sender, reasonCode);
}
let localChan = -1;
let reason;
let replied = false;
let accept;
const reject = () => {
if (replied)
return;
replied = true;
if (reason === undefined) {
if (localChan === -1)
reason = CHANNEL_OPEN_FAILURE.RESOURCE_SHORTAGE;
else
reason = CHANNEL_OPEN_FAILURE.CONNECT_FAILED;
}
proto.channelOpenFail(info.sender, reason, '');
};
const reserveChannel = () => {
localChan = this._chanMgr.add();
if (localChan === -1) {
reason = CHANNEL_OPEN_FAILURE.RESOURCE_SHORTAGE;
if (debug) {
debug('Automatic rejection of incoming channel open: '
+ 'no channels available');
}
}
return (localChan !== -1);
};
const data = info.data;
switch (info.type) {
case 'session':
if (listenerCount(this, 'session') && reserveChannel()) {
accept = () => {
if (replied)
return;
replied = true;
const instance = new Session(this, info, localChan);
this._chanMgr.update(localChan, instance);
proto.channelOpenConfirm(info.sender,
localChan,
MAX_WINDOW,
PACKET_SIZE);
return instance;
};
this.emit('session', accept, reject);
return;
}
break;
case 'direct-tcpip':
if (listenerCount(this, 'tcpip') && reserveChannel()) {
accept = () => {
if (replied)
return;
replied = true;
const chanInfo = {
type: undefined,
incoming: {
id: localChan,
window: MAX_WINDOW,
packetSize: PACKET_SIZE,
state: 'open'
},
outgoing: {
id: info.sender,
window: info.window,
packetSize: info.packetSize,
state: 'open'
}
};
const stream = new Channel(this, chanInfo, { server: true });
this._chanMgr.update(localChan, stream);
proto.channelOpenConfirm(info.sender,
localChan,
MAX_WINDOW,
PACKET_SIZE);
return stream;
};
this.emit('tcpip', accept, reject, data);
return;
}
break;
case 'direct-streamlocal@openssh.com':
if (listenerCount(this, 'openssh.streamlocal')
&& reserveChannel()) {
accept = () => {
if (replied)
return;
replied = true;
const chanInfo = {
type: undefined,
incoming: {
id: localChan,
window: MAX_WINDOW,
packetSize: PACKET_SIZE,
state: 'open'
},
outgoing: {
id: info.sender,
window: info.window,
packetSize: info.packetSize,
state: 'open'
}
};
const stream = new Channel(this, chanInfo, { server: true });
this._chanMgr.update(localChan, stream);
proto.channelOpenConfirm(info.sender,
localChan,
MAX_WINDOW,
PACKET_SIZE);
return stream;
};
this.emit('openssh.streamlocal', accept, reject, data);
return;
}
break;
default:
// Automatically reject any unsupported channel open requests
reason = CHANNEL_OPEN_FAILURE.UNKNOWN_CHANNEL_TYPE;
if (debug) {
debug('Automatic rejection of unsupported incoming channel open'
+ ` type: ${info.type}`);
}
}
if (reason === undefined) {
reason = CHANNEL_OPEN_FAILURE.ADMINISTRATIVELY_PROHIBITED;
if (debug) {
debug('Automatic rejection of unexpected incoming channel open'
+ ` for: ${info.type}`);
}
}
reject();
},
CHANNEL_OPEN_CONFIRMATION: (p, info) => {
const channel = this._chanMgr.get(info.recipient);
if (typeof channel !== 'function')
return;
const chanInfo = {
type: channel.type,
incoming: {
id: info.recipient,
window: MAX_WINDOW,
packetSize: PACKET_SIZE,
state: 'open'
},
outgoing: {
id: info.sender,
window: info.window,
packetSize: info.packetSize,
state: 'open'
}
};
const instance = new Channel(this, chanInfo, { server: true });
this._chanMgr.update(info.recipient, instance);
channel(undefined, instance);
},
CHANNEL_OPEN_FAILURE: (p, recipient, reason, description) => {
const channel = this._chanMgr.get(recipient);
if (typeof channel !== 'function')
return;
const info = { reason, description };
onChannelOpenFailure(this, recipient, info, channel);
},
CHANNEL_DATA: (p, recipient, data) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
channel = channel._channel;
if (!channel)
return;
}
// The remote party should not be sending us data if there is no
// window space available ...
// TODO: raise error on data with not enough window?
if (channel.incoming.window === 0)
return;
channel.incoming.window -= data.length;
if (channel.push(data) === false) {
channel._waitChanDrain = true;
return;
}
if (channel.incoming.window <= WINDOW_THRESHOLD)
windowAdjust(channel);
},
CHANNEL_EXTENDED_DATA: (p, recipient, data, type) => {
// NOOP -- should not be sent by client
},
CHANNEL_WINDOW_ADJUST: (p, recipient, amount) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
channel = channel._channel;
if (!channel)
return;
}
// The other side is allowing us to send `amount` more bytes of data
channel.outgoing.window += amount;
if (channel._waitWindow) {
channel._waitWindow = false;
if (channel._chunk) {
channel._write(channel._chunk, null, channel._chunkcb);
} else if (channel._chunkcb) {
channel._chunkcb();
} else if (channel._chunkErr) {
channel.stderr._write(channel._chunkErr,
null,
channel._chunkcbErr);
} else if (channel._chunkcbErr) {
channel._chunkcbErr();
}
}
},
CHANNEL_SUCCESS: (p, recipient) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
channel = channel._channel;
if (!channel)
return;
}
if (channel._callbacks.length)
channel._callbacks.shift()(false);
},
CHANNEL_FAILURE: (p, recipient) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
channel = channel._channel;
if (!channel)
return;
}
if (channel._callbacks.length)
channel._callbacks.shift()(true);
},
CHANNEL_REQUEST: (p, recipient, type, wantReply, data) => {
const session = this._chanMgr.get(recipient);
if (typeof session !== 'object' || session === null)
return;
let replied = false;
let accept;
let reject;
if (session.constructor !== Session) {
// normal Channel instance
if (wantReply)
proto.channelFailure(session.outgoing.id);
return;
}
if (wantReply) {
// "real session" requests will have custom accept behaviors
if (type !== 'shell'
&& type !== 'exec'
&& type !== 'subsystem') {
accept = () => {
if (replied || session._ending || session._channel)
return;
replied = true;
proto.channelSuccess(session._chanInfo.outgoing.id);
};
}
reject = () => {
if (replied || session._ending || session._channel)
return;
replied = true;
proto.channelFailure(session._chanInfo.outgoing.id);
};
}
if (session._ending) {
reject && reject();
return;
}
switch (type) {
// "pre-real session start" requests
case 'env':
if (listenerCount(session, 'env')) {
session.emit('env', accept, reject, {
key: data.name,
val: data.value
});
return;
}
break;
case 'pty-req':
if (listenerCount(session, 'pty')) {
session.emit('pty', accept, reject, data);
return;
}
break;
case 'window-change':
if (listenerCount(session, 'window-change'))
session.emit('window-change', accept, reject, data);
else
reject && reject();
break;
case 'x11-req':
if (listenerCount(session, 'x11')) {
session.emit('x11', accept, reject, data);
return;
}
break;
// "post-real session start" requests
case 'signal':
if (listenerCount(session, 'signal')) {
session.emit('signal', accept, reject, {
name: data
});
return;
}
break;
// XXX: is `auth-agent-req@openssh.com` really "post-real session
// start"?
case 'auth-agent-req@openssh.com':
if (listenerCount(session, 'auth-agent')) {
session.emit('auth-agent', accept, reject);
return;
}
break;
// "real session start" requests
case 'shell':
if (listenerCount(session, 'shell')) {
accept = () => {
if (replied || session._ending || session._channel)
return;
replied = true;
if (wantReply)
proto.channelSuccess(session._chanInfo.outgoing.id);
const channel = new Channel(
this, session._chanInfo, { server: true }
);
channel.subtype = session.subtype = type;
session._channel = channel;
return channel;
};
session.emit('shell', accept, reject);
return;
}
break;
case 'exec':
if (listenerCount(session, 'exec')) {
accept = () => {
if (replied || session._ending || session._channel)
return;
replied = true;
if (wantReply)
proto.channelSuccess(session._chanInfo.outgoing.id);
const channel = new Channel(
this, session._chanInfo, { server: true }
);
channel.subtype = session.subtype = type;
session._channel = channel;
return channel;
};
session.emit('exec', accept, reject, {
command: data
});
return;
}
break;
case 'subsystem': {
let useSFTP = (data === 'sftp');
accept = () => {
if (replied || session._ending || session._channel)
return;
replied = true;
if (wantReply)
proto.channelSuccess(session._chanInfo.outgoing.id);
let instance;
if (useSFTP) {
instance = new SFTP(this, session._chanInfo, {
server: true,
debug,
});
} else {
instance = new Channel(
this, session._chanInfo, { server: true }
);
instance.subtype =
session.subtype = `${type}:${data}`;
}
session._channel = instance;
return instance;
};
if (data === 'sftp') {
if (listenerCount(session, 'sftp')) {
session.emit('sftp', accept, reject);
return;
}
useSFTP = false;
}
if (listenerCount(session, 'subsystem')) {
session.emit('subsystem', accept, reject, {
name: data
});
return;
}
break;
}
}
debug && debug(
`Automatic rejection of incoming channel request: ${type}`
);
reject && reject();
},
CHANNEL_EOF: (p, recipient) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
if (!channel._ending) {
channel._ending = true;
channel.emit('eof');
channel.emit('end');
}
channel = channel._channel;
if (!channel)
return;
}
if (channel.incoming.state !== 'open')
return;
channel.incoming.state = 'eof';
if (channel.readable)
channel.push(null);
},
CHANNEL_CLOSE: (p, recipient) => {
let channel = this._chanMgr.get(recipient);
if (typeof channel !== 'object' || channel === null)
return;
if (channel.constructor === Session) {
channel._ending = true;
channel.emit('close');
channel = channel._channel;
if (!channel)
return;
}
onCHANNEL_CLOSE(this, recipient, channel);
},
// Begin service/auth-related ==========================================
SERVICE_REQUEST: (p, service) => {
if (exchanges === 0
|| acceptedAuthSvc
|| this.authenticated
|| service !== 'ssh-userauth') {
proto.disconnect(DISCONNECT_REASON.SERVICE_NOT_AVAILABLE);
socket.end();
return;
}
acceptedAuthSvc = true;
proto.serviceAccept(service);
},
USERAUTH_REQUEST: (p, username, service, method, methodData) => {
if (exchanges === 0
|| this.authenticated
|| (authCtx
&& (authCtx.username !== username
|| authCtx.service !== service))
// TODO: support hostbased auth
|| (method !== 'password'
&& method !== 'publickey'
&& method !== 'hostbased'
&& method !== 'keyboard-interactive'
&& method !== 'none')
|| pendingAuths.length === MAX_PENDING_AUTHS) {
proto.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR);
socket.end();
return;
} else if (service !== 'ssh-connection') {
proto.disconnect(DISCONNECT_REASON.SERVICE_NOT_AVAILABLE);
socket.end();
return;
}
let ctx;
switch (method) {
case 'keyboard-interactive':
ctx = new KeyboardAuthContext(proto, username, service, method,
methodData, onAuthDecide);
break;
case 'publickey':
ctx = new PKAuthContext(proto, username, service, method,
methodData, onAuthDecide);
break;
case 'hostbased':
ctx = new HostbasedAuthContext(proto, username, service, method,
methodData, onAuthDecide);
break;
case 'password':
if (authCtx
&& authCtx instanceof PwdAuthContext
&& authCtx._changeCb) {
const cb = authCtx._changeCb;
authCtx._changeCb = undefined;
cb(methodData.newPassword);
return;
}
ctx = new PwdAuthContext(proto, username, service, method,
methodData, onAuthDecide);
break;
case 'none':
ctx = new AuthContext(proto, username, service, method,
onAuthDecide);
break;
}
if (authCtx) {
if (!authCtx._initialResponse) {
return pendingAuths.push(ctx);
} else if (authCtx._multistep && !authCtx._finalResponse) {
// RFC 4252 says to silently abort the current auth request if a
// new auth request comes in before the final response from an
// auth method that requires additional request/response exchanges
// -- this means keyboard-interactive for now ...
authCtx._cleanup && authCtx._cleanup();
authCtx.emit('abort');
}
}
authCtx = ctx;
if (listenerCount(this, 'authentication'))
this.emit('authentication', authCtx);
else
authCtx.reject();
},
USERAUTH_INFO_RESPONSE: (p, responses) => {
if (authCtx && authCtx instanceof KeyboardAuthContext)
authCtx._onInfoResponse(responses);
},
// End service/auth-related ============================================
GLOBAL_REQUEST: (p, name, wantReply, data) => {
const reply = {
type: null,
buf: null
};
function setReply(type, buf) {
reply.type = type;
reply.buf = buf;
sendReplies();
}
if (wantReply)
unsentGlobalRequestsReplies.push(reply);
if ((name === 'tcpip-forward'
|| name === 'cancel-tcpip-forward'
|| name === 'no-more-sessions@openssh.com'
|| name === 'streamlocal-forward@openssh.com'
|| name === 'cancel-streamlocal-forward@openssh.com')
&& listenerCount(this, 'request')
&& this.authenticated) {
let accept;
let reject;
if (wantReply) {
let replied = false;
accept = (chosenPort) => {
if (replied)
return;
replied = true;
let bufPort;
if (name === 'tcpip-forward'
&& data.bindPort === 0
&& typeof chosenPort === 'number') {
bufPort = Buffer.allocUnsafe(4);
writeUInt32BE(bufPort, chosenPort, 0);
}
setReply('SUCCESS', bufPort);
};
reject = () => {
if (replied)
return;
replied = true;
setReply('FAILURE');
};
}
if (name === 'no-more-sessions@openssh.com') {
this.noMoreSessions = true;
accept && accept();
return;
}
this.emit('request', accept, reject, name, data);
} else if (wantReply) {
setReply('FAILURE');
}
},
},
});
socket.pause();
cryptoInit.then(() => {
socket.on('data', (data) => {
try {
proto.parse(data, 0, data.length);
} catch (ex) {
this.emit('error', ex);
try {
if (isWritable(socket))
socket.end();
} catch {}
}
});
socket.resume();
}).catch((err) => {
this.emit('error', err);
try {
if (isWritable(socket))
socket.end();
} catch {}
});
socket.on('error', (err) => {
err.level = 'socket';
this.emit('error', err);
}).once('end', () => {
debug && debug('Socket ended');
proto.cleanup();
this.emit('end');
}).once('close', () => {
debug && debug('Socket closed');
proto.cleanup();
this.emit('close');
const err = new Error('No response from server');
// Simulate error for pending channels and close any open channels
this._chanMgr.cleanup(err);
});
const onAuthDecide = (ctx, allowed, methodsLeft, isPartial) => {
if (authCtx === ctx && !this.authenticated) {
if (allowed) {
authCtx = undefined;
this.authenticated = true;
proto.authSuccess();
pendingAuths = [];
this.emit('ready');
} else {
proto.authFailure(methodsLeft, isPartial);
if (pendingAuths.length) {
authCtx = pendingAuths.pop();
if (listenerCount(this, 'authentication'))
this.emit('authentication', authCtx);
else
authCtx.reject();
}
}
}
};
function sendReplies() {
while (unsentGlobalRequestsReplies.length > 0
&& unsentGlobalRequestsReplies[0].type) {
const reply = unsentGlobalRequestsReplies.shift();
if (reply.type === 'SUCCESS')
proto.requestSuccess(reply.buf);
if (reply.type === 'FAILURE')
proto.requestFailure();
}
}
}
end() {
if (this._sock && isWritable(this._sock)) {
this._protocol.disconnect(DISCONNECT_REASON.BY_APPLICATION);
this._sock.end();
}
return this;
}
x11(originAddr, originPort, cb) {
const opts = { originAddr, originPort };
openChannel(this, 'x11', opts, cb);
return this;
}
forwardOut(boundAddr, boundPort, remoteAddr, remotePort, cb) {
const opts = { boundAddr, boundPort, remoteAddr, remotePort };
openChannel(this, 'forwarded-tcpip', opts, cb);
return this;
}
openssh_forwardOutStreamLocal(socketPath, cb) {
const opts = { socketPath };
openChannel(this, 'forwarded-streamlocal@openssh.com', opts, cb);
return this;
}
rekey(cb) {
let error;
try {
this._protocol.rekey();
} catch (ex) {
error = ex;
}
// TODO: re-throw error if no callback?
if (typeof cb === 'function') {
if (error)
process.nextTick(cb, error);
else
this.once('rekey', cb);
}
}
}
function openChannel(self, type, opts, cb) {
// Ask the client to open a channel for some purpose (e.g. a forwarded TCP
// connection)
const initWindow = MAX_WINDOW;
const maxPacket = PACKET_SIZE;
if (typeof opts === 'function') {
cb = opts;
opts = {};
}
const wrapper = (err, stream) => {
cb(err, stream);
};
wrapper.type = type;
const localChan = self._chanMgr.add(wrapper);
if (localChan === -1) {
cb(new Error('No free channels available'));
return;
}
switch (type) {
case 'forwarded-tcpip':
self._protocol.forwardedTcpip(localChan, initWindow, maxPacket, opts);
break;
case 'x11':
self._protocol.x11(localChan, initWindow, maxPacket, opts);
break;
case 'forwarded-streamlocal@openssh.com':
self._protocol.openssh_forwardedStreamLocal(
localChan, initWindow, maxPacket, opts
);
break;
default:
throw new Error(`Unsupported channel type: ${type}`);
}
}
function compareNumbers(a, b) {
return a - b;
}
module.exports = Server;
module.exports.IncomingClient = Client;
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { monitorChartsQueryString } from '../../../../../legacy/plugins/uptime/public/queries';
import { expectFixtureEql } from './helpers/expect_fixture_eql';
export default function ({ getService }) {
describe('monitorCharts query', () => {
before('load heartbeat data', () => getService('esArchiver').load('uptime/full_heartbeat'));
after('unload heartbeat index', () => getService('esArchiver').unload('uptime/full_heartbeat'));
const supertest = getService('supertest');
it('will fetch a series of data points for monitor duration and status', async () => {
const getMonitorChartsQuery = {
operationName: 'MonitorCharts',
query: monitorChartsQueryString,
variables: {
dateRangeStart: '2019-09-11T03:31:04.380Z',
dateRangeEnd: '2019-09-11T03:40:34.410Z',
monitorId: '0002-up',
},
};
const {
body: { data },
} = await supertest
.post('/api/uptime/graphql')
.set('kbn-xsrf', 'foo')
.send({ ...getMonitorChartsQuery });
expectFixtureEql(data, 'monitor_charts');
});
it('will fetch empty sets for a date range with no data', async () => {
const getMonitorChartsQuery = {
operationName: 'MonitorCharts',
query: monitorChartsQueryString,
variables: {
dateRangeStart: '1999-09-11T03:31:04.380Z',
dateRangeEnd: '1999-09-11T03:40:34.410Z',
monitorId: '0002-up',
},
};
const {
body: { data },
} = await supertest
.post('/api/uptime/graphql')
.set('kbn-xsrf', 'foo')
.send({ ...getMonitorChartsQuery });
expectFixtureEql(data, 'monitor_charts_empty_sets');
});
});
}
|
// Copyright 2020, Collabora, Ltd.
// SPDX-License-Identifier: BSL-1.0
// Author: Ryan Pavlik <ryan.pavlik@collabora.com>
#pragma once
#include "android.app.h"
namespace wrap {
namespace android::content {
class ComponentName;
class Context;
} // namespace android::content
} // namespace wrap
namespace wrap {
namespace android::service::vr {
/*!
* Wrapper for android.service.vr.VrListenerService objects.
*/
class VrListenerService : public app::Service {
public:
using Service::Service;
static constexpr const char *getTypeName() noexcept {
return "android/service/vr/VrListenerService";
}
/*!
* Wrapper for the isVrModePackageEnabled static method
*
* Java prototype:
* `public static final boolean
* isVrModePackageEnabled(android.content.Context,
* android.content.ComponentName);`
*
* JNI signature:
* (Landroid/content/Context;Landroid/content/ComponentName;)Z
*
*/
static bool
isVrModePackageEnabled(content::Context const &context,
content::ComponentName const &componentName);
/*!
* Class metadata
*/
struct Meta : public MetaBase {
jni::method_t isVrModePackageEnabled;
/*!
* Singleton accessor
*/
static Meta &data() {
static Meta instance;
return instance;
}
private:
Meta();
};
};
} // namespace android::service::vr
} // namespace wrap
#include "android.service.vr.impl.h"
|
import { AbstractTransitionComponent } from 'vue-transition-component';
import VueTypes from 'vue-types';
import ArmChairTransitionController from './ArmChairTransitionController';
import { match } from '../../store/utils';
// [100 - 66, 66-33, 0]
// @vue/component
export default {
name: 'ArmChair',
extends: AbstractTransitionComponent,
props: {
points: VueTypes.number.isRequired,
},
computed: {
activeStep() {
let step;
match(this.points)
.on(
p => p <= 100 && p > 60,
() => {
step = 1;
},
)
.on(
p => p <= 60 && p > 0,
() => {
step = 2;
},
)
.on(
p => p === 0,
() => {
step = 3;
},
);
return step;
},
},
methods: {
handleAllComponentsReady() {
this.transitionController = new ArmChairTransitionController(this);
this.isReady();
},
},
};
|
import setuptools
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
PROJECT_NAME = "ANN---Implementation"
USER_NAME = "kkkumar2"
setuptools.setup(
name=f"{PROJECT_NAME}-{USER_NAME}",
version="0.0.2",
author=USER_NAME,
author_email="kmohankumar123456@gmail.com",
description="This is a ANN using Tensorflow package",
long_description=long_description,
long_description_content_type="text/markdown",
url=f"https://github.com/{USER_NAME}/{PROJECT_NAME}",
project_urls={
"Bug Tracker": f"https://github.com/{USER_NAME}/{PROJECT_NAME}",
},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"": "src"},
# packages=["src"],
packages=setuptools.find_packages(where="src"),
python_requires=">=3.7",
# with open("requirements.txt", "r", encoding="utf-8") as fh:
# install_requires = fh.read()
# install_requires_content_type="text/plain"
install_requires = ["numpy","tqdm","pandas","matplotlib","tensorflow","seaborn"]
)
|
from visualiser.visualiser import Visualiser as vs
start_state = (3, 3, 1)
goal_state = (0, 0, 0)
options = [(2, 0), (1, 1), (0, 2), (1, 0), (0, 1)]
visited = dict()
def is_valid(m, c):
return m >= 0 and m <= 3 and c >= 0 and c <= 3
@vs(ignore_args=["node_num", "level"])
def dfs(m, c, s, level):
if (m, c, s) == goal_state:
return True
if m > 0 and c > m:
return False
right_side_m = 3 - m
right_side_c = 3 - c
if right_side_m > 0 and right_side_c > right_side_m:
return False
visited[(m, c, s)] = True
if s == 1:
op = -1
else:
op = 1
solved = False
for i in range(5):
next_m, next_c, next_side = m + op * options[i][0], c + op * options[i][1], int(not s)
if is_valid(next_m, next_c):
if (next_m, next_c, next_side) not in visited:
solved = (solved or dfs(m=next_m, c=next_c, s=next_side, level=level + 1))
if solved:
return True
return solved
if (dfs(m=3, c=3, s=1, level=0)):
print("SOlution Found")
# Save recursion tree to a file
vs.make_animation("missionaries.gif", delay=2)
|
#!/usr/bin/env python
import sys
src_tok, tgt_tok = [], []
for line in sys.stdin:
line = line.strip()
if line.endswith('sec'):
s, t = line.split()[-4].split('/')
src_tok.append(float(s))
tgt_tok.append(float(t))
print('src wps: ', sum(src_tok) / len(src_tok))
print('tgt wps: ', sum(tgt_tok) / len(tgt_tok))
|
const Token = artifacts.require("MyToken");
var chai = require("chai");
require('dotenv').config({path: '../.env'});
const BN = web3.utils.BN;
const chaiBN = require('chai-bn')(BN);
chai.use(chaiBN);
var chaiAsPromised = require("chai-as-promised");
chai.use(chaiAsPromised);
const expect = chai.expect;
contract("Token Test", function(accounts) {
const [ initialHolder, recipient, anotherAccount ] = accounts;
beforeEach(async () => {
this.myToken = await Token.new(process.env.INITIAL_TOKENS);
});
it("All tokens should be in my account", async () => {
//let instance = await Token.deployed();
let instance = this.myToken;
let totalSupply = await instance.totalSupply();
//… more content
});
it("I can send tokens from Account 1 to Account 2", async () => {
const sendTokens = 1;
let instance = this.myToken;
let totalSupply = await instance.totalSupply();
//… more content
});
it("It's not possible to send more tokens than account 1 has", async () => {
let instance = this.myToken;
//… more content
});
});
|
# coding: utf-8
"""
Account API
The <b>Account API</b> gives sellers the ability to configure their eBay seller accounts, including the seller's policies (the Fulfillment Policy, Payment Policy, and Return Policy), opt in and out of eBay seller programs, configure sales tax tables, and get account information. <br><br>For details on the availability of the methods in this API, see <a href=\"/api-docs/sell/account/overview.html#requirements\">Account API requirements and restrictions</a>. # noqa: E501
OpenAPI spec version: v1.6.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TimeDuration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'unit': 'str',
'value': 'int'
}
attribute_map = {
'unit': 'unit',
'value': 'value'
}
def __init__(self, unit=None, value=None): # noqa: E501
"""TimeDuration - a model defined in Swagger""" # noqa: E501
self._unit = None
self._value = None
self.discriminator = None
if unit is not None:
self.unit = unit
if value is not None:
self.value = value
@property
def unit(self):
"""Gets the unit of this TimeDuration. # noqa: E501
A time-measurement unit that specifies a singular period of time. A span of time is defined when you apply the value specified in the value field to the value specified for unit. Time-measurement units can be YEAR, MONTH, DAY, and so on. See TimeDurationUnitEnum for a complete list of possible time-measurement units. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/account/types/ba:TimeDurationUnitEnum'>eBay API documentation</a> # noqa: E501
:return: The unit of this TimeDuration. # noqa: E501
:rtype: str
"""
return self._unit
@unit.setter
def unit(self, unit):
"""Sets the unit of this TimeDuration.
A time-measurement unit that specifies a singular period of time. A span of time is defined when you apply the value specified in the value field to the value specified for unit. Time-measurement units can be YEAR, MONTH, DAY, and so on. See TimeDurationUnitEnum for a complete list of possible time-measurement units. For implementation help, refer to <a href='https://developer.ebay.com/api-docs/sell/account/types/ba:TimeDurationUnitEnum'>eBay API documentation</a> # noqa: E501
:param unit: The unit of this TimeDuration. # noqa: E501
:type: str
"""
self._unit = unit
@property
def value(self):
"""Gets the value of this TimeDuration. # noqa: E501
An integer that represents an amount of time, as measured by the time-measurement unit specified in the unit field. # noqa: E501
:return: The value of this TimeDuration. # noqa: E501
:rtype: int
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this TimeDuration.
An integer that represents an amount of time, as measured by the time-measurement unit specified in the unit field. # noqa: E501
:param value: The value of this TimeDuration. # noqa: E501
:type: int
"""
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TimeDuration, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimeDuration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
# ------------------------------------------------------
# Utilities to handle HapMap data
# ------------------------------------------------------
import numpy
import re
# genotypes:
# rs# alleles chrom pos strand assembly# center protLSID assayLSID panelLSID QCcode NA06984 NA06985
# phased haplotypes:
# rs# phys_position NAXXXXXX_A NAXXXXXX_B NAXXXXXX_A NAXXXXXX_B
# loads the SNP text format
# into an array of positions
def loadPositions(f, verbose=False, delimiter=' ', pos_idx=0, sample_idx=1):
data = [re.split(delimiter, line.strip()) for line in open(f).read().strip().split('\n')]
# look at header, sample names
header = data[0]
if verbose:
print "%i SNPs" % (len(data)-1)
positions = numpy.array([int(data[d][pos_idx]) for d in xrange(1, len(data))], dtype="u4")
return positions
# loads the SNP text format
# into a list of strings consisting only of {0,1,2}, one for each sample
def loadSNPmatrix(f, n_cutoff=None, verbose=False, delimiter=' ', pos_idx=0, sample_idx=1, filter_positions=None):
data = [re.split(delimiter, line.strip()) for line in open(f).read().strip().split('\n')]
# look at header, sample names
header = data[0]
if type(sample_idx) == type([]):
sample_idx = [header.index(s) for s in sample_idx]
else:
sample_idx = range(sample_idx, len(header))
if verbose:
print "Loading %i samples..." % (len(sample_idx))
print [header[s] for s in sample_idx]
print "%i SNPs" % (len(data)-1)
# pivot data table, convert alleles to {0,1,2,N}
snps = []
if filter_positions is not None:
f = 0
for i in xrange(1, len(data)):
# filter by positions, if necessary
if filter_positions is not None:
while f < len(filter_positions) and filter_positions[f] < int(data[i][pos_idx]):
f += 1
if f < len(filter_positions) and filter_positions[f] != int(data[i][pos_idx]):
continue
alleles = ''.join([data[i][s] for s in sample_idx])
allele_counts = sorted([(a, alleles.count(a)) for a in "ACGTN"], key = lambda a: -1*a[1])
if sum([a[1] for a in allele_counts]) < len(alleles):
if verbose:
print "Unknown allele (probably indel {D,I}) in '%s'" % alleles
continue
majority = allele_counts[0]
if allele_counts[3][1] > 0 or (allele_counts[2][1] > 0 and (allele_counts[3][0] == 'N' or allele_counts[4][0] == 'N')):
if verbose:
print "Three or more alleles (probably known on chrX):", allele_counts
continue
alleles = []
for s in sample_idx:
allele = data[i][s]
if 'N' in allele:
allele = 'N'
elif len(allele) > 1 and allele[0] != allele[-1]:
allele = '2'
elif allele[0] == majority[0]:
allele = '0'
else:
allele = '1'
alleles.append(allele)
snps.append(alleles)
# compute and filter by N fraction
if n_cutoff is not None:
s = 0
while s < len(samples):
n_fraction = float(samples[s].count('N'))/len(samples[s])
if verbose:
print "%s: %.4f N" % (header[sample_idx[s]], n_fraction)
if n_fraction > n_cutoff:
samples.pop(s)
else:
s += 1
if verbose:
print "matrix: %i samples x %i SNPs" % (len(snps[0]), len(snps))
return snps
|
'''
OpenCV Python binary extension loader
'''
import os
import sys
try:
import numpy
import numpy.core.multiarray
except ImportError:
print('OpenCV bindings requires "numpy" package.')
print('Install it via command:')
print(' pip install numpy')
raise
# TODO
# is_x64 = sys.maxsize > 2**32
def bootstrap():
import sys
import copy
save_sys_path = copy.copy(sys.path)
if hasattr(sys, 'OpenCV_LOADER'):
print(sys.path)
raise ImportError('ERROR: recursion is detected during loading of "cv2" binary extensions. Check OpenCV installation.')
sys.OpenCV_LOADER = True
DEBUG = False
if hasattr(sys, 'OpenCV_LOADER_DEBUG'):
DEBUG = True
import platform
if DEBUG: print('OpenCV loader: os.name="{}" platform.system()="{}"'.format(os.name, str(platform.system())))
LOADER_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
PYTHON_EXTENSIONS_PATHS = []
BINARIES_PATHS = []
g_vars = globals()
l_vars = locals()
if sys.version_info[:2] < (3, 0):
from . load_config_py2 import exec_file_wrapper
else:
from . load_config_py3 import exec_file_wrapper
def load_first_config(fnames, required=True):
for fname in fnames:
fpath = os.path.join(LOADER_DIR, fname)
if not os.path.exists(fpath):
if DEBUG: print('OpenCV loader: config not found, skip: {}'.format(fpath))
continue
if DEBUG: print('OpenCV loader: loading config: {}'.format(fpath))
exec_file_wrapper(fpath, g_vars, l_vars)
return True
if required:
raise ImportError('OpenCV loader: missing configuration file: {}. Check OpenCV installation.'.format(fnames))
load_first_config(['config.py'], True)
load_first_config([
'config-{}.{}.py'.format(sys.version_info[0], sys.version_info[1]),
'config-{}.py'.format(sys.version_info[0])
], True)
if DEBUG: print('OpenCV loader: PYTHON_EXTENSIONS_PATHS={}'.format(str(l_vars['PYTHON_EXTENSIONS_PATHS'])))
if DEBUG: print('OpenCV loader: BINARIES_PATHS={}'.format(str(l_vars['BINARIES_PATHS'])))
applySysPathWorkaround = False
if hasattr(sys, 'OpenCV_REPLACE_SYS_PATH_0'):
applySysPathWorkaround = True
else:
try:
BASE_DIR = os.path.dirname(LOADER_DIR)
if sys.path[0] == BASE_DIR or os.path.realpath(sys.path[0]) == BASE_DIR:
applySysPathWorkaround = True
except:
if DEBUG: print('OpenCV loader: exception during checking workaround for sys.path[0]')
pass # applySysPathWorkaround is False
for p in reversed(l_vars['PYTHON_EXTENSIONS_PATHS']):
sys.path.insert(1 if not applySysPathWorkaround else 0, p)
if os.name == 'nt':
if sys.version_info[:2] >= (3, 8): # https://github.com/python/cpython/pull/12302
for p in l_vars['BINARIES_PATHS']:
try:
os.add_dll_directory(p)
except Exception as e:
if DEBUG: print('Failed os.add_dll_directory(): '+ str(e))
pass
os.environ['PATH'] = ';'.join(l_vars['BINARIES_PATHS']) + ';' + os.environ.get('PATH', '')
if DEBUG: print('OpenCV loader: PATH={}'.format(str(os.environ['PATH'])))
else:
# amending of LD_LIBRARY_PATH works for sub-processes only
os.environ['LD_LIBRARY_PATH'] = ':'.join(l_vars['BINARIES_PATHS']) + ':' + os.environ.get('LD_LIBRARY_PATH', '')
if DEBUG: print('OpenCV loader: replacing cv2 module')
del sys.modules['cv2']
import cv2
sys.path = save_sys_path # multiprocessing should start from bootstrap code (https://github.com/opencv/opencv/issues/18502)
try:
import sys
del sys.OpenCV_LOADER
except:
pass
if DEBUG: print('OpenCV loader: DONE')
bootstrap()
|
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var del = require('del');
var fs = require('fs');
var gulp = require('gulp-help')(require('gulp'));
var gzipSize = require('gzip-size');
var prettyBytes = require('pretty-bytes');
var table = require('text-table');
var through = require('through2');
var util = require('gulp-util');
var tempFolderName = '__size-temp';
var MAX_FILE_SIZE_POS = 0;
var MIN_FILE_SIZE_POS = 1;
var FILENAME_POS = 2;
// normalized table headers
var tableHeaders = [
['max', 'min', 'gzip', 'file'],
['---', '---', '---', '---'],
];
var tableOptions = {
align: ['r', 'r', 'r', 'l'],
hsep: ' | ',
};
/**
* Returns a number greater than -1 if item is found within the array, else
* returns -1.
*
* @param {!Array<!Array>} rows
* @param {!fuction(string)} predicate
* @return {number}
*/
function findMaxIndexByFilename(rows, predicate) {
for (var i = 0; i < rows.length; i++) {
var curRow = rows[i];
var curFilename = curRow[2];
if (predicate(curFilename)) {
return i;
}
}
return -1;
}
/**
* Mutates the rows in place and merges the minified file entry as well
* as its unminified file entry counterpart.
* @param {!Array<!Array>} rows
* @param {string} minFilename
* @param {string} maxFilename
* @param {boolean} mergeNames
*/
function normalizeRow(rows, minFilename, maxFilename, mergeNames) {
var minIndex = findMaxIndexByFilename(rows, function(filename) {
return filename == minFilename;
});
var maxIndex = findMaxIndexByFilename(rows, function(filename) {
return filename == maxFilename;
});
if (maxIndex != -1 && minIndex != -1) {
if (mergeNames) {
rows[minIndex][FILENAME_POS] += ' / ' + rows[maxIndex][FILENAME_POS];
}
rows[minIndex].unshift(rows[maxIndex][MAX_FILE_SIZE_POS]);
rows.splice(maxIndex, 1);
}
}
/**
* Call normalizeRow on the core file, integration file and all extensions.
* @param {!Array<!Array>} rows
* @return {!Array<!Array>}
*/
function normalizeRows(rows) {
// normalize amp.js
normalizeRow(rows, 'v0.js', 'amp.js', true);
// normalize integration.js
normalizeRow(rows, 'current-min/f.js', 'current/integration.js', true);
// normalize extensions
var curName = null;
var i = rows.length;
// we are mutating in place... kind of icky but this will do fow now.
while (i--) {
curName = rows[i][FILENAME_POS];
if (/^v0/.test(curName)) {
normalizeExtension(rows, curName);
}
}
return rows;
}
/**
* Finds the counterpart entry of the extension file, wether it be
* the unminified or the minified counterpart.
* @param {!Array<!Array>} rows
* @param {string} filename
*/
function normalizeExtension(rows, filename) {
var isMax = /\.max\.js$/.test(filename);
var counterpartName = filename.replace(/(v0\/.*?)(\.max)?(\.js)$/,
function(full, grp1, grp2, grp3) {
if (isMax) {
return grp1 + grp3;
}
return full;
});
if (isMax) {
normalizeRow(rows, counterpartName, filename, false);
} else {
normalizeRow(rows, filename, counterpartName, false);
}
}
/**
* Through2 transform function - Tracks the original size and the gzipped size
* of the file content in the rows array. Passes the file and/or err to the
* callback when finished.
* @param {!Array<!Array<string>>} rows array to store content size information
* @param {!File} file File to process
* @param {string} enc Encoding (not used)
* @param {function(?Error, !File)} cb Callback function
*/
function onFileThrough(rows, file, enc, cb) {
if (file.isNull()) {
cb(null, file);
return;
}
if (file.isStream()) {
cb(new util.PluginError('size', 'Stream not supported'));
return;
}
rows.push([
prettyBytes(file.contents.length),
prettyBytes(gzipSize.sync(file.contents)),
file.relative,
]);
cb(null, file);
}
/**
* Through2 flush function - combines headers with the rows and generates
* a text-table of the content size information to log to the console and the
* test/size.txt logfile.
*
* @param {!Array<!Array<string>>} rows array of content size information
* @param {function()} cb Callback function
*/
function onFileThroughEnd(rows, cb) {
rows = normalizeRows(rows);
rows.unshift.apply(rows, tableHeaders);
var tbl = table(rows, tableOptions);
console/*OK*/.log(tbl);
fs.writeFileSync('test/size.txt', tbl);
cb();
}
/**
* Setup through2 to capture size information using the above transform and
* flush functions on a stream
* @return {!Stream} a Writable Stream
*/
function sizer() {
var rows = [];
return through.obj(
onFileThrough.bind(null, rows),
onFileThroughEnd.bind(null, rows)
);
}
/**
* Pipe the distributable js files through the sizer to get a record of
* the content size before and after gzip and cleanup any temporary file
* output from the process.
*/
function sizeTask() {
gulp.src([
'dist/**/*.js',
'!dist/**/*-latest.js',
'dist.3p/{current,current-min}/**/*.js',
])
.pipe(sizer())
.pipe(gulp.dest(tempFolderName))
.on('end', del.bind(null, [tempFolderName]));
}
gulp.task('size', 'Runs a report on artifact size', sizeTask);
|
/**
* @license Angular v7.1.1
* (c) 2010-2018 Google, Inc. https://angular.io/
* License: MIT
*/
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@angular/compiler"),require("@angular/core"),require("@angular/common"),require("@angular/platform-browser")):"function"==typeof define&&define.amd?define("@angular/platform-browser-dynamic",["exports","@angular/compiler","@angular/core","@angular/common","@angular/platform-browser"],t):t((e.ng=e.ng||{},e.ng.platformBrowserDynamic={}),e.ng.compiler,e.ng.core,e.ng.common,e.ng.platformBrowser)}(this,function(e,t,r,o,n){"use strict";var i=function(e,t){return(i=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(e,t){e.__proto__=t}||function(e,t){for(var r in t)t.hasOwnProperty(r)&&(e[r]=t[r])})(e,t)};function s(e,t){function r(){this.constructor=e}i(e,t),e.prototype=null===t?Object.create(t):(r.prototype=t.prototype,new r)}function a(e,t){var r="function"==typeof Symbol&&e[Symbol.iterator];if(!r)return e;var o,n,i=r.call(e),s=[];try{for(;(void 0===t||t-- >0)&&!(o=i.next()).done;)s.push(o.value)}catch(e){n={error:e}}finally{try{o&&!o.done&&(r=i.return)&&r.call(i)}finally{if(n)throw n.error}}return s}function l(){for(var e=[],t=0;t<arguments.length;t++)e=e.concat(a(arguments[t]));return e}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/var p=function c(){var e=new Map;return e.set(t.Identifiers.ANALYZE_FOR_ENTRY_COMPONENTS,r.ANALYZE_FOR_ENTRY_COMPONENTS),e.set(t.Identifiers.ElementRef,r.ElementRef),e.set(t.Identifiers.NgModuleRef,r.NgModuleRef),e.set(t.Identifiers.ViewContainerRef,r.ViewContainerRef),e.set(t.Identifiers.ChangeDetectorRef,r.ChangeDetectorRef),e.set(t.Identifiers.Renderer2,r.Renderer2),e.set(t.Identifiers.QueryList,r.QueryList),e.set(t.Identifiers.TemplateRef,r.TemplateRef),e.set(t.Identifiers.CodegenComponentFactoryResolver,r.ɵCodegenComponentFactoryResolver),e.set(t.Identifiers.ComponentFactoryResolver,r.ComponentFactoryResolver),e.set(t.Identifiers.ComponentFactory,r.ComponentFactory),e.set(t.Identifiers.ComponentRef,r.ComponentRef),e.set(t.Identifiers.NgModuleFactory,r.NgModuleFactory),e.set(t.Identifiers.createModuleFactory,r.ɵcmf),e.set(t.Identifiers.moduleDef,r.ɵmod),e.set(t.Identifiers.moduleProviderDef,r.ɵmpd),e.set(t.Identifiers.RegisterModuleFactoryFn,r.ɵregisterModuleFactory),e.set(t.Identifiers.Injector,r.Injector),e.set(t.Identifiers.ViewEncapsulation,r.ViewEncapsulation),e.set(t.Identifiers.ChangeDetectionStrategy,r.ChangeDetectionStrategy),e.set(t.Identifiers.SecurityContext,r.SecurityContext),e.set(t.Identifiers.LOCALE_ID,r.LOCALE_ID),e.set(t.Identifiers.TRANSLATIONS_FORMAT,r.TRANSLATIONS_FORMAT),e.set(t.Identifiers.inlineInterpolate,r.ɵinlineInterpolate),e.set(t.Identifiers.interpolate,r.ɵinterpolate),e.set(t.Identifiers.EMPTY_ARRAY,r.ɵEMPTY_ARRAY),e.set(t.Identifiers.EMPTY_MAP,r.ɵEMPTY_MAP),e.set(t.Identifiers.Renderer,r.Renderer),e.set(t.Identifiers.viewDef,r.ɵvid),e.set(t.Identifiers.elementDef,r.ɵeld),e.set(t.Identifiers.anchorDef,r.ɵand),e.set(t.Identifiers.textDef,r.ɵted),e.set(t.Identifiers.directiveDef,r.ɵdid),e.set(t.Identifiers.providerDef,r.ɵprd),e.set(t.Identifiers.queryDef,r.ɵqud),e.set(t.Identifiers.pureArrayDef,r.ɵpad),e.set(t.Identifiers.pureObjectDef,r.ɵpod),e.set(t.Identifiers.purePipeDef,r.ɵppd),e.set(t.Identifiers.pipeDef,r.ɵpid),e.set(t.Identifiers.nodeValue,r.ɵnov),e.set(t.Identifiers.ngContentDef,r.ɵncd),e.set(t.Identifiers.unwrapValue,r.ɵunv),e.set(t.Identifiers.createRendererType2,r.ɵcrt),e.set(t.Identifiers.createComponentFactory,r.ɵccf),e}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/(),u=function(){function e(){this.reflectionCapabilities=new r.ɵReflectionCapabilities}return e.prototype.componentModuleUrl=function(e,o){var n=o.moduleId;if("string"==typeof n)return t.getUrlScheme(n)?n:"package:"+n;if(null!==n&&void 0!==n)throw t.syntaxError('moduleId should be a string in "'+r.ɵstringify(e)+"\". See https://goo.gl/wIDDiL for more information.\nIf you're using Webpack you should inline the template and the styles, see https://goo.gl/X2J8zc.");return"./"+r.ɵstringify(e)},e.prototype.parameters=function(e){return this.reflectionCapabilities.parameters(e)},e.prototype.tryAnnotations=function(e){return this.annotations(e)},e.prototype.annotations=function(e){return this.reflectionCapabilities.annotations(e)},e.prototype.shallowAnnotations=function(e){throw new Error("Not supported in JIT mode")},e.prototype.propMetadata=function(e){return this.reflectionCapabilities.propMetadata(e)},e.prototype.hasLifecycleHook=function(e,t){return this.reflectionCapabilities.hasLifecycleHook(e,t)},e.prototype.guards=function(e){return this.reflectionCapabilities.guards(e)},e.prototype.resolveExternalReference=function(e){return p.get(e)||e.runtime},e}(),d=new r.InjectionToken("ErrorCollector"),f={provide:r.PACKAGE_ROOT_URL,useValue:"/"},m={get:function(e){throw new Error("No ResourceLoader implementation has been provided. Can't read the url \""+e+'"')}},y=new r.InjectionToken("HtmlParser"),R=function(){function e(e,r,o,n,i,s,a,l,p,c){this._metadataResolver=r,this._delegate=new t.JitCompiler(r,o,n,i,s,a,l,p,c,this.getExtraNgModuleProviders.bind(this)),this.injector=e}return e.prototype.getExtraNgModuleProviders=function(){return[this._metadataResolver.getProviderMetadata(new t.ProviderMeta(r.Compiler,{useValue:this}))]},e.prototype.compileModuleSync=function(e){return this._delegate.compileModuleSync(e)},e.prototype.compileModuleAsync=function(e){return this._delegate.compileModuleAsync(e)},e.prototype.compileModuleAndAllComponentsSync=function(e){var t=this._delegate.compileModuleAndAllComponentsSync(e);return{ngModuleFactory:t.ngModuleFactory,componentFactories:t.componentFactories}},e.prototype.compileModuleAndAllComponentsAsync=function(e){return this._delegate.compileModuleAndAllComponentsAsync(e).then(function(e){return{ngModuleFactory:e.ngModuleFactory,componentFactories:e.componentFactories}})},e.prototype.loadAotSummaries=function(e){this._delegate.loadAotSummaries(e)},e.prototype.hasAotSummary=function(e){return this._delegate.hasAotSummary(e)},e.prototype.getComponentFactory=function(e){return this._delegate.getComponentFactory(e)},e.prototype.clearCache=function(){this._delegate.clearCache()},e.prototype.clearCacheFor=function(e){this._delegate.clearCacheFor(e)},e.prototype.getModuleId=function(e){var t=this._metadataResolver.getNgModuleMetadata(e);return t&&t.id||void 0},e}(),g=[{provide:t.CompileReflector,useValue:new u},{provide:t.ResourceLoader,useValue:m},{provide:t.JitSummaryResolver,deps:[]},{provide:t.SummaryResolver,useExisting:t.JitSummaryResolver},{provide:r.ɵConsole,deps:[]},{provide:t.Lexer,deps:[]},{provide:t.Parser,deps:[t.Lexer]},{provide:y,useClass:t.HtmlParser,deps:[]},{provide:t.I18NHtmlParser,useFactory:function(e,o,n,i,s){return new t.I18NHtmlParser(e,o=o||"",n,o?i.missingTranslation:r.MissingTranslationStrategy.Ignore,s)},deps:[y,[new r.Optional,new r.Inject(r.TRANSLATIONS)],[new r.Optional,new r.Inject(r.TRANSLATIONS_FORMAT)],[t.CompilerConfig],[r.ɵConsole]]},{provide:t.HtmlParser,useExisting:t.I18NHtmlParser},{provide:t.TemplateParser,deps:[t.CompilerConfig,t.CompileReflector,t.Parser,t.ElementSchemaRegistry,t.I18NHtmlParser,r.ɵConsole]},{provide:t.DirectiveNormalizer,deps:[t.ResourceLoader,t.UrlResolver,t.HtmlParser,t.CompilerConfig]},{provide:t.CompileMetadataResolver,deps:[t.CompilerConfig,t.HtmlParser,t.NgModuleResolver,t.DirectiveResolver,t.PipeResolver,t.SummaryResolver,t.ElementSchemaRegistry,t.DirectiveNormalizer,r.ɵConsole,[r.Optional,t.StaticSymbolCache],t.CompileReflector,[r.Optional,d]]},f,{provide:t.StyleCompiler,deps:[t.UrlResolver]},{provide:t.ViewCompiler,deps:[t.CompileReflector]},{provide:t.NgModuleCompiler,deps:[t.CompileReflector]},{provide:t.CompilerConfig,useValue:new t.CompilerConfig},{provide:r.Compiler,useClass:R,deps:[r.Injector,t.CompileMetadataResolver,t.TemplateParser,t.StyleCompiler,t.ViewCompiler,t.NgModuleCompiler,t.SummaryResolver,t.CompileReflector,t.CompilerConfig,r.ɵConsole]},{provide:t.DomElementSchemaRegistry,deps:[]},{provide:t.ElementSchemaRegistry,useExisting:t.DomElementSchemaRegistry},{provide:t.UrlResolver,deps:[r.PACKAGE_ROOT_URL]},{provide:t.DirectiveResolver,deps:[t.CompileReflector]},{provide:t.PipeResolver,deps:[t.CompileReflector]},{provide:t.NgModuleResolver,deps:[t.CompileReflector]}],C=function(){function e(e){this._defaultOptions=l([{useJit:!0,defaultEncapsulation:r.ViewEncapsulation.Emulated,missingTranslation:r.MissingTranslationStrategy.Warning}],e)}return e.prototype.createCompiler=function(e){void 0===e&&(e=[]);var o=function n(e){return{useJit:v(e.map(function(e){return e.useJit})),defaultEncapsulation:v(e.map(function(e){return e.defaultEncapsulation})),providers:function t(e){var t=[];return e.forEach(function(e){return e&&t.push.apply(t,l(e))}),t}
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/(e.map(function(e){return e.providers})),missingTranslation:v(e.map(function(e){return e.missingTranslation})),preserveWhitespaces:v(e.map(function(e){return e.preserveWhitespaces}))}}(this._defaultOptions.concat(e));return r.Injector.create([g,{provide:t.CompilerConfig,useFactory:function(){return new t.CompilerConfig({useJit:o.useJit,jitDevMode:r.isDevMode(),defaultEncapsulation:o.defaultEncapsulation,missingTranslation:o.missingTranslation,preserveWhitespaces:o.preserveWhitespaces})},deps:[]},o.providers]).get(r.Compiler)},e}();function v(e){for(var t=e.length-1;t>=0;t--)if(void 0!==e[t])return e[t]}var h=r.createPlatformFactory(r.platformCore,"coreDynamic",[{provide:r.COMPILER_OPTIONS,useValue:{},multi:!0},{provide:r.CompilerFactory,useClass:C,deps:[r.COMPILER_OPTIONS]}]),I=function(e){function t(){return null!==e&&e.apply(this,arguments)||this}return s(t,e),t.prototype.get=function(e){var t,r,o=new Promise(function(e,o){t=e,r=o}),n=new XMLHttpRequest;return n.open("GET",e,!0),n.responseType="text",n.onload=function(){var o=n.response||n.responseText,i=1223===n.status?204:n.status;0===i&&(i=o?200:0),200<=i&&i<=300?t(o):r("Failed to load "+e)},n.onerror=function(){r("Failed to load "+e)},n.send(),o},function o(e,t,r,n){var i,s=arguments.length,a=s<3?t:null===n?n=Object.getOwnPropertyDescriptor(t,r):n;if("object"==typeof Reflect&&"function"==typeof Reflect.decorate)a=Reflect.decorate(e,t,r,n);else for(var l=e.length-1;l>=0;l--)(i=e[l])&&(a=(s<3?i(a):s>3?i(t,r,a):i(t,r))||a);return s>3&&a&&Object.defineProperty(t,r,a),a}([r.Injectable()],t)}(t.ResourceLoader),_=[n.ɵINTERNAL_BROWSER_PLATFORM_PROVIDERS,{provide:r.COMPILER_OPTIONS,useValue:{providers:[{provide:t.ResourceLoader,useClass:I,deps:[]}]},multi:!0},{provide:r.PLATFORM_ID,useValue:o.ɵPLATFORM_BROWSER_ID}],M=function(e){function t(){var t=e.call(this)||this;if(t._cache=r.ɵglobal.$templateCache,null==t._cache)throw new Error("CachedResourceLoader: Template cache was not found in $templateCache.");return t}return s(t,e),t.prototype.get=function(e){return this._cache.hasOwnProperty(e)?Promise.resolve(this._cache[e]):Promise.reject("CachedResourceLoader: Did not find cached template for "+e)},t}(t.ResourceLoader),E=new r.Version("7.1.1"),O=[{provide:t.ResourceLoader,useClass:M,deps:[]}],A=r.createPlatformFactory(h,"browserDynamic",_);
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
e.ɵangular_packages_platform_browser_dynamic_platform_browser_dynamic_a=M,e.RESOURCE_CACHE_PROVIDER=O,e.platformBrowserDynamic=A,e.VERSION=E,e.JitCompilerFactory=C,e.ɵCompilerImpl=R,e.ɵplatformCoreDynamic=h,e.ɵINTERNAL_BROWSER_DYNAMIC_PLATFORM_PROVIDERS=_,e.ɵResourceLoaderImpl=I,Object.defineProperty(e,"__esModule",{value:!0})});
|
config = {
"interfaces": {
"google.ads.googleads.v6.services.ConversionUploadService": {
"retry_codes": {
"idempotent": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 5000,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 3600000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 3600000,
"total_timeout_millis": 3600000
}
},
"methods": {
"UploadClickConversions": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UploadCallConversions": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
}
}
}
}
}
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""service-management convert-config command."""
import os
from apitools.base.py import encoding
from googlecloudsdk.api_lib.service_management import services_util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import log
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA)
class ConvertConfig(base.Command):
"""Convert Swagger specification to Google service configuration.
DEPRECATED: This command is deprecated and will be removed soon.
"""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'open_api_file',
help='The file path containing the Open API specification to convert.')
parser.add_argument(
'output_file', nargs='?', default='',
help=('The file path of the output file containing the converted '
'configuration. Output to standard output if omitted.'))
def Run(self, args):
"""Run 'service-management convert-config'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
The response from the ConvertConfig API call.
Raises:
IOError: An IOError is returned if the input file cannot be read, or
the output file cannot be written to.
"""
log.warn('This command is deprecated and will be removed soon.')
messages = services_util.GetMessagesModule()
client = services_util.GetClientInstance()
# TODO(user): Add support for swagger file references later
# This requires the API to support multiple files first. b/23353397
try:
with open(args.open_api_file) as f:
open_api_spec = messages.OpenApiSpec(openApiFiles=[
messages.ConfigFile(
filePath=os.path.basename(args.open_api_file),
contents=f.read())
])
except IOError:
raise exceptions.ToolException.FromCurrent(
'Cannot open {f} file'.format(f=args.open_api_file))
request = messages.ConvertConfigRequest(openApiSpec=open_api_spec)
converted_config = client.v1.ConvertConfig(request)
diagnostics = converted_config.diagnostics
if diagnostics:
kind = messages.Diagnostic.KindValueValuesEnum
for diagnostic in diagnostics:
logger = log.error if diagnostic.kind == kind.ERROR else log.warning
logger('{l}: {m}'.format(l=diagnostic.location, m=diagnostic.message))
service = converted_config.serviceConfig
if service:
if args.output_file:
try:
with open(args.output_file, 'w') as out:
out.write(encoding.MessageToJson(service))
except IOError:
raise exceptions.ToolException.FromCurrent(
'Cannot open output file \'{f}\''.format(f=args.output_file))
else:
return service
def Format(self, unused_args):
return 'json'
|
#!/usr/bin/env python
import json
x = json.loads('{"foo":"var"}')
print x
x = {'key': 'value'}
print json.dumps(x)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Original source: github.com/okfn/bibserver
# Authors:
# markmacgillivray
# Etienne Posthumus (epoz)
# Francois Boulogne <fboulogne at april dot org>
import sys
import logging
import io
import re
from bibtexparser.bibdatabase import BibDatabase
logger = logging.getLogger(__name__)
__all__ = ['BibTexParser']
if sys.version_info >= (3, 0):
from io import StringIO
ustr = str
else:
from StringIO import StringIO
ustr = unicode
class BibTexParser(object):
"""
A parser for reading BibTeX bibliographic data files.
Example::
from bibtexparser.bparser import BibTexParser
bibtex_str = ...
parser = BibTexParser()
parser.ignore_nonstandard_types = False
parser.homogenise_fields = False
bib_database = bibtexparser.loads(bibtex_str, parser)
"""
def __new__(cls, data=None,
customization=None,
ignore_nonstandard_types=True,
homogenise_fields=True):
"""
To catch the old API structure in which creating the parser would immediately parse and return data.
"""
if data is None:
return super(BibTexParser, cls).__new__(cls)
else:
# For backwards compatibility: if data is given, parse and return the `BibDatabase` object instead of the
# parser.
parser = BibTexParser()
parser.customization = customization
parser.ignore_nonstandard_types = ignore_nonstandard_types
parser.homogenise_fields = homogenise_fields
return parser.parse(data)
def __init__(self):
"""
Creates a parser for rading BibTeX files
:return: parser
:rtype: `BibTexParser`
"""
self.bib_database = BibDatabase()
#: Callback function to process BibTeX entries after parsing, for example to create a list from a string with
#: multiple values. By default all BibTeX values are treated as simple strings. Default: `None`.
self.customization = None
#: Ignore non-standard BibTeX types (`book`, `article`, etc). Default: `True`.
self.ignore_nonstandard_types = True
#: Sanitise BibTeX field names, for example change `url` to `link` etc. Field names are always converted to
#: lowercase names. Default: `True`.
self.homogenise_fields = True
# On some sample data files, the character encoding detection simply
# hangs We are going to default to utf8, and mandate it.
self.encoding = 'utf8'
# pre-defined set of key changes
self.alt_dict = {
'keyw': 'keyword',
'keywords': 'keyword',
'authors': 'author',
'editors': 'editor',
'url': 'link',
'urls': 'link',
'links': 'link',
'subjects': 'subject'
}
self.replace_all_re = re.compile(r'((?P<pre>"?)\s*(#|^)\s*(?P<id>[^\d\W]\w*)\s*(#|$)\s*(?P<post>"?))', re.UNICODE)
def _bibtex_file_obj(self, bibtex_str):
# Some files have Byte-order marks inserted at the start
byte = '\xef\xbb\xbf'
if not isinstance(byte, ustr):
byte = ustr('\xef\xbb\xbf', self.encoding, 'ignore')
if bibtex_str[:3] == byte:
bibtex_str = bibtex_str[3:]
return StringIO(bibtex_str)
def parse(self, bibtex_str):
"""Parse a BibTeX string into an object
:param bibtex_str: BibTeX string
:type: str or unicode
:return: bibliographic database
:rtype: BibDatabase
"""
self.bibtex_file_obj = self._bibtex_file_obj(bibtex_str)
self._parse_records(customization=self.customization)
return self.bib_database
def parse_file(self, file):
"""Parse a BibTeX file into an object
:param file: BibTeX file or file-like object
:type: file
:return: bibliographic database
:rtype: BibDatabase
"""
return self.parse(file.read())
def _parse_records(self, customization=None):
"""Parse the bibtex into a list of records.
:param customization: a function
"""
def _add_parsed_record(record, records):
"""
Atomic function to parse a record
and append the result in records
"""
if record != "":
logger.debug('The record is not empty. Let\'s parse it.')
parsed = self._parse_record(record, customization=customization)
if parsed:
logger.debug('Store the result of the parsed record')
records.append(parsed)
else:
logger.debug('Nothing returned from the parsed record!')
else:
logger.debug('The record is empty')
records = []
record = ""
# read each line, bundle them up until they form an object, then send for parsing
for linenumber, line in enumerate(self.bibtex_file_obj):
logger.debug('Inspect line %s', linenumber)
if line.strip().startswith('@'):
# Remove leading whitespaces
line = line.lstrip()
logger.debug('Line starts with @')
# Parse previous record
_add_parsed_record(record, records)
# Start new record
logger.debug('The record is set to empty')
record = ""
# Keep adding lines to the record
record += line
# catch any remaining record and send it for parsing
_add_parsed_record(record, records)
logger.debug('Set the list of entries')
self.bib_database.entries = records
def _parse_record(self, record, customization=None):
"""Parse a record.
* tidy whitespace and other rubbish
* parse out the bibtype and citekey
* find all the key-value pairs it contains
:param record: a record
:param customization: a function
:returns: dict --
"""
d = {}
if not record.startswith('@'):
logger.debug('The record does not start with @. Return empty dict.')
return {}
# if a comment record, add to bib_database.comments
if record.lower().startswith('@comment'):
logger.debug('The record startswith @comment')
logger.debug('Store comment in list of comments')
self.bib_database.comments.append(re.search('\{(.*)\}', record, re.DOTALL).group(1))
logger.debug('Return an empty dict')
return {}
# if a preamble record, add to bib_database.preambles
if record.lower().startswith('@preamble'):
logger.debug('The record startswith @preamble')
logger.debug('Store preamble in list of preambles')
self.bib_database.preambles.append(re.search('\{(.*)\}', record, re.DOTALL).group(1))
logger.debug('Return an empty dict')
return {}
# prepare record
record = '\n'.join([i.strip() for i in record.split('\n')])
if '}\n' in record:
logger.debug('}\\n detected in the record. Clean up.')
record = record.replace('\r\n', '\n').replace('\r', '\n').rstrip('\n')
# treat the case for which the last line of the record
# does not have a coma
if record.endswith('}\n}') or record.endswith('}}'):
logger.debug('Missing coma in the last line of the record. Fix it.')
record = re.sub('}(\n|)}$', '},\n}', record)
# if a string record, put it in the replace_dict
if record.lower().startswith('@string'):
logger.debug('The record startswith @string')
key, val = [i.strip().strip('{').strip('}').replace('\n', ' ') for i in record.split('{', 1)[1].strip('\n').strip(',').strip('}').split('=')]
key = key.lower() # key is case insensitive
val = self._string_subst_partial(val)
if val.startswith('"') or val.lower() not in self.bib_database.strings:
self.bib_database.strings[key] = val.strip('"')
else:
self.bib_database.strings[key] = self.bib_database.strings[val.lower()]
logger.debug('Return a dict')
return d
# for each line in record
logger.debug('Split the record of its lines and treat them')
kvs = [i.strip() for i in record.split(',\n')]
inkey = ""
inval = ""
for kv in kvs:
logger.debug('Inspect: %s', kv)
# TODO: We may check that the keyword belongs to a known type
if kv.startswith('@') and not inkey:
# it is the start of the record - set the bibtype and citekey (id)
logger.debug('Line starts with @ and the key is not stored yet.')
bibtype, id = kv.split('{', 1)
bibtype = self._add_key(bibtype)
id = id.strip('}').strip(',')
logger.debug('bibtype = %s', bibtype)
logger.debug('id = %s', id)
if self.ignore_nonstandard_types and bibtype not in ('article',
'book',
'booklet',
'conference',
'inbook',
'incollection',
'inproceedings',
'manual',
'mastersthesis',
'misc',
'phdthesis',
'proceedings',
'techreport',
'unpublished'):
logger.warning('Entry type %s not standard. Not considered.', bibtype)
break
elif '=' in kv and not inkey:
# it is a line with a key value pair on it
logger.debug('Line contains a key-pair value and the key is not stored yet.')
key, val = [i.strip() for i in kv.split('=', 1)]
key = self._add_key(key)
val = self._string_subst_partial(val)
# if it looks like the value spans lines, store details for next loop
if (val.count('{') != val.count('}')) or (val.startswith('"') and not val.replace('}', '').endswith('"')):
logger.debug('The line is not ending the record.')
inkey = key
inval = val
else:
logger.debug('The line is the end of the record.')
d[key] = self._add_val(val)
elif inkey:
logger.debug('Continues the previous line to complete the key pair value...')
# if this line continues the value from a previous line, append
inval += ', ' + kv
# if it looks like this line finishes the value, store it and clear for next loop
if (inval.startswith('{') and inval.endswith('}')) or (inval.startswith('"') and inval.endswith('"')):
logger.debug('This line represents the end of the current key-pair value')
d[inkey] = self._add_val(inval)
inkey = ""
inval = ""
else:
logger.debug('This line does NOT represent the end of the current key-pair value')
logger.debug('All lines have been treated')
if not d:
logger.debug('The dict is empty, return it.')
return d
d['type'] = bibtype
d['id'] = id
if customization is None:
logger.debug('No customization to apply, return dict')
return d
else:
# apply any customizations to the record object then return it
logger.debug('Apply customizations and return dict')
return customization(d)
def _strip_quotes(self, val):
"""Strip double quotes enclosing string
:param val: a value
:type val: string
:returns: string -- value
"""
logger.debug('Strip quotes')
val = val.strip()
if val.startswith('"') and val.endswith('"'):
return val[1:-1]
return val
def _strip_braces(self, val):
"""Strip braces enclosing string
:param val: a value
:type val: string
:returns: string -- value
"""
logger.debug('Strip braces')
val = val.strip()
if val.startswith('{') and val.endswith('}') and self._full_span(val):
return val[1:-1]
return val
def _full_span(self, val):
cnt = 0
for i in range(0, len(val)):
if val[i] == '{':
cnt += 1
elif val[i] == '}':
cnt -= 1
if cnt == 0:
break
if i == len(val) - 1:
return True
else:
return False
def _string_subst(self, val):
""" Substitute string definitions
:param val: a value
:type val: string
:returns: string -- value
"""
logger.debug('Substitute string definitions')
if not val:
return ''
for k in list(self.bib_database.strings.keys()):
if val.lower() == k:
val = self.bib_database.strings[k]
if not isinstance(val, ustr):
val = ustr(val, self.encoding, 'ignore')
return val
def _string_subst_partial(self, val):
""" Substitute string definitions inside larger expressions
:param val: a value
:type val: string
:returns: string -- value
"""
def repl(m):
k = m.group('id')
replacement = self.bib_database.strings[k.lower()] if k.lower() in self.bib_database.strings else k
pre = '"' if m.group('pre') != '"' else ''
post = '"' if m.group('post') != '"' else ''
return pre + replacement + post
logger.debug('Substitute string definitions inside larger expressions')
if '#' not in val:
return val
# TODO?: Does not match two subsequent variables or strings, such as "start" # foo # bar # "end" or "start" # "end".
# TODO: Does not support braces instead of quotes, e.g.: {start} # foo # {bar}
# TODO: Does not support strings like: "te#s#t"
return self.replace_all_re.sub(repl, val)
def _add_val(self, val):
""" Clean instring before adding to dictionary
:param val: a value
:type val: string
:returns: string -- value
"""
if not val or val == "{}":
return ''
val = self._strip_braces(val)
val = self._strip_quotes(val)
val = self._strip_braces(val)
val = self._string_subst(val)
return val
def _add_key(self, key):
""" Add a key and homogeneize alternative forms.
:param key: a key
:type key: string
:returns: string -- value
"""
key = key.strip().strip('@').lower()
if self.homogenise_fields:
if key in list(self.alt_dict.keys()):
key = self.alt_dict[key]
if not isinstance(key, ustr):
return ustr(key, 'utf-8')
else:
return key
|