initial commit

This commit is contained in:
Dan Ritz 2024-05-03 13:26:53 -04:00
commit c2b9a2e15d
72 changed files with 575276 additions and 0 deletions

223
app/Packages/Oauth2.pm Normal file
View File

@ -0,0 +1,223 @@
package Oauth2;
use strict;
use warnings;
use v5.22;
use feature qw(signatures);
no warnings qw(experimental::signatures);
use Moo;
use utf8;
use LWP::Authen::OAuth2;
use DDP;
use Mojo::JSON qw(decode_json);
use Mojo::Pg;
use SQL::Abstract;
use POSIX qw(strftime);
use HTML::Strip;
use Encoding::FixLatin qw(fix_latin);
has 'db' => (
is => 'rw',
lazy => 1,
builder => '_build_db',
);
has 'db_user' => (
is => 'rw',
default => 'db_master',
);
has 'db_pass' => (
is => 'rw',
default => 'LTL%9O9^Wj&jmX4lEg',
);
has 'db_uri' => (
is => 'rw',
default => 'nehantic-dev.cnxhiwukntah.us-east-1.rds.amazonaws.com',
);
has 'db_name' => (
is => 'rw',
default => 'nehantic_data',
);
has 'authorization_endpoint' => (
is => 'rw',
required => 1,
);
has 'token_endpoint' => (
is => 'rw',
required => 1,
);
#has 'api_endpoint' => (
# is => 'rw',
# required => 1,
#);
has 'grant_type' => (
is => 'rw',
required => 1,
);
has 'client_id' => (
is => 'rw',
required => 1,
);
has 'client_secret' => (
is => 'rw',
required => 1,
);
has 'redirect_uri' => (
is => 'rw',
required => 1,
);
has 'scope' => (
is => 'rw',
required => 0,
);
has 'user' => (
is => 'rw',
required => 1,
);
has 'token' => (
is => 'rw',
lazy => 1,
builder => '_build_token',
);
has 'code' => (
is => 'ro'
);
has 'refresh_token' => (
is => 'rw',
lazy => 1,
builder => '_build_refresh_token',
);
has 'OAuth2' => (
is => 'rw',
required => 1,
lazy => 1,
builder => '_build_OAuth2',
);
has 'content_url' => (
is => 'rw',
required => 0,
);
has 'results' => (
is => 'rw',
lazy => 1,
builder => '_build_results',
);
has 'save_token' => (
is => 'rw',
lazy => 1,
builder => '_save_token',
);
sub _build_db ($self) {
my $user = $self->db_user;
my $pass = $self->db_pass;
my $db_uri = $self->db_uri;
my $db_name = $self->db_name;
my $pg = Mojo::Pg->new("postgresql://$user:$pass\@$db_uri/$db_name");
my $db = $pg->db;
return $db;
}
sub _build_token ($self) {
my $user = $self->user;
my $db = $self->db;
my $token = $db->select('oauth2_tokens', ['token_string'], {username=>$user})->array;
return $token->[0];
}
sub _save_token ($self, $user, $token) {
my $db = $self->db;
#my $user = $self->user;
#my $token = $self->token;
if (! $db->select('oauth2_tokens', ['username'], {username=>$user} )->array) {
$db->insert('oauth2_tokens', {token_string=>$token, username=>$user,});
return;
}
else {
my $ts = strftime("%m/%d/%Y %H:%M:%S\n", localtime);
$db->update('oauth2_tokens', {token_string=>$token, modify_timestamp=>$ts}, {username=>$user});
return;
}
}
sub get_initial_access_token ($self) {
my $oauth2 = $self->Oauth2;
}
sub _build_refresh_token ($self) {
my $oauth2 = $self->OAuth2;
my $refresh_token = decode_json($self->token)->{'refresh_token'};
my $res = $oauth2->request_tokens(
grant_type => 'refresh_token',
refresh_token => $refresh_token,
client_id => $self->client_id,
client_secret => $self->client_secret,
);
p $res
}
sub _build_OAuth2 ($self) {
my $oauth2 = LWP::Authen::OAuth2->new(
client_id => $self->client_id,
client_secret => $self->client_secret,
redirect_uri => $self->redirect_uri,
authorization_endpoint => $self->authorization_endpoint,
token_endpoint => $self->token_endpoint,
#request_required_params => [ 'grant_type', 'client_id', 'client_secret'],
#request_optional_params => [ 'scope', 'code', 'refresh_token', 'authorization_token', 'redirect_uri'],
#refresh_required_params => ['client_id', 'client_secret', 'refresh_token'],
#refresh_optional_params
# Optional hook, but recommended.
save_tokens => sub{$self->_save_token(@_)},
save_tokens_args => [ $self->db, $self->user ],
# This is for when you have tokens from last time.
token_string => $self->token,
);
return $oauth2;
}
sub _build_results ($self) {
my $content_url = $self->content_url;
my $access_token = decode_json($self->token)->{'access_token'};
my $companyid = '193514809224424';
my $endpoint = "https://sandbox-quickbooks.api.intuit.com/v3/company/$companyid";
my $oauth2 = $self->OAuth2;
my $url = "$endpoint.$content_url";
my %headers = (Authorization => "Bearer $access_token",
Accept => 'application/json');
my $res = $oauth2->get($url, %headers);
my $json = $res->decoded_content;
return $json;
}
__PACKAGE__->meta->make_immutable;

140
app/Packages/sqlConn.pm Normal file
View File

@ -0,0 +1,140 @@
package sqlConn;
# use DBIx::Log4perl;
# use DBI::Profile;
use Moo;
use MooX::HandlesVia;
use DBD::Pg;
use DDP;
use v5.26;
use feature 'signatures';
no warnings qw(experimental::signatures);
has 'db' => ( is => 'rw', required => 1 );
has 'dbh' => ( is => 'lazy' );
has 'sql' => ( is => 'rw', trigger => \&_build_results );
has 'bind' => (is => 'rw' );
has 'results' => ( is => 'lazy', );
has 'column_names' => ( is => 'ro', writer => '_set_column_names');
# my $logconf;
# my $log;
# if (-e 'N:\conversion\Automation\Packages') {
# $logconf = 'N:\conversion\Automation\Logs\log.conf';
# $log = 'generic';
# }
# else {
# $logconf = '/mnt/cvgserver2/conversion/Automation/Logs/log_linux.conf';
# $log = 'linux';
# }
#
# say "using $log logging: $logconf";
#
# Log::Log4perl->init( $logconf );
# my $logger = Log::Log4perl->get_logger($log);
my %db_conn = (
pc => {
database => 'nehantic_data',
host => 'nehantic-dev.cnxhiwukntah.us-east-1.rds.amazonaws.com',
# host => 'localhost',
port => '5432',
user => 'pc',
pass => 'ydY4&Hz4p4j4^h',
dsn => "DBI:Pg:dbname=",
},
superbase => {
database => 'superbase',
# host => 'nehantic-dev.cnxhiwukntah.us-east-1.rds.amazonaws.com',
host => 'localhost',
port => '5432',
user => 'superfly',
pass => 'C33ucme!',
dsn => "DBI:Pg:dbname=",
},
);
sub _build_dbh {
my $self = shift;
my $db = $self->db;
my $database = $db_conn{$db}->{'database'} // '';
my $host = $db_conn{$db}->{'host'} // '';
my $port = $db_conn{$db}->{'port'} // '';
my $user = $db_conn{$db}->{'user'};
my $password = $db_conn{$db}->{'pass'};
my $dsn = $db_conn{$db}->{'dsn'} . "$database;host=$host;port=$port";
my $dbh;
#if ($db eq 'qb') {
# say $db_conn{$db}->{'dsn'};
# $dbh = DBI->connect('dbi:ODBC:QB64') || $logger->logdie("couldn't connect to source database: $!");
#}
#elsif ($db eq 'infor') {
# say $db_conn{$db}->{'dsn'};
# $dbh = DBIx::Log4perl->connect($db_conn{$db}->{'dsn'}, $db_conn{$db}->{'user'}, $db_conn{$db}->{'pass'}) || $logger->logdie("couldn't connect to source database: $!");
# }
# elsif ($db_conn{$db}->{'conn_str'}) {
# $dbh = DBIx::Log4perl->connect($db_conn{$db}->{'conn_str'}, {
# 'PrintError' => 0,
# 'RaiseError' => 1,
# 'AutoCommit' => 1,
# });
# }
#
# else {
# $dbh = DBIx::Log4perl->connect($dsn, $user, $password, {
$dbh = DBI->connect($dsn, $user, $password, {
'PrintError' => 1,
'RaiseError' => 1,
'AutoCommit' => 1,
});
# }
$dbh->{Profile} = 0;
return $dbh;
}
sub _build_results {
my $self = shift;
my $dbh = $self->dbh;
my $sql = $self->sql;
my $sth = $dbh->prepare($sql);
my $bind;
if ($self->bind) {
$sth->execute($bind);
}
else {
$sth->execute();
}
$self->_set_column_names($sth->{NAME});
return $sth->fetchall_arrayref;
}
sub triggers ($self) {
my $sql = qq{
SELECT tgrelid::regclass triggername, t.tgname tablename
FROM pg_trigger t, pg_proc p, pg_class c
WHERE c.oid = t.tgrelid AND t.tgfoid=p.oid
AND c.relkind = 'r'
};
$self->sql($sql);
}
sub tables ($self) {
my $sql = qq{
SELECT table_schema || '.' || table_name "tablename"
FROM information_schema.tables
WHERE table_type = 'BASE TABLE'
AND table_schema NOT IN ('pg_catalog', 'information_schema')
};
$self->sql($sql);
}
#__PACKAGE__->meta()->make_immutable();

32
app/__init__.py Normal file
View File

@ -0,0 +1,32 @@
import logging
from flask import Flask
from flask_appbuilder import AppBuilder, SQLA
"""
Logging configuration
"""
logging.basicConfig(format="%(asctime)s:%(levelname)s:%(name)s:%(message)s")
logging.getLogger().setLevel(logging.DEBUG)
app = Flask(__name__)
app.config.from_object("config")
db = SQLA(app)
appbuilder = AppBuilder(app, db.session)
"""
from sqlalchemy.engine import Engine
from sqlalchemy import event
#Only include this for SQLLite constraints
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
# Will force sqllite contraint foreign keys
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
"""
from . import models, views

56
app/models.py Normal file
View File

@ -0,0 +1,56 @@
from flask_appbuilder import Model
from sqlalchemy import Column, Integer, String, ForeignKey, Text, Date
from flask import Markup
from sqlalchemy.orm import relationship
class MatchType(Model):
id = Column(Integer, primary_key=True)
match_type = Column(String(50), unique=True, nullable=False)
def __repr__(self):
return self.match_type
class PassReason(Model):
id = Column(Integer, primary_key=True)
reason = Column(String(50), unique=True, nullable=False)
def __repr__(self):
return self.reason
class opportunityReport(Model):
id = Column(Integer, primary_key=True)
notif_source = Column(String)
notif_date = Column(Date)
notif_url = Column(String)
notif_details = Column(Text)
oppty_desc = Column(String)
oppty_source = Column(String)
oppty_date = Column(Date)
oppty_url = Column(String)
match_id = Column(Integer, ForeignKey('match_type.id'))
match = relationship("MatchType")
naics = Column(String)
pop = Column(String)
setaside = Column(String)
pass_id = Column(Integer, ForeignKey('pass_reason.id'))
pass_reason = relationship("PassReason")
def __repr__(self):
return self.notif_details
def description(self):
return Markup(f'')
def notif_link(self):
return Markup(f'<a href={self.notif_url} target=_blank>link</a>')
def oppty_link(self):
#return Markup(f'[link]: {self.oppty_url}')
return Markup(f'<a href={self.oppty_url} target=_blank>link</a>')
def notif_details_link(self):
#return Markup(f'[link]: {self.oppty_url}')
return Markup(f'<a href={self.oppty_url} target=_blank>{self.notif_details}</a>')

View File

@ -0,0 +1,226 @@
use strict;
use warnings;
use feature 'say';
use v5.26;
use feature qw(signatures);
no warnings qw(experimental::signatures);
use HTML::TableExtract;
use Mojo::UserAgent;
use Mojo::JSON qw(j encode_json);
use Excel::Writer::XLSX;
use Text::CSV;
use DDP;
use SQL::Abstract::Pg;
use Time::Piece;
# use Net::SSH::Tunnel;
use lib '/mnt/vol/dev/pc/Packages/';
use sqlConn;
# init SSH Tunnel
# system ("ssh -fNT -L 5433:localhost:5432 ubuntu\@18.214.5.20 -i /home/superfly/.ssh/nehantic-dev.pem -p 2222");
#$initialize DB
my $dbh = sqlConn->new( db => 'pc' )->dbh;
my $sql = SQL::Abstract->new();
my $sql_tbl = 'pc.opportunity';
my $base_url = 'http://www.mybidmatch.com';
my $bm_home = 'http://www.mybidmatch.com/go?sub=058FAE4A-456C-4FF5-BA74-8EDD033DE87D';
##find previous
my ($stmt, @bind) = $sql->select($sql_tbl, 'notif_url');
my $sth = $dbh->prepare($stmt);
$sth->execute(@bind);
my $prev = $sth->fetchall_hashref('notif_url');
my $bm = extract_bidmatch($bm_home);
for my $day (@$bm) {
"a" =~ /a/; ## Reset captures to undef.
my $url = $day->[0];
my $date = $day->[1];
next if $prev->{$url};
my $results = get_results($url);
map {push @$_, $url} @$results;
map {push @$_, $date} @$results;
my $sam_data = get_sam_data($results);
insert_rows($results);
}
refresh_report();
sub refresh_report() {
my $sql = q'INSERT INTO pc.opportunity_report
SELECT 1, e.* FROM pc.opportunity_extract e LEFT JOIN pc.opportunity_report r USING (id)
WHERE r.id IS NULL';
$dbh->do($sql);
}
sub get_results($url) {
my $ua = Mojo::UserAgent->new;
my $res = $ua->get($url)->result->to_string;
return if $res =~ /No articles/;
$res =~ /(<table(?:.*class="data")>?(.|\n)*?<\/table>)/gm;
return extract_table($1);
}
sub extract_table($html) {
my $te = HTML::TableExtract->new(keep_html => 1, headers => [qw(# Source Agency FSG Title Keywords)]);
$te->parse($html);
my @results;
# Examine all matching tables
for my $ts ($te->tables) {
for my $row ($ts->rows) {
$row->[4] =~ s/href=\"/href=\"$base_url/;
$row->[4] =~ /<a href="(.*)">\s+?(.*)<\/a>/;
$row->[4] = $2;
$row->[6] = $1;
push(@results, $row);
}
}
return \@results
}
sub get_sam_data ($results) {
for my $row (@$results) {
my $bidmatch_link = $row->[6];
my $sam_link = get_doc($bidmatch_link);
$sam_link =~ m|https://beta.sam.gov/opp/(.*)/view|;
my $sam_json_link = "https://beta.sam.gov/api/prod/opps/v2/opportunities/$1";
if ($sam_link) {
my $sam_json = get_sam_json($sam_json_link);
push @$row, encode_json ($sam_json);
}
}
}
sub insert_rows ($results) {
my $sam_link = $results->[6];
my @headers = qw(id source agency fsg title keywords oppy_url notif_url date oppy_det);
my %data;
my %psc_map = (
A => 'Research and Development',
B => 'Special Studies and Analyses - Not R&D',
C => 'Architect and Engineering - Construction',
D => 'Automatic Data Processing and Telecommunication',
E => 'Purchase of Structures and Facilities',
F => 'Natural Resources and Conservation',
G => 'Social Services',
H => 'Quality Control, Testing, and Inspection',
J => 'Maintenance, Repair, and Rebuilding of Equipment',
K => 'Modification of Equipment',
L => 'Technical Representative',
M => 'Operation of Government Owned Facilities',
N => 'Installation of Equipment',
P => 'Salvage Services',
Q => 'Medical Services',
R => 'Professional, Administrative and Management Support',
S => 'Utilities and Housekeeping Services',
T => 'Photographic, Mapping, Printing, and Publications',
U => 'Education and Training',
V => 'Transportation, Travel and Relocation',
W => 'Lease or Rental of Equipment',
X => 'Lease or Rental of Facilities',
Y => 'Construction of Structures and Facilities',
Z => 'Maintenance, Repair or Alteration of Real Property',);
for my $row (@$results) {
@data{@headers} = @$row;
my %ins_data = (
notif_source => $data{'source'},
notif_date => $data{'date'},
notif_url => $data{'notif_url'},
notif_details => $data{'title'},
oppty_desc => $psc_map{$data{'fsg'}},
oppty_source => $data{'agency'},
oppty_date => '',
oppty_url => $data{'oppy_url'},
oppty_details => $data{'oppy_det'},
);
my ($stmt, @bind) = $sql->insert($sql_tbl, \%ins_data, {on_conflict => \'do nothing'});
my $sth = $dbh->prepare($stmt);
$sth->execute(@bind);
}
}
sub extract_bidmatch($url) {
my $ua = Mojo::UserAgent->new;
my $res = $ua->get($url)->result->to_string;
$res =~ /(<table(?:.*class="data")>?(.|\n)*?<\/table>)/gm;
my $html = $1;
my $te = HTML::TableExtract->new(keep_html=>1, headers=>[qw(Date Articles Read)]);
$te->parse($html);
my @results;
# Examine all matching tables
for my $ts ($te->tables) {
for my $row ($ts->rows) {
my $sel1 = $row->[0];
$sel1 =~ /<a href="(\/.*)">?(.*)<\/a>/;
my $bm_det = "http://www.mybidmatch.com$1";
my $sel2 = $2;
$sel2 =~ /.*, (\w{3} \d{1,2}), \d{4}$/;
#my $date = $1;
my $date = Time::Piece->strptime($1 . ' 2019', '%b %d %Y')->ymd;
push(@results, [$bm_det, $date]);
}
}
return \@results
}
sub write_csv ($rows, $date) {
say "writing csv";
my $csv = Text::CSV->new({binary=>1, auto_diag=>1, eol => $/});
open my $fh, ">:encoding(utf8)", "/home/superfly/project-conquer/Project Conquer/Miscellaneous/bidmatch_$date.csv" or die "prime_contacts.csv: $!";
my @header = qw(# Source Agency FSG Title Keywords BM_Link Opp_Link NAICS PoP NAICS_Match);
$csv->say ($fh, \@header);
for my $row (@$rows) {
$csv->say ($fh, $row);
}
close $fh or die "bidmatch.csv: $!";
}
sub write_xlsx ($rows, $date) {
say "writing xlsx";
open my $fh, ">>", '/home/superfly/project-conquer/Project Conquer/Miscellaneous/bidmatch.xlsx' or die "Open Failed: $!";
my $xlsx = Excel::Writer::XLSX->new($fh);
my $worksheet = $xlsx->add_worksheet($date);
my @header = qw(# Source Agency FSG Title Keywords BM_Link Opp_Link NAICS PoP NAICS_Match);
$worksheet->write_row( 0, 0, \@header);
$worksheet->write_col( 1, 0, $rows);
$xlsx->close;
}
sub get_doc ($url) {
say $url;
my $ua = Mojo::UserAgent->new;
my $res = $ua->get($url)->result->to_string;
$res =~ /URL: <a href="(.*)">/;
return $1;
}
sub get_sam_json ($sam_link) {
# https://beta.sam.gov/opp/57c1b3ccd33a4635b3a9decc5b015d0e/view
# https://beta.sam.gov/api/prod/opps/v2/opportunities/57c1b3ccd33a4635b3a9decc5b015d0e
my $ua = Mojo::UserAgent->new;
my $res = $ua->get($sam_link)->result->json;
return $res;
}

View File

@ -0,0 +1,33 @@
use strict;
use warnings;
use v5.28;
use feature qw(signatures);
no warnings qw(experimental::signatures);
use DDP;
use lib '/mnt/vol/dev/pc/Packages/';
use sqlConn;
use Oauth2;
my $auth = Oauth2->new(
authorization_endpoint => 'https://www.linkedin.com/oauth/v2/authorization',
token_endpoint => 'https://www.linkedin.com/oauth/v2/accessToken',
grant_type => 'refresh_token',
client_id => "772wac7m82vj48",
client_secret => "zOClxQ9xwbriDDqr",
redirect_uri => 'https://auth.project-conquer.com/authcode',
scope => 'r_basicprofile%20w_messages',
user => 'info@project-conquer.com',
code => 'AQSDbcYa_70CeqJiwiW8rEa4IJVmMX6PQbJa3hEn-u9A-hE03EHPQEg4dX56yoGov821nHanIvzIbgpyaifx15CwG7tz5HOz-Td4k_Ecq5eTcY3teIe85KmoCXfdVpwQZa7zuRQ8JHNjso1MiHj7cLNrB97vwpLZsWYoY4NRLQdFX2NFcuwHeGbzWsUlCA',
);
my $client_id = $auth->client_id;
my $redirect = $auth->redirect_uri;
my $scope = $auth->scope;
my $code = "https://www.linkedin.com/oauth/v2/authorization?response_type=code&client_id=$client_id&redirect_uri=$redirect&scope=$scope";
p $code;

100
app/scripts/sql/opp_rpt.sql Normal file
View File

@ -0,0 +1,100 @@
DROP VIEW pc.opportunity_extract;
CREATE OR REPLACE VIEW pc.opportunity_extract AS
SELECT
id,
notif_source,
NULLIF(notif_date,'')::DATE notif_date,
notif_url::TEXT, --'<a href=' || notif_url || 'target="_blank">Link</a>' notif_url,
notif_details,
oppty_desc,
oppty_source,
NULLIF(oppty_date,'')::DATE oppty_date,
oppty_url::TEXT, --'<a href=' || oppty_url || 'target="_blank">Link</a>' oppty_url,
(oppty_details::jsonb->'data'->>'naics')::jsonb->0->'code'->>0 naics,
(oppty_details::jsonb->'data'->>'placeOfPerformance')::jsonb->'state'->>'code' pop,
(oppty_details::jsonb->'data'->>'solicitation')::jsonb->>'setAside' setaside,
(oppty_details::jsonb->'data'->>'pointOfContact')::jsonb->0->>'fullName' primary_poc_name,
(oppty_details::jsonb->'data'->>'pointOfContact')::jsonb->0->>'email' primary_poc_email,
(oppty_details::jsonb->'data'->>'pointOfContact')::jsonb->1->>'fullName' secondary_poc_fullname,
(oppty_details::jsonb->'data'->>'pointOfContact')::jsonb->1->>'email' secondary_poc_email,
FROM pc.opportunity o
LEFT JOIN pc.naics n ON ( ((oppty_details::jsonb->'data'->>'naics')::jsonb->0->'code'->>0)::INTEGER = n.naics_code)
/*WHERE CASE WHEN match = 'MATCH' THEN 1
WHEN n.naics_code IS NOT NULL THEN 1
WHEN (oppty_details::jsonb->'data'->>'placeOfPerformance')::jsonb->'state'->>'code' IN ('NY', 'CT', 'MA', 'RI') THEN 1
ELSE 0 END = 1*/
ORDER BY notif_date DESC
CREATE TABLE opportunity_report AS
SELECT * FROM pc.opportunity_extract;
INSERT INTO opportunity_report
SELECT 1, e.* FROM pc.opportunity_extract e LEFT JOIN pc.opportunity_report r USING (id)
WHERE r.id IS NULL
INSERT INTO match_type (match_type)
SELECT DISTINCT match FROM opportunity_report;
UPDATE opportunity_report o SET match_id = m.id
FROM match_type m
WHERE m.match_type = o.match_id
UPDATE opportunity_report SET match_id = 1 WHERE notif_date = '2019-12-22'
WITH comb AS (
SELECT primary_poc_name AS name, lower(primary_poc_email) AS email
FROM pc.opportunity_report
UNION ALL
SELECT secondary_poc_fullname, lower(secondary_poc_email)
FROM pc.opportunity_report
) SELECT DISTINCT email FROM comb
WITH omit AS(
SELECT oppty_details::jsonb->'opportunityId' o FROM pc.opportunity WHERE match = 'SKIP')
UPDATE pc.opportunity
SET match = 'SKIP'
WHERE match IS NULL AND oppty_details::jsonb->'opportunityId' IN (SELECT * FROM omit)
WITH omit AS(
SELECT COALESCE(oppty_details::jsonb->'parent'->>'opportunityId' , oppty_details::jsonb->>'opportunityId') o, oppty_details::jsonb->>'modifiedDate' d FROM pc.opportunity WHERE NULLIF(match,'') IS NULL),
rec AS (SELECT o, COUNT(*) c, MAX(d) m FROM omit GROUP BY o HAVING COUNT(*) > 1)
UPDATE pc.opportunity o
SET --match = 'SKIP'
match = CASE WHEN oppty_details::jsonb->>'modifiedDate' != rec.m THEN 'SKIP' END
-- SELECT *
FROM rec
WHERE COALESCE(oppty_details::jsonb->'parent'->>'opportunityId' , oppty_details::jsonb->>'opportunityId') = rec.o
-- AND match IS NULL
-- AND oppty_details::jsonb->>'modifiedDate' != rec.m
WITH omit AS(
SELECT COALESCE(oppty_details::jsonb->'parent'->>'opportunityId' , oppty_details::jsonb->>'opportunityId') o, oppty_details::jsonb->>'status' d, match
FROM pc.opportunity WHERE NULLIF(match,'') IS NULL),
rec AS (SELECT o, COUNT(*) c, MAX(d) m FROM omit GROUP BY o HAVING COUNT(*) > 1)
SELECT *
FROM pc.opportunity, rec
WHERE COALESCE(oppty_details::jsonb->'parent'->>'opportunityId' , oppty_details::jsonb->>'opportunityId') = rec.o
--AND match IS NULL
--AND oppty_details::jsonb->>'modifiedDate' != rec.m
UPDATE pc.opportunity
SET match = 'SKIP'
WHERE notif_source = 'Sources Sought' AND NULLIF(match, '') IS NULL
SELECT jsonb_pretty(oppty_details::jsonb) FROM pc.opportunity
SELECT oppty_details::jsonb->'description'->0->>'body' FROM pc.opportunity
WHERE oppty_details::jsonb->'description'->0->>'body' ~* 'hardware'
SELECT oppty_details::jsonb->'parent'->>'opportunityId'
FROM pc.opportunity
WHERE notif_details ~ 'Enterprise Administrative Support Services'

731
app/scripts/sql/opp_upd.sql Normal file
View File

@ -0,0 +1,731 @@
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 894
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 865
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 846
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 906
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 861
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 927
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 903
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 879
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 912
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 868
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 883
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 904
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 872
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 895
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 878
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 873
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 866
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 788
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 799
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 800
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 791
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 790
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 797
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 792
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 803
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 789
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 811
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 808
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 804
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 692
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 688
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 693
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 707
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 701
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 709
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 706
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 683
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 705
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 643
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 321
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 336
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 324
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 326
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 329
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 342
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 327
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 346
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 361
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 377
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 375
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 370
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 373
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 406
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 412
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 403
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 404
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 420
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 411
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 419
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 479
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 464
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 473
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 472
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 452
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 519
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 520
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 526
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 525
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 513
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 595
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 596
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 604
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 574
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 602
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 575
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 593
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 594
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 907
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 905
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 851
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 860
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 854
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 859
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 862
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 863
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 850
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 852
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 864
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 848
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 974
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 867
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 973
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 877
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 972
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 971
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 857
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 858
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 970
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 847
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 926
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 876
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 969
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 896
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 968
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 967
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 966
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 965
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 874
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 870
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 869
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 964
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 963
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 962
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 977
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1049
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1048
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1047
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1046
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1045
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1044
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1043
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1042
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1041
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1040
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 1039
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1038
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1037
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1036
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1035
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1034
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1033
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 933
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 932
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 931
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 929
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 921
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 920
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 919
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 924
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 923
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 922
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 880
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1051
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1050
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 925
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 882
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 890
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 982
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 900
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 911
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 881
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 916
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 981
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 909
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 886
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 980
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 897
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 893
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 887
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 914
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 875
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 856
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 871
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 892
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 979
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 853
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 934
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 930
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 898
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 849
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 917
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 978
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 976
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 855
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 975
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 918
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 915
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 910
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 908
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 899
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 902
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 885
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 888
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 884
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 889
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 891
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 901
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 928
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 913
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1032
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1031
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1030
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1029
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1028
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1027
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1026
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1025
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1024
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1023
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1022
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1021
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1020
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1019
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1018
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1017
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1016
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1015
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 1014
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1013
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1012
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1011
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1010
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1009
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1008
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1007
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1006
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1005
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 1004
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 1003
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1002
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1001
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 1000
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 999
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 998
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 997
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 996
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 995
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 994
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 993
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 992
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 991
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 990
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 989
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 988
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 987
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 986
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 985
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 984
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 983
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 961
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 960
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 959
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 958
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 957
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 956
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 955
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 954
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 953
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 952
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 951
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 950
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 949
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 948
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 947
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 946
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 945
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 944
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 943
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 942
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 941
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 940
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 939
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 938
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 937
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 936
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 935
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 822
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 821
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 820
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 817
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 816
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 806
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 824
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 843
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 813
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 805
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 825
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 794
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 786
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 802
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 814
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 845
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 823
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 812
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 844
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 787
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 807
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 796
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 809
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 810
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 818
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 795
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 826
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 827
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 828
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 829
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 830
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 831
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 832
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 833
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 834
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 835
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 836
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 837
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 815
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 838
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 793
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 819
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 839
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 840
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 798
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 841
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 842
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 801
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 684
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 696
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 691
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 732
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 785
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 784
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 783
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 782
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 781
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 780
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 779
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 778
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 777
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 776
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 775
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 774
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 773
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 772
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 771
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 770
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 769
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 768
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 767
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 766
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 765
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 764
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 763
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 761
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 760
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 759
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 758
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 757
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 755
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 754
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 753
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 752
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 751
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 749
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 748
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 747
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 746
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 745
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 744
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 743
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 742
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 741
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 740
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 739
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 738
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 737
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 731
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 730
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 729
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 728
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 727
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 726
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 725
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 724
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 723
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 722
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 721
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 720
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 719
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 718
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 717
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 736
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 735
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 734
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 733
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 715
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 711
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 714
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 762
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 713
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 712
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 716
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 685
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 687
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 686
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 690
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 710
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 702
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 695
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 698
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 703
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 704
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 694
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 700
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 697
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 708
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 699
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 756
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 750
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 689
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 657
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 655
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 649
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 659
UPDATE pc.opportunity SET match = 'MATCH' WHERE id = 660
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 667
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 645
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 666
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 665
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 662
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 651
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 650
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 652
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 642
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 647
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 646
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 648
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 664
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 663
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 654
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 653
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 656
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 682
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 681
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 680
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 679
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 658
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 669
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 641
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 670
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 668
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 644
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 671
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 661
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 672
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 673
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 674
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 675
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 676
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 677
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 678
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 351
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 328
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 341
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 337
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 333
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 325
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 323
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 343
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 344
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 354
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 345
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 334
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 322
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 340
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 338
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 358
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 339
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 335
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 347
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 332
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 330
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 331
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 348
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 349
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 350
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 355
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 359
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 356
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 357
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 352
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 353
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 387
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 396
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 369
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 397
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 388
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 389
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 394
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 367
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 398
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 365
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 366
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 393
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 372
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 371
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 379
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 380
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 378
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 368
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 381
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 382
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 383
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 362
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 384
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 385
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 376
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 386
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 399
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 395
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 360
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 390
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 391
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 374
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 363
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 364
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 392
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 435
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 400
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 418
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 408
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 409
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 402
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 401
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 407
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 433
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 425
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 424
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 440
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 441
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 423
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 447
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 438
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 429
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 434
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 439
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 446
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 436
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 426
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 422
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 442
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 443
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 432
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 427
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 444
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 428
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 431
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 430
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 445
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 413
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 415
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 437
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 410
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 414
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 405
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 416
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 417
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 421
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 455
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 460
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 486
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 458
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 485
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 484
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 481
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 463
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 480
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 459
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 468
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 453
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 483
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 478
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 482
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 469
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 450
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 448
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 470
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 477
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 467
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 456
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 471
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 492
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 475
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 474
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 457
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 461
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 449
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 499
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 462
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 498
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 497
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 496
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 495
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 494
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 466
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 493
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 476
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 451
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 491
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 454
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 490
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 489
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 465
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 488
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 487
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 542
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 527
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 523
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 516
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 518
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 541
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 563
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 562
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 561
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 560
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 559
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 558
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 533
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 512
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 557
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 556
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 555
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 546
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 554
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 545
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 529
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 544
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 543
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 564
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 547
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 548
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 553
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 552
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 511
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 502
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 509
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 549
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 550
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 521
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 522
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 517
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 524
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 551
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 508
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 569
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 501
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 507
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 532
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 531
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 568
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 515
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 567
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 514
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 510
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 504
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 503
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 505
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 566
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 565
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 528
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 540
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 539
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 500
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 506
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 534
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 538
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 537
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 536
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 535
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 530
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 571
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 570
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 572
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 632
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 587
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 581
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 584
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 597
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 577
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 634
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 585
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 598
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 607
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 573
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 635
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 578
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 579
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 601
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 636
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 637
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 590
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 591
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 583
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 638
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 600
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 639
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 576
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 592
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 605
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 586
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 588
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 589
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 580
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 582
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 608
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 603
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 599
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 606
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 609
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 610
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 611
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 612
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 613
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 614
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 615
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 616
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 617
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 618
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 619
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 620
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 621
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 622
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 623
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 624
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 625
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 626
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 627
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 628
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 629
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 630
UPDATE pc.opportunity SET match = 'REVIEW' WHERE id = 631
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 633
UPDATE pc.opportunity SET match = 'SKIP' WHERE id = 640

View File

@ -0,0 +1,4 @@
SELECT o.poll_option_text Candidate, COUNT(v.id) votes
FROM oc_polls_options o LEFT JOIN oc_polls_votes v ON (v.poll_id = o.poll_id AND v.vote_option_id = o.id)
WHERE o.poll_id = 2
GROUP BY poll_option_text

7
app/templates/404.html Normal file
View File

@ -0,0 +1,7 @@
{% extends "appbuilder/base.html" %}
{% block content %}
<h2><center>{{_('Page not found')}}<center></h2>
{% endblock %}

Binary file not shown.

View File

@ -0,0 +1,24 @@
# Portuguese translations for PROJECT.
# Copyright (C) 2015 ORGANIZATION
# This file is distributed under the same license as the PROJECT project.
# FIRST AUTHOR <EMAIL@ADDRESS>, 2015.
#
#, fuzzy
msgid ""
msgstr ""
"Project-Id-Version: PROJECT VERSION\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
"POT-Creation-Date: 2015-10-26 13:23+0000\n"
"PO-Revision-Date: 2015-10-26 13:22+0000\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: pt <LL@li.org>\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.0\n"
#: app/templates/404.html:4
msgid "Page not found"
msgstr "Página não encontrada"

93
app/views.py Normal file
View File

@ -0,0 +1,93 @@
from flask import render_template, redirect, url_for, request
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder import ModelView, MasterDetailView, AppBuilder, BaseView, expose, has_access
from flask_appbuilder.models.sqla.filters import FilterEqual, FilterNotEqual, FilterStartsWith, FilterEqualFunction
from flask_appbuilder.actions import action
from . import appbuilder, db
from .models import *
class matchTypeView(ModelView):
datamodel = SQLAInterface(MatchType)
class passReasonView(ModelView):
datamodel = SQLAInterface(PassReason)
class opportunityReportModelView(ModelView):
datamodel = SQLAInterface(opportunityReport)
#base_filters = [['match_id', FilterNotEqual, 2]]
@action("skipChecked", "Omit", "All checked records will be market Omit", "fa-rocket")
def skipChecked(self, items):
if isinstance(items, list):
for item in items:
item.match_id = 2
self.datamodel.edit(item)
#self.update_redirect()
else:
items.match_id = 2
self.datamodel.edit(items)
return redirect(request.referrer)
@action("neutralChecked", "Neutral", "All checked records will be marked Neutral", "fa-rocket")
def neutralChecked(self, items):
if isinstance(items, list):
for item in items:
item.match_id = 6
self.datamodel.edit(item)
#self.update_redirect()
else:
items.match_id = 6
self.datamodel.edit(items)
return redirect(request.referrer)
@action("goodChecked", "Good", "All checked records will be marked Good", "fa-rocket")
def goodChecked(self, items):
if isinstance(items, list):
for item in items:
item.match_id = 5
self.datamodel.edit(item)
#self.update_redirect()
else:
items.match_id = 5
self.datamodel.edit(items)
return redirect(request.referrer)
base_order = ('notif_date', 'desc')
list_columns = ['match', 'notif_source', 'notif_details_link', 'notif_date', 'oppty_desc', 'oppty_source', 'pass_reason']
show_columns = ['match', 'notif_source', 'notif_details_link', 'notif_link', 'notif_date', 'oppty_desc', 'oppty_link', 'oppty_source', 'naics', 'pop', 'setaside']
add_columns = ['match', 'notif_details', 'oppty_url', 'oppty_date', 'naics', 'pop', 'setaside']
edit_columns = ['match', 'pass_reason']
label_columns = {
'notif_link': 'Bidmatch',
'oppty_link': 'Opportunity',
'naics': 'NAICS',
'pop': 'PoP',
}
class matchTypeMasterView(MasterDetailView):
datamodel = SQLAInterface(MatchType)
related_views = [opportunityReportModelView]
@appbuilder.app.errorhandler(404)
def page_not_found(e):
return (
render_template(
"404.html", base_template=appbuilder.base_template, appbuilder=appbuilder
),
404,
)
db.create_all()
appbuilder.add_view(matchTypeView, 'Match Types', icon='fa-folder-open-o', category = "Opportunities")
appbuilder.add_view(passReasonView, 'Pass Reasons', icon='fa-folder-open-o', category = "Opportunities")
#appbuilder.add_view_no_menu(opportunityReportModelView)
appbuilder.add_view(matchTypeMasterView, 'Bid Opportunities', icon='fa-folder-open-o', category = "Opportunities")
appbuilder.add_view(opportunityReportModelView, 'Bid Details', icon='fa-folder-open-o', category = "Opportunities")

54
aws/README.md Normal file
View File

@ -0,0 +1,54 @@
# AWS CLI v2
This bundle contains a built executable of the AWS CLI v2.
## Installation
To install the AWS CLI v2, run the `install` script:
```
$ sudo ./install
You can now run: /usr/local/bin/aws --version
```
This will install the AWS CLI v2 at `/usr/local/bin/aws`. Assuming
`/usr/local/bin` is on your `PATH`, you can now run:
```
$ aws --version
```
### Installing without sudo
If you don't have ``sudo`` permissions or want to install the AWS
CLI v2 only for the current user, run the `install` script with the `-b`
and `-i` options:
```
$ ./install -i ~/.local/aws-cli -b ~/.local/bin
```
This will install the AWS CLI v2 in `~/.local/aws-cli` and create
symlinks for `aws` and `aws_completer` in `~/.local/bin`. For more
information about these options, run the `install` script with `-h`:
```
$ ./install -h
```
### Updating
If you run the `install` script and there is a previously installed version
of the AWS CLI v2, the script will error out. To update to the version included
in this bundle, run the `install` script with `--update`:
```
$ sudo ./install --update
```
### Removing the installation
To remove the AWS CLI v2, delete the its installation and symlinks:
```
$ sudo rm -rf /usr/local/aws-cli
$ sudo rm /usr/local/bin/aws
$ sudo rm /usr/local/bin/aws_completer
```
Note if you installed the AWS CLI v2 using the `-b` or `-i` options, you will
need to remove the installation and the symlinks in the directories you
specified.

4
aws/config/config.yaml Normal file
View File

@ -0,0 +1,4 @@
region: us-east-1
profile: pc_main
project_code: pc
environment: main

View File

@ -0,0 +1,7 @@
region: us-east-2
regionabbr: use2
vpc_cidr: 10.70.16.0/21
pubsubnet1: 10.70.16.0/23
pubsubnet2: 10.70.18.0/23
pvtsubnet1: 10.70.20.0/23
pvtsubnet2: 10.70.22.0/23

View File

@ -0,0 +1,12 @@
---
template:
path: ec2/apps.yaml
parameters:
InstanceType: t4g.small
KeyName: pc-main-use2
AmiId: ami-0acb327475c6fd498 #Ubuntu 24.04 ARM
Environment: {{stack_group_config.environment}}
InstanceSubnetId: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-vpc-vpc::PublicSubnet1Id
InstanceProfileArn: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-ec2-sgs-roles::ToolsInstanceProfileArn
ToolsSg: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-ec2-sgs-roles::ToolsSgId

View File

@ -0,0 +1,8 @@
---
template:
path: ec2/fck-nat.yaml
parameters:
vpc: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.environment}}-{{stack_group_config.regionabbr}}-vpc-vpc::VPCId
subnet: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.environment}}-{{stack_group_config.regionabbr}}-vpc-vpc::PublicSubnet1Id
CIDR: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.environment}}-{{stack_group_config.regionabbr}}-vpc-vpc::VPCCIDR

View File

@ -0,0 +1,12 @@
---
template:
path: ec2/nc.yaml
parameters:
InstanceType: t4g.large
KeyName: pc-main-use2
AmiId: ami-0acb327475c6fd498 #Ubuntu 24.04 ARM
Environment: {{stack_group_config.environment}}
InstanceSubnetId: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-vpc-vpc::PublicSubnet1Id
InstanceProfileArn: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-ec2-sgs-roles::ToolsInstanceProfileArn
ToolsSg: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-ec2-sgs-roles::ToolsSgId

View File

@ -0,0 +1,8 @@
---
template:
path: ec2/sgs-roles.yaml
parameters:
VpcId: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-vpc-vpc::VPCId
VpcSgId: !stack_output_external {{stack_group_config.project_code}}-{{stack_group_config.regionabbr}}-vpc-vpc::DefaultSgId
Environment: {{stack_group_config.environment}}

View File

@ -0,0 +1,35 @@
---
template:
path: vpc/vpc-fck-nat.yaml
parameters:
VPCCIDRBlock: {{stack_group_config.vpc_cidr}}
VPCName: {{ stack_group_config.project_code }} VPC
# general
SubnetAZs: {{stack_group_config.region}}a,{{stack_group_config.region}}b
# pub
PublicSubnet1CIDRBlock: {{stack_group_config.pubsubnet1}}
PublicSubnet1Name: {{stack_group_config.project_code}} Subnet Public 1
PublicSubnet2CIDRBlock: {{stack_group_config.pubsubnet2}}
PublicSubnet2Name: {{stack_group_config.project_code}} Subnet Public 2
#priv
PrivateSubnet1CIDRBlock: {{stack_group_config.pvtsubnet1}}
PrivateSubnet1Name: {{stack_group_config.project_code}} Subnet Private 1
PrivateSubnet2CIDRBlock: {{stack_group_config.pvtsubnet2}}
PrivateSubnet2Name: {{stack_group_config.project_code}} Subnet Private 2
# routetables
Private1RouteTableName: {{stack_group_config.project_code}} Private Route 1
Private2RouteTableName: {{stack_group_config.project_code}} Private Route 2
Public1RouteTableName: {{stack_group_config.project_code}} Public Route 1
Public2RouteTableName: {{stack_group_config.project_code}} Public Route 2
...

View File

@ -0,0 +1,205 @@
Parameters:
VPCId:
Type: String
SgId:
Type: String
PubSubnet1Id:
Type: String
EfsId:
Type: String
DeploymentName:
Type: String
Hostname:
Type: String
Ami:
Type: String
Resources:
EfsAPData:
Type: AWS::EFS::AccessPoint
Properties:
FileSystemId: !Ref EfsId
RootDirectory:
Path: !Sub /${DeploymentName}/data
# CreationInfo:
# OwnerUid: !FindInMap [Config, Container, Uid]
# OwnerGid: !FindInMap [Config, Container, Gid]
# Permissions: !FindInMap [Config, Container, Permission]
AppSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: Enable HTTP access
VpcId: !Ref VPCId
SecurityGroupIngress:
- IpProtocol: tcp
FromPort: '80'
ToPort: '80'
CidrIp: 0.0.0.0/0
- IpProtocol: tcp
FromPort: '443'
ToPort: '443'
CidrIp: 0.0.0.0/0
app01:
Type: "AWS::EC2::Instance"
Properties:
ImageId: !Ref Ami
InstanceType: 't4g.small'
SubnetId: !Ref PubSubnet1Id
SecurityGroupIds:
- !Ref SgId
- !Ref AppSecurityGroup
BlockDeviceMappings:
- DeviceName: "/dev/sda1"
Ebs:
Encrypted: true
VolumeSize: 100
VolumeType: "gp2"
DeleteOnTermination: false
IamInstanceProfile: !Ref SSMInstanceProfile
UserData:
Fn::Base64: !Sub |
#!/bin/bash
hostnamectl set-hostname ${Hostname}
#yum-config-manager --add-repo https://pkgs.tailscale.com/stable/amazon-linux/2/tailscale.repo
yum update -y
yum install -y git amazon-efs-utils python3 python3-pip jq gcc #tailscale
amazon-linux-extras install ansible2 docker -y
systemctl enable docker
systemctl start docker
usermod -aG docker ssm-user
mkdir /mnt/data
chmod 777 /mnt/data
echo "${EfsAPData}:/ /mnt/data efs _netdev,noresvport,tls,iam 0 0" >> /etc/fstab
mount /mnt/data
Tags:
- Key: "Name"
Value: !Ref Hostname
- Key: "Purpose"
Value: hosting
- Key: "Environment"
Value: prod
- Key: "Application"
Value: dockerhost
- Key: "Owner"
Value: dan@project-conquer.com
- Key: "Managed By"
Value: dan@project-conquer.com
- Key: "Managed Status"
Value: active
- Key: "Schedule"
Value: always-on
- Key: "backupFrequency"
Value: daily
AmazonEC2RoleforSSM:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service:
- ec2.amazonaws.com
Action:
- sts:AssumeRole
Path: "/"
ManagedPolicyArns:
- arn:aws:iam::aws:policy/AmazonSSMManagedInstanceCore
- arn:aws:iam::aws:policy/AmazonSSMDirectoryServiceAccess
- arn:aws:iam::aws:policy/AmazonRDSFullAccess
SSMForEC2Policy:
Type: 'AWS::IAM::Policy'
Properties:
PolicyName: SSMForEC2
PolicyDocument:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- ssm:DescribeAssociation
- ssm:GetDeployablePatchSnapshotForInstance
- ssm:GetDocument
- ssm:DescribeDocument
- ssm:GetManifest
- ssm:GetParameters
- ssm:ListAssociations
- ssm:ListInstanceAssociations
- ssm:PutInventory
- ssm:PutComplianceItems
- ssm:PutConfigurePackageResult
- ssm:UpdateAssociationStatus
- ssm:UpdateInstanceAssociationStatus
- ssm:UpdateInstanceInformation
- ds:CreateComputer
- ds:DescribeDirectories
Resource: "*"
- Effect: Allow
Action:
- ssmmessages:CreateControlChannel
- ssmmessages:CreateDataChannel
- ssmmessages:OpenControlChannel
- ssmmessages:OpenDataChannel
Resource: "*"
- Effect: Allow
Action:
- ec2messages:AcknowledgeMessage
- ec2messages:DeleteMessage
- ec2messages:FailMessage
- ec2messages:GetEndpoint
- ec2messages:GetMessages
- ec2messages:SendReply
Resource: "*"
- Effect: Allow
Action:
- s3:GetObject
Resource:
- !Sub arn:aws:s3:::aws-ssm-${AWS::Region}/*
- !Sub arn:aws:s3:::aws-windows-downloads-${AWS::Region}/*
- !Sub arn:aws:s3:::amazon-ssm-${AWS::Region}/*
- !Sub arn:aws:s3:::amazon-ssm-packages-${AWS::Region}/*
- !Sub arn:aws:s3:::${AWS::Region}-birdwatcher-prod/*
- !Sub arn:aws:s3:::patch-baseline-snapshot-${AWS::Region}/*
- Effect: Allow
Action:
- secretsmanager:DescribeSecret
- secretsmanager:GetSecretValue
- secretsmanager:List*
Resource: "*"
- Effect: Allow
Action:
- logs:CreateLogGroup
- logs:CreateLogStream
- logs:DescribeLogGroups
- logs:DescribeLogStreams
- logs:PutLogEvents
Resource: '*'
- Sid: AllowRDSAccess
Effect: Allow
Action: rds-db:connect
Resource: !Sub 'arn:aws:rds-db:${AWS::Region}:${AWS::AccountId}:dbuser:*/ecloud_prod'
- Sid: DenyRDSDeleteDatabase
Effect: Deny
Action: rds:DeleteDBInstance
Resource: '*'
- Sid: ECSDeployments
Effect: Allow
Action:
- ecs:*
- iam:*
- cloudformation:*
- ec2:*
Resource: '*'
Roles:
- !Ref AmazonEC2RoleforSSM
SSMInstanceProfile:
Type: AWS::IAM::InstanceProfile
Properties:
Path: "/"
Roles:
- Ref: AmazonEC2RoleforSSM

16
aws/templates/bw.yaml Normal file
View File

@ -0,0 +1,16 @@
ecs-cli up \
--vpc vpc-0fac8dc7d301bf94c \
--subnets 10.192.10.0/24,10.192.11.0/24 \
--force
ecs-cli compose \
--project-name vaultwarden service up \
--cluster-config app-config \
--container-name vaultwarden \
--container-port 80 \
--target-group-arn arn:aws:elasticloadbalancing:us-east-1:693253913071:targetgroup/vaultwarden2/5fb9a8fc699d7735
ecs-cli compose --project-name vaultwarden service rm --cluster-config app-config

100
aws/templates/ec2/apps.yaml Normal file
View File

@ -0,0 +1,100 @@
---
AWSTemplateFormatVersion: 2010-09-09
Description: EC2 / LaunchTemplate
Parameters:
Environment:
Type: String
InstanceSubnetId:
Type: AWS::EC2::Subnet::Id
InstanceType:
Type: String
AmiId:
Type: AWS::EC2::Image::Id
KeyName:
Type: String
InstanceProfileArn:
Type: String
ToolsSg:
Type: String
Resources:
ToolsEc2:
Type: AWS::EC2::Instance
Properties:
LaunchTemplate:
LaunchTemplateId: !Ref ToolsEc2LaunchTemplate
Version: !GetAtt ToolsEc2LaunchTemplate.LatestVersionNumber
EbsVolumeA:
Type: AWS::EC2::Volume
Properties:
AvailabilityZone: !GetAtt ToolsEc2.AvailabilityZone # Specify the same availability zone as the EC2 instance
Size: 100 # Specify the desired size of the volume
VolumeType: gp3 # Specify the volume type
Encrypted: true # Specify whether the volume is encrypted or not
VolumeAttachment:
Type: AWS::EC2::VolumeAttachment
Properties:
Device: /dev/xvdf # Specify the device name to attach the volume to on the EC2 instance
InstanceId: !Ref ToolsEc2 # Reference the existing EC2 instance
VolumeId: !Ref EbsVolumeA # Reference the newly created EBS volume
ToolsEc2LaunchTemplate:
Type: AWS::EC2::LaunchTemplate
Properties:
LaunchTemplateName: AppsLaunchTemplate
LaunchTemplateData:
IamInstanceProfile:
Arn: !Ref InstanceProfileArn
DisableApiTermination: true
ImageId: !Ref AmiId
InstanceType: !Ref InstanceType
KeyName: !Ref KeyName
NetworkInterfaces:
- AssociatePublicIpAddress: false
DeviceIndex: 0
Groups:
- !Ref ToolsSg
SubnetId: !Ref InstanceSubnetId
TagSpecifications:
- ResourceType: instance
Tags:
- Key: "Name"
Value: apps
UserData:
Fn::Base64: !Sub |
# Run Ubuntu updates & install dependencies
apt update
apt upgrade -y
apt install unzip bzip2 podman postgresql-client jq
# Install AWS CLI V2
curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
./aws/install
# Install Docker
curl -fsSL https://get.docker.com | sh
usermod -aG docker ubuntu
usermod -aG docker ssm-user
mkfs -t xfs /dev//nvme1n1 # Format the volume
mount /dev/nvme1n1 /opt # Mount the volume to /data
echo "/dev/nvme1n1 /opt xfs defaults,nofail 0 2" >> /etc/fstab # Add entry to /etc/fstab for automatic mount
InstanceEIP:
Type: "AWS::EC2::EIP"
Properties:
InstanceId: !Ref ToolsEc2
Outputs: {}
...

View File

@ -0,0 +1,111 @@
Parameters:
vpc:
Type: String
Default: "vpc-121212121212121212"
subnet:
Type: String
Default: "subnet-121212121212121212"
CIDR:
Type: String
Default: "10.0.0.0/16"
Resources:
FckNatInterface:
Type: AWS::EC2::NetworkInterface
Properties:
SubnetId: !Sub "${subnet}"
GroupSet:
- Fn::GetAtt:
- NatSecurityGroup
- GroupId
SourceDestCheck: false
FckNatAsgInstanceProfile:
Type: AWS::IAM::InstanceProfile
Properties:
Roles:
- Ref: NatRole
FckNatLaunchTemplate:
Type: AWS::EC2::LaunchTemplate
Properties:
LaunchTemplateName: FckNatLaunchTemplate
LaunchTemplateData:
ImageId: ami-05b6d5a2e26f13c93
InstanceType: t4g.nano
IamInstanceProfile:
Arn: !GetAtt FckNatAsgInstanceProfile.Arn
SecurityGroups:
- Fn::GetAtt:
- NatSecurityGroup
- GroupId
UserData:
Fn::Base64:
Fn::Join:
- ""
- - |-
#!/bin/bash
echo "eni_id=
- Ref: FckNatInterface
- |-
" >> /etc/fck-nat.conf
service fck-nat restart
DependsOn:
- NatRole
FckNatAsg:
Type: AWS::AutoScaling::AutoScalingGroup
Properties:
MaxSize: "1"
MinSize: "1"
DesiredCapacity: "1"
LaunchTemplateId:
Ref: FckNatLaunchTemplate
VPCZoneIdentifier:
- !Sub "${subnet}"
UpdatePolicy:
AutoScalingScheduledAction:
IgnoreUnmodifiedGroupSizeProperties: true
NatSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: Security Group for NAT
SecurityGroupIngress:
- CidrIp: !Sub "${CIDR}"
IpProtocol: "-1"
SecurityGroupEgress:
- CidrIp: 0.0.0.0/0
Description: Allow all outbound traffic by default
IpProtocol: "-1"
VpcId: !Sub "${vpc}"
NatRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Action: sts:AssumeRole
Effect: Allow
Principal:
Service: ec2.amazonaws.com
Version: "2012-10-17"
Policies:
- PolicyDocument:
Statement:
- Action:
- ec2:AttachNetworkInterface
- ec2:ModifyNetworkInterfaceAttribute
Effect: Allow
Resource: "*"
Version: "2012-10-17"
PolicyName: attachNatEniPolicy
- PolicyDocument:
Statement:
- Action:
- ec2:AssociateAddress
- ec2:DisassociateAddress
Effect: Allow
Resource: "*"
Version: "2012-10-17"
PolicyName: associateNatAddressPolicy

100
aws/templates/ec2/nc.yaml Normal file
View File

@ -0,0 +1,100 @@
---
AWSTemplateFormatVersion: 2010-09-09
Description: EC2 / LaunchTemplate
Parameters:
Environment:
Type: String
InstanceSubnetId:
Type: AWS::EC2::Subnet::Id
InstanceType:
Type: String
AmiId:
Type: AWS::EC2::Image::Id
KeyName:
Type: String
InstanceProfileArn:
Type: String
ToolsSg:
Type: String
Resources:
ToolsEc2:
Type: AWS::EC2::Instance
Properties:
LaunchTemplate:
LaunchTemplateId: !Ref ToolsEc2LaunchTemplate
Version: !GetAtt ToolsEc2LaunchTemplate.LatestVersionNumber
EbsVolumeB:
Type: AWS::EC2::Volume
Properties:
AvailabilityZone: !GetAtt ToolsEc2.AvailabilityZone # Specify the same availability zone as the EC2 instance
Size: 100 # Specify the desired size of the volume
VolumeType: gp3 # Specify the volume type
Encrypted: true # Specify whether the volume is encrypted or not
VolumeAttachment:
Type: AWS::EC2::VolumeAttachment
Properties:
Device: /dev/xvdb # Specify the device name to attach the volume to on the EC2 instance
InstanceId: !Ref ToolsEc2 # Reference the existing EC2 instance
VolumeId: !Ref EbsVolumeB # Reference the newly created EBS volume
ToolsEc2LaunchTemplate:
DeletionPolicy: Retain
Type: AWS::EC2::LaunchTemplate
Properties:
LaunchTemplateName: ElkStackLaunchTemplate
LaunchTemplateData:
IamInstanceProfile:
Arn: !Ref InstanceProfileArn
DisableApiTermination: true
ImageId: !Ref AmiId
InstanceType: !Ref InstanceType
KeyName: !Ref KeyName
BlockDeviceMappings:
- DeviceName: "/dev/xvda"
Ebs:
Encrypted: true
VolumeSize: 500
VolumeType: "gp3"
DeleteOnTermination: true
NetworkInterfaces:
- AssociatePublicIpAddress: false
DeviceIndex: 0
Groups:
- !Ref ToolsSg
SubnetId: !Ref InstanceSubnetId
TagSpecifications:
- ResourceType: instance
Tags:
- Key: "Name"
Value: nc
UserData:
Fn::Base64: !Sub |
# Run Ubuntu updates & install dependencies
sudo apt update
sudo apt upgrade -y
sudo apt install unzip bzip2 podman postgresql-client jq
# Install AWS CLI V2
curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
InstanceEIP:
Type: "AWS::EC2::EIP"
Properties:
InstanceId: !Ref ToolsEc2
Outputs: {}
...

View File

@ -0,0 +1,112 @@
---
AWSTemplateFormatVersion: 2010-09-09
Description: Sgs and Roles
Parameters:
VpcId:
Type: AWS::EC2::VPC::Id
Description: VpcId of your existing Virtual Private Cloud (VPC)
VpcSgId:
Type: String
Description: Default VPC Sg the deploy creates
# CVpnSgId:
# Type: AWS::EC2::SecurityGroup::Id
Environment:
Type: String
# OnPremPlId:
# Type: String
# Description: Id of On-Prem prefix list
Resources:
ToolsSg:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: For video instances registered with BackendLb
SecurityGroupIngress:
# - IpProtocol: -1
# SourcePrefixListId: !Ref OnPremPlId
# Description: OnPremPlId
# - IpProtocol: -1
# SourceSecurityGroupId: !Ref CVpnSgId
# Description: CVpnSgId
- IpProtocol: -1
SourceSecurityGroupId: !Ref VpcSgId
Description: VpcSgId
- IpProtocol: tcp
FromPort: 80
ToPort: 80
CidrIp: 0.0.0.0/0
Description: HTTP
- IpProtocol: tcp
FromPort: 443
ToPort: 443
CidrIp: 0.0.0.0/0
Description: HTTPS
VpcId: !Ref VpcId
# Tools Role
ToolsRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Effect: Allow
Principal:
Service:
- ec2.amazonaws.com
Action:
- sts:AssumeRole
Path: "/"
ManagedPolicyArns:
- arn:aws:iam::aws:policy/PowerUserAccess
- arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM
ToolsInstanceProfile:
Type: AWS::IAM::InstanceProfile
Properties:
Path: "/"
Roles:
- !Ref ToolsRole
Outputs:
#
ToolsSgId:
Description: Sg Id - Tools
Value: !Ref ToolsSg
Export:
Name: !Sub "${AWS::StackName}-ToolsSgId"
#
ToolsRoleName:
Description: Tools Role Name
Value: !Ref ToolsRole
Export:
Name: !Sub "${AWS::StackName}-ToolsRoleName"
ToolsRoleArn:
Description: Tools Role Arn
Value: !GetAtt ToolsRole.Arn
Export:
Name: !Sub "${AWS::StackName}-ToolsRoleArn"
ToolsInstanceProfileName:
Description: Instance Role Name for Tools Role
Value: !Ref ToolsInstanceProfile
Export:
Name: !Sub "${AWS::StackName}-ToolsInstanceProfileName"
ToolsInstanceProfileArn:
Description: Instance Role Arn for Tools Role
Value: !GetAtt ToolsInstanceProfile.Arn
Export:
Name: !Sub "${AWS::StackName}-ToolsInstanceProfileArn"
...

1065
aws/templates/nc.yaml Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,545 @@
---
AWSTemplateFormatVersion: '2010-09-09'
Description: "VPC w/2 public, 2 private, and independent routing tables for each subnet"
Parameters:
VPCCIDRBlock:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(1[6-9]|2[0-8]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/16-28
Type: String
VPCName:
Type: String
# Depends on unfiedlogging-single
# VPCFlowLogDestination:
# Description: S3 bucket arn for the VPC Flow logs
# Type: String
fckNatAmi:
Type: String
Default: ami-0d241d0ba6ea2f8f4
## general subnets
SubnetAZs:
Description: Specify comma delimited list of AZs for public/private subnets 1-2
Type: CommaDelimitedList
Default: "us-east-1a,us-east-1b"
## private subnets
PrivateSubnet1CIDRBlock:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(1[6-9]|2[0-8]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/16-28
Description: CIDRBlock for Private Subnet 1
Type: String
PrivateSubnet1Name:
Description: Name of Subnet
Type: String
PrivateSubnet2CIDRBlock:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(1[6-9]|2[0-8]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/16-28
Description: CIDRBlock for Private Subnet 2
Type: String
PrivateSubnet2Name:
Description: Name of Subnet
Type: String
## public subnets
PublicSubnet1CIDRBlock:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(1[6-9]|2[0-8]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/16-28
Description: CIDRBlock for Public Subnet 1
Type: String
PublicSubnet1Name:
Description: Name of Subnet
Type: String
PublicSubnet2CIDRBlock:
AllowedPattern: ^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/(1[6-9]|2[0-8]))$
ConstraintDescription: CIDR block parameter must be in the form x.x.x.x/16-28
Description: CIDRBlock for Public Subnet 2
Type: String
PublicSubnet2Name:
Description: Name of Subnet
Type: String
## routetables
Private1RouteTableName:
Description: Name for route table
Type: String
Private2RouteTableName:
Description: Name for route table
Type: String
Public1RouteTableName:
Description: Name for route table
Type: String
Public2RouteTableName:
Description: Name for route table
Type: String
Resources:
## VPC Start ##
aVPC:
Type: "AWS::EC2::VPC"
Properties:
CidrBlock: !Ref VPCCIDRBlock
EnableDnsSupport: true
EnableDnsHostnames: true
Tags:
- Key: Name
Value: !Ref VPCName
# Depends on unfiedlogging-single
# VPCFlowLog:
# Type: AWS::EC2::FlowLog
# Properties:
# LogDestinationType: s3
# LogDestination: !Ref VPCFlowLogDestination
# ResourceId: !Ref aVPC
# ResourceType: VPC
# TrafficType: ALL
IGW:
Type: "AWS::EC2::InternetGateway"
IGWAttach:
Type: "AWS::EC2::VPCGatewayAttachment"
Properties:
VpcId: !Ref aVPC
InternetGatewayId: !Ref IGW
# NatGatewayEIP:
# Type: "AWS::EC2::EIP"
# Properties:
# Domain: vpc
# NatGatewayEIP2:
# Type: "AWS::EC2::EIP"
# Properties:
# Domain: vpc
## VPC Finsih ##
## Private Subnets Start ##
PrivateSubnet1:
Type: "AWS::EC2::Subnet"
Properties:
CidrBlock: !Ref PrivateSubnet1CIDRBlock
VpcId: !Ref aVPC
AvailabilityZone: !Select [ 0, !Ref SubnetAZs ]
MapPublicIpOnLaunch: false
Tags:
-
Key: Name
Value: !Ref PrivateSubnet1Name
PrivateSubnet2:
Type: "AWS::EC2::Subnet"
Properties:
CidrBlock: !Ref PrivateSubnet2CIDRBlock
VpcId: !Ref aVPC
AvailabilityZone: !Select [ 1, !Ref SubnetAZs ]
MapPublicIpOnLaunch: false
Tags:
-
Key: Name
Value: !Ref PrivateSubnet2Name
## Private Subnets Finish ##
## Public Subnets Start
PublicSubnet1:
Type: "AWS::EC2::Subnet"
Properties:
CidrBlock: !Ref PublicSubnet1CIDRBlock
VpcId: !Ref aVPC
AvailabilityZone: !Select [ 0, !Ref SubnetAZs ]
MapPublicIpOnLaunch: false
Tags:
-
Key: Name
Value: !Ref PublicSubnet1Name
PublicSubnet2:
Type: "AWS::EC2::Subnet"
Properties:
CidrBlock: !Ref PublicSubnet2CIDRBlock
VpcId: !Ref aVPC
AvailabilityZone: !Select [ 1, !Ref SubnetAZs ]
MapPublicIpOnLaunch: false
Tags:
-
Key: Name
Value: !Ref PublicSubnet2Name
## Public Subnets End
## NAT Gateway Start ##
FckNatInterface:
Type: AWS::EC2::NetworkInterface
Properties:
SubnetId: !Sub "${PublicSubnet1}"
GroupSet:
- Fn::GetAtt:
- NatSecurityGroup
- GroupId
SourceDestCheck: false
FckNatAsgInstanceProfile:
Type: AWS::IAM::InstanceProfile
Properties:
Roles:
- !Ref NatRole
FckNatLaunchTemplate:
Type: AWS::EC2::LaunchTemplate
Properties:
LaunchTemplateName: FckNatLaunchTemplate
LaunchTemplateData:
ImageId: !Ref fckNatAmi
InstanceType: t4g.nano
IamInstanceProfile:
Arn: !GetAtt FckNatAsgInstanceProfile.Arn
SecurityGroupIds:
- !GetAtt NatSecurityGroup.GroupId
UserData:
Fn::Base64:
Fn::Join:
- ""
- - |-
#!/bin/bash
echo "eni_id=
- Ref: FckNatInterface
- |-
" >> /etc/fck-nat.conf
service fck-nat restart
DependsOn:
- NatRole
FckNatAsg:
Type: AWS::AutoScaling::AutoScalingGroup
Properties:
MaxSize: "1"
MinSize: "1"
DesiredCapacity: "1"
LaunchTemplate:
LaunchTemplateId: !Ref FckNatLaunchTemplate
Version: !GetAtt FckNatLaunchTemplate.LatestVersionNumber
VPCZoneIdentifier:
- !Sub "${PublicSubnet1}"
UpdatePolicy:
AutoScalingScheduledAction:
IgnoreUnmodifiedGroupSizeProperties: true
NatSecurityGroup:
Type: AWS::EC2::SecurityGroup
Properties:
GroupDescription: Security Group for NAT
SecurityGroupIngress:
- CidrIp: !Sub "${VPCCIDRBlock}"
IpProtocol: "-1"
SecurityGroupEgress:
- CidrIp: 0.0.0.0/0
Description: Allow all outbound traffic by default
IpProtocol: "-1"
VpcId: !Sub "${aVPC}"
NatRole:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Statement:
- Action: sts:AssumeRole
Effect: Allow
Principal:
Service: ec2.amazonaws.com
Version: "2012-10-17"
Policies:
- PolicyDocument:
Statement:
- Action:
- ec2:AttachNetworkInterface
- ec2:ModifyNetworkInterfaceAttribute
Effect: Allow
Resource: "*"
Version: "2012-10-17"
PolicyName: attachNatEniPolicy
- PolicyDocument:
Statement:
- Action:
- ec2:AssociateAddress
- ec2:DisassociateAddress
Effect: Allow
Resource: "*"
Version: "2012-10-17"
PolicyName: associateNatAddressPolicy
## NAT Gateway End ##
## RouteTables start
Private1RT:
Type: "AWS::EC2::RouteTable"
Properties:
VpcId: !Ref aVPC
Tags:
-
Key: Name
Value: !Ref Private1RouteTableName
Priv1RTAssociation:
Type: "AWS::EC2::SubnetRouteTableAssociation"
Properties:
RouteTableId: !Ref Private1RT
SubnetId: !Ref PrivateSubnet1
Priv1Route:
Type: "AWS::EC2::Route"
Properties:
DestinationCidrBlock: "0.0.0.0/0"
NetworkInterfaceId: !Ref FckNatInterface
RouteTableId: !Ref Private1RT
#
Private2RT:
Type: "AWS::EC2::RouteTable"
Properties:
VpcId: !Ref aVPC
Tags:
-
Key: Name
Value: !Ref Private2RouteTableName
Priv2RTAssociation:
Type: "AWS::EC2::SubnetRouteTableAssociation"
Properties:
RouteTableId: !Ref Private2RT
SubnetId: !Ref PrivateSubnet2
Priv2Route:
Type: "AWS::EC2::Route"
Properties:
DestinationCidrBlock: "0.0.0.0/0"
NetworkInterfaceId: !Ref FckNatInterface
RouteTableId: !Ref Private2RT
#
Public1RT:
Type: "AWS::EC2::RouteTable"
Properties:
VpcId: !Ref aVPC
Tags:
-
Key: Name
Value: !Ref Public1RouteTableName
Pub1RTAssociation:
Type: "AWS::EC2::SubnetRouteTableAssociation"
Properties:
RouteTableId: !Ref Public1RT
SubnetId: !Ref PublicSubnet1
Pub1Route:
Type: "AWS::EC2::Route"
Properties:
DestinationCidrBlock: "0.0.0.0/0"
GatewayId: !Ref IGW
RouteTableId: !Ref Public1RT
#
Public2RT:
Type: "AWS::EC2::RouteTable"
Properties:
VpcId: !Ref aVPC
Tags:
-
Key: Name
Value: !Ref Public2RouteTableName
Pub2RTAssociation:
Type: "AWS::EC2::SubnetRouteTableAssociation"
Properties:
RouteTableId: !Ref Public2RT
SubnetId: !Ref PublicSubnet2
Pub2Route:
Type: "AWS::EC2::Route"
Properties:
DestinationCidrBlock: "0.0.0.0/0"
GatewayId: !Ref IGW
RouteTableId: !Ref Public2RT
## RouteTables end
#Default SecurityGroup
VpcSg:
Type: AWS::EC2::SecurityGroup
Properties:
VpcId: !Ref aVPC
GroupDescription: provides access to all resources within this Sg
Tags:
- Key: Name
Value: VpcSg
VpcSgIngress:
Type: AWS::EC2::SecurityGroupIngress
Properties:
GroupId: !Ref VpcSg
IpProtocol: "-1"
SourceSecurityGroupId: !Ref VpcSg
######################
### VPC ENDPOINTS
### https://aws.amazon.com/blogs/architecture/reduce-cost-and-increase-security-with-amazon-vpc-endpoints/
############
VpcEndpointSg:
Type: "AWS::EC2::SecurityGroup"
Properties:
GroupName: VPCEndpoints
GroupDescription: DefaultVpcEndpointSg
VpcId: !Ref aVPC
SecurityGroupIngress:
- IpProtocol: "-1"
CidrIp: !Ref VPCCIDRBlock
SecurityGroupEgress:
- IpProtocol: "-1"
CidrIp: 0.0.0.0/0
Tags:
- Key: Name
Value: Default VPC Endpoint Sg
S3VpcEndpoint:
Type: AWS::EC2::VPCEndpoint
Properties:
PolicyDocument:
Version: 2012-10-17
Statement:
- Principal: '*'
Effect: 'Allow'
Action: 's3:*'
Resource:
- 'arn:aws:s3:::*'
- 'arn:aws:s3:::*/*'
VpcEndpointType: Gateway
VpcId: !Ref aVPC
ServiceName: !Sub 'com.amazonaws.${AWS::Region}.s3'
RouteTableIds:
- !Ref Public1RT
- !Ref Public2RT
- !Ref Private1RT
- !Ref Private2RT
Outputs:
## VPC Outputs
# NatGatewayEIP:
# Description: IP of the NAT Gateway
# Value: !Ref NatGatewayEIP
# Export:
# Name: !Sub "${AWS::StackName}-NatGatewayEIP"
VPCId:
Description: VPC Id
Value: !Ref aVPC
Export:
Name: !Sub "${AWS::StackName}-VPCId"
VPCCIDR:
Description: CIDR Block of the VPC
Value: !Ref VPCCIDRBlock
Export:
Name: !Sub "${AWS::StackName}-VPCCIDRBlock"
IGWId:
Value: !Ref IGW
Export:
Name: !Sub "${AWS::StackName}-IGWId"
StackName:
Value: !Ref AWS::StackName
## private subnets
PrivateSubnet1Id:
Description: Private Subnet 1 ID
Value: !Ref PrivateSubnet1
Export:
Name: !Sub "${AWS::StackName}-PrivateSubnet1Id"
PrivateSubnet1CIDRBlock:
Description: Private Subnet 1 CIDR block
Value: !Ref PrivateSubnet1CIDRBlock
Export:
Name: !Sub "${AWS::StackName}-PrivateSubnet1CIDRBlock"
PrivateSubnet2Id:
Description: Private Subnet2 ID
Value: !Ref PrivateSubnet2
Export:
Name: !Sub "${AWS::StackName}-PrivateSubnet2Id"
PrivateSubnet2CIDRBlock:
Description: Private Subnet 2 CIDR block
Value: !Ref PrivateSubnet2CIDRBlock
Export:
Name: !Sub "${AWS::StackName}-PrivateSubnet2CIDRBlock"
## public subnets
PublicSubnet1Id:
Description: Public Subnet 1 ID
Value: !Ref PublicSubnet1
Export:
Name: !Sub "${AWS::StackName}-PublicSubnet1Id"
PublicSubnet1CIDRBlock:
Description: Public Subnet 1 CIDR block
Value: !Ref PublicSubnet1CIDRBlock
Export:
Name: !Sub "${AWS::StackName}-PublicSubnet1CIDRBlock"
PublicSubnet2Id:
Description: Public Subnet2 ID
Value: !Ref PublicSubnet2
Export:
Name: !Sub "${AWS::StackName}-PublicSubnet2Id"
PublicSubnet2CIDRBlock:
Description: Public Subnet 2 CIDR block
Value: !Ref PublicSubnet2CIDRBlock
Export:
Name: !Sub "${AWS::StackName}-PublicSubnet2CIDRBlock"
DefaultSgName:
Description: Default Security Group
Value: !Ref VpcSg
Export:
Name: !Sub "${AWS::StackName}-DefaultSgName"
DefaultSgId:
Description: Default Security Group
Value: !GetAtt VpcSg.GroupId
Export:
Name: !Sub "${AWS::StackName}-DefaultSgId"
Private1RTId:
Description: Route Table Private 1
Value: !Ref Private1RT
Export:
Name: !Sub "${AWS::StackName}-Private1RT"
Private2RTId:
Description: Route Table Private 2
Value: !Ref Private2RT
Export:
Name: !Sub "${AWS::StackName}-Private2RT"
Public1RTId:
Description: Route Table Public 1
Value: !Ref Public1RT
Export:
Name: !Sub "${AWS::StackName}-Public1RT"
Public2RTId:
Description: Route Table Public 2
Value: !Ref Public2RT
Export:
Name: !Sub "${AWS::StackName}-Public2RT"

View File

@ -0,0 +1,33 @@
version: '3.3'
services:
vaultwarden:
platform: linux/arm64
image: vaultwarden/server:latest
container_name: vaultwarden
restart: always
environment:
WEBSOCKET_ENABLED: 'True' # Enable WebSocket notifications.
SMTP_HOST: smtp.office365.com
SMTP_FROM: dan@project-conquer.com
SMTP_PORT: 587
SMTP_SECURITY: starttls
SMTP_USERNAME: dan@project-conquer.com
SMTP_PASSWORD: Bud79124Bud79124
DATABASE_URL: 'postgresql://bitwarden:xHR8z58CtnVqMVxRKtZN@bw.project-conquer.com:5432/bitwarden'
networks:
- traefik_proxy
labels:
- "traefik.enable=true"
- "traefik.http.routers.vaultwarden.rule=Host(`bw.project-conquer.com`)"
- "traefik.http.routers.vaultwarden.entrypoints=websecure"
- "traefik.http.routers.vaultwarden.tls.certresolver=le"
- "traefik.http.services.vaultwarden.loadbalancer.server.port=80"
# - "traefik.http.routers.vaultwarden.middlewares=internal-whitelist@file"
volumes:
- ./vw-data:/data
- /var/run/docker.sock:/var/run/docker.sock
networks:
traefik_proxy:
external: true

88
deployment/gitea/app.ini Normal file
View File

@ -0,0 +1,88 @@
APP_NAME = Gitea: Git with a cup of tea
RUN_USER = git
WORK_PATH = /data/gitea
RUN_MODE = prod
[database]
DB_TYPE = postgres
HOST = 192.168.1.11:5432
NAME = gitea
USER = gitea
PASSWD = g1+3a
SCHEMA =
SSL_MODE = disable
PATH = /data/gitea/data/gitea.db
LOG_SQL = false
[repository]
ROOT = /data/gitea/data/gitea-repositories
[server]
SSH_DOMAIN = git.vangelder-inc.com
SSH_PORT=2222
SSH_LISTEN_PORT=22
DOMAIN = git.vangelder-inc.com
HTTP_PORT = 3000
ROOT_URL = https://git.vangelder-inc.com/
LOCAL_ROOT_URL = http://127.0.0.1:3000/
APP_DATA_PATH = /data/gitea/data
DISABLE_SSH = false
SSH_PORT = 22
LFS_START_SERVER = true
LFS_JWT_SECRET = 3AdPHT-VV_T54bLx27e7_acwE8FnwIVSKdfFnpklIpo
OFFLINE_MODE = false
[lfs]
PATH = /data/gitea/data/lfs
[mailer]
ENABLED = true
SMTP_ADDR = mail.vangelder-inc.com
SMTP_PORT = 25
FROM = git@vangelder-inc.com
USER =
PASSWD =
[service]
REGISTER_EMAIL_CONFIRM = false
ENABLE_NOTIFY_MAIL = false
DISABLE_REGISTRATION = false
ALLOW_ONLY_EXTERNAL_REGISTRATION = false
ENABLE_CAPTCHA = false
REQUIRE_SIGNIN_VIEW = false
DEFAULT_KEEP_EMAIL_PRIVATE = false
DEFAULT_ALLOW_CREATE_ORGANIZATION = true
DEFAULT_ENABLE_TIMETRACKING = true
NO_REPLY_ADDRESS = noreply.localhost
[openid]
ENABLE_OPENID_SIGNIN = true
ENABLE_OPENID_SIGNUP = true
[cron.update_checker]
ENABLED = false
[session]
PROVIDER = file
[log]
MODE = console
LEVEL = info
ROOT_PATH = /data/gitea/log
[repository.pull-request]
DEFAULT_MERGE_STYLE = merge
[repository.signing]
DEFAULT_TRUST_MODEL = committer
[security]
INSTALL_LOCK = true
INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE2OTY1NDc1ODh9.36pFi50M1Vox6UCiuv_sLEueBflqkkhWwle5aav2HJo
PASSWORD_HASH_ALGO = pbkdf2
[oauth2]
JWT_SECRET = xqIzfzoM8jOR3JPE8erdm4xcCct-mUKhNDj7Rm5XTso
[actions]
ENABLED=true

View File

@ -0,0 +1,64 @@
version: "3"
services:
gitea:
image: gitea/gitea:1.21.11
container_name: gitea
environment:
- USER_UID=1002
- USER_GID=1002
- DB_TYPE=postgres
- DB_HOST=bw.project-conquer.com
- DB_NAME=gitea
- DB_USER=gitea
- DB_PASSWD=g1+3a
restart: always
networks:
- traefik_proxy
volumes:
- /opt/gitea:/data
- ./app.ini:/data/gitea/conf/app.ini
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
# ports:
# - "3000:3000"
# - "2222:22"
labels:
- traefik.enable=true
- traefik.docker.network=traefik_proxy
- traefik.http.routers.gitea-web.rule=Host(`git.project-conquer.com`)
- traefik.http.routers.gitea-web.entrypoints=websecure
- traefik.http.routers.gitea-web.tls.certresolver=le
- traefik.http.routers.gitea-web.service=gitea-web-svc
- traefik.http.services.gitea-web-svc.loadbalancer.server.port=3000
- traefik.tcp.routers.gitea-ssh.rule=HostSNI(`*`)
- traefik.tcp.routers.gitea-ssh.entrypoints=ssh
- traefik.tcp.routers.gitea-ssh.service=gitea-ssh-svc
- traefik.tcp.services.gitea-ssh-svc.loadbalancer.server.port=22
gitea_act:
container_name: gitea_act
restart: unless-stopped
image: gitea/act_runner:latest
networks:
- traefik_proxy
environment:
- GIT_SSL_NO_VERIFY=1
- CONFIG_FILE=/config.yaml
- GITEA_INSTANCE_URL=https://git.project-conquer.com
- GITEA_RUNNER_REGISTRATION_TOKEN=BMVpEQVGpXuXwlgHbhANC2wg9HPefVa3coZGcYuc
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./runner_config.yaml:/config.yaml
- /opt/gitea:/repo
- gitea-runner-data:/data
# act_runner register --no-interactive --instance http://gitea:3000 --token $RUNNER_TOKEN --name git_runnerls
networks:
traefik_proxy:
external: true
volumes:
gitea-runner-data:

View File

@ -0,0 +1,44 @@
log:
# The level of logging, can be trace, debug, info, warn, error, fatal
level: info
runner:
# Where to store the registration result.
file: /repo/gitea_act/.runner
# Execute how many tasks concurrently at the same time.
capacity: 1
# Extra environment variables to run jobs.
envs:
A_TEST_ENV_NAME_1: a_test_env_value_1
A_TEST_ENV_NAME_2: a_test_env_value_2
# Extra environment variables to run jobs from a file.
# It will be ignored if it's empty or the file doesn't exist.
env_file: .env
# The timeout for a job to be finished.
# Please note that the Gitea instance also has a timeout (3h by default) for the job.
# So the job could be stopped by the Gitea instance if it's timeout is shorter than this.
timeout: 3h
# Whether skip verifying the TLS certificate of the Gitea instance.
insecure: True
# The timeout for fetching the job from the Gitea instance.
fetch_timeout: 5s
# The interval for fetching the job from the Gitea instance.
fetch_interval: 2s
cache:
# Enable cache server to use actions/cache.
enabled: true
# The directory to store the cache data.
# If it's empty, the cache data will be stored in $HOME/.cache/actcache.
dir: ""
# The host of the cache server.
# It's not for the address to listen, but the address to connect from job containers.
# So 0.0.0.0 is a bad choice, leave it empty to detect automatically.
host: ""
# The port of the cache server.
# 0 means to use a random available port.
port: 0
container:
# Which network to use for the job containers. Could be bridge, host, none, or the name of a custom network.
network: bridge

View File

@ -0,0 +1,41 @@
sudo mkfs -t xfs /dev/nvme2n1
sudo mkdir /ncdata
sudo chmod 777 /ncdata
echo "UUID="c1ffe34b-e0d3-48d2-8599-ec43c824f114" /opt xfs defaults,nofail 0 2" | sudo tee -a /etc/fstab
sudo systemctl daemon-reload
sudo mount /data
# Install docker
curl -fsSL https://get.docker.com | sudo sh
usermod -aG docker ssm-user
usermod -aG docker ubuntu
# Create Database
sudo su postgres
psql
CREATE USER bitwarden WITH PASSWORD 'xHR8z58CtnVqMVxRKtZN';
CREATE DATABASE bitwarden WITH OWNER bitwarden;
Reference: https://github.com/nextcloud/all-in-one
NC AIO password: trident provoking delouse unlimited finite hut capped poem
Credentials:
admin: 1a70e513b4c19b4d49eb6f0fbf193923263385e12a436edb
# For Linux and without a web server or reverse proxy (like Apache, Nginx, Cloudflare Tunnel and else) already in place:
sudo docker run \
--init \
--sig-proxy=false \
--name nextcloud-aio-mastercontainer \
--restart always \
--publish 80:80 \
--publish 8080:8080 \
--publish 8443:8443 \
--volume nextcloud_aio_mastercontainer:/mnt/docker-aio-config \
--volume /var/run/docker.sock:/var/run/docker.sock:ro \
--env NEXTCLOUD_DATADIR="/ncdata" \
nextcloud/all-in-one:latest

1
deployment/traefik/.env Normal file
View File

@ -0,0 +1 @@
DASHBOARD_USERPASS='dan@project-conquer.com:$2y$05$Ygjd.THA4WgkfGp7LBnocu5LwnlOGJUYqPNffHlxAMPi.Jzj6Rxbq'

View File

@ -0,0 +1,67 @@
version: "3.3"
services:
traefik:
platform: linux/arm64
image: "traefik:v3.0"
container_name: "traefik"
restart: always
ports:
- "80:80"
- "443:443"
- "8888:8080"
command:
- --docker
- --debug=true
- --logLevel=DEBUG
labels:
traefik.enable: true
# Dashboard
traefik.http.middlewares.traefik-auth.basicauth.removeheader: true
traefik.http.middlewares.traefik-auth.basicauth.users: foobar:$$2y$$05$$z2KwKI.GmZ43BbwfmPPKw.CSl3rqQ0OhzBbdom.orfsMVKGLW/Xeu
traefik.http.routers.traefik.rule: Host(`traefik.project-conquer.com`)
traefik.http.routers.traefik.service: api@internal
traefik.http.routers.traefik.tls.certresolver: le
traefik.http.routers.traefik.entrypoints: websecure
traefik.http.routers.traefik.middlewares: traefik-auth
traefik.http.services.traefik.loadbalancer.server.port: 8080
# global redirect to https
traefik.http.routers.http-catchall.rule: hostregexp(`{host:.+}`)
traefik.http.routers.http-catchall.entrypoints: web
traefik.http.routers.http-catchall.middlewares: redirect-to-https
# middleware redirect
traefik.http.middlewares.redirect-to-https.redirectscheme.scheme: https
volumes:
- /etc/localtime:/etc/localtime:ro
- /var/run/docker.sock:/var/run/docker.sock:ro
- ./traefik.yml:/traefik.yml:ro
- ./dynamic:/dynamic:ro
- /var/log/traefik:/var/log/traefik
- ./certs:/etc/certs
networks:
- traefik_proxy
# whoami:
# platform: linux/arm64
# image: "traefik/whoami"
# container_name: "simple-service"
# labels:
# traefik.enable: true
# traefik.http.routers.whoami.rule: Host(`whoami.project-conquer.com`)
# traefik.http.routers.whoami.entrypoints: websecure
# traefik.http.routers.whoami.tls.certresolver: le
## "traefik.http.routers.whoami.tls: true"
# networks:
# - traefik_proxy
networks:
traefik_proxy:
external: true

View File

@ -0,0 +1,25 @@
http:
middlewares:
internal-whitelist:
ipWhiteList:
sourceRange:
- "127.0.0.1/32"
- "10.0.0.0/16"
- "172.16.0.0/12"
- "192.168.0.0/16"
- "66.110.219.60/32"
- "24.62.74.24/32"
routers:
nc:
rule: Host(`cloud.project-conquer.com`)
service: nc
tls:
passthrough: true
services:
nc:
loadBalancer:
servers:
- url: https://18.118.62.53

View File

@ -0,0 +1,56 @@
global:
checkNewVersion: true
sendAnonymousUsage: false
api:
dashboard: true
insecure: true
entryPoints:
web:
address: :80
websecure:
address: :443
forwardedHeaders:
insecure: true
ssh:
address: :22
certificatesResolvers:
le:
acme:
email: dan@project-conquer.com
storage: /etc/certs/acme.json
tlsChallenge: {}
providers:
providersThrottleDuration: 2s
docker:
exposedByDefault: false
network: traefik_proxy
watch: true
file:
filename: /dynamic/dynamic.yml
watch: true
# redis:
# endpoints:
# - "192.168.1.11:6379"
log:
level: DEBUG
format: common
filePath: /var/log/traefik/traefik.log
accessLog:
filePath: /var/log/traefik/access.log
format: json
fields:
defaultMode: keep
names:
ClientUsername: drop
headers:
defaultMode: keep
names:
User-Agent: keep
Authorization: keep
Content-Type: keep

6126
scripts/Koku.json Normal file

File diff suppressed because it is too large Load Diff

2788
scripts/Natalia.json Normal file

File diff suppressed because it is too large Load Diff

BIN
scripts/bw_dump Normal file

Binary file not shown.

186
scripts/datetest.pl Normal file
View File

@ -0,0 +1,186 @@
use strict;
use warnings;
use Path::Tiny;
use JSON::XS; # JSON parser module
use DDP;
use v5.36;
use DateTime::Format::ISO8601;
use Try::Catch;
use Getopt::Long;
use Image::MetaData::JPEG;
use File::stat;
GetOptions(
"pull-versions" => \my $get_versions,
"restore" => \my $do_restores,
"verbose" => \my $verbose,
"print" => \my $print,
"copy" => \my $copy,
"overwrite" => \my $overwrite,
"folder=s" => \my $folder_name,
"update-ts" => \my $update_ts,
"since" => \my $since,
"match" => \my $match,
) or die("Error in command line arguments\n");
$match = '.*' unless $match;
my %restores;
my %stats = (
exists => 0,
restored => 0,
failed => 0,
selected => 0,
);
my %folders = (
'koku_downloads' => {
'id' => 'm7ko5-adoga',
'path' => '/mnt/nasty/koku_downloads',
},
'koku_documents' => {
'id' => 'krvxc-ajsfv',
'path' => '/mnt/nasty/Koku',
},
'koku_desktop' => {
'id' => 'vhimk-o3gwx',
'path' => '/mnt/nasty/koku_desktop',
},
'shared_pictures' => {
'id' => 'p6jqq-nouth',
'path' => '/mnt/nasty/Media/Pictures'
},
'koku_music' => {
'id' => 'mdhgr-gd7rv',
'path' => '/mnt/nasty/Media/Music/Natalia'
},
'project-conquer' => {
'id' => 'qypth-g6xkb',
'path' => '/mnt/nasty/project-conquer'
},
'highpoint' => {
'id' => 'jekig-3yzvw',
'path' => '/mnt/nasty/highpoint'
},
);
my $folder_id = $folders{$folder_name}{'id'};
my $target_dir = $folders{$folder_name}{'path'};
my $source_dir = $folders{$folder_name}{'path'} . "/.stversions";
my $version_data = "$folder_name.json";
sub get_versions() {
my $curl = qq{curl -k -X GET -H "X-API-KEY: oemTvSg94cShmEymeuct66GRsyutLTGg" https://dri.tz:8090/rest/folder/versions?folder=$folder_id > $folder_name.json};
system($curl);
say $curl;
}
sub get_json() {
my $json_file = path($version_data);
my $decoded_json;
if (-e $json_file) {
my $json_data = $json_file->slurp;
$decoded_json = decode_json($json_data);
}
else {
die "JSON file not found at $json_file";
}
return $decoded_json;
};
sub generate_restores($path, $version_date, $version_time, $restore_path) {
if ($restores{$path}) {
say "Path exists, checking for latest verson" if $verbose;
if ($version_date > $restores{$path}{'version_time'}) {
say "Later version found" if $verbose;
$restores{$path} = {
'version_date' => $version_date,
'version_time' => $version_time,
'restore_path' => $restore_path,
};
}
else {
say "Keeping current version" if $verbose;
}
}
else {
say "Inserting new path" if $verbose;
$restores{$path} = {
'version_date' => $version_date,
'version_time' => $version_time,
'restore_path' => $restore_path,
};
}
}
sub restore_versions() {
for my $path (sort keys %restores) {
my $source = path("$source_dir/" . $restores{$path}{'restore_path'});
my $dest = path("$target_dir/$path");
say "Source: $source" if $verbose;
say "Dest: $dest" if $verbose;
my $res;
unless (-d $dest->parent) {
$dest->parent->mkdir;
}
if (! -f $dest || $overwrite) {
try {
$res = 'restored';
# my $restore_copy = $source->copy($dest) if $copy; # Doesn't preserve timestamps
my $restore_copy = system('cp', '-p', $source, $dest) if $copy;
say "$restore_copy successfully restored" if $verbose;
++$stats{'restored'};
}
catch {
$res = 'failed ';
say "$source -> $dest failed to restore" if $verbose;
++$stats{'failed'};
}
}
else {
$res = 'exists ';
++$stats{'exists'};
}
if ($update_ts) {
update_timestamp($path);
say "updated timestamp" if $verbose;
}
my $processed = $stats{'restored'} + $stats{'exists'} + $stats{'failed'};
my $selected = $stats{'selected'};
say sprintf('%.2f', $processed * 100 / $selected) . "% $processed/$selected $res\t$dest" if $print;
}
}
sub check_restores() {
my $decoded_json = get_json();
foreach my $path (sort keys %$decoded_json) {
for my $version (@{$decoded_json->{$path}}) {
my $ts = DateTime::Format::ISO8601->parse_datetime($version->{'versionTime'});
if ($ts->ymd != '2023-08-25') {
say $path
}
}
sub update_timestamp($path) {
# return unless $path =~ /\.jpe?g$/i;
try {
my $source = path("$source_dir/" . $restores{$path}{'restore_path'});
my $dest = path("$target_dir/$path");
my $stat = stat($source);
utime($stat->atime, $stat->mtime, $dest);
}
catch {
say "unable to update timestamp";
}
}
get_versions() if $get_versions;
do_restores() if $do_restores;

27
scripts/fix_word.pl Normal file
View File

@ -0,0 +1,27 @@
use strict;
use warnings;
use v5.36;
use Path::Tiny;
use MsOffice::Word::Surgeon;
use DDP;
use Try::Catch;
my $dir = path('/mnt/nasty/Koku/The Bakilanas');
my $iter = $dir->iterator;
while ( my $filepath = $iter->() ) {
if ($filepath->basename =~ /.docx?$/i) {
say $filepath;
open_file($filepath->stringify);
}
}
sub open_file($filename) {
try {
my $surgeon = MsOffice::Word::Surgeon->new(docx => $filename);
say "opens fine!";
}
catch {
say "couldn't open: $@";
}
}

76398
scripts/highpoint.json Normal file

File diff suppressed because it is too large Load Diff

7469
scripts/koku_desktop.json Normal file

File diff suppressed because it is too large Load Diff

6126
scripts/koku_documents.json Normal file

File diff suppressed because it is too large Load Diff

21313
scripts/koku_downloads.json Normal file

File diff suppressed because it is too large Load Diff

248
scripts/local_del/bin/json_xs Executable file
View File

@ -0,0 +1,248 @@
#!/home/superfly/perl5/perlbrew/perls/perl-5.36.1/bin/perl
=head1 NAME
json_xs - JSON::XS commandline utility
=head1 SYNOPSIS
json_xs [-v] [-f inputformat] [-t outputformat]
=head1 DESCRIPTION
F<json_xs> converts between some input and output formats (one of them is
JSON).
The default input format is C<json> and the default output format is
C<json-pretty>.
=head1 OPTIONS
=over 4
=item -v
Be slightly more verbose.
=item -f fromformat
Read a file in the given format from STDIN.
C<fromformat> can be one of:
=over 4
=item json - a json text encoded, either utf-8, utf16-be/le, utf32-be/le
=item cbor - CBOR (RFC 7049, L<CBOR::XS>), a kind of binary JSON
=item storable - a L<Storable> frozen value
=item storable-file - a L<Storable> file (Storable has two incompatible formats)
=item bencode - use L<Convert::Bencode>, if available (used by torrent files, among others)
=item clzf - L<Compress::LZF> format (requires that module to be installed)
=item eval - evaluate the given code as (non-utf-8) Perl, basically the reverse of "-t dump"
=item yaml - L<YAML> format (requires that module to be installed)
=item string - do not attempt to decode the file data
=item none - nothing is read, creates an C<undef> scalar - mainly useful with C<-e>
=back
=item -t toformat
Write the file in the given format to STDOUT.
C<toformat> can be one of:
=over 4
=item json, json-utf-8 - json, utf-8 encoded
=item json-pretty - as above, but pretty-printed
=item json-utf-16le, json-utf-16be - little endian/big endian utf-16
=item json-utf-32le, json-utf-32be - little endian/big endian utf-32
=item cbor - CBOR (RFC 7049, L<CBOR::XS>), a kind of binary JSON
=item cbor-packed - CBOR using extensions to make it smaller
=item storable - a L<Storable> frozen value in network format
=item storable-file - a L<Storable> file in network format (Storable has two incompatible formats)
=item bencode - use L<Convert::Bencode>, if available (used by torrent files, among others)
=item clzf - L<Compress::LZF> format
=item yaml - L<YAML::XS> format
=item dump - L<Data::Dump>
=item dumper - L<Data::Dumper>
=item string - writes the data out as if it were a string
=item none - nothing gets written, mainly useful together with C<-e>
Note that Data::Dumper doesn't handle self-referential data structures
correctly - use "dump" instead.
=back
=item -e code
Evaluate perl code after reading the data and before writing it out again
- can be used to filter, create or extract data. The data that has been
written is in C<$_>, and whatever is in there is written out afterwards.
=back
=head1 EXAMPLES
json_xs -t none <isitreally.json
"JSON Lint" - tries to parse the file F<isitreally.json> as JSON - if it
is valid JSON, the command outputs nothing, otherwise it will print an
error message and exit with non-zero exit status.
<src.json json_xs >pretty.json
Prettify the JSON file F<src.json> to F<dst.json>.
json_xs -f storable-file <file
Read the serialised Storable file F<file> and print a human-readable JSON
version of it to STDOUT.
json_xs -f storable-file -t yaml <file
Same as above, but write YAML instead (not using JSON at all :)
json_xs -f none -e '$_ = [1, 2, 3]'
Dump the perl array as UTF-8 encoded JSON text.
<torrentfile json_xs -f bencode -e '$_ = join "\n", map @$_, @{$_->{"announce-list"}}' -t string
Print the tracker list inside a torrent file.
lwp-request http://cpantesters.perl.org/show/JSON-XS.json | json_xs
Fetch the cpan-testers result summary C<JSON::XS> and pretty-print it.
=head1 AUTHOR
Copyright (C) 2008 Marc Lehmann <json@schmorp.de>
=cut
use strict;
use Getopt::Long;
use Storable ();
use Encode;
use JSON::XS;
my $opt_verbose;
my $opt_from = "json";
my $opt_to = "json-pretty";
my $opt_eval;
Getopt::Long::Configure ("bundling", "no_ignore_case", "require_order");
GetOptions(
"v" => \$opt_verbose,
"f=s" => \$opt_from,
"t=s" => \$opt_to,
"e=s" => \$opt_eval,
) or die "Usage: $0 [-v] -f fromformat [-e code] [-t toformat]\n";
my %F = (
"none" => sub { undef },
"string" => sub { $_ },
"json" => sub {
my $enc =
/^\x00\x00\x00/s ? "utf-32be"
: /^\x00.\x00/s ? "utf-16be"
: /^.\x00\x00\x00/s ? "utf-32le"
: /^.\x00.\x00/s ? "utf-16le"
: "utf-8";
warn "input text encoding is $enc\n" if $opt_verbose;
JSON::XS->new->decode (decode $enc, $_)
},
"cbor" => sub { require CBOR::XS; CBOR::XS->new->allow_cycles->decode ($_) },
"storable" => sub { Storable::thaw $_ },
"storable-file" => sub { open my $fh, "<", \$_; Storable::fd_retrieve $fh },
"bencode" => sub { require Convert::Bencode; Convert::Bencode::bdecode ($_) },
"clzf" => sub { require Compress::LZF; Compress::LZF::sthaw ($_) },
"yaml" => sub { require YAML::XS; YAML::XS::Load ($_) },
"eval" => sub { my $v = eval "no strict; no warnings; no utf8;\n#line 1 \"input\"\n$_"; die "$@" if $@; $v },
);
my %T = (
"none" => sub { "" },
"string" => sub { $_ },
"json" => sub { encode_json $_ },
"json-utf-8" => sub { encode_json $_ },
"json-pretty" => sub { JSON::XS->new->utf8->pretty->canonical->encode ($_) },
"json-utf-16le" => sub { encode "utf-16le", JSON::XS->new->encode ($_) },
"json-utf-16be" => sub { encode "utf-16be", JSON::XS->new->encode ($_) },
"json-utf-32le" => sub { encode "utf-32le", JSON::XS->new->encode ($_) },
"json-utf-32be" => sub { encode "utf-32be", JSON::XS->new->encode ($_) },
"cbor" => sub { require CBOR::XS; CBOR::XS::encode_cbor ($_) },
"cbor-packed" => sub { require CBOR::XS; CBOR::XS->new->pack_strings->encode ($_) },
"storable" => sub { Storable::nfreeze $_ },
"storable-file" => sub { open my $fh, ">", \my $buf; Storable::nstore_fd $_, $fh; $buf },
"bencode" => sub { require Convert::Bencode; Convert::Bencode::bencode ($_) },
"clzf" => sub { require Compress::LZF; Compress::LZF::sfreeze_cr ($_) },
"yaml" => sub { require YAML::XS; YAML::XS::Dump ($_) },
"dumper" => sub {
require Data::Dumper;
#local $Data::Dumper::Purity = 1; # hopeless case
local $Data::Dumper::Terse = 1;
local $Data::Dumper::Indent = 1;
local $Data::Dumper::Useqq = 1;
local $Data::Dumper::Quotekeys = 0;
local $Data::Dumper::Sortkeys = 1;
Data::Dumper::Dumper($_)
},
"dump" => sub {
require Data::Dump;
local $Data::Dump::TRY_BASE64 = 0;
Data::Dump::dump ($_) . "\n"
},
);
$F{$opt_from}
or die "$opt_from: not a valid fromformat\n";
$T{$opt_to}
or die "$opt_to: not a valid toformat\n";
if ($opt_from ne "none") {
local $/;
binmode STDIN; # stupid perl sometimes thinks its funny
$_ = <STDIN>;
}
$_ = $F{$opt_from}->();
eval $opt_eval;
die $@ if $@;
$_ = $T{$opt_to}->();
binmode STDOUT;
syswrite STDOUT, $_;

View File

@ -0,0 +1,233 @@
=head1 NAME
Canary::Stability - canary to check perl compatibility for schmorp's modules
=head1 SYNOPSIS
# in Makefile.PL
use Canary::Stability DISTNAME => 2001, MINIMUM_PERL_VERSION;
=head1 DESCRIPTION
This module is used by Schmorp's modules during configuration stage to
test the installed perl for compatibility with his modules.
It's not, at this stage, meant as a tool for other module authors,
although in principle nothing prevents them from subscribing to the same
ideas.
See the F<Makefile.PL> in L<Coro> or L<AnyEvent> for usage examples.
=cut
package Canary::Stability;
BEGIN {
$VERSION = 2013;
}
sub sgr {
# we just assume ANSI almost everywhere
# red 31, yellow 33, green 32
local $| = 1;
$ENV{PERL_CANARY_STABILITY_COLOUR} ne 0
and ((-t STDOUT and length $ENV{TERM}) or $ENV{PERL_CANARY_STABILITY_COLOUR})
and print "\e[$_[0]m";
}
sub import {
my (undef, $distname, $minvers, $minperl) = @_;
$ENV{PERL_CANARY_STABILITY_DISABLE}
and return;
$minperl ||= 5.008002;
print <<EOF;
***
*** Canary::Stability COMPATIBILITY AND SUPPORT CHECK
*** =================================================
***
*** Hi!
***
*** I do my best to provide predictable and reliable software.
***
*** However, in recent releases, P5P (who maintain perl) have been
*** introducing regressions that are sometimes subtle and at other times
*** catastrophic, often for personal preferences with little or no concern
*** for existing code, most notably CPAN.
***
*** For this reason, it has become very hard for me to maintain the level
*** of reliability and support I have committed myself to in the past, at
*** least with some perl versions: I simply can't keep up working around new
*** bugs or gratituous incompatibilities, and in turn you might suffer from
*** unanticipated problems.
***
*** Therefore I have introduced a support and compatibility check, the results
*** of which follow below, together with a FAQ and some recommendations.
***
*** This check is just to let you know that there might be a risk, so you can
*** make judgement calls on how to proceed - it will not keep the module from
*** installing or working.
***
EOF
if ($minvers > $VERSION) {
sgr 33;
print <<EOF;
*** The stability canary says: (nothing, it died of old age).
***
*** Your Canary::Stability module (used by $distname) is too old.
*** This is not a fatal problem - while you might want to upgrade to version
*** $minvers (currently installed version: $VERSION) to get better support
*** status testing, you might also not want to care at all, and all will
*** be well as long $distname works well enough for you, as the stability
*** canary is only used when installing the distribution.
***
EOF
} elsif ($] < $minperl) {
sgr 33;
print <<EOF;
*** The stability canary says: chirp (it seems concerned about something).
***
*** Your perl version ($]) is older than the $distname distribution
*** likes ($minperl). This is not a fatal problem - the module might work
*** well with your version of perl, but it does mean the author likely
*** won't do anything to make it work if it breaks.
***
EOF
if ($ENV{AUTOMATED_TESTING}) {
print <<EOF;
*** Since this is an AUTOMATED_TESTING environment, the stability canary
*** decided to fail cleanly here, rather than to generate a false test
*** result.
***
EOF
exit 0;
}
} elsif (defined $Internals::StabilityBranchVersion) {
# note to people studying this modules sources:
# the above test is not considered a clean or stable way to
# test for the stability branch.
sgr 32;
print <<EOF;
*** The stability canary says: chirp! chirp! (it seems to be quite excited)
***
*** It seems you are running schmorp's stability branch of perl.
*** All should be well, and if it isn't, you should report this as a bug
*** to the $distname author.
***
EOF
} elsif ($] < 5.021) {
#sgr 32;
print <<EOF;
*** The stability canary says: chirp! chirp! (it seems to be quite happy)
***
*** Your version of perl ($]) is quite supported by $distname, nothing
*** else to be said, hope it comes in handy.
***
EOF
} else {
sgr 31;
print <<EOF;
*** The stability canary says: (nothing, it was driven away by harsh weather)
***
*** It seems you are running perl version $], likely the "official" or
*** "standard" version. While there is nothing wrong with doing that,
*** standard perl versions 5.022 and up are not supported by $distname.
*** While this might be fatal, it might also be all right - if you run into
*** problems, you might want to downgrade your perl or switch to the
*** stability branch.
***
*** If everything works fine, you can ignore this message.
***
EOF
sgr 0;
print <<EOF;
***
*** Stability canary mini-FAQ:
***
*** Do I need to do anything?
*** With luck, no. While some distributions are known to fail
*** already, most should probably work. This message is here
*** to alert you that your perl is not supported by $distname,
*** and if things go wrong, you either need to downgrade, or
*** sidegrade to the stability variant of your perl version,
*** or simply live with the consequences.
***
*** What is this canary thing?
*** It's purpose is to check support status of $distname with
*** respect to your perl version.
***
*** What is this "stability branch"?
*** It's a branch or fork of the official perl, by schmorp, to
*** improve stability and compatibility with existing modules.
***
*** How can I skip this prompt on automated installs?
*** Set PERL_CANARY_STABILITY_NOPROMPT=1 in your environment.
*** More info is in the Canary::Stability manpage.
***
*** Long version of this FAQ: http://stableperl.schmorp.de/faq.html
*** Stability Branch homepage: http://stableperl.schmorp.de/
***
EOF
unless ($ENV{PERL_CANARY_STABILITY_NOPROMPT}) {
require ExtUtils::MakeMaker;
ExtUtils::MakeMaker::prompt ("Continue anyways? ", "y") =~ /^y/i
or die "FATAL: User aborted configuration of $distname.\n";
}
}
sgr 0;
}
=head1 ENVIRONMENT VARIABLES
=over 4
=item C<PERL_CANARY_STABILITY_NOPROMPT=1>
Do not prompt the user on alert messages.
=item C<PERL_CANARY_STABILITY_COLOUR=0>
Disable use of colour.
=item C<PERL_CANARY_STABILITY_COLOUR=1>
Force use of colour.
=item C<PERL_CANARY_STABILITY_DISABLE=1>
Disable this modules functionality completely.
=item C<AUTOMATED_TESTING=1>
When this variable is set to a true value and the perl minimum version
requirement is not met, the module will exit, which should skip testing
under automated testing environments.
This is done to avoid false failure or success reports when the chances of
success are already quite low and the failures are not supported by the
author.
=back
=head1 AUTHOR
Marc Lehmann <schmorp@schmorp.de>
http://software.schmorp.de/pkg/Canary-Stability.html
=cut
1

View File

@ -0,0 +1,266 @@
=head1 NAME
Types::Serialiser - simple data types for common serialisation formats
=encoding utf-8
=head1 SYNOPSIS
=head1 DESCRIPTION
This module provides some extra datatypes that are used by common
serialisation formats such as JSON or CBOR. The idea is to have a
repository of simple/small constants and containers that can be shared by
different implementations so they become interoperable between each other.
=cut
package Types::Serialiser;
use common::sense; # required to suppress annoying warnings
our $VERSION = '1.01';
=head1 SIMPLE SCALAR CONSTANTS
Simple scalar constants are values that are overloaded to act like simple
Perl values, but have (class) type to differentiate them from normal Perl
scalars. This is necessary because these have different representations in
the serialisation formats.
In the following, functions with zero or one arguments have a prototype of
C<()> and C<($)>, respectively, so act as constants and unary operators.
=head2 BOOLEANS (Types::Serialiser::Boolean class)
This type has only two instances, true and false. A natural representation
for these in Perl is C<1> and C<0>, but serialisation formats need to be
able to differentiate between them and mere numbers.
=over 4
=item $Types::Serialiser::true, Types::Serialiser::true
This value represents the "true" value. In most contexts is acts like
the number C<1>. It is up to you whether you use the variable form
(C<$Types::Serialiser::true>) or the constant form (C<Types::Serialiser::true>).
The constant is represented as a reference to a scalar containing C<1> -
implementations are allowed to directly test for this.
=item $Types::Serialiser::false, Types::Serialiser::false
This value represents the "false" value. In most contexts is acts like
the number C<0>. It is up to you whether you use the variable form
(C<$Types::Serialiser::false>) or the constant form (C<Types::Serialiser::false>).
The constant is represented as a reference to a scalar containing C<0> -
implementations are allowed to directly test for this.
=item Types::Serialiser::as_bool $value
Converts a Perl scalar into a boolean, which is useful syntactic
sugar. Strictly equivalent to:
$value ? $Types::Serialiser::true : $Types::Serialiser::false
=item $is_bool = Types::Serialiser::is_bool $value
Returns true iff the C<$value> is either C<$Types::Serialiser::true> or
C<$Types::Serialiser::false>.
For example, you could differentiate between a perl true value and a
C<Types::Serialiser::true> by using this:
$value && Types::Serialiser::is_bool $value
=item $is_true = Types::Serialiser::is_true $value
Returns true iff C<$value> is C<$Types::Serialiser::true>.
=item $is_false = Types::Serialiser::is_false $value
Returns false iff C<$value> is C<$Types::Serialiser::false>.
=back
=head2 ERROR (Types::Serialiser::Error class)
This class has only a single instance, C<error>. It is used to signal
an encoding or decoding error. In CBOR for example, and object that
couldn't be encoded will be represented by a CBOR undefined value, which
is represented by the error value in Perl.
=over 4
=item $Types::Serialiser::error, Types::Serialiser::error
This value represents the "error" value. Accessing values of this type
will throw an exception.
The constant is represented as a reference to a scalar containing C<undef>
- implementations are allowed to directly test for this.
=item $is_error = Types::Serialiser::is_error $value
Returns false iff C<$value> is C<$Types::Serialiser::error>.
=back
=cut
BEGIN {
# for historical reasons, and to avoid extra dependencies in JSON::PP,
# we alias *Types::Serialiser::Boolean with JSON::PP::Boolean.
package JSON::PP::Boolean;
*Types::Serialiser::Boolean:: = *JSON::PP::Boolean::;
}
{
# this must done before blessing to work around bugs
# in perl < 5.18 (it seems to be fixed in 5.18).
package Types::Serialiser::BooleanBase;
use overload
"0+" => sub { ${$_[0]} },
"++" => sub { $_[0] = ${$_[0]} + 1 },
"--" => sub { $_[0] = ${$_[0]} - 1 },
fallback => 1;
@Types::Serialiser::Boolean::ISA = Types::Serialiser::BooleanBase::;
}
our $true = do { bless \(my $dummy = 1), Types::Serialiser::Boolean:: };
our $false = do { bless \(my $dummy = 0), Types::Serialiser::Boolean:: };
our $error = do { bless \(my $dummy ), Types::Serialiser::Error:: };
sub true () { $true }
sub false () { $false }
sub error () { $error }
sub as_bool($) { $_[0] ? $true : $false }
sub is_bool ($) { UNIVERSAL::isa $_[0], Types::Serialiser::Boolean:: }
sub is_true ($) { $_[0] && UNIVERSAL::isa $_[0], Types::Serialiser::Boolean:: }
sub is_false ($) { !$_[0] && UNIVERSAL::isa $_[0], Types::Serialiser::Boolean:: }
sub is_error ($) { UNIVERSAL::isa $_[0], Types::Serialiser::Error:: }
package Types::Serialiser::Error;
sub error {
require Carp;
Carp::croak ("caught attempt to use the Types::Serialiser::error value");
};
use overload
"0+" => \&error,
"++" => \&error,
"--" => \&error,
fallback => 1;
=head1 NOTES FOR XS USERS
The recommended way to detect whether a scalar is one of these objects
is to check whether the stash is the C<Types::Serialiser::Boolean> or
C<Types::Serialiser::Error> stash, and then follow the scalar reference to
see if it's C<1> (true), C<0> (false) or C<undef> (error).
While it is possible to use an isa test, directly comparing stash pointers
is faster and guaranteed to work.
For historical reasons, the C<Types::Serialiser::Boolean> stash is
just an alias for C<JSON::PP::Boolean>. When printed, the classname
with usually be C<JSON::PP::Boolean>, but isa tests and stash pointer
comparison will normally work correctly (i.e. Types::Serialiser::true ISA
JSON::PP::Boolean, but also ISA Types::Serialiser::Boolean).
=head1 A GENERIC OBJECT SERIALIATION PROTOCOL
This section explains the object serialisation protocol used by
L<CBOR::XS>. It is meant to be generic enough to support any kind of
generic object serialiser.
This protocol is called "the Types::Serialiser object serialisation
protocol".
=head2 ENCODING
When the encoder encounters an object that it cannot otherwise encode (for
example, L<CBOR::XS> can encode a few special types itself, and will first
attempt to use the special C<TO_CBOR> serialisation protocol), it will
look up the C<FREEZE> method on the object.
Note that the C<FREEZE> method will normally be called I<during> encoding,
and I<MUST NOT> change the data structure that is being encoded in any
way, or it might cause memory corruption or worse.
If it exists, it will call it with two arguments: the object to serialise,
and a constant string that indicates the name of the data model. For
example L<CBOR::XS> uses C<CBOR>, and the L<JSON> and L<JSON::XS> modules
(or any other JSON serialiser), would use C<JSON> as second argument.
The C<FREEZE> method can then return zero or more values to identify the
object instance. The serialiser is then supposed to encode the class name
and all of these return values (which must be encodable in the format)
using the relevant form for Perl objects. In CBOR for example, there is a
registered tag number for encoded perl objects.
The values that C<FREEZE> returns must be serialisable with the serialiser
that calls it. Therefore, it is recommended to use simple types such as
strings and numbers, and maybe array references and hashes (basically, the
JSON data model). You can always use a more complex format for a specific
data model by checking the second argument, the data model.
The "data model" is not the same as the "data format" - the data model
indicates what types and kinds of return values can be returned from
C<FREEZE>. For example, in C<CBOR> it is permissible to return tagged CBOR
values, while JSON does not support these at all, so C<JSON> would be a
valid (but too limited) data model name for C<CBOR::XS>. similarly, a
serialising format that supports more or less the same data model as JSON
could use C<JSON> as data model without losing anything.
=head2 DECODING
When the decoder then encounters such an encoded perl object, it should
look up the C<THAW> method on the stored classname, and invoke it with the
classname, the constant string to identify the data model/data format, and
all the return values returned by C<FREEZE>.
=head2 EXAMPLES
See the C<OBJECT SERIALISATION> section in the L<CBOR::XS> manpage for
more details, an example implementation, and code examples.
Here is an example C<FREEZE>/C<THAW> method pair:
sub My::Object::FREEZE {
my ($self, $model) = @_;
($self->{type}, $self->{id}, $self->{variant})
}
sub My::Object::THAW {
my ($class, $model, $type, $id, $variant) = @_;
$class->new (type => $type, id => $id, variant => $variant)
}
=head1 BUGS
The use of L<overload> makes this module much heavier than it should be
(on my system, this module: 4kB RSS, overload: 260kB RSS).
=head1 SEE ALSO
Currently, L<JSON::XS> and L<CBOR::XS> use these types.
=head1 AUTHOR
Marc Lehmann <schmorp@schmorp.de>
http://home.schmorp.de/
=cut
1

View File

@ -0,0 +1,27 @@
=head1 NAME
Types::Serialiser::Error - dummy module for Types::Serialiser
=head1 SYNOPSIS
# do not "use" yourself
=head1 DESCRIPTION
This module exists only to provide overload resolution for Storable and
similar modules that assume that class name equals module name. See
L<Types::Serialiser> for more info about this class.
=cut
use Types::Serialiser ();
=head1 AUTHOR
Marc Lehmann <schmorp@schmorp.de>
http://home.schmorp.de/
=cut
1

View File

@ -0,0 +1,37 @@
{
"abstract" : "unknown",
"author" : [
"unknown"
],
"dynamic_config" : 0,
"generated_by" : "ExtUtils::MakeMaker version 7.34, CPAN::Meta::Converter version 2.150010",
"license" : [
"unknown"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "Canary-Stability",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
}
},
"release_status" : "stable",
"version" : "2013",
"x_serialization_backend" : "JSON::PP version 4.07"
}

View File

@ -0,0 +1 @@
{"pathname":"M/ML/MLEHMANN/Canary-Stability-2013.tar.gz","version":"2013","name":"Canary::Stability","target":"Canary::Stability","dist":"Canary-Stability-2013","provides":{"Canary::Stability":{"version":"2013","file":"Stability.pm"}}}

View File

@ -0,0 +1,44 @@
{
"abstract" : "unknown",
"author" : [
"unknown"
],
"dynamic_config" : 0,
"generated_by" : "ExtUtils::MakeMaker version 7.34, CPAN::Meta::Converter version 2.150001, CPAN::Meta::Converter version 2.150010",
"license" : [
"unknown"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "JSON-XS",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"Canary::Stability" : "0",
"ExtUtils::MakeMaker" : "6.52"
}
},
"runtime" : {
"requires" : {
"Types::Serialiser" : "0",
"common::sense" : "0"
}
}
},
"release_status" : "stable",
"version" : "4.03",
"x_serialization_backend" : "JSON::PP version 4.07"
}

View File

@ -0,0 +1 @@
{"provides":{"JSON::XS":{"file":"XS.pm","version":"4.03"}},"dist":"JSON-XS-4.03","version":"4.03","target":"JSON::XS","name":"JSON::XS","pathname":"M/ML/MLEHMANN/JSON-XS-4.03.tar.gz"}

View File

@ -0,0 +1,42 @@
{
"abstract" : "unknown",
"author" : [
"unknown"
],
"dynamic_config" : 0,
"generated_by" : "ExtUtils::MakeMaker version 7.34, CPAN::Meta::Converter version 2.150001, CPAN::Meta::Converter version 2.150010",
"license" : [
"unknown"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "Types-Serialiser",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"runtime" : {
"requires" : {
"common::sense" : "0"
}
}
},
"release_status" : "stable",
"version" : "1.01",
"x_serialization_backend" : "JSON::PP version 4.07"
}

View File

@ -0,0 +1 @@
{"pathname":"M/ML/MLEHMANN/Types-Serialiser-1.01.tar.gz","version":"1.01","target":"Types::Serialiser","name":"Types::Serialiser","dist":"Types-Serialiser-1.01","provides":{"Types::Serialiser":{"file":"Serialiser.pm","version":"1.01"},"Types::Serialiser::BooleanBase":{"file":"Serialiser.pm","version":"1.01"},"Types::Serialiser::Error":{"version":"1.01","file":"Serialiser.pm"},"JSON::PP::Boolean":{"file":"Serialiser.pm","version":"1.01"}}}

View File

@ -0,0 +1,37 @@
{
"abstract" : "unknown",
"author" : [
"unknown"
],
"dynamic_config" : 0,
"generated_by" : "ExtUtils::MakeMaker version 7.34, CPAN::Meta::Converter version 2.150001, CPAN::Meta::Converter version 2.150010",
"license" : [
"unknown"
],
"meta-spec" : {
"url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec",
"version" : 2
},
"name" : "common-sense",
"no_index" : {
"directory" : [
"t",
"inc"
]
},
"prereqs" : {
"build" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
},
"configure" : {
"requires" : {
"ExtUtils::MakeMaker" : "0"
}
}
},
"release_status" : "stable",
"version" : 3.75,
"x_serialization_backend" : "JSON::PP version 4.07"
}

View File

@ -0,0 +1 @@
{"pathname":"M/ML/MLEHMANN/common-sense-3.75.tar.gz","dist":"common-sense-3.75","provides":{"common::sense":{"file":"sense.pm.PL","version":"3.75"}},"version":"3.75","name":"common::sense","target":"common::sense"}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,31 @@
=head1 NAME
JSON::XS::Boolean - dummy module providing JSON::XS::Boolean
=head1 SYNOPSIS
# do not "use" yourself
=head1 DESCRIPTION
This module exists only to provide overload resolution for Storable and
similar modules. It's only needed for compatibility with data serialised
(by other modules such as Storable) that was decoded by JSON::XS versions
before 3.0.
Since 3.0, JSON::PP::Boolean has replaced it. Support for
JSON::XS::Boolean will be removed in a future release.
=cut
use JSON::XS ();
1;
=head1 AUTHOR
Marc Lehmann <schmorp@schmorp.de>
http://home.schmorp.de/
=cut

View File

@ -0,0 +1 @@
/mnt/store/repo/pc/scripts/local/lib/perl5/Canary/Stability.pm

View File

@ -0,0 +1,4 @@
/mnt/store/repo/pc/scripts/local/bin/json_xs
/mnt/store/repo/pc/scripts/local/lib/perl5/x86_64-linux/JSON/XS.pm
/mnt/store/repo/pc/scripts/local/lib/perl5/x86_64-linux/JSON/XS/Boolean.pm
/mnt/store/repo/pc/scripts/local/lib/perl5/x86_64-linux/auto/JSON/XS/XS.so

Binary file not shown.

View File

@ -0,0 +1,2 @@
/mnt/store/repo/pc/scripts/local/lib/perl5/Types/Serialiser.pm
/mnt/store/repo/pc/scripts/local/lib/perl5/Types/Serialiser/Error.pm

View File

@ -0,0 +1,2 @@
/mnt/store/repo/pc/scripts/local/lib/perl5/x86_64-linux/common/sense.pm
/mnt/store/repo/pc/scripts/local/lib/perl5/x86_64-linux/common/sense.pod

View File

@ -0,0 +1,16 @@
package common::sense;
our $VERSION = 3.75;
# overload should be included
sub import {
local $^W; # work around perl 5.16 spewing out warnings for next statement
# use warnings
${^WARNING_BITS} ^= ${^WARNING_BITS} ^ "\x0c\x3f\x33\x00\x03\xf0\x0f\xc0\xf0\xfc\x33\x00\x00\x00\x0c\x00\x00\x00\x00\x00";
# use strict, use utf8; use feature;
$^H |= 0x3c820fc0;
@^H{qw(feature___SUB__ feature_bareword_filehandles feature_evalbytes feature_fc feature_indirect feature_multidimensional feature_say feature_state feature_switch feature_unicode)} = (1) x 10;
}
1

View File

@ -0,0 +1,444 @@
=head1 NAME
common::sense - save a tree AND a kitten, use common::sense!
=head1 SYNOPSIS
use common::sense;
# Supposed to be mostly the same, with much lower memory usage, as:
# use utf8;
# use strict qw(vars subs);
# use feature qw(say state switch);
# use feature qw(unicode_strings unicode_eval current_sub fc evalbytes);
# no feature qw(array_base);
# no warnings;
# use warnings qw(FATAL closed threads internal debugging pack
# prototype inplace io pipe unpack malloc glob
# digit printf layer reserved taint closure semicolon);
# no warnings qw(exec newline unopened);
=head1 DESCRIPTION
“Nothing is more fairly distributed than common sense: no one thinks
he needs more of it than he already has.”
René Descartes
This module implements some sane defaults for Perl programs, as defined by
two typical (or not so typical - use your common sense) specimens of Perl
coders. In fact, after working out details on which warnings and strict
modes to enable and make fatal, we found that we (and our code written so
far, and others) fully agree on every option, even though we never used
warnings before, so it seems this module indeed reflects a "common" sense
among some long-time Perl coders.
The basic philosophy behind the choices made in common::sense can be
summarised as: "enforcing strict policies to catch as many bugs as
possible, while at the same time, not limiting the expressive power
available to the programmer".
Two typical examples of how this philosophy is applied in practise is the
handling of uninitialised and malloc warnings:
=over 4
=item I<uninitialised>
C<undef> is a well-defined feature of perl, and enabling warnings for
using it rarely catches any bugs, but considerably limits you in what you
can do, so uninitialised warnings are disabled.
=item I<malloc>
Freeing something twice on the C level is a serious bug, usually causing
memory corruption. It often leads to side effects much later in the
program and there are no advantages to not reporting this, so malloc
warnings are fatal by default.
=back
Unfortunately, there is no fine-grained warning control in perl, so often
whole groups of useful warnings had to be excluded because of a single
useless warning (for example, perl puts an arbitrary limit on the length
of text you can match with some regexes before emitting a warning, making
the whole C<regexp> category useless).
What follows is a more thorough discussion of what this module does,
and why it does it, and what the advantages (and disadvantages) of this
approach are.
=head1 RATIONALE
=over 4
=item use utf8
While it's not common sense to write your programs in UTF-8, it's quickly
becoming the most common encoding, is the designated future default
encoding for perl sources, and the most convenient encoding available
(you can do really nice quoting tricks...). Experience has shown that our
programs were either all pure ascii or utf-8, both of which will stay the
same.
There are few drawbacks to enabling UTF-8 source code by default (mainly
some speed hits due to bugs in older versions of perl), so this module
enables UTF-8 source code encoding by default.
=item use strict qw(subs vars)
Using C<use strict> is definitely common sense, but C<use strict
'refs'> definitely overshoots its usefulness. After almost two
decades of Perl hacking, we decided that it does more harm than being
useful. Specifically, constructs like these:
@{ $var->[0] }
Must be written like this (or similarly), when C<use strict 'refs'> is in
scope, and C<$var> can legally be C<undef>:
@{ $var->[0] || [] }
This is annoying, and doesn't shield against obvious mistakes such as
using C<"">, so one would even have to write (at least for the time
being):
@{ defined $var->[0] ? $var->[0] : [] }
... which nobody with a bit of common sense would consider
writing: clear code is clearly something else.
Curiously enough, sometimes perl is not so strict, as this works even with
C<use strict> in scope:
for (@{ $var->[0] }) { ...
If that isn't hypocrisy! And all that from a mere program!
=item use feature qw(say state given ...)
We found it annoying that we always have to enable extra features. If
something breaks because it didn't anticipate future changes, so be
it. 5.10 broke almost all our XS modules and nobody cared either (or at
least I know of nobody who really complained about gratuitous changes -
as opposed to bugs).
Few modules that are not actively maintained work with newer versions of
Perl, regardless of use feature or not, so a new major perl release means
changes to many modules - new keywords are just the tip of the iceberg.
If your code isn't alive, it's dead, Jim - be an active maintainer.
But nobody forces you to use those extra features in modules meant for
older versions of perl - common::sense of course works there as well.
There is also an important other mode where having additional features by
default is useful: commandline hacks and internal use scripts: See "much
reduced typing", below.
There is one notable exception: C<unicode_eval> is not enabled by
default. In our opinion, C<use feature> had one main effect - newer perl
versions don't value backwards compatibility and the ability to write
modules for multiple perl versions much, after all, you can use feature.
C<unicode_eval> doesn't add a new feature, it breaks an existing function.
=item no warnings, but a lot of new errors
Ah, the dreaded warnings. Even worse, the horribly dreaded C<-w>
switch: Even though we don't care if other people use warnings (and
certainly there are useful ones), a lot of warnings simply go against the
spirit of Perl.
Most prominently, the warnings related to C<undef>. There is nothing wrong
with C<undef>: it has well-defined semantics, it is useful, and spitting
out warnings you never asked for is just evil.
The result was that every one of our modules did C<no warnings> in the
past, to avoid somebody accidentally using and forcing his bad standards
on our code. Of course, this switched off all warnings, even the useful
ones. Not a good situation. Really, the C<-w> switch should only enable
warnings for the main program only.
Funnily enough, L<perllexwarn> explicitly mentions C<-w> (and not in a
favourable way, calling it outright "wrong"), but standard utilities, such
as L<prove>, or MakeMaker when running C<make test>, still enable them
blindly.
For version 2 of common::sense, we finally sat down a few hours and went
through I<every single warning message>, identifying - according to
common sense - all the useful ones.
This resulted in the rather impressive list in the SYNOPSIS. When we
weren't sure, we didn't include the warning, so the list might grow in
the future (we might have made a mistake, too, so the list might shrink
as well).
Note the presence of C<FATAL> in the list: we do not think that the
conditions caught by these warnings are worthy of a warning, we I<insist>
that they are worthy of I<stopping> your program, I<instantly>. They are
I<bugs>!
Therefore we consider C<common::sense> to be much stricter than C<use
warnings>, which is good if you are into strict things (we are not,
actually, but these things tend to be subjective).
After deciding on the list, we ran the module against all of our code that
uses C<common::sense> (that is almost all of our code), and found only one
occurrence where one of them caused a problem: one of elmex's (unreleased)
modules contained:
$fmt =~ s/([^\s\[]*)\[( [^\]]* )\]/\x0$1\x1$2\x0/xgo;
We quickly agreed that indeed the code should be changed, even though it
happened to do the right thing when the warning was switched off.
=item much reduced typing
Especially with version 2.0 of common::sense, the amount of boilerplate
code you need to add to get I<this> policy is daunting. Nobody would write
this out in throwaway scripts, commandline hacks or in quick internal-use
scripts.
By using common::sense you get a defined set of policies (ours, but maybe
yours, too, if you accept them), and they are easy to apply to your
scripts: typing C<use common::sense;> is even shorter than C<use warnings;
use strict; use feature ...>.
And you can immediately use the features of your installed perl, which
is more difficult in code you release, but not usually an issue for
internal-use code (downgrades of your production perl should be rare,
right?).
=item mucho reduced memory usage
Just using all those pragmas mentioned in the SYNOPSIS together wastes
<blink>I<< B<776> kilobytes >></blink> of precious memory in my perl, for
I<every single perl process using our code>, which on our machines, is a
lot. In comparison, this module only uses I<< B<four> >> kilobytes (I even
had to write it out so it looks like more) of memory on the same platform.
The money/time/effort/electricity invested in these gigabytes (probably
petabytes globally!) of wasted memory could easily save 42 trees, and a
kitten!
Unfortunately, until everybody applies more common sense, there will still
often be modules that pull in the monster pragmas. But one can hope...
=back
=head1 THERE IS NO 'no common::sense'!!!! !!!! !!
This module doesn't offer an unimport. First of all, it wastes even more
memory, second, and more importantly, who with even a bit of common sense
would want no common sense?
=head1 STABILITY AND FUTURE VERSIONS
Future versions might change just about everything in this module. We
might test our modules and upload new ones working with newer versions of
this module, and leave you standing in the rain because we didn't tell
you. In fact, we did so when switching from 1.0 to 2.0, which enabled gobs
of warnings, and made them FATAL on top.
Maybe we will load some nifty modules that try to emulate C<say> or so
with perls older than 5.10 (this module, of course, should work with older
perl versions - supporting 5.8 for example is just common sense at this
time. Maybe not in the future, but of course you can trust our common
sense to be consistent with, uhm, our opinion).
=head1 WHAT OTHER PEOPLE HAD TO SAY ABOUT THIS MODULE
apeiron
"... wow"
"I hope common::sense is a joke."
crab
"i wonder how it would be if joerg schilling wrote perl modules."
Adam Kennedy
"Very interesting, efficient, and potentially something I'd use all the time."
[...]
"So no common::sense for me, alas."
H.Merijn Brand
"Just one more reason to drop JSON::XS from my distribution list"
Pista Palo
"Something in short supply these days..."
Steffen Schwigon
"This module is quite for sure *not* just a repetition of all the other
'use strict, use warnings'-approaches, and it's also not the opposite.
[...] And for its chosen middle-way it's also not the worst name ever.
And everything is documented."
BKB
"[Deleted - thanks to Steffen Schwigon for pointing out this review was
in error.]"
Somni
"the arrogance of the guy"
"I swear he tacked somenoe else's name onto the module
just so he could use the royal 'we' in the documentation"
Anonymous Monk
"You just gotta love this thing, its got META.json!!!"
dngor
"Heh. '"<elmex at ta-sa.org>"' The quotes are semantic
distancing from that e-mail address."
Jerad Pierce
"Awful name (not a proper pragma), and the SYNOPSIS doesn't tell you
anything either. Nor is it clear what features have to do with "common
sense" or discipline."
acme
"THERE IS NO 'no common::sense'!!!! !!!! !!"
apeiron (meta-comment about us commenting^Wquoting his comment)
"How about quoting this: get a clue, you fucktarded amoeba."
quanth
"common sense is beautiful, json::xs is fast, Anyevent, EV are fast and
furious. I love mlehmannware ;)"
apeiron
"... it's mlehmann's view of what common sense is. His view of common
sense is certainly uncommon, insofar as anyone with a clue disagrees
with him."
apeiron (another meta-comment)
"apeiron wonders if his little informant is here to steal more quotes"
ew73
"... I never got past the SYNOPSIS before calling it shit."
[...]
How come no one ever quotes me. :("
chip (not willing to explain his cryptic questions about links in Changes files)
"I'm willing to ask the question I've asked. I'm not willing to go
through the whole dance you apparently have choreographed. Either
answer the completely obvious question, or tell me to fuck off again."
=head1 FREQUENTLY ASKED QUESTIONS
Or frequently-come-up confusions.
=over 4
=item Is this module meant to be serious?
Yes, we would have put it under the C<Acme::> namespace otherwise.
=item But the manpage is written in a funny/stupid/... way?
This was meant to make it clear that our common sense is a subjective
thing and other people can use their own notions, taking the steam out
of anybody who might be offended (as some people are always offended no
matter what you do).
This was a failure.
But we hope the manpage still is somewhat entertaining even though it
explains boring rationale.
=item Why do you impose your conventions on my code?
For some reason people keep thinking that C<common::sense> imposes
process-wide limits, even though the SYNOPSIS makes it clear that it works
like other similar modules - i.e. only within the scope that C<use>s them.
So, no, we don't - nobody is forced to use this module, and using a module
that relies on common::sense does not impose anything on you.
=item Why do you think only your notion of common::sense is valid?
Well, we don't, and have clearly written this in the documentation to
every single release. We were just faster than anybody else w.r.t. to
grabbing the namespace.
=item But everybody knows that you have to use strict and use warnings,
why do you disable them?
Well, we don't do this either - we selectively disagree with the
usefulness of some warnings over others. This module is aimed at
experienced Perl programmers, not people migrating from other languages
who might be surprised about stuff such as C<undef>. On the other hand,
this does not exclude the usefulness of this module for total newbies, due
to its strictness in enforcing policy, while at the same time not limiting
the expressive power of perl.
This module is considerably I<more> strict than the canonical C<use
strict; use warnings>, as it makes all its warnings fatal in nature, so
you can not get away with as many things as with the canonical approach.
This was not implemented in version 1.0 because of the daunting number
of warning categories and the difficulty in getting exactly the set of
warnings you wish (i.e. look at the SYNOPSIS in how complicated it is to
get a specific set of warnings - it is not reasonable to put this into
every module, the maintenance effort would be enormous).
=item But many modules C<use strict> or C<use warnings>, so the memory
savings do not apply?
I suddenly feel sad...
But yes, that's true. Fortunately C<common::sense> still uses only a
miniscule amount of RAM.
=item But it adds another dependency to your modules!
It's a fact, yeah. But it's trivial to install, most popular modules have
many more dependencies. And we consider dependencies a good thing - it
leads to better APIs, more thought about interworking of modules and so
on.
=item Why do you use JSON and not YAML for your META.yml?
This is not true - YAML supports a large subset of JSON, and this subset
is what META.yml is written in, so it would be correct to say "the
META.yml is written in a common subset of YAML and JSON".
The META.yml follows the YAML, JSON and META.yml specifications, and is
correctly parsed by CPAN, so if you have trouble with it, the problem is
likely on your side.
=item But! But!
Yeah, we know.
=back
=head1 AUTHOR
Marc Lehmann <schmorp@schmorp.de>
http://home.schmorp.de/
Robin Redeker, "<elmex at ta-sa.org>".
=cut

View File

@ -0,0 +1,88 @@
=head2 Fri Aug 25 18:32:35 2023: C<Module> L<Canary::Stability|Canary::Stability>
=over 4
=item *
C<installed into: /mnt/store/repo/pc/scripts/local/lib/perl5>
=item *
C<LINKTYPE: dynamic>
=item *
C<VERSION: 2013>
=item *
C<EXE_FILES: >
=back
=head2 Fri Aug 25 18:32:36 2023: C<Module> L<common::sense|common::sense>
=over 4
=item *
C<installed into: /mnt/store/repo/pc/scripts/local/lib/perl5>
=item *
C<LINKTYPE: dynamic>
=item *
C<VERSION: 3.75>
=item *
C<EXE_FILES: >
=back
=head2 Fri Aug 25 18:32:37 2023: C<Module> L<Types::Serialiser|Types::Serialiser>
=over 4
=item *
C<installed into: /mnt/store/repo/pc/scripts/local/lib/perl5>
=item *
C<LINKTYPE: dynamic>
=item *
C<VERSION: 1.01>
=item *
C<EXE_FILES: >
=back
=head2 Fri Aug 25 18:32:40 2023: C<Module> L<JSON::XS|JSON::XS>
=over 4
=item *
C<installed into: /mnt/store/repo/pc/scripts/local/lib/perl5>
=item *
C<LINKTYPE: dynamic>
=item *
C<VERSION: 4.03>
=item *
C<EXE_FILES: bin/json_xs>
=back

26055
scripts/project-conquer.json Normal file

File diff suppressed because it is too large Load Diff

210
scripts/restore_versions.pl Normal file
View File

@ -0,0 +1,210 @@
use strict;
use warnings;
use Path::Tiny;
use JSON::XS; # JSON parser module
use DDP;
use v5.36;
use DateTime::Format::ISO8601;
use Try::Catch;
use Getopt::Long;
use Image::MetaData::JPEG;
use File::stat;
GetOptions(
"pull-versions" => \my $get_versions,
"restore" => \my $do_restores,
"verbose" => \my $verbose,
"print" => \my $print,
"copy" => \my $copy,
"overwrite" => \my $overwrite,
"folder=s" => \my $folder_name,
"update-ts" => \my $update_ts,
"since" => \my $since,
"match" => \my $match,
) or die("Error in command line arguments\n");
$match = '.*' unless $match;
my %restores;
my %stats = (
exists => 0,
restored => 0,
failed => 0,
selected => 0,
);
my %folders = (
'koku_downloads' => {
'id' => 'm7ko5-adoga',
'path' => '/mnt/nasty/koku_downloads',
},
'koku_documents' => {
'id' => 'krvxc-ajsfv',
'path' => '/mnt/nasty/Koku',
},
'koku_desktop' => {
'id' => 'vhimk-o3gwx',
'path' => '/mnt/nasty/koku_desktop',
},
'shared_pictures' => {
'id' => 'p6jqq-nouth',
'path' => '/mnt/nasty/Media/Pictures'
},
'koku_music' => {
'id' => 'mdhgr-gd7rv',
'path' => '/mnt/nasty/Media/Music/Natalia'
},
'project-conquer' => {
'id' => 'qypth-g6xkb',
'path' => '/mnt/nasty/project-conquer'
},
'highpoint' => {
'id' => 'jekig-3yzvw',
'path' => '/mnt/nasty/highpoint'
},
);
my $folder_id = $folders{$folder_name}{'id'};
my $target_dir = $folders{$folder_name}{'path'};
my $source_dir = $folders{$folder_name}{'path'} . "/.stversions";
my $version_data = "$folder_name.json";
sub get_versions() {
my $curl = qq{curl -k -X GET -H "X-API-KEY: oemTvSg94cShmEymeuct66GRsyutLTGg" https://dri.tz:8090/rest/folder/versions?folder=$folder_id > $folder_name.json};
system($curl);
say $curl;
}
sub get_json() {
my $json_file = path($version_data);
my $decoded_json;
if (-e $json_file) {
my $json_data = $json_file->slurp;
$decoded_json = decode_json($json_data);
}
else {
die "JSON file not found at $json_file";
}
return $decoded_json;
};
sub generate_restores($path, $version_date, $version_time, $restore_path) {
if ($restores{$path}) {
say "Path exists, checking for latest verson" if $verbose;
if ($version_date > $restores{$path}{'version_time'}) {
say "Later version found" if $verbose;
$restores{$path} = {
'version_date' => $version_date,
'version_time' => $version_time,
'restore_path' => $restore_path,
};
}
else {
say "Keeping current version" if $verbose;
}
}
else {
say "Inserting new path" if $verbose;
$restores{$path} = {
'version_date' => $version_date,
'version_time' => $version_time,
'restore_path' => $restore_path,
};
}
}
sub restore_versions() {
for my $path (sort keys %restores) {
my $source = path("$source_dir/" . $restores{$path}{'restore_path'});
my $dest = path("$target_dir/$path");
say "Source: $source" if $verbose;
say "Dest: $dest" if $verbose;
my $res;
unless (-d $dest->parent) {
$dest->parent->mkdir;
}
if (! -f $dest || $overwrite) {
try {
$res = 'restored';
# my $restore_copy = $source->copy($dest) if $copy; # Doesn't preserve timestamps
my $restore_copy = system('cp', '-p', $source, $dest) if $copy;
say "$restore_copy successfully restored" if $verbose;
++$stats{'restored'};
}
catch {
$res = 'failed ';
say "$source -> $dest failed to restore" if $verbose;
++$stats{'failed'};
}
}
else {
$res = 'exists ';
++$stats{'exists'};
}
if ($update_ts) {
update_timestamp($path);
say "updated timestamp" if $verbose;
}
my $processed = $stats{'restored'} + $stats{'exists'} + $stats{'failed'};
my $selected = $stats{'selected'};
say sprintf('%.2f', $processed * 100 / $selected) . "% $processed/$selected $res\t$dest" if $print;
}
}
sub do_restores() {
my $decoded_json = get_json();
foreach my $path (sort keys %$decoded_json) {
# Perform your operations on each item
# For example, print the item
for my $version (@{$decoded_json->{$path}}) {
my $ts = DateTime::Format::ISO8601->parse_datetime($version->{'versionTime'});
if ($ts ge $since) {
my $version_date = $ts->ymd('');
my $version_time = $ts->hms('');
my $fullpath = path($path);
$fullpath =~ /(.*)(\.\w{2,4}$)/;
my $base = $1;
my $ext = $2;
say $path unless ($1 && $2);
unless ($1 && $2) {
++$stats{'skipped'};
next;
};
++$stats{'selected'};
my $restore_path = path("$base~${version_date}-${version_time}${ext}");
generate_restores($path, $version_date, $version_time, $restore_path);
say "$path: $restore_path" if $verbose;
}
}
}
restore_versions();
p % stats;
}
sub update_timestamp($path) {
# return unless $path =~ /\.jpe?g$/i;
try {
my $source = path("$source_dir/" . $restores{$path}{'restore_path'});
my $dest = path("$target_dir/$path");
my $stat = stat($source);
utime($stat->atime, $stat->mtime, $dest);
}
catch {
say "unable to update timestamp";
}
}
get_versions() if $get_versions;
do_restores() if $do_restores;

420735
scripts/shared_pictures.json Normal file

File diff suppressed because it is too large Load Diff