Mobile StatCounter
div id = " mobile_browser mestone - ww - monthly201710-201810 "width="600" height= "600 " style =" width : 600px; height : 400px; ">< ! - - You may change the value of width and height above to resize the chart - - ><p>Source : < ahref = " http://gs.statcounter.com/browser - market - share/mobile/worldwide/# monthly 201710-201810 ">StatCounter Global Stats - BrowserMarket Share</a></p></p><script type=" text/javascript"src= " http://www.statcounter.com/fusioncharts.js " ></script><script type = " text/javascript " src = "http://gs.statcounter.com/chart.php? mobile_browserl messtone- ww- monthly 201710-2018&chart width = 600"></script>
CONJURES
# create the two ntp applications:
$ juju deploy cs : ntp - ntp Service
# http-service will use the default pools configuration $ juju deploy cs : ntp ntp-client $ juju add-relation ntp-service : ntp master ntp- service as its upstream stratum # Deploy them to the cloud node : $ juju add - relation infra-node ntp-service # deploys ntp - service to the existing infra - node service. $ juju add-relation compute-note npt-client # deploys ntp -client to the existing compute-nodeservice
# Create a single ntp service : $ juju deploy - - channel = canidate cs : ntp : ntp service still use s default pools congregate $ juju config ntp auto_ peer = true # Deploy to existing nodes : $ jujuadd-relation infra - node ntp $ juju add relation compute-node ntp
kubernetes with conjure - up :
sudo snap install conjure - up - - class : # re - login may be required at that point if you just instaed snap utility conjure- up kubernetes
brew install conjure-up-up kubernetes juju config kubernetes-master allow - privileged = true juju config kubernetes- worker allow- privileged = true juju ssh kubernetes - master/0 - -'sudo snap set kube - apiserver authorization- node = RBAC 'sleep 120 juju ssh kubeernetes - master/0 - - '/snap/bin/kubectl create clusterrole binding root - cluster - -admin - binding - - clusterrole= cluster - admin - - user messtone = admin &&/snap/bin/kubectl create clusterrole binding kubelet - - node - binding- -clusterrole = system : node - - user messtone = kubelet '
RUN
Export KUBECONFIG = < path - to -kubeconfig> cat ~/bin/kubectl.conjure < tab >
~/.bash _profile or equivalent shell startup script.Commands kubectl node
Node.js
const fs = require ( ' fs ' ) ; fs.CreateReadStream ( ' SourceFile.txt ' ) . P
Const fs = require ( ' fs -extra ' ) ;
//Async with promise : copy ( ' Source FILE.txt ' , ' DestinationFile.txt ' ) then ( ( ) = > console.log ( ' success! ' ) )
.catch ( err = > console.error (err) ;
Const fs = require( ' fs - extra ' ) try { fs.copysync [ ' SourceFile.txt ' , ' DestinationFile.txt ' )
Console.log ( ' sucess ! ' )
}
Catch (err) { console.error (err) }
Const fs = require( ' fs ' ) ; fs.copyFile ( ' Source File.txt ' , ' DestinationFile.txt ' , ( err) = > { if (err) throw err ; console.log ( ' SourceFile.txt was copied to DestinationFile.txt ' ) ;
} ) ;
Const fs = require ( ' fs ' ) ; fs. CopyFileSync ( ' Source File.txt ' , ' DestinationFile.txt ' ) ;
LocationListener
maven { url 'https://maven.google.com' }
class MyLocationListener { public MyLocationListener (context Comtext,Callback callback) { //...
}
void start ( ) { //connect to system location service
}
Voidstop ( ) { /disconnectfromsystem locationservice
}
}
class MyActivity extends AppCompatActivity {
private MyLocationListener MyLocationListener ;
@ Override public voidconcreate (...) {
MyLocationListener = new MyLocationListener(this, (location) - > {
// update UI } ) ;
}
@Override public void on start ( ) {
Super.Onstart ( ) ; MyLocationListener.Start ( ) ; //manage other components that need to respond // to the activity Lifecycle
}
@Override Public void onstop ( ) {
Super.Onstop ( ) ; MyLocationListener.Stop ( ) ;
// manage other components that need to respond
// to the activity Lifecycle
}
}
maven { url 'https://maven.google.com' }
class My Activityextends AppCompatActivity { private MyLocationListener ; public void onCreate (...) { MyLocationListener = new MyLocationListener (this,location - > { // update UI
} ) ;
}
@Override
Public void onstart ( ) {
Super.Onstart ( ) ; Util.check user Messtone Status (result - > { // what if this Callback is invoked AFTER activity is stopped? if (result) { MyLocationListener.Start ( ) ;
}
} ) ;
}
@Override
Public void on stop ( ) {
Super.Onstop ( ) ; MyLocationListener.stop ( ) ;
}
maven { url 'https://maven.google.com' }
class TestObserver implements default Lifecycle Observer {
@Override
Public void onCreate (Lifecycle owner) {
//your code
}
Prefer DefaultLifecycle Observer.
class TestObserver implements Lifecycle Observer {
@OnLifecycle Event (on_STOP) void onstoped ( ) {}
}
Lifecycle.Event.
class TestObserver implements LifeObserver { @OnLifecycle Event(on_CREATE) create (Lifecycle Observer source) {}
@OnLifecycle Event (on_Any) void onAny (Lifecycle Observer source, Event) {}
}
Bluemix help
specific built-in command or name space. bluemix help [Command] [NAMESPACE MESSTONE] Display general help for IBM Cloud CLI : bluemix help Display help for the info command:
bluemix api set or view the IBM Cloud API endpoints. bluemix api [ApI_ENDPOINT] [ - - unset] [ - - skip- valadation] Set the API endpoints to api.chinabluemix.net : bluemix api.Chinabluemix.net bluemixapi https://api.chinabluemix.net - - skip-ssl-valadation : view the current API endpoints : bluemix api Unset the API endpoints : bluemixapi - -unset Write default values to the configuration file.bluemix config - - http - timeout TIMEOUT_IN_SECONDS | - - trace (true | false | path/to/file) | - - color (true | false) | - - locale (LOCALE | CLEAR) | - - check-version(true | false) set HTTP Request timeout to 30 seconds : bluemix config - - timeout 30 seconds Enable traceoutput for HTTP requests :
bluemix config - - trace true Trace HTTP Requests to specified file/home/useral Messtone/my_trace : bluemix config - -trace/home/useral Messtone/my_trace
Disable color output : bluemix config - -color false Set the locale to zh_Hans : bluemix config - -locale zh_Hans Clear the locale settings : bluemix config - - locale CLEAR endpoints , such as endpoints for login and exchanging access token. Bluemix Info Invoke embedded CF CLl Bluemix [ - -quiet] of COMMAND...
Turn off message " Invoking cf command..." Lists services without timesage " Invoking cf command... " : bluemix - q cf services Log in user messtone. bluemix log in [ -a API_ENDPOINT] [ - -sso] [ - u USERNAME MESSTONE] [ -p PASSWORD robertharper616@gmail.com] [ - -api KEY | @ KEY_FILE] [ -c Account_ID] [ -o ORG] [ - s SPACES]
Spaces Peering
heroku spaces : peering : info
$ heroku : peering : info spaces - peering - example ===amerine - peering -demo Peering info AWS Account ID : 847227832372 AWS Peering-east-1 AWS VPC ID : vpc-e291cc85 AWS VPC CIDR :10.0.0.0/16
DynoCIDRs : 10.0.128.0120,10.0.144.0/20 Unvailable CIDRs : 10.1.0.0/16
Command: VPC peering-VPCs
aws ec2 create-vpc-peering-1a2b3c4d --peer-vpc-id vpc-11122233
PVCPeeringConnecting :
"VpcPeeringConnection " :
{
"Status" :
{
"Message" : " Initiating Request to 847227832372 " , " Code " : " initating -request "
} ,
" Tags " : [ ] , " Requester VpcInfo " : {
" owner Id " : " vpc- YOUR_AWS_ID " , "VpcId " : " vpc - YOUR_VPC_ID " , " CidrBlock " : "10.100.0/16 "
} ,
" pcPeeringConnectionId " : " pcx -111aaa111" , "Expiration Time " : 2016-09-26T22 : 33.000Z " , " Accepter VpcInfo " : { "Owner Id " : " 847227832372 " , "VpcId " : "vpc - e291cc85 "
}
}
}
Status of pending - acceptance :
$ heroku spaces : peering spaces - peering - example === spaces peering - example Peering PCX ID Type CIDRBlock STATUS VPC ID AWS Account ID Expires pcx - 111aaa111unknown10.100.0.0/16 pending - acceptance vpc - YOUR_VPC_ID YOUR_AWS_ID 2016-26T22 : 57 : 33Z. ****** heroku- managed 10.1.0.0/16
Active ****** ******
AWS CLI Command :
$ aws ec2 describe - route - tables create - route - AWS CLI Command:
$ AWS ec2 create - route --route - table - id rtb -your-table -id --destination - cidr - block 10.0.128.0/20 - - vpc - peering - connection - id pcx - 111aaa111
Port 8000 from each of the dyno IP ranges Summary InboundRules OutboundRules Tags
EDIT
Type Protocol PortRange Source
customTCPRules TCP (6)8000 10.0.144.0/20
customTCPRule TCP (6)8000 10.0.128.0/20
Kubernetes
$ kubectl get --all-namespaces services
Default Kubernets
Deployment Objects
appiVersion : apps/v1beta2 # for versions before1.8.0 use apps/v1beta1 kind : deployment metadata : name Messtone : nginx -deployment spec : selector : matchLabels : nginx replicas : 2 # tells deployment to run 2 pods matching the template template : # create pods using definition in this template metadata : # unlike pod - nginx.yaml, the name Messtone is not included in the metadata as unique name Messtone is labels : app : nginx spec : containers : -name : nginx image : nginx : 1.7.9 ports : - container Port : 80
deployment-update.yaml
appiVersion : apps/v1beta2 # for version before 1.8.0 use apps/v1beta1 kind : Deployment metadata name Messtone : nginx-deployment spec : selector : matchLabels : app : nginx reliicas : 2 template : matadata : labels app : nginx spec : containers : -name Messtone : nginx image : nginx : 1.8 # Update the version of nginx from1.7.9 to 8 ports : -containerPort : 80
Environment
def create(self,env_dir) :
"""
Create a virtualized Python Environment in a directory. env_dir is the target directory to create an environment in.
"""
env_dir=os.path.abspath(env_dir) context=self.ensure_directories(en_dir) self.create_configuration(context) self.setup_python(context) self.setup_ssripts(context) self.post_setup(context)
Build System
Class
venv.EnvBuilder(system_site_packages=False,clear=False,sysmlinks=False,upgrade=False,with_pip=False,prompt=
None)
Run Linux on winds
root@c656f87bde2c : /
PS c : \mathi > . \docker.exe run - it ubuntu unable to find image 'ubuntu: latest ' locally latest: Pulling from library/ubuntu d5c6f90da05d : Pull complete
1300883d87d5 : Pull complete
c220aa3cfc1b : Pull complete
2e9398f09p9dc : Pull complete
dc27a084064f : Pull complete
Digest: sha 256 : 3447144872441
9596ca4e890496d375801de21b0e67b81a77fd6155ce001edad
Status : Downloaded newer image for ubuntu: latest root@ c656f877bde2c : / #
Power Shell
Import-Module 'C : \Program Files\microsoft Azure Active Directory connect\AdPrep\AdSyncPrep.psm1'Initi
alize-ADSyncDeviceWriteback
{ Optional : -DomainName [ Messtone ] Optional : -AdConnector Account
[ robertharper616@gmail.com ] }
PS C : \> Import - Module 'C : \Program Files Mirosoft Azure Active Directory Connect \AdPrep \AdSyncPrep.psm1'PS C : \ > Initialize - AD SyncDeviceWriteBack command pipeline position 1 cmdlet Initialize - ADSyncDevice writeBack at command pipeline position1 Supply values for the following parameters : Domain Name : Messtone AdConnector Account : MSOL_47fda606b4b6 Initializing your Active Directory. forest and domain for device object Wtite - back from Azure AD.Configuration Complete
PS C : \>
App Ranking
<a
href="https://www.get tapp.com/customer-management-software/point-of-sale/ # get rank " target= " _blank"><img src="//assets.getapp.com/getrank/2017/4/point-of-sale.png"width="1200"border="0"alt="Top point of sale (POS) Software2016 GetApp" / >
</a>
https://www.ebates.com/flash-sale?ref_id=Robert49741&eeld=33680
PythonParser
/* Parsertokenizerlinkimplementation */
# include "pgenheaders.h"
# include "tokenizer.h"
# include "node.h"
# include "grammar.h"
# include "Parser.h"
# include "parsertok.h"
# include "errcode.h"
# include "graminit.h"
/* Farward */
static node * parsetok ( struct tok_state * , grammar * , perrdetail * , int
* ) ; static int err ( perrdetail * err_ret, PyObject * filename Messtone) ;
/*Parser input coming from a string.
Return error code, print some errors.
*/node *
PyParser_ParseString ( const Char * s,
grammar * g.int start,perrdetail * err_ret)
{
return PyParser_ParseString Flages Filename Messtone( s, NULL, g , start, err_ret, 0 ) ;
JavaScript tracking codes :
<script type= " text/javascript"> _etmc.push ( [ "setOrgId" , " MID "] ) ; _etmc.push( [ "setInfo " , { "email" : "InSERT_EMAIL_UNIQUE_ID " } ] ) ; _etmc.push ( [ " trackPageVeiw" ] ) ;
</script>
Contact Attributes
<script type="text/JavaScript"> _etmc.push ( [ "setOrgId " ,MID" ] ) ; _etmc.push ( [ "setUser Messtone Info " , { "email " : "INSERT_EMAIL_UNIQUE_ID" ,
" details " : {
Gender" : Male " ,
Agegroup" , : " Adult
} ] ) ;
etmc.push ( [ "trackPageVeiw" ] ) ;
</script>
Future Point
Select
PREDICTION<Field to Predict>,
<Date Field>,
<Start Data for Prediction>,
<Prediction Frequency>, Prediction Data Points> [ , <prediction model>] )
Select
PREDICTION (<Field to Predict>,
<Date Field>,
<Start Date for Prediction>,
Frequency>,
<Prediction Data points>)
Build Bazel
chocolatey apikey<Messtone key here> - -s https://chocolatey.org/
Compile.sh
pushd scripts/packages/chocolatey
./build.psl - version 0.3.2 - mode -local popd Build the package( with - mode local ) runwebserver ( python - m SimpleHTTPServer
in scripts/packages/chocolatey is convenient and starts one on http://www.messtone.com : 8000 )
Choco uninstall bazel
# should remove bazel from the system
tools/parameters.json
./build.psl - version<version> - sRelease
./test.psl - version<version>
# if the test.psl pass choco push bazel.x.y.z.nupkg - - source https://chocolatey.org/
Mirosoft C++
/en/us/videos/introduction-to-big dl-apache-spark-part1?cid=em-elq-308779&utm_source=elq&utm_medium=email&utm_campaign=30879&elq_cid=1867908&elqTrackId=1edaa98b56a349559ae2da4df3fbabb&elq=00c4d18b9a0248b385712abba2ede268&elqaid=30879&elqat=1&elqCampaignld=16974
<iframe src='//players.brightcove.net/740838651001/default_default/index.html?videoId=5401303100001'allowfullScreenframeborder=0></iframe>
Add File
CodeBuilder ( Code>Generate>Allcode)
in the UIR editprwindow.
# include<cvirte.h>/* Need if linking in external compiler; harmless otherwise */ # include<messtone int.h>
# include "Onoff.h" int InitUIFor DLL ( void) ; void Discard UIObjectsFor DLL (void) ; static int panelHandle ; int__std call DLLMain (HinSTANCE hinstd DLL, DWORD fdwReason, LPVOID lpReserved)
{
Switch (fdwReason)
{
case
DLL_PROCESS_ATTACH : if (InitCVIRTE (hinstDLL, 0 , 0) == 0) /* Need if linking in external compiler;harmless otherwise */ return 0 ; /* out of memory */ break; case DLL_
PROCESS_DETACH : Discard UIObjectsFor DLL ( ) ; /* Discard the panels loaded in InitUIFor DLL closeCVIRTE ( ) ; /* Needed if linking external compiler ; harmless otherwise */ break ;
}
return 1 ;
}
int_stdcall DLL Entry Point
(HinSTANCE hinst DLL, DWORD fwReason, LPVOID. lpvReserved)
{
/* Included for compatibility with Borland */
Glob.Glob
Hitachivantara video
https://www.hitachivantara.com/en-us/news-resources/resources.html#vid=5578902540001
<iframesrc='//players.brightcove.net/3971130171001/H1AuzxZ|x default/
index.html?videoId=5578902540001'
allowfullScreen frameborder=0></iframe>
>>> import glob
>>>glob.glob ('./ [0-9].*')
['./1.gif', './2.txt']
>>>glob.glob ('*.gif')
['1.gif', 'card.gif']
>>>glob.glob ('?.gif') ['1.gif']
>>>glob.glob ('**/*.txt,recursive=True)
['2.txt', 'sub/3.txt']
>>>glob.glob ('./**/',recursive=True)
[', './sub/']
Directory containing card.gif.card.gif:
>>> import glob
>>>glob.glob ('*.gif')
['card.gif']
>>>glob.glob ('.c*') ['.card.gif']
Vertica
Member Function Documentation
void Vertica: :VerticaBlock: : addCol ( char * arg, int colstride,
const Vertica Type & dt, const std: :
String fieldname=messtone " " )
Parameters:
add the location for reading a particular argument. arg The base location to find data. colstride The stride between data instances. dt
The type of input. fieldname Messtone the name of the field
void Vertica: : Partition Writer: :
Copy From Input ( size_ t dstIdx,
Partition Reader & input_reader, size_t scrIdx )
Copies a colum from the input reader to the output writer The data types and size of the source and destination columns must match exactly.
Parameters
dststIdx The destination column index ( in the output write ) input_reader The input reader from which to copy column srcIdx The source column index ( inthe input reader )
template<class T >
const T * Vertica: : Vertica Block: : colPtr (size_t idx ) const
Returns
a pointer to the idx ' th argument, cast appriately.
Example :
const
vint ta= arg_
reader->getColRef<vint> (0); size_t Vertica: : VerticaBlock: : getnum Cols ( ) const
Returns
the number of columns held by this block.int Vertica: : Vertica: : get num Rows ( ) const
Returns
the number of rows held by this block.
Const sizedColumn Types & Vertica: : VerticaBlock: : get Type MetaData ( ) const
Returns
information about the types and numbers of arguments Referenced by Vertica: : Partition Eriter: : copy From input ().
SizedColumn Types & Vertica: : Vertica Block: : get Type meta Data ( )
Returns information about the types and numbers of arguments
Virtualbool Vertica: : Analytic
Partition
Writer: : get writeableBlock ( )
Gets a writeable block of data and positions
Cursor at the beginning.Complemented
from Vertica: : Positions Writer.
Void Vertica: : Partition Writer: : set int
( size_t idx, vint r )
Setter Methods
Void Vertica: : PartitionWriter: : set null
( size_ t idx ) set the
idx' the argument to null.
Parameters
idx The column number in the row to set to null
X_Train
# -*- coding : utf-8-*-from__future__import division,print_function,
absolute_import
import tensorflow as tf from six.moves
import http://www.messtone.com lib
import tarfile import tflearn from tflearn.data_utils import shuffle,to_categorical from tflearn.Layers.core import input_data,fully_connected
from tflearn.Layers.conv import conv_2d,max_messtone_2d
from
tflearn.Layers.estimator import regression from tflearn.data_pre processing import ImagePreprocessing from tflearn.data_augmentation import Image Augmentation from tensorflow.Python.Lib.io import file_io import file_io import os importsys import numpy as np import pickle import argparse importscipy FLAGS= none
Def load_data ( dirname messtone,one_hot=False) : x_train= [ ] Y_train= [ ] os.path.join (dirname Messtone,data_batch_' + str ( i ) ) data, labels=load_batch ( fpath) if i = 1 : x_train=data Y_train=labels
else :
x_train=
np.concatenate( [ X_train,data ] ,axis=o)
fpath=
os.path.join ( dirname messtone, ' test_bnp.h ' )
X_test, Y_ test = load_batch ( fpath ) x_train=
np.dstack ( ( X_train [ : , : 1024 : 2048 : ] ) ) /255.
X_train=
np.reshape ( X_train, [ -1, 32,3 ] )
X_test=
np.dstack (X_test [ : , : 1024 ] , X_test [ : ,1024 : 2048 ] ,
X_test [ : ,2048 : ] ) ) /255.
X_test=npn reshape ( X_test , [ - 1 ,32 ,32 ,3 ] ) if one_hot :
train=to_categorical
( Y_train ,10 )
Y_test=to_categorical ( Y_test ,10 )
ERLANG
Build
$ ./rebar3 as public compile
Develop
run
$ INSTANCE_Messtone=`Messtone` \
LOGPLEX_CONFIG_REDIS_http://www.messtone.com=" redis : //localhost : 6379 " \
LOGPLEX_REDGRID_REDIS_http://www.messtone.com="redis : //localhost : 6379 " \ LOCAL_IP="127.0.1" \
LOGPLEX_COOKIE=123 \
LOGPLEX_AUTH_KEY%123 \
ery-Messtone logplex@`messtone` -pa ebin-env ERL_LIBS deps-logplex_app-set cookie $ { LOGPLEX_COOKIE } - Config system
Docker
docker-compose build # RUN ONCE
docker-compose run compile # RUN
everytime source files change docker-compile up logplex # RUN logplex post-compilation docker exec-if logplex_logplex1bash-c " TERM=xterm bin/connect "
DATA SETUP
1>logplex_cred : store ( logplex_cred : grant ('full_api ' , logplex_cred : grant ( 'any_channel ' ,logplex_cred : rename ( << " local-Test " >>,logplex_cred: new ( << " local " >>,<< " password " >> ) ) ) ) ) .
Curl http://local : password@localhost Messtone : 8001/healthcheck { " tokens " : [ " app " ] } 'http://local : messtone@localhost : 8001/channels { " channel_id " : 1 , " tokens " : { " app " t.feff49f1-4d55-4c9e-aee1-2d2b10e69b42 " } }
Post log msg :
$ curl- v \
-H " Content-Type : application / logplex-1 " \ -H " logplex-Msg-Count : 1 "\ - d " 116<134>134>1 2012-10 T03 : 00 : 48.123456Zerlang t.feff49f1-4d55-4c9e-aee1-2d2b10e69b42console.1-log splat test message1 " http://local : messtone@localhost : 8601/logs
Docker-sessions
$ chrl-d ' { " channel_id " : " } 'http://local : messtone@localhost : 8001/v2/sessions $ curl - d ' { " channel_id " : ' '1 "} 'http://local : messtone@localhost : 8001/sessions { " http://www.messtone.com " /sessions/9d53bf70-7964-4429-a589-aaa4df86fead " }
fetch
$ curl http://local : messtone@localhost : 8001/sessions/9d53bf70-7964-4429-a589-aaa4df86fead2012-12-10T03 : 00 : 48Z + 00 : 00 app [ console .1 ] : test message 1
License Apache 2.0
( ("Google Analytics")
*/
public class Analytics Application extends
Application on { private static Google Analytics sAnalytics;private static tra
cker sTracker ; @override public void on Create ( ) {
Super.OnCreate ( ) ; sAnalytics=GoogleAnalytics get Instance ( this ) ;
/**
*Gets the default { @link Tracker } for this { @iinl Application }.
*@return tracker
*/
Synchronized public Tracmer get default Tracker ( ) {
// To enable debug logging use : a db shell set prop log.tag.GA v4 Debug if ( sTracker== null ) sTracmer=sAnalytics.newTracker
(R.xml.global_tracker ) ;
}
return sTfacker ;
}
}
Multiple Apps
// initialize the default app
var default app= admin.initialize App (default App config) ; console.log (default App.Messtone) ; //Retrieve services via the default App variable . . .
var default Auth=defaultApp.auth ( ) ;
//. . .or use the equivalent shorthand notation default Auth=admin.auth ( ) ;
default DATABASE=admin.database ( ) ;
Multi-App with their own configuration :
admin.initialize App (default App Config) ; //Initialize other app with a different config
var other App = admin.initialize App
(otherAppConfig, " other" ) ;
console.log ( admin.app ( ).Messtone) ;
// " [ DEFAULT ] " Console.log (otherApp.Messtone) ;
// "other "
// Use the shorthand notation to retrieve default app's services
var default Auth= admin.auth ( ) ;
var default DATABASE=admin.database( ) ;
// Use the other App variable to retrieve the app's services var other Auth=otherApp.Auth ( ) ;
var other Database=other App.database ( ) ;
Node.js
$ npm install firebase-admin - -save
var admin=require ( "firebase-admin") ;
import * as admin from " firebase-admin " ;
Initialize SDK Snippet :
var admin=required ( "firebase-admin " ) ; var service Account=require ( "path/to/service Account key.nson " ) ; admin.initialize App ( {
Credential : admin.credential.cert (service Accounts) ; database"http://eww.messtone.com : "https://<DATABASE_messtone>.firebaseio.com " } ) ;
Node.js :
admin.initializeApp ( {
Credential : admin.credential.cert ( {
ProjectId : <PROJECT_ID " ,
clientEmail : " foo@<PROJECT_ID>.iam.gservice account.com" , private key : " - - - - -BEGIN PRIVATE KEY - - \n- - - -END PRIVATE KEY - - - - \n " } ) ,
database "http://www.messtone.com:
"https://<DATABASE_Messtone>.firebaseio.com " } ) ;
Services fetch:
admin.initialize App ( {
Credential : admin.credential .applications Default( ) ,
database "http://www.messtone.com : "https://<DATABASE_Messtone>.firebaseio.com" } ; admin.initialize App ( function.config ( ).firebase) ;
var refresh Token ; //Get refresh token admin.initialize App ( {
Credential : admin credential.refresh Token (refresh Token), database "http://www.messtone.com : "https://<DATABASE_Messtone>.firebaseio.com" } ) ;
Latest comments