Tuesday, June 8, 2021

Hello swift async and await for Swift 5.5

(1) This is to test the new async and await feature for Swift 5.5 dev on Linux. First Download and install packages in Ubuntu 20.04.
Shell script for setup Swift 5.5   Select all
sudo apt-get install -y binutils git gnupg2 libc6-dev libcurl4 libedit2 libgcc-9-dev libpython2.7 libsqlite3-0 libstdc++-9-dev libxml2 libz3-dev pkg-config tzdata zlib1g-dev cd ${HOME} wget https://swift.org/builds/swift-5.5-branch/ubuntu2004/swift-5.5-DEVELOPMENT-SNAPSHOT-2021-06-02-a/swift-5.5-DEVELOPMENT-SNAPSHOT-2021-06-02-a-ubuntu20.04.tar.gz tar xzvf swift-5.5-DEVELOPMENT-SNAPSHOT-2021-06-02-a-ubuntu20.04.tar.gz export PATH=${HOME}/swift-5.5-DEVELOPMENT-SNAPSHOT-2021-06-02-a-ubuntu20.04/usr/bin:$PATH


(2) Create swift package for executable
Shell script testing async and await   Select all
cd ${HOME} mkdir -p ${HOME}/AsyncSwift cd ${HOME}/AsyncSwift swift package init --type executable


(3) Amend ${HOME}/AsyncSwift/Package.swift to have swift setting flags
Package.swift   Select all
// swift-tools-version:5.5 // The swift-tools-version declares the minimum version of Swift required to build this package. import PackageDescription let package = Package( name: "AsyncSwift", products: [ .executable(name: "AsyncSwift", targets: ["AsyncSwift"]) ], dependencies: [ // none for now.. ], targets: [ .executableTarget(name: "AsyncSwift", swiftSettings: [ .unsafeFlags([ "-parse-as-library", "-Xfrontend", "-disable-availability-checking", "-Xfrontend", "-enable-experimental-concurrency", ]) ] ), .testTarget(name: "AsyncSwiftTests", dependencies: ["AsyncSwift"]), ] )


(4) Edit ${HOME}/AsyncSwift/Sources/AsyncSwift/main.swift
Sources/AsyncSwift/main.swift   Select all
import Foundation func calculateFirstNumber() async -> Int { print("First number is now being calculated...") return await withUnsafeContinuation { c in DispatchQueue.main.asyncAfter(deadline: .now() + 2) { print("First number is now ready.") c.resume(returning: 42) } } } func calculateSecondNumber() async -> Int { print("Second number is now being calculated...") return await withUnsafeContinuation { c in DispatchQueue.main.asyncAfter(deadline: .now() + 1) { print("Second number is now ready.") c.resume(returning: 6) } } } func calculateThirdNumber() async -> Int { print("Third number is now being calculated...") return await withUnsafeContinuation { c in DispatchQueue.main.asyncAfter(deadline: .now() + 3) { print("Third number is now ready.") c.resume(returning: 69) } } } func trace(task: Int) async { // Generates a random integer in the [0, task] range print("Task \(task) started") sleep( UInt32.random(in: 0...UInt32(task)) ) print("Task \(task) completed") } @main struct MyProgram { static func main() async { print("Hello, swift async!\n") print("\nSerial queue, asynchronous execution\n") for i in 5...10 { print("Submitting task \(i)") async { await trace(task: i) } } async let x = calculateFirstNumber() let y = await calculateSecondNumber() let z = await calculateThirdNumber() await print(x + y + z) sleep(2) await print("Program ended") } } /* [6/6] Build complete! Hello, swift async! First number is now being calculated... First number is now ready. Second number is now being calculated... Second number is now ready. Third number is now being calculated... Third number is now ready. 117 */


(5) build and run executable
Shell script for building and running   Select all
cd ${HOME}/AsyncSwift swift package clean swift build swift run


Monday, May 17, 2021

How to compile Quantlib-Python for Raspberry Pi 4B arm32 and arm64

Raspberry Pi has default gcc-8 and Python 3.7 for its 32 bit / 64 bit buster image. And compiling QuantLib-Python on this machine could have out of memeory error. Cross compiling on docker might have different python version which is not compatible. The trick to compile on Raspberry Pi is to setup swap say 2G and 4G Ram and turn off debug -g flag when compiling as Python package.
Shell script for building arm32 version   Select all
# install necessary packages for building sudo apt update sudo apt install -y build-essential wget libbz2-dev libboost-test1.67.0 libboost-test-dev # Get QuantLib-1.22 and build static library cd ${HOME} wget https://github.com/lballabio/QuantLib/releases/download/QuantLib-v1.22/QuantLib-1.22.tar.gz tar xzf QuantLib-1.22.tar.gz cd QuantLib-1.22/ ./configure --prefix=/usr --disable-shared CXXFLAGS=-O3 make -j 4 && make install sudo ldconfig # Setup and enable swap and check it for at least 2GB. sudo dphys-swapfile setup sudo dphys-swapfile swapon free -mh sudo apt install -y python3 python3-pip python-dev libgomp1 # Get QuantLib-SWIG-1.22 and compile it cd ${HOME} wget --no-check-certificate https://github.com/lballabio/QuantLib-SWIG/releases/download/QuantLib-SWIG-v1.22/QuantLib-SWIG-${quantlib_swig_version}.tar.gz tar xfz QuantLib-SWIG-1.22.tar.gz cd QuantLib-SWIG-1.22/ ./configure CXXFLAGS="-O2 --param ggc-min-expand=1 --param ggc-min-heapsize=32768 -Wno-deprecated-declarations -Wno-misleading-indentation" PYTHON=/usr/bin/python3 # manual compile it and remove the -g flag cd Python/ mkdir -p build/temp.linux-armv7l-3.7/QuantLib export CXX="echo gcc"; python3 setup.py bdist_wheel g++ -fwrapv -O2 -Wall -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -DNDEBUG -I/usr/include/python3.7m -I/usr/include -c QuantLib/quantlib_wrap.cpp -o build/temp.linux-armv7l-3.7/QuantLib/quantlib_wrap.o -Wno-unused --param ggc-min-expand=1 --param ggc-min-heapsize=32768 -Wno-deprecated-declarations -Wno-misleading-indentation mkdir -p build/lib.linux-armv7l-3.7/QuantLib/ g++ -shared -Wl,-z,relro -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 build/temp.linux-armv7l-3.7/QuantLib/quantlib_wrap.o -lQuantLib -o build/lib.linux-armv7l-3.7/QuantLib/_QuantLib.cpython-37m-arm-linux-gnueabihf.so # create wheel file python3 setup.py bdist_wheel # Upgrade PIP and install the wheel file /usr/bin/python3 -m pip install --upgrade pip pip3 install dist/QuantLib-1.22-cp37-cp37m-linux_armv7l.whl # Or alternatively install as site-package sudo python3 setup.py install # Test examples after installation pip3 install pandas python3 examples/bonds.py . . . .


Compiling for Rapberry Pi arm64 is very similar but has to add -fPIC flag for the QuantLib when building static library
Shell script for building arm64 version   Select all
# install necessary packages for building sudo apt update sudo apt install -y build-essential wget libbz2-dev sudo apt install -y libboost-test1.67.0 libboost-test-dev cd ${HOME} wget https://github.com/lballabio/QuantLib/releases/download/1.22/QuantLib-1.22.tar.gz tar xzf QuantLib-1.22.tar.gz cd QuantLib-1.22/ # enable -fPIC flag for building static library ./configure --prefix=/usr --disable-shared CXXFLAGS="-O3 -fPIC" make -j 4 && make install sudo ldconfig # If Raspbeery Pi has 8GB Ram, no need to setup and enable swap sudo apt install -y python3 python3-pip python-dev libgomp1 # Get QuantLib-SWIG-1.22 and compile it cd {HOME} wget https://github.com/lballabio/QuantLib-SWIG/releases/download/QuantLib-SWIG-v1.22/QuantLib-SWIG-1.22.tar.gz tar xzf QuantLib-SWIG-1.22.tar.gz cd QuantLib-SWIG-1.22/ cd Python/ ./configure CXXFLAGS="--param ggc-min-expand=1 --param ggc-min-heapsize=32768 -fPIC -Wno-deprecated-declarations -Wno-misleading-indentation" PYTHON=/usr/bin/python3 # manual compile it and remove the -g flag cd Python/ mkdir -p build/temp.linux-aarch64-3.7/QuantLib/ g++ -fwrapv -O2 -Wall -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -fPIC -I/usr/include/python3.7m -I/usr/include -c QuantLib/quantlib_wrap.cpp -o build/temp.linux-aarch64-3.7/QuantLib/quantlib_wrap.o -Wno-unused --param ggc-min-expand=1 --param ggc-min-heapsize=32768 -fno-strict-aliasing -Wno-unused -Wno-uninitialized -Wno-sign-compare -Wno-write-strings -Wno-deprecated-declarations -Wno-misleading-indentation mkdir -p build/lib.linux-aarch64-3.7/QuantLib/ g++ -shared -Wl,-z,relro -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 build/temp.linux-aarch64-3.7/QuantLib/quantlib_wrap.o -lQuantLib -o build/lib.linux-aarch64-3.7/QuantLib/_QuantLib.cpython-37m-aarch64-linux-gnu.so # create wheel file python3 setup.py bdist_wheel # Upgrade PIP and install the wheel file /usr/bin/python3 -m pip install --upgrade pip pip3 install dist/QuantLib-1.22-cp37-cp37m-linux_aarch64.whl # Or alternatively install as site-package sudo python3 setup.py install # Test examples after installation pip3 install pandas python3 examples/bonds.py


File Download QuantLib-1.22-cp37-cp37m-linux_armv7l.whl https://mega.nz/file/mtJSxZTT#fzDDHw0AIqz-2LIspBGNZLoyW4_MT9qjft_b-ITTA8w

File Download QuantLib-1.22-cp37-cp37m-linux_aarch64.whl https://mega.nz/file/WlAEXJCZ#UKFnlTrfQfRNzFW-OJbXHLFIHwzCw_189HvMa_xU4Oo

Wednesday, May 12, 2021

How to install docker client and connect to docker desktop engine on another machine macOS or Windows.

Use Docker Desktop for Windows 10 / macOS as engine and docker client for linux/macos/Android to connect.
Why use docker client ? Because don't want to / cannot install docker engine in the client environment and just want to connect to the docker engine on local LAN.
(1) For Windows 10 Host, after installation of docker desktop
# C:\ProgramData\Docker\config\daemon.json and add
"hosts" : ["tcp://0.0.0.0:2375"],
# change port forwarding to docker wsl backend, run this in powershell admin mode
netsh interface portproxy add v4tov4 listenport=2375 listenaddress=192.168.64.1 connectaddress=127.0.0.1 connectport=2375
# change Windows defender firewall and add incoming rule to enable port 2375 # Refer to this for setting openssh server authorized_keys, with proper file permission https://superuser.com/questions/1445976/windows-ssh-server-refuses-key-based-authentication-from-client

(2) For macOS Host, after installation of docker desktop
# ssh-keygen in client and ssh-copy-id to host, e.g. remote host username with ip address of 192.168.64.1
ssh-keygen -t rsa
ssh-copy-id username@192.168.64.1
# edit sshd_config
sudo vi /private/etc/ssh/sshd_config
# and add
PermitUserEnvironment PATH,LANG
# edit ~/.docker/daemon.json and add
"hosts" : ["tcp://0.0.0.0:2375"],
# add .ssh/environment
PATH=$PATH:/usr/local/bin
# restarting sshd using macOS System Preferences -> Sharing -> File Sharing

(3) To install docker client for Linux using tcp
cd ~/
wget https://download.docker.com/linux/static/stable/x86_64/docker-20.10.6.tgz
tar -xzvf docker-20.10.6.tgz
cd docker
./docker -H tcp://<remote host ip address>:2375 images


(3.1) To install docker client for Termux app of Android and using tcp
wget https://download.docker.com/linux/static/stable/aarch64/docker-20.10.6.tgz

tar xzvf docker-20.10.6.tgz
mv docker/docker /data/data/com.termux/files/usr/bin/
./docker -H tcp://<remote host ip address>:2375 images


(4) To install docker client for macOS using ssh
cd ~/
wget https://download.docker.com/mac/static/stable/x86_64/docker-20.10.6.tgz
#or curl -OL https://download.docker.com/mac/static/stable/x86_64/docker-20.10.6.tgz
#or curl -OL https://download.docker.com/mac/static/stable/aarch64/docker-20.10.6.tgz
tar xzvf docker-20.10.6.tgz
xattr -rc docker
cd docker
sudo mkdir -p /usr/local/bin
sudo mv * /usr/local/bin/
docker -H ssh://username@<remote host ip address> images


(5) Or simply add the corresponding variables to ~/.bashrc
unset DOCKER_HOST
# for tcp connection to Windows 10 host
export DOCKER_HOST=tcp://192.168.64.1:2375

# for ssh connection to macOS host using SSH
export DOCKER_HOST=ssh://user@192.168.64.1


Sunday, May 2, 2021

How to install openssh server in wsl2

(1) This is the guide to install openssh server and connect to wsl2 shell when ssh login from other machines in LAN network.

https://www.hanselman.com/blog/the-easy-way-how-to-ssh-into-bash-and-wsl2-on-windows-10-from-an-external-machine



(2) However, if still cannot connect from external machines from LAN network after setting all the firewall rules. Do this in powershell with admin right.

Set-ExecutionPolicy Unrestricted -Force



(3) If want to setup jupyter-notebook server in wsl2 and connect from other machines in the LAN network. Follow this guide.
https://medium.com/swlh/how-to-set-up-a-jupiter-notebook-server-and-access-it-from-a-local-or-remote-network-on-windows-d335c5ba490d

(4) The important setup steps are to open the Windows firewall rule and the script to port forward to the vm of the wsl2 as in.

wget https://gist.githubusercontent.com/david-littlefield/f45999c069e0b6b68bdae829d8616727/raw/80a60968f1bdda598eaf275bf1300bb3451d45ab/jupyter_notebook_port_wsl2.ps1


(5) It is advised to login from external internet to local LAN machines via VPN Server only rather than exposing or forwarding the local ports directly via the router.

(6) Refer to this for setting openssh server authorized_keys, with proper file permission https://superuser.com/questions/1445976/windows-ssh-server-refuses-key-based-authentication-from-client

Saturday, May 1, 2021

How to use Chrome browser to scrape website using javascript

(1) This is to demo how to scrape website using Chrome Browser and save the json text results to local drive.

(2) First launch Desktop version of Chrome Browser and goto this website "http://aastocks.com/en/stocks/market/calendar.aspx?type=5"

(3) Install "JQuery Inject" as Chrome Extension and enable it in current browser session.

(4) Open Chrome Developer tools (Ctrl-Shift-I) and select console tab to enter the following code. Enter the code in 3 steps.
Chrome Browser console code : Step 1   Select all
// Step 1 // define arrays for scraped objects and should be store as global variable var scrapeResults = [];

Console Code : Step 2   Select all
//Step 2 // function to scrape page $("table.CalendarResultTable > tbody > .crtRow").each((index, element) => { const tds = $(element).find("td"); if (index===0) { previousDate = $(tds[0]).text(); } if ($(tds[0]).text().trim()==='') { } else { previousDate = $(tds[0]).text(); } const date = previousDate; const namecell = $(tds[1]).find("a"); const name = $(tds[1]).text().replace('\n','').split(/[0-9]+.HK/)[0].trim(); const stockcode = $(namecell).text(); const stockurl = $(namecell).attr("href"); const dividend = $(tds[2]).text().trim().split('D:')[1]; const dividenddate = $(tds[3]).text().trim().split(/Ex-Date: | Payable: | Book Close: /); const exdate = dividenddate[1] const payable = dividenddate[2] const bookclose = dividenddate[3] const scrapeResult = { date, name, stockcode, stockurl, dividend, exdate, payable, bookclose }; //console.log(scrapeResult); if (!scrapeResults.find(({stockcode}) => stockcode === scrapeResult.stockcode)) { scrapeResults.push(scrapeResult); } }); // copy to clipboard. copy(scrapeResults);
If there are more than 1 page, click to goto next page and Repeat Step 2 to scrape again. After finished with all the pages then run Step 3 code to download to local drive. scrapeResults should be stored as Global variable for this to work,

Console Code : Step 3   Select all
// Step 3 // define download function for webAPI function download(content, fileName, contentType) { var a = document.createElement("a"); var file = new Blob([content], {type: contentType}); a.href = URL.createObjectURL(file); a.download = fileName; a.click(); } // download json to local folder var jsonData = JSON.stringify(scrapeResults); var currentdate = new Date(); download(jsonData, currentdate.getFullYear() + ('0'+(currentdate.getMonth()+1)).slice(-2) + ('0'+currentdate.getDate()).slice(-2) + '_' + ('0'+currentdate.getHours()).slice(-2) + ('0'+currentdate.getMinutes()).slice(-2) + ('0'+currentdate.getSeconds()).slice(-2)+'_stockjson.txt', 'text/plain');


(5) The same jquery code function above can be used in nodejs script for automation. Just add "request request-promise cheerio" packages to the project

(6) For nodejs, the save function should be
nodejs script   Select all
var fs = require('fs'); fs.writeFile("json.txt", jsonData, function(err) { if (err) { console.log(err); } });


(7) For browser console code without jQuery inject or don't want to import the jQuery library, and have to use querySelectorAll() function and use Object.values to convert to object as demo below.
console code   Select all
// Goto http://www.aastocks.com/en/stocks/market/calendar.aspx?type=1 and then open Browser (Chrome, Firefox, Safari) developer tools using ( Cmd + Opt + I in mac or Ctrl + Shift + I in win) and enter the following console code to run. var scrapeResults = ''; document.querySelectorAll('tr.crtRow').forEach(function(item) { const first = Object.values(item.querySelectorAll('.first'))[0]; if (typeof first !== 'undefined' && first !== null) { console.log(first.textContent??first.textContent.trim()); scrapeResults = scrapeResults.concat(first.textContent??first.textContent.trim(), ' '); }; const second = Object.values(item.querySelectorAll('td.second'))[0]; if (typeof second !== 'undefined' && second !== null) { console.log(second.textContent??second.textContent.trim()); scrapeResults = scrapeResults.concat(second.textContent??second.textContent.trim(), ' '); }; const third = Object.values(item.querySelectorAll('td.minw4'))[0]; if (typeof third !== 'undefined' && third !== null) { console.log(third.textContent??third.textContent.trim()); scrapeResults = scrapeResults.concat(third.textContent??third.textContent.trim(), ' '); }; const last = Object.values(item.querySelectorAll('td.last.minw1'))[0]; if (typeof last !== 'undefined' && last !== null) { console.log(last.textContent??last.textContent.trim()); scrapeResults = scrapeResults.concat(last.textContent??last.textContent.trim(), '\n'); }; }); // copy results to clipboard. copy(scrapeResults);


(7.1) For browser console code and use Array.from and map function to return json data.
console code   Select all
// //// Goto http://www.aastocks.com/en/stocks/market/calendar.aspx?type=1 and then open Browser (Chrome, Firefox, Safari) developer tools using ( Cmd + Opt + I in mac or Ctrl + Shift + I in win) and enter the following console code to run. var data = Array.from( document.querySelectorAll('.crtRow') ).map( row => Array.from(row.children).map(node => node.textContent.trim()) ).map( (row) => row[0].length === 0 ? [...row.slice(1)] : row ).map( (row, idx, arr) => { if (row.length === 1) return null; const getLastMatch = (idx, arr) => arr[idx].length === 4 ? arr[idx] : getLastMatch(idx - 1, arr); const match = getLastMatch(idx, arr); const isSameDate = row.length === 3; console.log(''.concat(match[0],' ',row[1 - isSameDate *1],' ',row[2 - isSameDate *1],' ',row[3 - isSameDate *1],'\n')); return { date:match[0], stock:row[1- isSameDate *1], code:row[1 - isSameDate *1].slice(-8).slice(0,5), industry:row[2 - isSameDate *1], period:row[3 - isSameDate *1 -3] } }).filter(Boolean); console.log(data); copy(data);


(8) Another example for nodejs scrapping code as demo below.
ronaldo.js   Select all
// need to install npm install request // run with node ronaldo.js const request = require("request-promise"); const url = "https://www.transfermarkt.com/cristiano-ronaldo/alletore/spieler/8198/plus/1" headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36' } ; const jsdom = require("jsdom"); const { JSDOM } = jsdom; const options = { url: url, timeout: 300000, headers:headers }; async function scrape() { try { const htmlResult = await request.get(options); const dom = new JSDOM(htmlResult); const { document } = dom.window; var data = Array.from( document.querySelectorAll('.responsive-table table tbody tr') ).map( row => Array.from(row.children).map(node => node.textContent.trim()) ).map( (row) => row.length === 15 ? [...row.slice(0, 5), ...row.slice(6)] : row ).map( (row, idx, goals) => { if (row.length === 1) return null; const getLastMatch = (idx, goals) => goals[idx].length === 14 ? goals[idx] : getLastMatch(idx - 1, goals); const match = getLastMatch(idx, goals); const isSameMatch = row.length === 14; return { competition: match[1], matchday: match[2], date: match[3], venue: match[4], opponent: match[7], result: match[8], position: match[9], minute: row[1 + isSameMatch * 9], atScore: row[2 + isSameMatch * 9], goalType: row[3 + isSameMatch * 9], assist: row[4 + isSameMatch * 9], } } ).filter(Boolean) // filter null .filter(x => (new Date(x.date)).getFullYear() >= 2021) // filter year console.log(data); } catch (err) { // try catch console.error(err); } } scrape();

Saturday, April 24, 2021

Google colab - keras-learn and Logistic Regression example

(1) Following the previous post, this demo the keras sample from how-to-install-tensorflow-with-gpu.html
Please take note that google only allow one active session for the free service. If you need faster GPU, more RAM and sessions, please consider to subscribe colab pro.

keraslearn.ipynb   Select all
# Step 1 mount google drive if data is from google drive import os from google.colab import drive drive.mount('/content/drive') # Step 2 if using tensorflow GPU #%tensorflow_version 2.x #import tensorflow as tf #print('TensorFlow: {}'.format(tf.__version__)) #tf.test.gpu_device_name() # Step 3 from keras.models import Sequential from keras.layers import Dense import numpy import time # fix random seed for reproducibility numpy.random.seed(7) # Step 4 # download pima indians dataset to google drive !curl -L https://tinyurl.com/tensorflowwin | grep -A768 pima-indians-diabetes.data.nbsp | sed '1d' > 'drive/MyDrive/Colab Notebooks/pima-indians-diabetes.data' # or download to local data directory !mkdir -p ./data !curl -L https://tinyurl.com/tensorflowwin | grep -A768 pima-indians-diabetes.data.nbsp | sed '1d' > './data/pima-indians-diabetes.data' # Step 5 load dataset from google drive dataset = numpy.loadtxt("drive/MyDrive/Colab Notebooks/pima-indians-diabetes.data", delimiter=",") # or load data from local data directory dataset = numpy.loadtxt("./data/pima-indians-diabetes.data", delimiter=",") # Step 6 # split into input (X) and output (Y) variables X = dataset[:,0:8] Y = dataset[:,8] # Step 7 # create model model = Sequential() model.add(Dense(12, input_dim=8, activation='relu')) model.add(Dense(1, activation='sigmoid')) # Step 8 # Compile model model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) # Step 9 start_time=time.time() # Fit the model model.fit(X, Y, batch_size=10, epochs=1500) # parameters for keras 1.2.2 # evaluate the model scores = model.evaluate(X, Y) print("\n%s: %.2f%%" % (model.metrics_names[1], scores[1]*100)) print("\nTraining took %.2f seconds\n" %(time.time()-start_time))


(2) For large training data set, consider to zip them and upload to google drive. Mount the google drive, then unzip it in local session. e.g.
!mkdir -p ./data
!unzip -o './drive/MyDrive/Colab Notebooks/mydata.zip' -d ./data/


(3) To stop the running cell in Google Colab use Ctrl-M I

(4) How to quickly run an ipynb example from github ?
4.1) Go to https://colab.research.google.com/, after login gmail and choose GitHub tab and enter search say "clareyan/From-Linear-to-Logistic-Regression-Explained-Step-by-Step"
4.2) In Step 2 cell box change the importing of dataset to
df = pd.read_csv('https://raw.githubusercontent.com/clareyan/From-Linear-to-Logistic-Regression-Explained-Step-by-Step/master/Social_Network_Ads.csv')
4.3) Then choose menu -> Runtime -> Run All. After that, use menu -> File -> Save a copy in Drive.

Friday, April 23, 2021

How to setup google colab and start linear regression with tensorflow.

(1) You only need a chrome browser, google gmail account and google drive account to start cloud tensorflow computing. And it is free to use and learn.
(2) Go to https://colab.research.google.com/
(3) Create a new notebook rename it and then Copy to Drive
(4) Type the following into notebook and run it step by step (Press Alt-Enter to run after each step)
LinearRegression.ipynb   Select all
#Step 1 # mount Google Drive, will ask for authorization code import numpy as np import os from google.colab import drive drive.mount('/content/drive') #Step 2 # choose the notebook settings to use GPU, via Menu -> Edit -> Notebook Settings. %tensorflow_version 2.x import tensorflow as tf # will show GPU if successful tf.test.gpu_device_name() #Step 3 # load data import pandas as pd # either download the linear_data.csv and upload to google drive, or direct download it via the shell command as below !curl -L https://tinyurl.com/lineardatacsv | grep -A200 START_OF_LINEAR_DATA.CSV | sed '1d' | sed -n "/END_OF_LINEAR_DATA.CSV/q;p" | sed 's/&gt;/\>/g;s/&lt;/\</g' > 'drive/MyDrive/Colab Notebooks/linear_data.csv' df = pd.read_csv('drive/MyDrive/Colab Notebooks/linear_data.csv') df.head() #Step 4 # split into independent and dependent X = df[['X']].values y = df[['Y']].values X.shape, y.shape #Step 5 # visualize data import matplotlib.pyplot as plt %matplotlib inline plt.scatter(X,y) plt.xlabel('independent') plt.ylabel('dependent') plt.show() # Use Text box to enter # # Linear Regression $ \hat y = a + b * X $ #Step 6 # Linear Regression # define regression model class regression(): def __init__(self): self.a = tf.Variable(initial_value=0,dtype=tf.float32) self.b = tf.Variable(initial_value=0,dtype=tf.float32) def __call__(self, X): x = tf.convert_to_tensor(X,dtype=tf.float32) y_est = tf.add(self.a, tf.multiply(self.b,x)) return y_est model = regression() # Use Text box to enter # # loss = sum of square error (sse) = $ \sum (y_t - y_p) ^ 2 $ # step 7 # define loss function def loss_func(y_true, y_pred): # both values are in tensors sse = tf.reduce_sum(tf.square(tf.subtract(y_true,y_pred))) return sse # Use Text box to enter # # Gradient Descent $ a = a_i - \nabla(sse) | a * LR $ $ b = b_i - \nabla(sse) | b * LR $ # step 8 # define train function def train(model, inputs, outputs, learning_rate): # convert outputs into tensor y_true = tf.convert_to_tensor(outputs,dtype=tf.float32) # GradientTape cal gradient distance with tf.GradientTape() as g: y_pred = model(inputs) current_loss = loss_func(y_true,y_pred) da,db = g.gradient(current_loss,[model.a,model.b]) # update the values model.a.assign_sub(da*learning_rate) model.b.assign_sub(db*learning_rate) # Step 9 def plot_scatter(x,y): plt.scatter(x,y) # scatter plt.plot(x,model(x),'r--') #line plot_scatter(X,y) # step 10 # model fitting model = regression() a_values = [] b_values = [] cost_values = [] # epochs, no of steps epochs = 100 # learning_rate learning_rate = 0.0001 for epoch in range(epochs): a_values.append(model.a) b_values.append(model.b) # prediction values and error y_pred = model(X) cost_value = loss_func(y,y_pred) cost_values.append(cost_value) # training train(model,X,y,learning_rate) # visual the scatter plot_scatter(X,y) plt.show #print the value print('Epoch: %d, Loss: %0.2f, a: %0.2f, b: %0.2f' %(epoch,cost_value,a_values[-1],b_values[-1])) # step 11 plt.plot(cost_values)


(5) Linear_data.csv , download and upload to google drive
linear_data.csv   Select all
X,Y 4,2 4,10 7,4 7,22 8,16 9,10 10,18 10,26 10,34 11,17 11,28 12,14 12,20 12,24 12,28 13,25 13,34 13,24 13,46 14,26 14,36 14,60 14,80 15,20 15,26 15,54 16,32 16,40 17,32 17,40 17,50 18,42 18,56 18,76 18,84 19,36 19,45 19,68 20,32 20,48 20,52