https://github.com/brentthorne/posterdown
https://github.com/rstudio/pagedown
https://github.com/GerkeLab/betterposter
| library("scatterplot3d") | |
| library("MASS") | |
| path <- "/Users/jakewestfall/Desktop/" | |
| # simulate data from gaussian copula | |
| covmat <- matrix(.9, nrow=3, ncol=3) | |
| diag(covmat) <- 1 | |
| dat <- pnorm(mvrnorm(n=3000, mu=c(0,0,0), Sigma=covmat)) | |
| # pairs(dat) |
| ### Contour-enhanced funnel plots using metafor ### | |
| # Load metafor package | |
| library("metafor") | |
| # Load dataset | |
| dat <- get(data(dat.molloy2014)) |
| from keras import backend as K | |
| from keras.layers import TimeDistributed, Dense, LSTM | |
| class AttentionLSTM(LSTM): | |
| """LSTM with attention mechanism | |
| This is an LSTM incorporating an attention mechanism into its hidden states. | |
| Currently, the context vector calculated from the attended vector is fed | |
| into the model's internal states, closely following the model by Xu et al. | |
| (2016, Sec. 3.1.2), using a soft attention model following | |
| Bahdanau et al. (2014). |
| import requests | |
| import base64 | |
| from tqdm import tqdm | |
| master_json_url = 'https://178skyfiregce-a.akamaihd.net/exp=1474107106~acl=%2F142089577%2F%2A~hmac=0d9becc441fc5385462d53bf59cf019c0184690862f49b414e9a2f1c5bafbe0d/142089577/video/426274424,426274425,426274423,426274422/master.json?base64_init=1' | |
| base_url = master_json_url[:master_json_url.rfind('/', 0, -26) + 1] | |
| resp = requests.get(master_json_url) | |
| content = resp.json() |