rm(list=ls())

1 RSiena tutorial

#install.packages("RSiena")
library(RSiena)
friend.data.w1 <- s501
friend.data.w2 <- s502
friend.data.w3 <- s503
drink <- s50a
smoke <- s50s
friendship <- sienaDependent(array(c(friend.data.w1, friend.data.w2, friend.data.w3), dim = c(50, 50,
    3)))
smoke1 <- coCovar(smoke[, 1])
alcohol <- varCovar(drink)
mydata <- sienaDataCreate(friendship, smoke1, alcohol)
# and request
mydata
# to see what you have produced.
myalgorithm <- sienaAlgorithmCreate(projname = NULL)
myeff <- getEffects(mydata)
myeff <- includeEffects(myeff, transTrip, cycle3)
myeff <- includeEffects(myeff, egoX, altX, egoXaltX, interaction1 = "alcohol")
myeff <- includeEffects(myeff, simX, interaction1 = "smoke1")
myeff
ans <- siena07(myalgorithm, data = mydata, effects = myeff)
summary(ans)
myeff <- setEffect(myeff, inPopSqrt, fix = TRUE, test = TRUE, initialValue = 0)
ans <- siena07(myalgorithm, data = mydata, effects = myeff, prevAns=ans)
summary(ans)

2 Twitter manual

# density: observed relations divided by possible relations
fdensity <- function(x) {
    # x is your nomination network make sure diagonal cells are NA
    diag(x) <- NA
    # take care of RSiena structural zeros, set as missing.
    x[x == 10] <- NA
    sum(x == 1, na.rm = T)/(sum(x == 1 | x == 0, na.rm = T))
}

# calculate intragroup density
fdensityintra <- function(x, A) {
    # A is matrix indicating whether nodes in dyad have same node attributes
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    sum(x == 1 & A == 1, na.rm = T)/(sum((x == 1 | x == 0) & A == 1, na.rm = T))
}

# calculate intragroup density
fdensityinter <- function(x, A) {
    # A is matrix indicating whether nodes in dyad have same node attributes
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    sum(x == 1 & A != 1, na.rm = T)/(sum((x == 1 | x == 0) & A != 1, na.rm = T))
}

# construct dyadcharacteristic whether nodes are similar/homogenous
fhomomat <- function(x) {
    # x is a vector of node-covariate
    xmat <- matrix(x, nrow = length(x), ncol = length(x))
    xmatt <- t(xmat)
    xhomo <- xmat == xmatt
    return(xhomo)
}

# a function to calculate all valid dyads.
fndyads <- function(x) {
    diag(x) <- NA
    x[x == 10] <- NA
    (sum((x == 1 | x == 0), na.rm = T))
}

# a function to calculate all valid intragroupdyads.
fndyads2 <- function(x, A) {
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    (sum((x == 1 | x == 0) & A == 1, na.rm = T))
}


fscolnet <- function(network, ccovar) {
    # Calculate coleman on network level:
    # https://reader.elsevier.com/reader/sd/pii/S0378873314000239?token=A42F99FF6E2B750436DD2CB0DB7B1F41BDEC16052A45683C02644DAF88215A3379636B2AA197B65941D6373E9E2EE413
    
    fhomomat <- function(x) {
        xmat <- matrix(x, nrow = length(x), ncol = length(x))
        xmatt <- t(xmat)
        xhomo <- xmat == xmatt
        return(xhomo)
    }
    
    fsumintra <- function(x, A) {
        # A is matrix indicating whether nodes constituting dyad have same characteristics
        diag(x) <- NA
        x[x == 10] <- NA
        diag(A) <- NA
        sum(x == 1 & A == 1, na.rm = T)
    }
    
    # expecation w*=sum_g sum_i (ni((ng-1)/(N-1)))
    network[network == 10] <- NA
    ni <- rowSums(network, na.rm = T)
    ng <- NA
    for (i in 1:length(ccovar)) {
        ng[i] <- table(ccovar)[rownames(table(ccovar)) == ccovar[i]]
    }
    N <- length(ccovar)
    wexp <- sum(ni * ((ng - 1)/(N - 1)), na.rm = T)
    
    # wgg1 how many intragroup ties
    w <- fsumintra(network, fhomomat(ccovar))
    
    Scol_net <- ifelse(w >= wexp, (w - wexp)/(sum(ni, na.rm = T) - wexp), (w - wexp)/wexp)
    return(Scol_net)
}
getwd()
load("/Users/anuschka/Documents/labjournal/data/twitter_20190919.RData")  #change to your working directory
str(twitter_20190919, 1)
keyf <- twitter_20190919[[1]]
mydata <- twitter_20190919[[2]]
seats <- twitter_20190919[[3]]
# retrieve nominationdata from rsiena object
fnet <- mydata$depvars$fnet
atmnet <- mydata$depvars$atmnet
rtnet <- mydata$depvars$rtnet

# retrieve node-attributes from rsiena object
vrouw <- mydata$cCovars$vrouw
partij <- mydata$cCovars$partij
ethminz <- mydata$cCovars$ethminz
lft <- mydata$cCovars$lft

# de-mean-center node attributes
ethminz <- ethminz + attributes(ethminz)$mean
partij <- partij + attributes(partij)$mean
vrouw <- vrouw + attributes(vrouw)$mean
lft <- lft + attributes(lft)$mean

# construct matrices for similarity for each dimension (dyad characteristics)
vrouwm <- fhomomat(vrouw)
partijm <- fhomomat(partij)
ethminzm <- fhomomat(ethminz)

# just for fun, make dyad characteristic indicating whether both nodes are ethnic minorities
xmat <- matrix(ethminz, nrow = length(ethminz), ncol = length(ethminz))
xmatt <- t(xmat)
minoritym <- xmat == 1 & xmatt == 1

# for age max 5 year difference / for descriptives
xmat <- matrix(lft, nrow = length(lft), ncol = length(lft))
xmatt <- t(xmat)
lftm <- (abs(xmat - xmatt) < 6)

# calculate all possible similar dyads, not the focus of this exercise.  fndyads2(fnet[,,1], vrouwm)
# fndyads2(fnet[,,3], vrouwm) fndyads2(fnet[,,1], partijm) fndyads2(fnet[,,3], partijm)
# fndyads2(fnet[,,1], ethminzm) fndyads2(fnet[,,3], ethminzm)

# make a big object to store all results
desmat <- matrix(NA, nrow = 10, ncol = 9)

# lets start using our functions
desmat[1, 1] <- fdensity(fnet[, , 1])
desmat[1, 2] <- fdensity(fnet[, , 2])
desmat[1, 3] <- fdensity(fnet[, , 3])
desmat[2, 1] <- fdensityintra(fnet[, , 1], vrouwm)
desmat[2, 2] <- fdensityintra(fnet[, , 2], vrouwm)
desmat[2, 3] <- fdensityintra(fnet[, , 3], vrouwm)
desmat[3, 1] <- fdensityinter(fnet[, , 1], vrouwm)
desmat[3, 2] <- fdensityinter(fnet[, , 2], vrouwm)
desmat[3, 3] <- fdensityinter(fnet[, , 3], vrouwm)
desmat[4, 1] <- fdensityintra(fnet[, , 1], partijm)
desmat[4, 2] <- fdensityintra(fnet[, , 2], partijm)
desmat[4, 3] <- fdensityintra(fnet[, , 3], partijm)
desmat[5, 1] <- fdensityinter(fnet[, , 1], partijm)
desmat[5, 2] <- fdensityinter(fnet[, , 2], partijm)
desmat[5, 3] <- fdensityinter(fnet[, , 3], partijm)
desmat[6, 1] <- fdensityintra(fnet[, , 1], ethminzm)
desmat[6, 2] <- fdensityintra(fnet[, , 2], ethminzm)
desmat[6, 3] <- fdensityintra(fnet[, , 3], ethminzm)
desmat[7, 1] <- fdensityinter(fnet[, , 1], ethminzm)
desmat[7, 2] <- fdensityinter(fnet[, , 2], ethminzm)
desmat[7, 3] <- fdensityinter(fnet[, , 3], ethminzm)
desmat[8, 1] <- fdensityinter(fnet[, , 1], minoritym)
desmat[8, 2] <- fdensityinter(fnet[, , 2], minoritym)
desmat[8, 3] <- fdensityinter(fnet[, , 3], minoritym)
desmat[9, 1] <- fdensityintra(fnet[, , 1], lftm)
desmat[9, 2] <- fdensityintra(fnet[, , 2], lftm)
desmat[9, 3] <- fdensityintra(fnet[, , 3], lftm)
desmat[10, 1] <- fdensityinter(fnet[, , 1], lftm)
desmat[10, 2] <- fdensityinter(fnet[, , 2], lftm)
desmat[10, 3] <- fdensityinter(fnet[, , 3], lftm)

desmat[1, 1 + 3] <- fdensity(atmnet[, , 1])
desmat[1, 2 + 3] <- fdensity(atmnet[, , 2])
desmat[1, 3 + 3] <- fdensity(atmnet[, , 3])
desmat[2, 1 + 3] <- fdensityintra(atmnet[, , 1], vrouwm)
desmat[2, 2 + 3] <- fdensityintra(atmnet[, , 2], vrouwm)
desmat[2, 3 + 3] <- fdensityintra(atmnet[, , 3], vrouwm)
desmat[3, 1 + 3] <- fdensityinter(atmnet[, , 1], vrouwm)
desmat[3, 2 + 3] <- fdensityinter(atmnet[, , 2], vrouwm)
desmat[3, 3 + 3] <- fdensityinter(atmnet[, , 3], vrouwm)
desmat[4, 1 + 3] <- fdensityintra(atmnet[, , 1], partijm)
desmat[4, 2 + 3] <- fdensityintra(atmnet[, , 2], partijm)
desmat[4, 3 + 3] <- fdensityintra(atmnet[, , 3], partijm)
desmat[5, 1 + 3] <- fdensityinter(atmnet[, , 1], partijm)
desmat[5, 2 + 3] <- fdensityinter(atmnet[, , 2], partijm)
desmat[5, 3 + 3] <- fdensityinter(atmnet[, , 3], partijm)
desmat[6, 1 + 3] <- fdensityintra(atmnet[, , 1], ethminzm)
desmat[6, 2 + 3] <- fdensityintra(atmnet[, , 2], ethminzm)
desmat[6, 3 + 3] <- fdensityintra(atmnet[, , 3], ethminzm)
desmat[7, 1 + 3] <- fdensityinter(atmnet[, , 1], ethminzm)
desmat[7, 2 + 3] <- fdensityinter(atmnet[, , 2], ethminzm)
desmat[7, 3 + 3] <- fdensityinter(atmnet[, , 3], ethminzm)
desmat[8, 1 + 3] <- fdensityinter(atmnet[, , 1], minoritym)
desmat[8, 2 + 3] <- fdensityinter(atmnet[, , 2], minoritym)
desmat[8, 3 + 3] <- fdensityinter(atmnet[, , 3], minoritym)
desmat[9, 1 + 3] <- fdensityintra(atmnet[, , 1], lftm)
desmat[9, 2 + 3] <- fdensityintra(atmnet[, , 2], lftm)
desmat[9, 3 + 3] <- fdensityintra(atmnet[, , 3], lftm)
desmat[10, 1 + 3] <- fdensityinter(atmnet[, , 1], lftm)
desmat[10, 2 + 3] <- fdensityinter(atmnet[, , 2], lftm)
desmat[10, 3 + 3] <- fdensityinter(atmnet[, , 3], lftm)

desmat[1, 1 + 6] <- fdensity(rtnet[, , 1])
desmat[1, 2 + 6] <- fdensity(rtnet[, , 2])
desmat[1, 3 + 6] <- fdensity(rtnet[, , 3])
desmat[2, 1 + 6] <- fdensityintra(rtnet[, , 1], vrouwm)
desmat[2, 2 + 6] <- fdensityintra(rtnet[, , 2], vrouwm)
desmat[2, 3 + 6] <- fdensityintra(rtnet[, , 3], vrouwm)
desmat[3, 1 + 6] <- fdensityinter(rtnet[, , 1], vrouwm)
desmat[3, 2 + 6] <- fdensityinter(rtnet[, , 2], vrouwm)
desmat[3, 3 + 6] <- fdensityinter(rtnet[, , 3], vrouwm)
desmat[4, 1 + 6] <- fdensityintra(rtnet[, , 1], partijm)
desmat[4, 2 + 6] <- fdensityintra(rtnet[, , 2], partijm)
desmat[4, 3 + 6] <- fdensityintra(rtnet[, , 3], partijm)
desmat[5, 1 + 6] <- fdensityinter(rtnet[, , 1], partijm)
desmat[5, 2 + 6] <- fdensityinter(rtnet[, , 2], partijm)
desmat[5, 3 + 6] <- fdensityinter(rtnet[, , 3], partijm)
desmat[6, 1 + 6] <- fdensityintra(rtnet[, , 1], ethminzm)
desmat[6, 2 + 6] <- fdensityintra(rtnet[, , 2], ethminzm)
desmat[6, 3 + 6] <- fdensityintra(rtnet[, , 3], ethminzm)
desmat[7, 1 + 6] <- fdensityinter(rtnet[, , 1], ethminzm)
desmat[7, 2 + 6] <- fdensityinter(rtnet[, , 2], ethminzm)
desmat[7, 3 + 6] <- fdensityinter(rtnet[, , 3], ethminzm)
desmat[8, 1 + 6] <- fdensityinter(rtnet[, , 1], minoritym)
desmat[8, 2 + 6] <- fdensityinter(rtnet[, , 2], minoritym)
desmat[8, 3 + 6] <- fdensityinter(rtnet[, , 3], minoritym)
desmat[9, 1 + 6] <- fdensityintra(rtnet[, , 1], lftm)
desmat[9, 2 + 6] <- fdensityintra(rtnet[, , 2], lftm)
desmat[9, 3 + 6] <- fdensityintra(rtnet[, , 3], lftm)
desmat[10, 1 + 6] <- fdensityinter(rtnet[, , 1], lftm)
desmat[10, 2 + 6] <- fdensityinter(rtnet[, , 2], lftm)
desmat[10, 3 + 6] <- fdensityinter(rtnet[, , 3], lftm)

colnames(desmat) <- c("friends w1", "friends w2", "friends w3", "atmentions w1", "atmentions w2", "atmentions w3", 
    "retweets w1", "retweets w2", "retweets w3")
rownames(desmat) <- c("total", "same sex", "different sex", "same party", "different party", "same ethnicity", 
    "different ethnicity", "both minority", "same age (<6)", "different age (>5)")
desmat
myeff <- getEffects(mydata)
myeff
#Name is de afhankelijke variabele 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "partij", name = "rtnet")

#To answer the second research question, I would like to include different effects. To study the segregation of sex, I think I can still use the sameX effect (to see whether individuals of the same sex are more inclined to retweet one another). 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "vrouw", name = "rtnet")
#To answer the second subquestion of the second RQ, we have to see if there is segregation based on age. I believe that during the class we talked about that you just want the normal variable of age (lft), but I don't totally understand why it's not possible to take the variable same age. Let's try lft first. 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "lft", name = "rtnet")
#Does afstand just work like this?
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "afstand", name = "rtnet")
#No it doesn't so I checked the effect in the effectsdocumentation. Here I see that sameX cannot be used for the variable afstand. I'm not sure which of the other effects is useful, I checked them in the Rsiena manual. Maybe incoming shared WWX, but on the other hand I'm not sure if that really tells something about segregation. Let's try reciprocity and then if there's no reciprocity that's segregation (not really but ok)
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, XRecip, interaction1 = "afstand", name = "rtnet")

myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, transTrip, transTies, name = "rtnet")
# I used a seed so you will probably see the same results
myalgorithm <- sienaAlgorithmCreate(projname = "test", seed = 345654)
# to speed things up a bit, I am using more cores.
ansM1 <- siena07(myalgorithm, data = mydata, effects = myeff_m1, useCluster = TRUE, nbrNodes = 2, initC = TRUE, 
    batch = TRUE)
ansM1b <- siena07(myalgorithm, data = mydata, prevAns = ansM1, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
ansM1c <- siena07(myalgorithm, data = mydata, prevAns = ansM1b, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
ansM1d <- siena07(myalgorithm, data = mydata, prevAns = ansM1c, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
save(ansM1, file = "ansM1a.RData")
save(ansM1b, file = "ansM1b.RData")
save(ansM1c, file = "ansM1c.RData")
save(ansM1d, file="ansM1d.RData")
summary(ansM1)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7025  ( 0.1597   )    0.0357   
#>    2. rate constant fnet rate (period 2)    1.9617  ( 0.1209   )   -0.0394   
#>    3. eval fnet: outdegree (density)       -0.6469  ( 0.0832   )    0.0611   
#>    4. eval fnet: reciprocity                0.8738  ( 0.0933   )   -0.0087   
#>    5. rate constant atmnet rate (period 1) 25.7940  ( 1.9433   )    0.0479   
#>    6. rate constant atmnet rate (period 2)  9.6375  ( 0.5609   )   -0.0363   
#>    7. eval atmnet: outdegree (density)     -2.3469  ( 0.0306   )   -0.0745   
#>    8. eval atmnet: reciprocity              1.7050  ( 0.0679   )   -0.0068   
#>    9. rate constant rtnet rate (period 1)   9.8803  ( 0.4642   )    0.0368   
#>   10. rate constant rtnet rate (period 2)   8.8679  ( 0.4907   )    0.0069   
#>   11. eval rtnet: outdegree (density)      -2.1481  ( 0.0383   )    0.0565   
#>   12. eval rtnet: reciprocity               1.7617  ( 0.0677   )    0.0693   
#>   13. eval rtnet: same vrouw               -0.0053  ( 0.0479   )    0.0758   
#> 
#> Overall maximum convergence ratio:    0.1886 
#> 
#> 
#> Total of 2494 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.026        0.001        0.000        0.001        0.014       -0.002        0.000        0.000        0.005       -0.002        0.000        0.000        0.000
#>        0.072        0.015        0.000        0.000        0.004       -0.003        0.000       -0.001        0.005       -0.002        0.000        0.000        0.000
#>       -0.011       -0.018        0.007       -0.003       -0.009        0.007        0.000        0.000        0.002       -0.002        0.000        0.000        0.000
#>        0.043        0.012       -0.398        0.009        0.021        0.002        0.000        0.000       -0.003        0.003        0.000        0.000        0.000
#>        0.046        0.018       -0.057        0.114        3.777       -0.134        0.019        0.020       -0.090        0.069       -0.011        0.018        0.010
#>       -0.018       -0.038        0.147        0.036       -0.123        0.315        0.000        0.000       -0.006        0.008        0.000        0.001        0.000
#>        0.061        0.060        0.038        0.124        0.314        0.005        0.001       -0.001        0.000       -0.001        0.000        0.000        0.000
#>       -0.045       -0.074       -0.029       -0.043        0.153        0.001       -0.474        0.005       -0.002        0.001        0.000        0.000        0.000
#>        0.071        0.084        0.046       -0.070       -0.100       -0.022       -0.018       -0.068        0.215       -0.023        0.000        0.001        0.001
#>       -0.024       -0.029       -0.042        0.069        0.072        0.028       -0.038        0.039       -0.099        0.241       -0.001        0.002        0.000
#>        0.034        0.016        0.038       -0.008       -0.148       -0.002        0.051       -0.103       -0.024       -0.039        0.001       -0.001       -0.001
#>       -0.011       -0.005       -0.023       -0.029        0.137        0.028       -0.047        0.002        0.033        0.060       -0.362        0.005        0.000
#>        0.025        0.027       -0.007        0.006        0.105       -0.009       -0.028        0.052        0.066        0.007       -0.686        0.111        0.002
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      147.068        0.000      147.068       80.339       -9.614        0.000        2.660        4.027        5.224        0.000       -7.700       -0.720       -4.456
#>        0.000      136.075       26.814        9.317        0.000        1.654       -3.338       -0.203        0.000        1.329       -8.634       -3.628       -8.557
#>        7.944        2.444      223.672      157.729        1.575       -5.525       -6.637        3.345       -1.090       10.867       -1.099       -2.692       -4.298
#>       -9.111       -4.532       69.433      324.649        4.971      -10.379       -8.389        5.973       10.768       -2.642       -7.224       -4.728       -5.551
#>       -0.774        0.000       -0.774       -2.111        9.675        0.000      -10.690       -6.356        0.485        0.000        0.322       -1.527       -0.797
#>        0.000        0.744       -4.899       -3.694        0.000       37.463      -14.015       -6.784        0.000       -0.023       -0.275        0.189        0.168
#>        1.904      -30.789      -36.032      -79.930      217.567      163.212     1293.928      582.230      -15.236        9.605       28.318       55.585       35.254
#>        8.951        4.859        7.445       -5.530       -4.936       -0.014      335.429      414.904       22.024        2.639       28.854       17.507        6.540
#>       -3.888        0.000       -3.888        0.123        4.701        0.000        3.261        2.967       50.878        0.000      -12.638       -8.288       -8.170
#>        0.000        0.983        0.661       -2.082        0.000        1.110        3.678        0.981        0.000       43.015       -9.656       -6.170       -4.157
#>      -54.242      -19.149      -54.952      -45.825       12.993      -18.686      -15.470       17.704       97.337      178.564     1305.721      504.070      709.266
#>        1.970       -5.476       -3.632      -11.686        3.487       -4.020       13.956       12.646      -36.347      -10.687      289.674      419.988      146.919
#>      -35.639      -15.114      -38.982      -28.483        8.240       -4.596       29.300       20.538       34.695      100.331      692.127      245.758      727.670
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      543.039       21.571      557.499      301.214      -33.152        0.794       26.020       15.467       18.185       -8.478      -17.680        6.594       -9.906
#>        0.057      268.069       73.485       21.783       -0.754       -1.092      -16.214      -10.072       32.020        4.967      -19.569      -14.018      -15.080
#>        0.824        0.155      842.106      498.782      -29.602       -1.591       32.188       16.713       35.880        9.622       -8.467      -13.465      -11.349
#>        0.422        0.043        0.561      939.412      -25.220        7.817       15.788        2.032       41.081       -1.388      -51.635      -59.587      -30.041
#>       -0.065       -0.002       -0.047       -0.038      473.637       14.358       79.919      -36.650        6.324       13.342        8.625        0.983        4.440
#>        0.002       -0.003       -0.003        0.012        0.031      463.176        0.591      -40.726      -21.149       19.981      -14.604       13.900        0.428
#>        0.035       -0.031        0.035        0.016        0.116        0.001     1001.633      482.314       36.089        1.539      -19.287      -38.926        8.611
#>        0.029       -0.027        0.025        0.003       -0.073       -0.082        0.657      538.782       39.987       -0.475      -13.830      -30.417       -6.263
#>        0.033        0.082        0.052        0.056        0.012       -0.041        0.048        0.072      569.093      -27.751      -35.811     -100.069      -21.007
#>       -0.017        0.014        0.015       -0.002        0.029        0.043        0.002       -0.001       -0.054      461.029       42.505      -39.025       37.047
#>       -0.022       -0.035       -0.009       -0.049        0.012       -0.020       -0.018       -0.017       -0.044        0.058     1171.588      564.844      645.549
#>        0.011       -0.033       -0.018       -0.075        0.002        0.025       -0.047       -0.050       -0.161       -0.070        0.633      680.652      310.823
#>       -0.017       -0.037       -0.016       -0.039        0.008        0.001        0.011       -0.011       -0.035        0.069        0.750        0.474      633.061
summary(ansM1b)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7029  ( 0.1585   )    0.0540   
#>    2. rate constant fnet rate (period 2)    1.9681  ( 0.1212   )    0.0116   
#>    3. eval fnet: outdegree (density)       -0.6500  ( 0.0813   )    0.0518   
#>    4. eval fnet: reciprocity                0.8738  ( 0.0876   )   -0.0051   
#>    5. rate constant atmnet rate (period 1) 25.7601  ( 1.6459   )    0.0126   
#>    6. rate constant atmnet rate (period 2)  9.6437  ( 0.5321   )    0.0203   
#>    7. eval atmnet: outdegree (density)     -2.3447  ( 0.0299   )   -0.0553   
#>    8. eval atmnet: reciprocity              1.7003  ( 0.0701   )   -0.0063   
#>    9. rate constant rtnet rate (period 1)   9.8955  ( 0.4964   )    0.0516   
#>   10. rate constant rtnet rate (period 2)   8.8459  ( 0.4346   )    0.0499   
#>   11. eval rtnet: outdegree (density)      -2.1509  ( 0.0282   )    0.0390   
#>   12. eval rtnet: reciprocity               1.7617  ( 0.0695   )   -0.0035   
#>   13. eval rtnet: same lft                 -0.0085  ( 0.1328   )   -0.0104   
#> 
#> Overall maximum convergence ratio:    0.1374 
#> 
#> 
#> Total of 2317 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.025        0.000       -0.001        0.000       -0.013        0.000        0.000       -0.001       -0.003        0.002        0.000        0.000        0.001
#>       -0.003        0.015        0.001        0.000        0.003        0.004        0.000        0.000        0.000       -0.003        0.000        0.000        0.000
#>       -0.075        0.052        0.007       -0.003        0.010       -0.005        0.000        0.000        0.000       -0.001        0.000        0.000        0.000
#>       -0.005       -0.016       -0.357        0.008       -0.017        0.000        0.000        0.000        0.003        0.002        0.000        0.000       -0.001
#>       -0.050        0.013        0.071       -0.117        2.709       -0.153        0.006        0.028       -0.020       -0.013        0.001       -0.002       -0.022
#>        0.000        0.058       -0.105        0.010       -0.175        0.283        0.000       -0.001       -0.004       -0.004       -0.001        0.001        0.007
#>        0.037       -0.015       -0.005       -0.076        0.130        0.000        0.001       -0.001        0.000        0.000        0.000        0.000        0.000
#>       -0.098        0.019        0.016        0.045        0.246       -0.039       -0.498        0.005        0.001        0.000        0.000        0.000       -0.001
#>       -0.042        0.000       -0.010        0.059       -0.024       -0.015        0.033        0.016        0.246       -0.006        0.001        0.003        0.002
#>        0.027       -0.058       -0.032        0.040       -0.018       -0.016       -0.013        0.012       -0.030        0.189       -0.001        0.001        0.002
#>       -0.022       -0.022       -0.016       -0.002        0.030       -0.051       -0.050       -0.035        0.045       -0.068        0.001       -0.001        0.000
#>        0.044        0.008        0.028        0.039       -0.013        0.017        0.059        0.034        0.087        0.035       -0.453        0.005        0.000
#>        0.032       -0.031       -0.035       -0.044       -0.102        0.093        0.014       -0.068        0.027        0.042       -0.069       -0.035        0.018
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      150.116        0.000      150.116       91.979       -1.053        0.000        5.713        4.942        4.915        0.000       -5.025       -7.543       -0.389
#>        0.000      135.597       17.896       11.251        0.000        1.815        7.116        1.617        0.000        8.528       -1.616       -0.846        0.896
#>       10.124       -9.224      225.543      153.271       -3.358        5.632       -5.974       -2.904        0.498       -5.866       -4.452      -12.147        1.960
#>        6.339        1.619       79.299      342.095        1.342        1.783        2.013       -2.384        4.347       -8.948        7.250       -6.697        3.485
#>        0.217        0.000        0.217        2.348       12.892        0.000      -10.445       -6.741        0.048        0.000       -0.113        0.379        0.325
#>        0.000       -2.149        3.217        1.926        0.000       40.360      -12.757       -6.244        0.000       -2.642        4.594        2.378       -0.694
#>       14.360       -0.095       13.143       63.176      234.860      153.303     1293.019      608.863      -68.841       -9.777       49.364       -3.789        3.087
#>       27.763       -0.908       23.747        4.911      -18.209       -0.110      341.843      416.070      -10.391        9.170       22.488       -1.038        2.493
#>        2.437        0.000        2.437       -0.736        2.161        0.000       -1.869       -2.470       46.319        0.000      -17.428      -10.949       -0.685
#>        0.000        1.979        1.623        0.310        0.000        5.981       -0.451       -2.486        0.000       50.634       -5.413       -3.758       -0.522
#>      -14.421        4.352      -24.191       -6.354      -30.492        2.480        3.670       38.038      107.701      140.822     1372.864      604.811       34.686
#>      -11.525        1.230      -21.914      -13.324       -7.629      -11.031      -14.407       -5.819      -39.579      -12.265      334.461      450.751        8.939
#>       -2.567        2.933        2.226        7.220       -1.194        0.828        0.735        4.304       -1.718        6.548       39.485       16.641       45.684
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      555.845        1.950      537.333      332.788       -6.804       -3.500       31.165       14.942       18.248       11.058      -27.062      -17.675       -0.199
#>        0.005      266.998       40.151       29.633        3.949        0.542        7.070        3.674       -0.313       14.991      -12.061       -2.638        0.788
#>        0.811        0.087      790.350      507.460      -12.670        4.795        7.792        5.658       19.086       10.748      -47.046      -34.156        1.751
#>        0.458        0.059        0.586      949.029       15.371       -2.705       30.698       14.219       26.619        4.990        5.935       -4.091        3.786
#>       -0.012        0.010       -0.019        0.022      535.198      -31.328       63.140      -27.218       -6.246       -8.924      -21.592       -0.855        0.816
#>       -0.007        0.002        0.008       -0.004       -0.061      485.227       11.137      -45.416      -18.008       13.765       20.476        9.244        2.308
#>        0.040        0.013        0.008        0.030        0.083        0.015     1075.542      530.346      -37.125      -11.314      -27.476        6.911        0.377
#>        0.027        0.009        0.008        0.019       -0.050       -0.087        0.683      561.055      -19.040       -7.207       16.898       15.625        1.070
#>        0.033       -0.001        0.029        0.037       -0.012       -0.035       -0.048       -0.034      549.119        3.117      -34.853      -99.089       -1.642
#>        0.021        0.041        0.017        0.007       -0.017        0.028       -0.016       -0.014        0.006      495.080       21.291      -44.192        5.892
#>       -0.033       -0.021       -0.048        0.005       -0.027        0.027       -0.024        0.020       -0.042        0.027     1229.548      654.334       41.096
#>       -0.027       -0.006       -0.044       -0.005       -0.001        0.015        0.008        0.024       -0.152       -0.071        0.671      773.468       16.495
#>       -0.001        0.008        0.010        0.021        0.006        0.018        0.002        0.008       -0.012        0.044        0.196        0.099       35.594
summary(ansM1c)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7056  ( 0.1618   )    0.0704   
#>    2. rate constant fnet rate (period 2)    1.9693  ( 0.1169   )    0.0015   
#>    3. eval fnet: outdegree (density)       -0.6498  ( 0.0875   )    0.1001   
#>    4. eval fnet: reciprocity                0.8761  ( 0.0952   )    0.0577   
#>    5. rate constant atmnet rate (period 1) 25.7303  ( 2.1120   )    0.0595   
#>    6. rate constant atmnet rate (period 2)  9.6375  ( 0.6349   )   -0.0284   
#>    7. eval atmnet: outdegree (density)     -2.3448  ( 0.0300   )    0.0472   
#>    8. eval atmnet: reciprocity              1.7007  ( 0.0739   )    0.0538   
#>    9. rate constant rtnet rate (period 1)  10.3726  ( 0.5379   )    0.0871   
#>   10. rate constant rtnet rate (period 2)   9.2336  ( 0.5150   )    0.0279   
#>   11. eval rtnet: outdegree (density)      -2.1440  ( 0.0281   )    0.0235   
#>   12. eval rtnet: reciprocity               1.0249  ( 0.1500   )   -0.0119   
#>   13. eval rtnet: afstand x reciprocity    -0.0999  ( 0.0163   )    0.0223   
#> 
#> Overall maximum convergence ratio:    0.1644 
#> 
#> 
#> Total of 2494 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.026        0.001        0.000        0.000       -0.036        0.000        0.000       -0.001       -0.001       -0.001        0.000        0.001        0.000
#>        0.066        0.014       -0.001        0.000       -0.005       -0.002        0.000        0.000        0.002        0.001        0.000        0.001        0.000
#>       -0.016       -0.054        0.008       -0.003       -0.015       -0.008        0.000        0.000       -0.002       -0.005        0.000        0.000        0.000
#>        0.003       -0.026       -0.304        0.009        0.065       -0.006        0.000        0.001        0.006        0.000        0.000        0.000        0.000
#>       -0.104       -0.021       -0.080        0.322        4.460       -0.282        0.013        0.046        0.169        0.043       -0.001       -0.015       -0.003
#>        0.001       -0.021       -0.149       -0.097       -0.210        0.403        0.001       -0.003       -0.026        0.006        0.001       -0.001        0.000
#>       -0.086       -0.057        0.012        0.072        0.206        0.042        0.001       -0.001       -0.001        0.000        0.000        0.000        0.000
#>       -0.059        0.023       -0.049        0.112        0.295       -0.070       -0.494        0.005        0.007        0.000        0.000        0.000        0.000
#>       -0.012        0.033       -0.045        0.122        0.149       -0.075       -0.040        0.164        0.289       -0.019        0.001        0.001        0.000
#>       -0.015        0.012       -0.106        0.005        0.039        0.019       -0.020       -0.008       -0.068        0.265        0.001        0.003        0.000
#>        0.014       -0.032       -0.011        0.027       -0.013        0.079        0.008       -0.070        0.067        0.091        0.001       -0.001        0.000
#>        0.034        0.031        0.036        0.006       -0.046       -0.006        0.015       -0.013        0.010        0.033       -0.222        0.022        0.002
#>        0.047        0.010        0.045        0.004       -0.101        0.041       -0.018       -0.037       -0.036       -0.024       -0.045        0.880        0.000
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      144.470        0.000      144.470       83.286        2.341        0.000        7.377        7.069        2.028        0.000        1.827        1.609      -25.495
#>        0.000      145.145       32.526       15.128        0.000        8.427        5.418        1.267        0.000        9.474       -0.349       -5.563       48.416
#>        8.048       13.588      204.612      132.600        1.380       16.022       -9.298       -4.354        0.955        0.352       10.568        7.198     -102.057
#>       -4.539        5.009       63.105      316.504       -2.476        6.827        0.195        1.972      -14.451       -0.061       -3.740        5.697      -94.590
#>        0.930        0.000        0.930       -3.104        9.422        0.000      -11.059       -7.451       -0.911        0.000       -0.445       -0.311        5.975
#>        0.000        0.977        5.228        4.830        0.000       31.487      -16.166       -5.929        0.000       -0.569       -1.997        1.276      -16.881
#>       60.585       15.293       47.030      -13.969      239.030      168.447     1436.591      711.460       30.587       22.931      -11.674      -93.763      760.712
#>       34.807        1.895       33.543        5.519      -23.055       -1.239      376.462      436.122      -25.714       11.595       33.029      -10.481      125.422
#>       -0.139        0.000       -0.139       -2.694        1.137        0.000       -2.493       -2.640       40.009        0.000      -14.453       -9.884       89.108
#>        0.000        0.204        3.509        3.212        0.000       -1.768        6.110        3.917        0.000       39.756      -13.313       -9.677       86.780
#>       -1.989       -8.585      -14.870      -61.245       16.241      -12.586       58.969       37.685      115.434      149.505     1277.164      465.523    -3710.883
#>        2.531       -8.782        5.233       -4.483       -2.043        4.189       -8.569       -6.601      -41.723      -25.786      261.957      395.219    -3245.204
#>      -37.206       52.657      -88.089      -44.796        8.715        4.592       48.013       40.744      408.108      237.334    -2206.344    -3242.372    32032.864
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      534.590       23.520      543.904      320.731        9.467      -14.363       -3.505       22.158        8.268       -6.101       13.382       13.583     -104.140
#>        0.060      285.246       86.395       36.262      -10.001       15.128        0.460        3.552        7.901       17.951      -16.302      -11.752       95.641
#>        0.824        0.179      814.580      513.905       11.650       10.536       -6.936       18.401        3.523       -0.611       26.764       27.108     -230.267
#>        0.453        0.070        0.588      936.486       -3.468       10.009       -7.472       14.573      -22.456        3.949      -29.707       -5.312      -21.730
#>        0.018       -0.027        0.018       -0.005      499.326      -14.181       85.187      -39.764       17.376       19.266        6.616       -5.604       10.299
#>       -0.030        0.043        0.018        0.016       -0.031      425.790       20.526      -22.231       -6.150      -15.328       -2.766       -2.972       43.979
#>       -0.004        0.001       -0.007       -0.007        0.111        0.029     1184.271      597.356       -3.620       39.347       39.691      -15.946       85.779
#>        0.039        0.009        0.026        0.019       -0.072       -0.044        0.703      610.319      -15.926       27.964       19.371      -12.680      121.894
#>        0.016        0.022        0.006       -0.034        0.036       -0.014       -0.005       -0.030      470.653       -3.160       14.641      -97.120      941.082
#>       -0.012        0.050       -0.001        0.006        0.041       -0.035        0.054        0.053       -0.007      452.224       83.922      -37.552      391.211
#>        0.018       -0.029        0.029       -0.030        0.009       -0.004        0.035        0.024        0.021        0.120     1080.719      482.551    -4133.295
#>        0.023       -0.027        0.037       -0.007       -0.010       -0.006       -0.018       -0.020       -0.176       -0.069        0.577      646.063    -5382.071
#>       -0.020        0.025       -0.035       -0.003        0.002        0.009        0.011        0.022        0.189        0.080       -0.549       -0.924    52488.103
summary(ansM1d)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7040  ( 0.1662   )    0.0205   
#>    2. rate constant fnet rate (period 2)    1.9684  ( 0.1246   )   -0.0029   
#>    3. eval fnet: outdegree (density)       -0.6498  ( 0.0816   )    0.0129   
#>    4. eval fnet: reciprocity                0.8748  ( 0.0918   )   -0.0070   
#>    5. rate constant atmnet rate (period 1) 25.7112  ( 1.9733   )   -0.0044   
#>    6. rate constant atmnet rate (period 2)  9.6607  ( 0.5419   )    0.0394   
#>    7. eval atmnet: outdegree (density)     -2.3452  ( 0.0302   )   -0.0134   
#>    8. eval atmnet: reciprocity              1.7019  ( 0.0706   )    0.0083   
#>    9. rate constant rtnet rate (period 1)  14.0703  ( 0.8359   )   -0.0769   
#>   10. rate constant rtnet rate (period 2)  14.3665  ( 0.9926   )   -0.0006   
#>   11. eval rtnet: outdegree (density)      -2.8855  ( 0.0565   )   -0.0722   
#>   12. eval rtnet: reciprocity               0.7335  ( 0.0802   )   -0.0364   
#>   13. eval rtnet: transitive triplets       0.1258  ( 0.0080   )   -0.0180   
#>   14. eval rtnet: transitive ties           1.1401  ( 0.0742   )   -0.0706   
#> 
#> Overall maximum convergence ratio:    0.1251 
#> 
#> 
#> Total of 2956 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.028        0.000       -0.001        0.000       -0.098        0.005        0.000        0.000       -0.006        0.001        0.000        0.000        0.000        0.000
#>        0.005        0.016       -0.001        0.000        0.006       -0.008        0.000        0.000        0.002       -0.019       -0.001        0.001        0.000        0.002
#>       -0.038       -0.068        0.007       -0.003       -0.006        0.001        0.000        0.000        0.002        0.003        0.000        0.000        0.000        0.000
#>       -0.004        0.020       -0.389        0.008       -0.005       -0.003        0.000       -0.001       -0.013        0.003        0.000        0.000        0.000        0.000
#>       -0.300        0.026       -0.039       -0.026        3.894       -0.186        0.011        0.030        0.070       -0.110       -0.004       -0.005        0.000        0.012
#>        0.060       -0.121        0.013       -0.059       -0.174        0.294        0.002       -0.002        0.011        0.021        0.004       -0.002        0.001       -0.006
#>       -0.059        0.044       -0.099        0.036        0.183        0.132        0.001       -0.001        0.000       -0.002        0.000        0.000        0.000        0.000
#>       -0.034       -0.039        0.038       -0.082        0.218       -0.041       -0.451        0.005        0.004       -0.004        0.000        0.000        0.000        0.000
#>       -0.041        0.016        0.029       -0.166        0.042        0.024       -0.016        0.063        0.699       -0.150        0.001        0.001        0.000        0.003
#>        0.004       -0.153        0.033        0.038       -0.056        0.039       -0.070       -0.058       -0.181        0.985        0.014       -0.003        0.001       -0.019
#>        0.002       -0.152        0.027       -0.017       -0.031        0.119       -0.114        0.011        0.014        0.247        0.003        0.000        0.000       -0.004
#>       -0.031        0.090       -0.009        0.014       -0.029       -0.053        0.042       -0.081        0.017       -0.039        0.014        0.006        0.000        0.000
#>        0.049       -0.145        0.036       -0.088        0.021        0.133       -0.063        0.074       -0.037        0.127        0.106       -0.552        0.000        0.000
#>       -0.034        0.169       -0.045        0.042        0.080       -0.142        0.116       -0.007        0.051       -0.261       -0.865       -0.068       -0.348        0.006
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      147.630        0.000      147.630       88.732       -0.602        0.000       -1.461       -4.670        2.669        0.000       -8.696       -8.150      -95.961       -8.335
#>        0.000      135.002       34.368        9.812        0.000        3.628       -3.734        6.008        0.000       -2.209       -3.736       -1.738       33.991       -8.235
#>        7.411       11.548      225.935      150.124        0.933        5.738       17.627        8.148        2.537      -14.145       -5.894       -4.351      -29.486        0.430
#>       11.274       -3.115       86.316      327.207        0.305        2.679        1.718       -1.019        8.189       -4.172      -10.515       -0.817      113.055       -5.701
#>        3.886        0.000        3.886        2.715        9.800        0.000       -8.790       -5.614       -0.415        0.000       -5.610       -3.260      -43.154       -5.972
#>        0.000        2.846        0.816        1.723        0.000       36.201      -25.670       -9.126        0.000       -3.472       -3.045       -2.953      -50.172       -3.061
#>      -10.618       -8.540       37.130       22.775      257.354      121.782     1286.184      551.828       25.791       32.397       82.528       33.331      568.367       61.393
#>      -18.467       10.932      -11.978       -8.130        4.651       -5.834      316.761      389.963       14.855        1.042       10.827        8.209      -29.716       -2.739
#>        1.271        0.000        1.271        4.636       -1.102        0.000        0.020       -0.913       28.292        0.000      -10.993       -5.285       -8.332      -13.102
#>        0.000        2.208        0.028       -0.854        0.000        1.071        1.149        1.666        0.000       19.562       -8.378       -4.817      -43.013       -7.031
#>       -1.357        1.450      -14.224      -26.659       22.771       -9.891       53.522       27.017      256.937      234.967     2552.419     1141.957    12407.941     2437.127
#>        1.474      -13.450       -2.933       -9.498       -1.536        3.405        9.040       14.711        0.420       -8.073      696.120      725.940     6359.445      773.587
#>     -107.516        0.371     -120.248      -75.931      131.930      -63.260      140.307       82.588      159.547      -19.988    10140.304     8246.876   109375.937    11051.064
#>        1.304      -17.860       -9.619      -30.208       10.622       24.653       -3.484        8.001      193.146      130.794     2119.132     1072.890    11610.482     2292.185
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      544.414        5.756      531.543      331.271       -1.369        0.764       -6.408      -24.959       12.660      -11.783      -41.245      -33.563     -313.369      -35.209
#>        0.015      266.044       72.605        9.577       -0.791        4.911       -8.746       14.878       -1.176       -3.559      -33.716      -23.246     -313.467      -34.813
#>        0.808        0.158      795.623      500.758      -10.798        8.913       14.199      -13.816       -2.907      -30.724      -92.402      -61.828     -647.959      -75.304
#>        0.470        0.019        0.588      912.203       -0.335        0.830        6.876      -42.830       -1.820      -10.611     -109.280      -94.595     -951.729      -98.832
#>       -0.003       -0.002       -0.017       -0.001      491.744       -1.755      116.285       -5.028       -0.721       -0.432        1.526      -12.364     -111.728        3.656
#>        0.002        0.015        0.016        0.001       -0.004      411.235      -32.766      -39.565        9.437      -15.494       36.363       15.990      312.488       42.232
#>       -0.008       -0.016        0.015        0.007        0.161       -0.050     1063.442      492.310        5.118       -0.777       14.575       14.818      -25.875       18.732
#>       -0.046        0.040       -0.021       -0.062       -0.010       -0.085        0.656      530.269       14.952       -5.071        6.844       12.018       58.783       -5.810
#>        0.021       -0.003       -0.004       -0.002       -0.001        0.018        0.006        0.026      645.114      -24.568      256.292        5.334      577.254      190.502
#>       -0.024       -0.010       -0.051       -0.016       -0.001       -0.036       -0.001       -0.010       -0.045      453.488      146.254      -56.020     -242.391       31.827
#>       -0.030       -0.035       -0.055       -0.061        0.001        0.030        0.008        0.005        0.171        0.116     3486.930     1968.728    22589.967     3493.685
#>       -0.034       -0.033       -0.051       -0.074       -0.013        0.019        0.011        0.012        0.005       -0.062        0.783     1813.727    19504.121     2121.364
#>       -0.026       -0.037       -0.044       -0.060       -0.010        0.030       -0.002        0.005        0.044       -0.022        0.734        0.879   271555.304    23858.852
#>       -0.024       -0.035       -0.043       -0.053        0.003        0.034        0.009       -0.004        0.122        0.024        0.960        0.808        0.743     3796.365

I think we can conclude that there is a negative and weak effect of same gender on retweeting, which means that individuals do not necessarily prefer to retweet someone of the same gender compared to someone of the other gender. Now we have to check how this is the case for individuals of the same age. In the second model I ran, we see that also for same age there is a negative and quite weak effect (-0.0085), so politicians do not prefer to retweet something tweeted by someone of the same age. In sum, the segregation of party affiliation in the retweet network does not seem to be the result of segregation along age and sex.


summary(ansM1b)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7029  ( 0.1585   )    0.0540   
#>    2. rate constant fnet rate (period 2)    1.9681  ( 0.1212   )    0.0116   
#>    3. eval fnet: outdegree (density)       -0.6500  ( 0.0813   )    0.0518   
#>    4. eval fnet: reciprocity                0.8738  ( 0.0876   )   -0.0051   
#>    5. rate constant atmnet rate (period 1) 25.7601  ( 1.6459   )    0.0126   
#>    6. rate constant atmnet rate (period 2)  9.6437  ( 0.5321   )    0.0203   
#>    7. eval atmnet: outdegree (density)     -2.3447  ( 0.0299   )   -0.0553   
#>    8. eval atmnet: reciprocity              1.7003  ( 0.0701   )   -0.0063   
#>    9. rate constant rtnet rate (period 1)   9.8955  ( 0.4964   )    0.0516   
#>   10. rate constant rtnet rate (period 2)   8.8459  ( 0.4346   )    0.0499   
#>   11. eval rtnet: outdegree (density)      -2.1509  ( 0.0282   )    0.0390   
#>   12. eval rtnet: reciprocity               1.7617  ( 0.0695   )   -0.0035   
#>   13. eval rtnet: same lft                 -0.0085  ( 0.1328   )   -0.0104   
#> 
#> Overall maximum convergence ratio:    0.1374 
#> 
#> 
#> Total of 2317 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.025        0.000       -0.001        0.000       -0.013        0.000        0.000       -0.001       -0.003        0.002        0.000        0.000        0.001
#>       -0.003        0.015        0.001        0.000        0.003        0.004        0.000        0.000        0.000       -0.003        0.000        0.000        0.000
#>       -0.075        0.052        0.007       -0.003        0.010       -0.005        0.000        0.000        0.000       -0.001        0.000        0.000        0.000
#>       -0.005       -0.016       -0.357        0.008       -0.017        0.000        0.000        0.000        0.003        0.002        0.000        0.000       -0.001
#>       -0.050        0.013        0.071       -0.117        2.709       -0.153        0.006        0.028       -0.020       -0.013        0.001       -0.002       -0.022
#>        0.000        0.058       -0.105        0.010       -0.175        0.283        0.000       -0.001       -0.004       -0.004       -0.001        0.001        0.007
#>        0.037       -0.015       -0.005       -0.076        0.130        0.000        0.001       -0.001        0.000        0.000        0.000        0.000        0.000
#>       -0.098        0.019        0.016        0.045        0.246       -0.039       -0.498        0.005        0.001        0.000        0.000        0.000       -0.001
#>       -0.042        0.000       -0.010        0.059       -0.024       -0.015        0.033        0.016        0.246       -0.006        0.001        0.003        0.002
#>        0.027       -0.058       -0.032        0.040       -0.018       -0.016       -0.013        0.012       -0.030        0.189       -0.001        0.001        0.002
#>       -0.022       -0.022       -0.016       -0.002        0.030       -0.051       -0.050       -0.035        0.045       -0.068        0.001       -0.001        0.000
#>        0.044        0.008        0.028        0.039       -0.013        0.017        0.059        0.034        0.087        0.035       -0.453        0.005        0.000
#>        0.032       -0.031       -0.035       -0.044       -0.102        0.093        0.014       -0.068        0.027        0.042       -0.069       -0.035        0.018
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      150.116        0.000      150.116       91.979       -1.053        0.000        5.713        4.942        4.915        0.000       -5.025       -7.543       -0.389
#>        0.000      135.597       17.896       11.251        0.000        1.815        7.116        1.617        0.000        8.528       -1.616       -0.846        0.896
#>       10.124       -9.224      225.543      153.271       -3.358        5.632       -5.974       -2.904        0.498       -5.866       -4.452      -12.147        1.960
#>        6.339        1.619       79.299      342.095        1.342        1.783        2.013       -2.384        4.347       -8.948        7.250       -6.697        3.485
#>        0.217        0.000        0.217        2.348       12.892        0.000      -10.445       -6.741        0.048        0.000       -0.113        0.379        0.325
#>        0.000       -2.149        3.217        1.926        0.000       40.360      -12.757       -6.244        0.000       -2.642        4.594        2.378       -0.694
#>       14.360       -0.095       13.143       63.176      234.860      153.303     1293.019      608.863      -68.841       -9.777       49.364       -3.789        3.087
#>       27.763       -0.908       23.747        4.911      -18.209       -0.110      341.843      416.070      -10.391        9.170       22.488       -1.038        2.493
#>        2.437        0.000        2.437       -0.736        2.161        0.000       -1.869       -2.470       46.319        0.000      -17.428      -10.949       -0.685
#>        0.000        1.979        1.623        0.310        0.000        5.981       -0.451       -2.486        0.000       50.634       -5.413       -3.758       -0.522
#>      -14.421        4.352      -24.191       -6.354      -30.492        2.480        3.670       38.038      107.701      140.822     1372.864      604.811       34.686
#>      -11.525        1.230      -21.914      -13.324       -7.629      -11.031      -14.407       -5.819      -39.579      -12.265      334.461      450.751        8.939
#>       -2.567        2.933        2.226        7.220       -1.194        0.828        0.735        4.304       -1.718        6.548       39.485       16.641       45.684
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      555.845        1.950      537.333      332.788       -6.804       -3.500       31.165       14.942       18.248       11.058      -27.062      -17.675       -0.199
#>        0.005      266.998       40.151       29.633        3.949        0.542        7.070        3.674       -0.313       14.991      -12.061       -2.638        0.788
#>        0.811        0.087      790.350      507.460      -12.670        4.795        7.792        5.658       19.086       10.748      -47.046      -34.156        1.751
#>        0.458        0.059        0.586      949.029       15.371       -2.705       30.698       14.219       26.619        4.990        5.935       -4.091        3.786
#>       -0.012        0.010       -0.019        0.022      535.198      -31.328       63.140      -27.218       -6.246       -8.924      -21.592       -0.855        0.816
#>       -0.007        0.002        0.008       -0.004       -0.061      485.227       11.137      -45.416      -18.008       13.765       20.476        9.244        2.308
#>        0.040        0.013        0.008        0.030        0.083        0.015     1075.542      530.346      -37.125      -11.314      -27.476        6.911        0.377
#>        0.027        0.009        0.008        0.019       -0.050       -0.087        0.683      561.055      -19.040       -7.207       16.898       15.625        1.070
#>        0.033       -0.001        0.029        0.037       -0.012       -0.035       -0.048       -0.034      549.119        3.117      -34.853      -99.089       -1.642
#>        0.021        0.041        0.017        0.007       -0.017        0.028       -0.016       -0.014        0.006      495.080       21.291      -44.192        5.892
#>       -0.033       -0.021       -0.048        0.005       -0.027        0.027       -0.024        0.020       -0.042        0.027     1229.548      654.334       41.096
#>       -0.027       -0.006       -0.044       -0.005       -0.001        0.015        0.008        0.024       -0.152       -0.071        0.671      773.468       16.495
#>       -0.001        0.008        0.010        0.021        0.006        0.018        0.002        0.008       -0.012        0.044        0.196        0.099       35.594

Let’s continue to test the third RQ. I think that to test propinquity, we have to see how close the party members are seated next to each other. This has to do with the variable seats. In the keyf we see the coordinates, but when we study the mydata list, we see that the distance between members is already calculated as a dynamic covariate (afstand). This is what we want to use to test the propinquity.

summary(seats)
summary(mydata$dycCovars)


In the output of afstand x reciprocity we see that there is a negative effect (-0.0999) of afstand x reciprocity. I think this means that individuals do not prefer to have a reciprocal relation with someone who is close to them, while the general effect of reciprocity is positive. However, I’m not sure how afstand is now analyzed and whether this effect regards individuals of a lower distance as more of the same. But I also don’t know which variable could give a better answer to the research question.

summary(ansM1c)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7056  ( 0.1618   )    0.0704   
#>    2. rate constant fnet rate (period 2)    1.9693  ( 0.1169   )    0.0015   
#>    3. eval fnet: outdegree (density)       -0.6498  ( 0.0875   )    0.1001   
#>    4. eval fnet: reciprocity                0.8761  ( 0.0952   )    0.0577   
#>    5. rate constant atmnet rate (period 1) 25.7303  ( 2.1120   )    0.0595   
#>    6. rate constant atmnet rate (period 2)  9.6375  ( 0.6349   )   -0.0284   
#>    7. eval atmnet: outdegree (density)     -2.3448  ( 0.0300   )    0.0472   
#>    8. eval atmnet: reciprocity              1.7007  ( 0.0739   )    0.0538   
#>    9. rate constant rtnet rate (period 1)  10.3726  ( 0.5379   )    0.0871   
#>   10. rate constant rtnet rate (period 2)   9.2336  ( 0.5150   )    0.0279   
#>   11. eval rtnet: outdegree (density)      -2.1440  ( 0.0281   )    0.0235   
#>   12. eval rtnet: reciprocity               1.0249  ( 0.1500   )   -0.0119   
#>   13. eval rtnet: afstand x reciprocity    -0.0999  ( 0.0163   )    0.0223   
#> 
#> Overall maximum convergence ratio:    0.1644 
#> 
#> 
#> Total of 2494 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.026        0.001        0.000        0.000       -0.036        0.000        0.000       -0.001       -0.001       -0.001        0.000        0.001        0.000
#>        0.066        0.014       -0.001        0.000       -0.005       -0.002        0.000        0.000        0.002        0.001        0.000        0.001        0.000
#>       -0.016       -0.054        0.008       -0.003       -0.015       -0.008        0.000        0.000       -0.002       -0.005        0.000        0.000        0.000
#>        0.003       -0.026       -0.304        0.009        0.065       -0.006        0.000        0.001        0.006        0.000        0.000        0.000        0.000
#>       -0.104       -0.021       -0.080        0.322        4.460       -0.282        0.013        0.046        0.169        0.043       -0.001       -0.015       -0.003
#>        0.001       -0.021       -0.149       -0.097       -0.210        0.403        0.001       -0.003       -0.026        0.006        0.001       -0.001        0.000
#>       -0.086       -0.057        0.012        0.072        0.206        0.042        0.001       -0.001       -0.001        0.000        0.000        0.000        0.000
#>       -0.059        0.023       -0.049        0.112        0.295       -0.070       -0.494        0.005        0.007        0.000        0.000        0.000        0.000
#>       -0.012        0.033       -0.045        0.122        0.149       -0.075       -0.040        0.164        0.289       -0.019        0.001        0.001        0.000
#>       -0.015        0.012       -0.106        0.005        0.039        0.019       -0.020       -0.008       -0.068        0.265        0.001        0.003        0.000
#>        0.014       -0.032       -0.011        0.027       -0.013        0.079        0.008       -0.070        0.067        0.091        0.001       -0.001        0.000
#>        0.034        0.031        0.036        0.006       -0.046       -0.006        0.015       -0.013        0.010        0.033       -0.222        0.022        0.002
#>        0.047        0.010        0.045        0.004       -0.101        0.041       -0.018       -0.037       -0.036       -0.024       -0.045        0.880        0.000
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      144.470        0.000      144.470       83.286        2.341        0.000        7.377        7.069        2.028        0.000        1.827        1.609      -25.495
#>        0.000      145.145       32.526       15.128        0.000        8.427        5.418        1.267        0.000        9.474       -0.349       -5.563       48.416
#>        8.048       13.588      204.612      132.600        1.380       16.022       -9.298       -4.354        0.955        0.352       10.568        7.198     -102.057
#>       -4.539        5.009       63.105      316.504       -2.476        6.827        0.195        1.972      -14.451       -0.061       -3.740        5.697      -94.590
#>        0.930        0.000        0.930       -3.104        9.422        0.000      -11.059       -7.451       -0.911        0.000       -0.445       -0.311        5.975
#>        0.000        0.977        5.228        4.830        0.000       31.487      -16.166       -5.929        0.000       -0.569       -1.997        1.276      -16.881
#>       60.585       15.293       47.030      -13.969      239.030      168.447     1436.591      711.460       30.587       22.931      -11.674      -93.763      760.712
#>       34.807        1.895       33.543        5.519      -23.055       -1.239      376.462      436.122      -25.714       11.595       33.029      -10.481      125.422
#>       -0.139        0.000       -0.139       -2.694        1.137        0.000       -2.493       -2.640       40.009        0.000      -14.453       -9.884       89.108
#>        0.000        0.204        3.509        3.212        0.000       -1.768        6.110        3.917        0.000       39.756      -13.313       -9.677       86.780
#>       -1.989       -8.585      -14.870      -61.245       16.241      -12.586       58.969       37.685      115.434      149.505     1277.164      465.523    -3710.883
#>        2.531       -8.782        5.233       -4.483       -2.043        4.189       -8.569       -6.601      -41.723      -25.786      261.957      395.219    -3245.204
#>      -37.206       52.657      -88.089      -44.796        8.715        4.592       48.013       40.744      408.108      237.334    -2206.344    -3242.372    32032.864
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      534.590       23.520      543.904      320.731        9.467      -14.363       -3.505       22.158        8.268       -6.101       13.382       13.583     -104.140
#>        0.060      285.246       86.395       36.262      -10.001       15.128        0.460        3.552        7.901       17.951      -16.302      -11.752       95.641
#>        0.824        0.179      814.580      513.905       11.650       10.536       -6.936       18.401        3.523       -0.611       26.764       27.108     -230.267
#>        0.453        0.070        0.588      936.486       -3.468       10.009       -7.472       14.573      -22.456        3.949      -29.707       -5.312      -21.730
#>        0.018       -0.027        0.018       -0.005      499.326      -14.181       85.187      -39.764       17.376       19.266        6.616       -5.604       10.299
#>       -0.030        0.043        0.018        0.016       -0.031      425.790       20.526      -22.231       -6.150      -15.328       -2.766       -2.972       43.979
#>       -0.004        0.001       -0.007       -0.007        0.111        0.029     1184.271      597.356       -3.620       39.347       39.691      -15.946       85.779
#>        0.039        0.009        0.026        0.019       -0.072       -0.044        0.703      610.319      -15.926       27.964       19.371      -12.680      121.894
#>        0.016        0.022        0.006       -0.034        0.036       -0.014       -0.005       -0.030      470.653       -3.160       14.641      -97.120      941.082
#>       -0.012        0.050       -0.001        0.006        0.041       -0.035        0.054        0.053       -0.007      452.224       83.922      -37.552      391.211
#>        0.018       -0.029        0.029       -0.030        0.009       -0.004        0.035        0.024        0.021        0.120     1080.719      482.551    -4133.295
#>        0.023       -0.027        0.037       -0.007       -0.010       -0.006       -0.018       -0.020       -0.176       -0.069        0.577      646.063    -5382.071
#>       -0.020        0.025       -0.035       -0.003        0.002        0.009        0.011        0.022        0.189        0.080       -0.549       -0.924    52488.103
summary(ansM1d)
#> Estimates, standard errors and convergence t-ratios
#> 
#>                                            Estimate   Standard   Convergence 
#>                                                         Error      t-ratio   
#>    1. rate constant fnet rate (period 1)    3.7040  ( 0.1662   )    0.0205   
#>    2. rate constant fnet rate (period 2)    1.9684  ( 0.1246   )   -0.0029   
#>    3. eval fnet: outdegree (density)       -0.6498  ( 0.0816   )    0.0129   
#>    4. eval fnet: reciprocity                0.8748  ( 0.0918   )   -0.0070   
#>    5. rate constant atmnet rate (period 1) 25.7112  ( 1.9733   )   -0.0044   
#>    6. rate constant atmnet rate (period 2)  9.6607  ( 0.5419   )    0.0394   
#>    7. eval atmnet: outdegree (density)     -2.3452  ( 0.0302   )   -0.0134   
#>    8. eval atmnet: reciprocity              1.7019  ( 0.0706   )    0.0083   
#>    9. rate constant rtnet rate (period 1)  14.0703  ( 0.8359   )   -0.0769   
#>   10. rate constant rtnet rate (period 2)  14.3665  ( 0.9926   )   -0.0006   
#>   11. eval rtnet: outdegree (density)      -2.8855  ( 0.0565   )   -0.0722   
#>   12. eval rtnet: reciprocity               0.7335  ( 0.0802   )   -0.0364   
#>   13. eval rtnet: transitive triplets       0.1258  ( 0.0080   )   -0.0180   
#>   14. eval rtnet: transitive ties           1.1401  ( 0.0742   )   -0.0706   
#> 
#> Overall maximum convergence ratio:    0.1251 
#> 
#> 
#> Total of 2956 iteration steps.
#> 
#> Covariance matrix of estimates (correlations below diagonal)
#> 
#>        0.028        0.000       -0.001        0.000       -0.098        0.005        0.000        0.000       -0.006        0.001        0.000        0.000        0.000        0.000
#>        0.005        0.016       -0.001        0.000        0.006       -0.008        0.000        0.000        0.002       -0.019       -0.001        0.001        0.000        0.002
#>       -0.038       -0.068        0.007       -0.003       -0.006        0.001        0.000        0.000        0.002        0.003        0.000        0.000        0.000        0.000
#>       -0.004        0.020       -0.389        0.008       -0.005       -0.003        0.000       -0.001       -0.013        0.003        0.000        0.000        0.000        0.000
#>       -0.300        0.026       -0.039       -0.026        3.894       -0.186        0.011        0.030        0.070       -0.110       -0.004       -0.005        0.000        0.012
#>        0.060       -0.121        0.013       -0.059       -0.174        0.294        0.002       -0.002        0.011        0.021        0.004       -0.002        0.001       -0.006
#>       -0.059        0.044       -0.099        0.036        0.183        0.132        0.001       -0.001        0.000       -0.002        0.000        0.000        0.000        0.000
#>       -0.034       -0.039        0.038       -0.082        0.218       -0.041       -0.451        0.005        0.004       -0.004        0.000        0.000        0.000        0.000
#>       -0.041        0.016        0.029       -0.166        0.042        0.024       -0.016        0.063        0.699       -0.150        0.001        0.001        0.000        0.003
#>        0.004       -0.153        0.033        0.038       -0.056        0.039       -0.070       -0.058       -0.181        0.985        0.014       -0.003        0.001       -0.019
#>        0.002       -0.152        0.027       -0.017       -0.031        0.119       -0.114        0.011        0.014        0.247        0.003        0.000        0.000       -0.004
#>       -0.031        0.090       -0.009        0.014       -0.029       -0.053        0.042       -0.081        0.017       -0.039        0.014        0.006        0.000        0.000
#>        0.049       -0.145        0.036       -0.088        0.021        0.133       -0.063        0.074       -0.037        0.127        0.106       -0.552        0.000        0.000
#>       -0.034        0.169       -0.045        0.042        0.080       -0.142        0.116       -0.007        0.051       -0.261       -0.865       -0.068       -0.348        0.006
#> 
#> Derivative matrix of expected statistics X by parameters:
#> 
#>      147.630        0.000      147.630       88.732       -0.602        0.000       -1.461       -4.670        2.669        0.000       -8.696       -8.150      -95.961       -8.335
#>        0.000      135.002       34.368        9.812        0.000        3.628       -3.734        6.008        0.000       -2.209       -3.736       -1.738       33.991       -8.235
#>        7.411       11.548      225.935      150.124        0.933        5.738       17.627        8.148        2.537      -14.145       -5.894       -4.351      -29.486        0.430
#>       11.274       -3.115       86.316      327.207        0.305        2.679        1.718       -1.019        8.189       -4.172      -10.515       -0.817      113.055       -5.701
#>        3.886        0.000        3.886        2.715        9.800        0.000       -8.790       -5.614       -0.415        0.000       -5.610       -3.260      -43.154       -5.972
#>        0.000        2.846        0.816        1.723        0.000       36.201      -25.670       -9.126        0.000       -3.472       -3.045       -2.953      -50.172       -3.061
#>      -10.618       -8.540       37.130       22.775      257.354      121.782     1286.184      551.828       25.791       32.397       82.528       33.331      568.367       61.393
#>      -18.467       10.932      -11.978       -8.130        4.651       -5.834      316.761      389.963       14.855        1.042       10.827        8.209      -29.716       -2.739
#>        1.271        0.000        1.271        4.636       -1.102        0.000        0.020       -0.913       28.292        0.000      -10.993       -5.285       -8.332      -13.102
#>        0.000        2.208        0.028       -0.854        0.000        1.071        1.149        1.666        0.000       19.562       -8.378       -4.817      -43.013       -7.031
#>       -1.357        1.450      -14.224      -26.659       22.771       -9.891       53.522       27.017      256.937      234.967     2552.419     1141.957    12407.941     2437.127
#>        1.474      -13.450       -2.933       -9.498       -1.536        3.405        9.040       14.711        0.420       -8.073      696.120      725.940     6359.445      773.587
#>     -107.516        0.371     -120.248      -75.931      131.930      -63.260      140.307       82.588      159.547      -19.988    10140.304     8246.876   109375.937    11051.064
#>        1.304      -17.860       -9.619      -30.208       10.622       24.653       -3.484        8.001      193.146      130.794     2119.132     1072.890    11610.482     2292.185
#> 
#> Covariance matrix of X (correlations below diagonal):
#> 
#>      544.414        5.756      531.543      331.271       -1.369        0.764       -6.408      -24.959       12.660      -11.783      -41.245      -33.563     -313.369      -35.209
#>        0.015      266.044       72.605        9.577       -0.791        4.911       -8.746       14.878       -1.176       -3.559      -33.716      -23.246     -313.467      -34.813
#>        0.808        0.158      795.623      500.758      -10.798        8.913       14.199      -13.816       -2.907      -30.724      -92.402      -61.828     -647.959      -75.304
#>        0.470        0.019        0.588      912.203       -0.335        0.830        6.876      -42.830       -1.820      -10.611     -109.280      -94.595     -951.729      -98.832
#>       -0.003       -0.002       -0.017       -0.001      491.744       -1.755      116.285       -5.028       -0.721       -0.432        1.526      -12.364     -111.728        3.656
#>        0.002        0.015        0.016        0.001       -0.004      411.235      -32.766      -39.565        9.437      -15.494       36.363       15.990      312.488       42.232
#>       -0.008       -0.016        0.015        0.007        0.161       -0.050     1063.442      492.310        5.118       -0.777       14.575       14.818      -25.875       18.732
#>       -0.046        0.040       -0.021       -0.062       -0.010       -0.085        0.656      530.269       14.952       -5.071        6.844       12.018       58.783       -5.810
#>        0.021       -0.003       -0.004       -0.002       -0.001        0.018        0.006        0.026      645.114      -24.568      256.292        5.334      577.254      190.502
#>       -0.024       -0.010       -0.051       -0.016       -0.001       -0.036       -0.001       -0.010       -0.045      453.488      146.254      -56.020     -242.391       31.827
#>       -0.030       -0.035       -0.055       -0.061        0.001        0.030        0.008        0.005        0.171        0.116     3486.930     1968.728    22589.967     3493.685
#>       -0.034       -0.033       -0.051       -0.074       -0.013        0.019        0.011        0.012        0.005       -0.062        0.783     1813.727    19504.121     2121.364
#>       -0.026       -0.037       -0.044       -0.060       -0.010        0.030       -0.002        0.005        0.044       -0.022        0.734        0.879   271555.304    23858.852
#>       -0.024       -0.035       -0.043       -0.053        0.003        0.034        0.009       -0.004        0.122        0.024        0.960        0.808        0.743     3796.365


To give an answer to the last research question, I believe I do not need to estimate a new model. When we look at the outdegree, we see that this is negative. The only interpretation I wrote down from last week is that that means that individuals want outdegrees but not with everyone in the network. We also see that individuals prefer to have a reciprocal tie rather than a non-reciprocal tie. Politicians thus prefer to retweet someone who also retweets them, rather than retweeting individuals that have not retweeted them. When estimating a model with details about transitivity, we see that there is both a positive effect of transitive triplets as well as transitive ties. Does this then mean that 2 individuals that retweet me also retweet each other? When summarizing these effects, we see that politicians prefer to retweet individuals with whom they already have some kind of connection (through transitivity or to establish a reciprocal relation). Therefore, I think one could conclude that there are structural factors in the network that could link with segregation. However, it is still possible that these structural factors are mostly visible because there already is segregation based on party affiliation. (How can you distinguish what is cause and what is influence?)

---
title: "Tutorial R Siena"
author: "Anuschka Peelen"
date: "`r Sys.Date()`"
output: html_document
---

```{r warning=FALSE, globalsettings, echo=FALSE, results='hide'}
library(knitr)

knitr::opts_chunk$set(echo = TRUE)
opts_chunk$set(tidy.opts=list(width.cutoff=100),tidy=TRUE, warning = FALSE, message = FALSE,comment = "#>", cache=TRUE, class.source=c("test"), class.output=c("test2"))
options(width = 100)
rgl::setupKnitr()



colorize <- function(x, color) {sprintf("<span style='color: %s;'>%s</span>", color, x) }
```

```{r klippy, echo=FALSE, include=TRUE}
klippy::klippy(position = c('top', 'right'))
#klippy::klippy(color = 'darkred')
#klippy::klippy(tooltip_message = 'Click to copy', tooltip_success = 'Done')
```

```{r, eval=FALSE}
rm(list=ls())
```

# RSiena tutorial

```{r, eval=FALSE}
#install.packages("RSiena")
library(RSiena)
friend.data.w1 <- s501
friend.data.w2 <- s502
friend.data.w3 <- s503
drink <- s50a
smoke <- s50s
friendship <- sienaDependent(array(c(friend.data.w1, friend.data.w2, friend.data.w3), dim = c(50, 50,
    3)))
smoke1 <- coCovar(smoke[, 1])
alcohol <- varCovar(drink)
mydata <- sienaDataCreate(friendship, smoke1, alcohol)
# and request
mydata
# to see what you have produced.
```


```{r, eval=FALSE}
myalgorithm <- sienaAlgorithmCreate(projname = NULL)
```

```{r, eval=FALSE}
myeff <- getEffects(mydata)
myeff <- includeEffects(myeff, transTrip, cycle3)
myeff <- includeEffects(myeff, egoX, altX, egoXaltX, interaction1 = "alcohol")
myeff <- includeEffects(myeff, simX, interaction1 = "smoke1")
myeff
```
```{r, eval=FALSE}
ans <- siena07(myalgorithm, data = mydata, effects = myeff)
summary(ans)
```


```{r, eval=FALSE}
myeff <- setEffect(myeff, inPopSqrt, fix = TRUE, test = TRUE, initialValue = 0)
ans <- siena07(myalgorithm, data = mydata, effects = myeff, prevAns=ans)
summary(ans)
```


# Twitter manual 
```{r, eval=FALSE}
# density: observed relations divided by possible relations
fdensity <- function(x) {
    # x is your nomination network make sure diagonal cells are NA
    diag(x) <- NA
    # take care of RSiena structural zeros, set as missing.
    x[x == 10] <- NA
    sum(x == 1, na.rm = T)/(sum(x == 1 | x == 0, na.rm = T))
}

# calculate intragroup density
fdensityintra <- function(x, A) {
    # A is matrix indicating whether nodes in dyad have same node attributes
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    sum(x == 1 & A == 1, na.rm = T)/(sum((x == 1 | x == 0) & A == 1, na.rm = T))
}

# calculate intragroup density
fdensityinter <- function(x, A) {
    # A is matrix indicating whether nodes in dyad have same node attributes
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    sum(x == 1 & A != 1, na.rm = T)/(sum((x == 1 | x == 0) & A != 1, na.rm = T))
}

# construct dyadcharacteristic whether nodes are similar/homogenous
fhomomat <- function(x) {
    # x is a vector of node-covariate
    xmat <- matrix(x, nrow = length(x), ncol = length(x))
    xmatt <- t(xmat)
    xhomo <- xmat == xmatt
    return(xhomo)
}

# a function to calculate all valid dyads.
fndyads <- function(x) {
    diag(x) <- NA
    x[x == 10] <- NA
    (sum((x == 1 | x == 0), na.rm = T))
}

# a function to calculate all valid intragroupdyads.
fndyads2 <- function(x, A) {
    diag(x) <- NA
    x[x == 10] <- NA
    diag(A) <- NA
    (sum((x == 1 | x == 0) & A == 1, na.rm = T))
}


fscolnet <- function(network, ccovar) {
    # Calculate coleman on network level:
    # https://reader.elsevier.com/reader/sd/pii/S0378873314000239?token=A42F99FF6E2B750436DD2CB0DB7B1F41BDEC16052A45683C02644DAF88215A3379636B2AA197B65941D6373E9E2EE413
    
    fhomomat <- function(x) {
        xmat <- matrix(x, nrow = length(x), ncol = length(x))
        xmatt <- t(xmat)
        xhomo <- xmat == xmatt
        return(xhomo)
    }
    
    fsumintra <- function(x, A) {
        # A is matrix indicating whether nodes constituting dyad have same characteristics
        diag(x) <- NA
        x[x == 10] <- NA
        diag(A) <- NA
        sum(x == 1 & A == 1, na.rm = T)
    }
    
    # expecation w*=sum_g sum_i (ni((ng-1)/(N-1)))
    network[network == 10] <- NA
    ni <- rowSums(network, na.rm = T)
    ng <- NA
    for (i in 1:length(ccovar)) {
        ng[i] <- table(ccovar)[rownames(table(ccovar)) == ccovar[i]]
    }
    N <- length(ccovar)
    wexp <- sum(ni * ((ng - 1)/(N - 1)), na.rm = T)
    
    # wgg1 how many intragroup ties
    w <- fsumintra(network, fhomomat(ccovar))
    
    Scol_net <- ifelse(w >= wexp, (w - wexp)/(sum(ni, na.rm = T) - wexp), (w - wexp)/wexp)
    return(Scol_net)
}
```


```{r, eval=FALSE}
getwd()
load("/Users/anuschka/Documents/labjournal/data/twitter_20190919.RData")  #change to your working directory
```


```{r, eval=FALSE}
str(twitter_20190919, 1)
keyf <- twitter_20190919[[1]]
mydata <- twitter_20190919[[2]]
seats <- twitter_20190919[[3]]
```

```{r, eval=FALSE}
# retrieve nominationdata from rsiena object
fnet <- mydata$depvars$fnet
atmnet <- mydata$depvars$atmnet
rtnet <- mydata$depvars$rtnet

# retrieve node-attributes from rsiena object
vrouw <- mydata$cCovars$vrouw
partij <- mydata$cCovars$partij
ethminz <- mydata$cCovars$ethminz
lft <- mydata$cCovars$lft

# de-mean-center node attributes
ethminz <- ethminz + attributes(ethminz)$mean
partij <- partij + attributes(partij)$mean
vrouw <- vrouw + attributes(vrouw)$mean
lft <- lft + attributes(lft)$mean

# construct matrices for similarity for each dimension (dyad characteristics)
vrouwm <- fhomomat(vrouw)
partijm <- fhomomat(partij)
ethminzm <- fhomomat(ethminz)

# just for fun, make dyad characteristic indicating whether both nodes are ethnic minorities
xmat <- matrix(ethminz, nrow = length(ethminz), ncol = length(ethminz))
xmatt <- t(xmat)
minoritym <- xmat == 1 & xmatt == 1

# for age max 5 year difference / for descriptives
xmat <- matrix(lft, nrow = length(lft), ncol = length(lft))
xmatt <- t(xmat)
lftm <- (abs(xmat - xmatt) < 6)

# calculate all possible similar dyads, not the focus of this exercise.  fndyads2(fnet[,,1], vrouwm)
# fndyads2(fnet[,,3], vrouwm) fndyads2(fnet[,,1], partijm) fndyads2(fnet[,,3], partijm)
# fndyads2(fnet[,,1], ethminzm) fndyads2(fnet[,,3], ethminzm)

# make a big object to store all results
desmat <- matrix(NA, nrow = 10, ncol = 9)

# lets start using our functions
desmat[1, 1] <- fdensity(fnet[, , 1])
desmat[1, 2] <- fdensity(fnet[, , 2])
desmat[1, 3] <- fdensity(fnet[, , 3])
desmat[2, 1] <- fdensityintra(fnet[, , 1], vrouwm)
desmat[2, 2] <- fdensityintra(fnet[, , 2], vrouwm)
desmat[2, 3] <- fdensityintra(fnet[, , 3], vrouwm)
desmat[3, 1] <- fdensityinter(fnet[, , 1], vrouwm)
desmat[3, 2] <- fdensityinter(fnet[, , 2], vrouwm)
desmat[3, 3] <- fdensityinter(fnet[, , 3], vrouwm)
desmat[4, 1] <- fdensityintra(fnet[, , 1], partijm)
desmat[4, 2] <- fdensityintra(fnet[, , 2], partijm)
desmat[4, 3] <- fdensityintra(fnet[, , 3], partijm)
desmat[5, 1] <- fdensityinter(fnet[, , 1], partijm)
desmat[5, 2] <- fdensityinter(fnet[, , 2], partijm)
desmat[5, 3] <- fdensityinter(fnet[, , 3], partijm)
desmat[6, 1] <- fdensityintra(fnet[, , 1], ethminzm)
desmat[6, 2] <- fdensityintra(fnet[, , 2], ethminzm)
desmat[6, 3] <- fdensityintra(fnet[, , 3], ethminzm)
desmat[7, 1] <- fdensityinter(fnet[, , 1], ethminzm)
desmat[7, 2] <- fdensityinter(fnet[, , 2], ethminzm)
desmat[7, 3] <- fdensityinter(fnet[, , 3], ethminzm)
desmat[8, 1] <- fdensityinter(fnet[, , 1], minoritym)
desmat[8, 2] <- fdensityinter(fnet[, , 2], minoritym)
desmat[8, 3] <- fdensityinter(fnet[, , 3], minoritym)
desmat[9, 1] <- fdensityintra(fnet[, , 1], lftm)
desmat[9, 2] <- fdensityintra(fnet[, , 2], lftm)
desmat[9, 3] <- fdensityintra(fnet[, , 3], lftm)
desmat[10, 1] <- fdensityinter(fnet[, , 1], lftm)
desmat[10, 2] <- fdensityinter(fnet[, , 2], lftm)
desmat[10, 3] <- fdensityinter(fnet[, , 3], lftm)

desmat[1, 1 + 3] <- fdensity(atmnet[, , 1])
desmat[1, 2 + 3] <- fdensity(atmnet[, , 2])
desmat[1, 3 + 3] <- fdensity(atmnet[, , 3])
desmat[2, 1 + 3] <- fdensityintra(atmnet[, , 1], vrouwm)
desmat[2, 2 + 3] <- fdensityintra(atmnet[, , 2], vrouwm)
desmat[2, 3 + 3] <- fdensityintra(atmnet[, , 3], vrouwm)
desmat[3, 1 + 3] <- fdensityinter(atmnet[, , 1], vrouwm)
desmat[3, 2 + 3] <- fdensityinter(atmnet[, , 2], vrouwm)
desmat[3, 3 + 3] <- fdensityinter(atmnet[, , 3], vrouwm)
desmat[4, 1 + 3] <- fdensityintra(atmnet[, , 1], partijm)
desmat[4, 2 + 3] <- fdensityintra(atmnet[, , 2], partijm)
desmat[4, 3 + 3] <- fdensityintra(atmnet[, , 3], partijm)
desmat[5, 1 + 3] <- fdensityinter(atmnet[, , 1], partijm)
desmat[5, 2 + 3] <- fdensityinter(atmnet[, , 2], partijm)
desmat[5, 3 + 3] <- fdensityinter(atmnet[, , 3], partijm)
desmat[6, 1 + 3] <- fdensityintra(atmnet[, , 1], ethminzm)
desmat[6, 2 + 3] <- fdensityintra(atmnet[, , 2], ethminzm)
desmat[6, 3 + 3] <- fdensityintra(atmnet[, , 3], ethminzm)
desmat[7, 1 + 3] <- fdensityinter(atmnet[, , 1], ethminzm)
desmat[7, 2 + 3] <- fdensityinter(atmnet[, , 2], ethminzm)
desmat[7, 3 + 3] <- fdensityinter(atmnet[, , 3], ethminzm)
desmat[8, 1 + 3] <- fdensityinter(atmnet[, , 1], minoritym)
desmat[8, 2 + 3] <- fdensityinter(atmnet[, , 2], minoritym)
desmat[8, 3 + 3] <- fdensityinter(atmnet[, , 3], minoritym)
desmat[9, 1 + 3] <- fdensityintra(atmnet[, , 1], lftm)
desmat[9, 2 + 3] <- fdensityintra(atmnet[, , 2], lftm)
desmat[9, 3 + 3] <- fdensityintra(atmnet[, , 3], lftm)
desmat[10, 1 + 3] <- fdensityinter(atmnet[, , 1], lftm)
desmat[10, 2 + 3] <- fdensityinter(atmnet[, , 2], lftm)
desmat[10, 3 + 3] <- fdensityinter(atmnet[, , 3], lftm)

desmat[1, 1 + 6] <- fdensity(rtnet[, , 1])
desmat[1, 2 + 6] <- fdensity(rtnet[, , 2])
desmat[1, 3 + 6] <- fdensity(rtnet[, , 3])
desmat[2, 1 + 6] <- fdensityintra(rtnet[, , 1], vrouwm)
desmat[2, 2 + 6] <- fdensityintra(rtnet[, , 2], vrouwm)
desmat[2, 3 + 6] <- fdensityintra(rtnet[, , 3], vrouwm)
desmat[3, 1 + 6] <- fdensityinter(rtnet[, , 1], vrouwm)
desmat[3, 2 + 6] <- fdensityinter(rtnet[, , 2], vrouwm)
desmat[3, 3 + 6] <- fdensityinter(rtnet[, , 3], vrouwm)
desmat[4, 1 + 6] <- fdensityintra(rtnet[, , 1], partijm)
desmat[4, 2 + 6] <- fdensityintra(rtnet[, , 2], partijm)
desmat[4, 3 + 6] <- fdensityintra(rtnet[, , 3], partijm)
desmat[5, 1 + 6] <- fdensityinter(rtnet[, , 1], partijm)
desmat[5, 2 + 6] <- fdensityinter(rtnet[, , 2], partijm)
desmat[5, 3 + 6] <- fdensityinter(rtnet[, , 3], partijm)
desmat[6, 1 + 6] <- fdensityintra(rtnet[, , 1], ethminzm)
desmat[6, 2 + 6] <- fdensityintra(rtnet[, , 2], ethminzm)
desmat[6, 3 + 6] <- fdensityintra(rtnet[, , 3], ethminzm)
desmat[7, 1 + 6] <- fdensityinter(rtnet[, , 1], ethminzm)
desmat[7, 2 + 6] <- fdensityinter(rtnet[, , 2], ethminzm)
desmat[7, 3 + 6] <- fdensityinter(rtnet[, , 3], ethminzm)
desmat[8, 1 + 6] <- fdensityinter(rtnet[, , 1], minoritym)
desmat[8, 2 + 6] <- fdensityinter(rtnet[, , 2], minoritym)
desmat[8, 3 + 6] <- fdensityinter(rtnet[, , 3], minoritym)
desmat[9, 1 + 6] <- fdensityintra(rtnet[, , 1], lftm)
desmat[9, 2 + 6] <- fdensityintra(rtnet[, , 2], lftm)
desmat[9, 3 + 6] <- fdensityintra(rtnet[, , 3], lftm)
desmat[10, 1 + 6] <- fdensityinter(rtnet[, , 1], lftm)
desmat[10, 2 + 6] <- fdensityinter(rtnet[, , 2], lftm)
desmat[10, 3 + 6] <- fdensityinter(rtnet[, , 3], lftm)

colnames(desmat) <- c("friends w1", "friends w2", "friends w3", "atmentions w1", "atmentions w2", "atmentions w3", 
    "retweets w1", "retweets w2", "retweets w3")
rownames(desmat) <- c("total", "same sex", "different sex", "same party", "different party", "same ethnicity", 
    "different ethnicity", "both minority", "same age (<6)", "different age (>5)")
desmat
```

```{r, eval=FALSE}

myeff <- getEffects(mydata)
myeff
```
```{r, eval=FALSE}
#Name is de afhankelijke variabele 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "partij", name = "rtnet")

#To answer the second research question, I would like to include different effects. To study the segregation of sex, I think I can still use the sameX effect (to see whether individuals of the same sex are more inclined to retweet one another). 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "vrouw", name = "rtnet")
#To answer the second subquestion of the second RQ, we have to see if there is segregation based on age. I believe that during the class we talked about that you just want the normal variable of age (lft), but I don't totally understand why it's not possible to take the variable same age. Let's try lft first. 
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "lft", name = "rtnet")
#Does afstand just work like this?
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, sameX, interaction1 = "afstand", name = "rtnet")
#No it doesn't so I checked the effect in the effectsdocumentation. Here I see that sameX cannot be used for the variable afstand. I'm not sure which of the other effects is useful, I checked them in the Rsiena manual. Maybe incoming shared WWX, but on the other hand I'm not sure if that really tells something about segregation. Let's try reciprocity and then if there's no reciprocity that's segregation (not really but ok)
myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, XRecip, interaction1 = "afstand", name = "rtnet")

myeff_m1 <- myeff
myeff_m1 <- includeEffects(myeff_m1, transTrip, transTies, name = "rtnet")

```
```{r, eval=FALSE}
# I used a seed so you will probably see the same results
myalgorithm <- sienaAlgorithmCreate(projname = "test", seed = 345654)
```
```{r, eval=FALSE}
# to speed things up a bit, I am using more cores.
ansM1 <- siena07(myalgorithm, data = mydata, effects = myeff_m1, useCluster = TRUE, nbrNodes = 2, initC = TRUE, 
    batch = TRUE)
ansM1b <- siena07(myalgorithm, data = mydata, prevAns = ansM1, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
ansM1c <- siena07(myalgorithm, data = mydata, prevAns = ansM1b, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
ansM1d <- siena07(myalgorithm, data = mydata, prevAns = ansM1c, effects = myeff_m1, useCluster = TRUE, 
    nbrNodes = 2, initC = TRUE, batch = TRUE)
```


```{r, eval=FALSE}
save(ansM1, file = "ansM1a.RData")
save(ansM1b, file = "ansM1b.RData")
save(ansM1c, file = "ansM1c.RData")
save(ansM1d, file="ansM1d.RData")
```


```{r, echo=FALSE}
load("/Users/anuschka/Documents/labjournal/data/ansM1a.RData")
load("/Users/anuschka/Documents/labjournal/data/ansM1b.RData")
load("/Users/anuschka/Documents/labjournal/data/ansM1c.Rdata")
load("/Users/anuschka/Documents/labjournal/data/ansM1d.Rdata")
```

```{r}
summary(ansM1)
summary(ansM1b)
summary(ansM1c)
summary(ansM1d)

```

I think we can conclude that there is a negative and weak effect of same gender on retweeting, which means that individuals do not necessarily prefer to retweet someone of the same gender compared to someone of the other gender. Now we have to check how this is the case for individuals of the same age. In the second model I ran, we see that also for same age there is a negative and quite weak effect (-0.0085), so politicians do not prefer to retweet something tweeted by someone of the same age. In sum, the segregation of party affiliation in the retweet network does not seem to be the result of segregation along age and sex. 

<br>

```{r}
summary(ansM1b)
```

Let's continue to test the third RQ. 
I think that to test propinquity, we have to see how close the party members are seated next to each other. This has to do with the variable seats. In the keyf we see the coordinates, but when we study the mydata list, we see that the distance between members is already calculated as a dynamic covariate (afstand). This is what we want to use to test the propinquity. 

```{r, eval=FALSE}
summary(seats)
summary(mydata$dycCovars)
```

<br>

In the output of afstand x reciprocity we see that there is a negative effect (-0.0999) of afstand x reciprocity. I think this means that individuals do not prefer to have a reciprocal relation with someone who is close to them, while the general effect of reciprocity is positive. However, I'm not sure how afstand is now analyzed and whether this effect regards individuals of a lower distance as more of the same. But I also don't know which variable could give a better answer to the research question. 

```{r}

summary(ansM1c)
summary(ansM1d)
```

<br>

To give an answer to the last research question, I believe I do not need to estimate a new model. When we look at the outdegree, we see that this is negative. The only interpretation I wrote down from last week is that that means that individuals want outdegrees but not with everyone in the network. We also see that individuals prefer to have a reciprocal tie rather than a non-reciprocal tie. Politicians thus prefer to retweet someone who also retweets them, rather than retweeting individuals that have not retweeted them. When estimating a model with details about transitivity, we see that there is both a positive effect of transitive triplets as well as transitive ties. Does this then mean that 2 individuals that retweet me also retweet each other? When summarizing these effects, we see that politicians prefer to retweet individuals with whom they already have some kind of connection (through transitivity or to establish a reciprocal relation). Therefore, I think one could conclude that there are structural factors in the network that could link with segregation. However, it is still possible that these structural factors are mostly visible because there already is segregation based on party affiliation. (How can you distinguish what is cause and what is influence?)










