diff --git a/404.html b/404.html index b7d16e7ab94dce2097726ce3c55dead1b1efc3ab..fa1a4c1192363e9ac3cc41371a5775b3cc3cf19c 100644 --- a/404.html +++ b/404.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • diff --git a/CONTRIBUTING.html b/CONTRIBUTING.html index 32e2bbed79bb308d8ba5213360f4d3797b08ba74..1fbbeaa6c425da78a3eb8cfb84ccdca2928e39b1 100644 --- a/CONTRIBUTING.html +++ b/CONTRIBUTING.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • diff --git a/LICENSE-text.html b/LICENSE-text.html index df7874b73c33e746100d584e0a60bad77de6758c..6df809a96108eaf48856105d0653b7aeb6625327 100644 --- a/LICENSE-text.html +++ b/LICENSE-text.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • diff --git a/LICENSE.html b/LICENSE.html index 4ab810144163105ae0a5e4039a54e6fc0bbcb99b..596f226f4372b7b91d49cf5482cc240df0bc81b6 100644 --- a/LICENSE.html +++ b/LICENSE.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • diff --git a/articles/examples/basic-autograd.html b/articles/examples/basic-autograd.html new file mode 100644 index 0000000000000000000000000000000000000000..a5b35f7778069d6a9c79cc94507ba2c06caba570 --- /dev/null +++ b/articles/examples/basic-autograd.html @@ -0,0 +1,230 @@ + + + + + + + +basic-autograd • torch + + + + + + + + + + + +
    +
    + + + + +
    +
    + + + + +
    +library(torch)
    +
    +# creates example tensors. x requires_grad = TRUE tells that 
    +# we are going to take derivatives over it.
    +x <- torch_tensor(3, requires_grad = TRUE)
    +y <- torch_tensor(2)
    +
    +# executes the forward operation x^2
    +o <- x^2
    +
    +# computes the backward operation for each tensor that is marked with
    +# requires_grad = TRUE
    +o$backward()
    +
    +# get do/dx = 2 * x (at x = 3)
    +x$grad
    +
    ## torch_tensor 
    +##  6
    +## [ CPUFloatType{1} ]
    +
    + + + +
    + + + + +
    + + + + + + diff --git a/articles/examples/basic-autograd_files/accessible-code-block-0.0.1/empty-anchor.js b/articles/examples/basic-autograd_files/accessible-code-block-0.0.1/empty-anchor.js new file mode 100644 index 0000000000000000000000000000000000000000..ca349fd6a570108bde9d7daace534cd651c5f042 --- /dev/null +++ b/articles/examples/basic-autograd_files/accessible-code-block-0.0.1/empty-anchor.js @@ -0,0 +1,15 @@ +// Hide empty tag within highlighted CodeBlock for screen reader accessibility (see https://github.com/jgm/pandoc/issues/6352#issuecomment-626106786) --> +// v0.0.1 +// Written by JooYoung Seo (jooyoung@psu.edu) and Atsushi Yasumoto on June 1st, 2020. + +document.addEventListener('DOMContentLoaded', function() { + const codeList = document.getElementsByClassName("sourceCode"); + for (var i = 0; i < codeList.length; i++) { + var linkList = codeList[i].getElementsByTagName('a'); + for (var j = 0; j < linkList.length; j++) { + if (linkList[j].innerHTML === "") { + linkList[j].setAttribute('aria-hidden', 'true'); + } + } + } +}); diff --git a/articles/examples/basic-nn-module.html b/articles/examples/basic-nn-module.html new file mode 100644 index 0000000000000000000000000000000000000000..afebc49aee8af27ebc7c32eee3e057b08f7d6556 --- /dev/null +++ b/articles/examples/basic-nn-module.html @@ -0,0 +1,286 @@ + + + + + + + +basic-nn-module • torch + + + + + + + + + + + +
    +
    + + + + +
    +
    + + + + +
    +library(torch)
    +
    +# creates example tensors. x requires_grad = TRUE tells that 
    +# we are going to take derivatives over it.
    +dense <- nn_module(
    +  clasname = "dense",
    +  # the initialize function tuns whenever we instantiate the model
    +  initialize = function(in_features, out_features) {
    +    
    +    # just for you to see when this function is called
    +    cat("Calling initialize!") 
    +    
    +    # we use nn_parameter to indicate that those tensors are special
    +    # and should be treated as parameters by `nn_module`.
    +    self$w <- nn_parameter(torch_randn(in_features, out_features))
    +    self$b <- nn_parameter(torch_zeros(out_features))
    +    
    +  },
    +  # this function is called whenever we call our model on input.
    +  forward = function(x) {
    +    cat("Calling forward!")
    +    torch_mm(x, self$w) + self$b
    +  }
    +)
    +
    +model <- dense(3, 1)
    +
    ## Calling initialize!
    +
    +# you can get all parameters 
    +model$parameters
    +
    ## $w
    +## torch_tensor 
    +## 0.01 *
    +##  4.5107
    +##  -45.9509
    +##  -246.0987
    +## [ CPUFloatType{3,1} ]
    +## 
    +## $b
    +## torch_tensor 
    +##  0
    +## [ CPUFloatType{1} ]
    +
    +# or individually
    +model$w
    +
    ## torch_tensor 
    +## 0.01 *
    +##  4.5107
    +##  -45.9509
    +##  -246.0987
    +## [ CPUFloatType{3,1} ]
    +
    +model$b
    +
    ## torch_tensor 
    +##  0
    +## [ CPUFloatType{1} ]
    +
    +# create an input tensor
    +x <- torch_randn(10, 3)
    +y_pred <- model(x)
    +
    ## Calling forward!
    +
    +y_pred
    +
    ## torch_tensor 
    +## -0.5823
    +##  1.1249
    +##  0.7531
    +## -0.5148
    +##  0.1950
    +## -4.0449
    +##  2.2684
    +##  0.5924
    +##  3.6237
    +## -1.8567
    +## [ CPUFloatType{10,1} ]
    +
    + + + +
    + + + + +
    + + + + + + diff --git a/articles/examples/basic-nn-module_files/accessible-code-block-0.0.1/empty-anchor.js b/articles/examples/basic-nn-module_files/accessible-code-block-0.0.1/empty-anchor.js new file mode 100644 index 0000000000000000000000000000000000000000..ca349fd6a570108bde9d7daace534cd651c5f042 --- /dev/null +++ b/articles/examples/basic-nn-module_files/accessible-code-block-0.0.1/empty-anchor.js @@ -0,0 +1,15 @@ +// Hide empty tag within highlighted CodeBlock for screen reader accessibility (see https://github.com/jgm/pandoc/issues/6352#issuecomment-626106786) --> +// v0.0.1 +// Written by JooYoung Seo (jooyoung@psu.edu) and Atsushi Yasumoto on June 1st, 2020. + +document.addEventListener('DOMContentLoaded', function() { + const codeList = document.getElementsByClassName("sourceCode"); + for (var i = 0; i < codeList.length; i++) { + var linkList = codeList[i].getElementsByTagName('a'); + for (var j = 0; j < linkList.length; j++) { + if (linkList[j].innerHTML === "") { + linkList[j].setAttribute('aria-hidden', 'true'); + } + } + } +}); diff --git a/articles/examples/dataset.html b/articles/examples/dataset.html new file mode 100644 index 0000000000000000000000000000000000000000..022e151b029bf4302ab4f1b90507fac678176aae --- /dev/null +++ b/articles/examples/dataset.html @@ -0,0 +1,305 @@ + + + + + + + +dataset • torch + + + + + + + + + + + +
    +
    + + + + +
    +
    + + + + +
    +library(torch)
    +
    +# In deep learning models you don't usually have all your data in RAM
    +# because you are usually training using mini-batch gradient descent
    +# thus only needing a mini-batch on RAM each time.
    +
    +# In torch we use the `datasets` abstraction to define the process of
    +# loading data. Once you have defined your dataset you can use torch
    +# dataloaders that allows you to iterate over this dataset in batches.
    +
    +# Note that datasets are optional in torch. They are jut there as a
    +# recommended way to load data.
    +
    +# Below you will see an example of how to create a simple torch dataset
    +# that pre-process a data.frame into tensors so you can feed them to
    +# a model.
    +
    +df_dataset <- dataset(
    +  "mydataset",
    +  
    +  # the input data to your dataset goes in the initialize function.
    +  # our dataset will take a dataframe and the name of the response
    +  # variable.
    +  initialize = function(df, response_variable) {
    +    self$df <- df[,-which(names(df) == response_variable)]
    +    self$response_variable <- df[[response_variable]]
    +  },
    +  
    +  # the .getitem method takes an index as input and returns the
    +  # corresponding item from the dataset.
    +  # the index could be anything. the dataframe could have many
    +  # rows for each index and the .getitem method would do some
    +  # kind of aggregation before returning the element.
    +  # in our case the index will be a row of the data.frame,
    +  .getitem = function(index) {
    +    response <- torch_tensor(self$response_variable[index])
    +    x <- torch_tensor(as.numeric(self$df[index,]))
    +    
    +    # note that the dataloaders will automatically stack tensors
    +    # creating a new dimension
    +    list(x = x, y = response)
    +  },
    +  
    +  # It's optional, but helpful to define the .length method returning 
    +  # the number of elements in the dataset. This is needed if you want 
    +  # to shuffle your dataset.
    +  .length = function() {
    +    length(self$response_variable)
    +  }
    +  
    +)
    +
    +
    +# we can now initialize an instance of our dataset.
    +# for example
    +mtcars_dataset <- df_dataset(mtcars, "mpg")
    +
    +# now we can get an item with
    +mtcars_dataset$.getitem(1)
    +
    ## $x
    +## torch_tensor 
    +##    6.0000
    +##  160.0000
    +##  110.0000
    +##    3.9000
    +##    2.6200
    +##   16.4600
    +##    0.0000
    +##    1.0000
    +##    4.0000
    +##    4.0000
    +## [ CPUFloatType{10} ]
    +## 
    +## $y
    +## torch_tensor 
    +##  21
    +## [ CPUFloatType{1} ]
    +
    +# Given a dataset you can create a dataloader with
    +dl <- dataloader(mtcars_dataset, batch_size = 15, shuffle = TRUE)
    +
    +# we can then loop trough the elements of the dataloader with
    +for(batch in enumerate(dl)) {
    +  cat("X size:  ")
    +  print(batch[[1]]$size())
    +  cat("Y size:  ")
    +  print(batch[[2]]$size())
    +}
    +
    ## X size:  [1] 15 10
    +## Y size:  [1] 15  1
    +## X size:  [1] 15 10
    +## Y size:  [1] 15  1
    +## X size:  [1]  2 10
    +## Y size:  [1] 2 1
    +
    + + + +
    + + + + +
    + + + + + + diff --git a/articles/examples/dataset_files/accessible-code-block-0.0.1/empty-anchor.js b/articles/examples/dataset_files/accessible-code-block-0.0.1/empty-anchor.js new file mode 100644 index 0000000000000000000000000000000000000000..ca349fd6a570108bde9d7daace534cd651c5f042 --- /dev/null +++ b/articles/examples/dataset_files/accessible-code-block-0.0.1/empty-anchor.js @@ -0,0 +1,15 @@ +// Hide empty tag within highlighted CodeBlock for screen reader accessibility (see https://github.com/jgm/pandoc/issues/6352#issuecomment-626106786) --> +// v0.0.1 +// Written by JooYoung Seo (jooyoung@psu.edu) and Atsushi Yasumoto on June 1st, 2020. + +document.addEventListener('DOMContentLoaded', function() { + const codeList = document.getElementsByClassName("sourceCode"); + for (var i = 0; i < codeList.length; i++) { + var linkList = codeList[i].getElementsByTagName('a'); + for (var j = 0; j < linkList.length; j++) { + if (linkList[j].innerHTML === "") { + linkList[j].setAttribute('aria-hidden', 'true'); + } + } + } +}); diff --git a/articles/examples/index.html b/articles/examples/index.html new file mode 100644 index 0000000000000000000000000000000000000000..45a26b12702d996f6afe34b3e96174cbf164beef --- /dev/null +++ b/articles/examples/index.html @@ -0,0 +1,227 @@ + + + + + + + +Examples • torch + + + + + + + + + + + +
    +
    + + + + +
    +
    + + + + +

    Gallery of scripts demonstrating torch functionality.

    + + + + + + + + + + + + + + + +
    Examples
    basic-autograd
    basic-nn-module
    dataset
    +
    + + + +
    + + + + +
    + + + + + + diff --git a/articles/examples/index_files/accessible-code-block-0.0.1/empty-anchor.js b/articles/examples/index_files/accessible-code-block-0.0.1/empty-anchor.js new file mode 100644 index 0000000000000000000000000000000000000000..ca349fd6a570108bde9d7daace534cd651c5f042 --- /dev/null +++ b/articles/examples/index_files/accessible-code-block-0.0.1/empty-anchor.js @@ -0,0 +1,15 @@ +// Hide empty tag within highlighted CodeBlock for screen reader accessibility (see https://github.com/jgm/pandoc/issues/6352#issuecomment-626106786) --> +// v0.0.1 +// Written by JooYoung Seo (jooyoung@psu.edu) and Atsushi Yasumoto on June 1st, 2020. + +document.addEventListener('DOMContentLoaded', function() { + const codeList = document.getElementsByClassName("sourceCode"); + for (var i = 0; i < codeList.length; i++) { + var linkList = codeList[i].getElementsByTagName('a'); + for (var j = 0; j < linkList.length; j++) { + if (linkList[j].innerHTML === "") { + linkList[j].setAttribute('aria-hidden', 'true'); + } + } + } +}); diff --git a/articles/extending-autograd.html b/articles/extending-autograd.html index 05802b50daf77ef49b30610c97c9b02ac2cf7687..eef1bb3386eb1dd84201643738da83acb550f5f8 100644 --- a/articles/extending-autograd.html +++ b/articles/extending-autograd.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -156,8 +183,7 @@
    -library(torch)
    -
    +library(torch)

    Adding operations to autograd requires implementing a new autograd_function for each operation. Recall that autograd_functionss are what autograd uses to compute the results and gradients, and encode the operation history. Every new function requires you to implement 2 methods:

    - +
  • Reference
  • @@ -159,8 +186,7 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Soumith Chintala.

    -library(torch)
    -
    +library(torch)

    Central to all neural networks in torch is the autograd functionality. Let’s first briefly visit this, and we will then go to training our first neural network.

    Autograd provides automatic differentiation for all operations on Tensors. It is a define-by-run framework, which means that your backprop is defined by how your code is run, and that every single iteration can be different.

    Let us see this in more simple terms with some examples.

    @@ -175,74 +201,67 @@

    If you want to compute the derivatives, you can call $backward() on a Tensor. If Tensor is a scalar (i.e. it holds a one element data), you don’t need to specify any arguments to backward(), however if it has more elements, you need to specify a gradient argument that is a tensor of matching shape.

    Create a tensor and set requires_grad=TRUE to track computation with it:

    -x <- torch_ones(2, 2, requires_grad = TRUE)
    -x
    +x <- torch_ones(2, 2, requires_grad = TRUE)
    +x
     #> torch_tensor 
     #>  1  1
     #>  1  1
    -#> [ CPUFloatType{2,2} ]
    -
    +#> [ CPUFloatType{2,2} ]

    Do a tensor operation:

    -y <- x + 2
    -y
    +y <- x + 2
    +y
     #> torch_tensor 
     #>  3  3
     #>  3  3
    -#> [ CPUFloatType{2,2} ]
    -
    +#> [ CPUFloatType{2,2} ]

    y was created as a result of an operation, so it has a grad_fn.

    -y$grad_fn
    -#> AddBackward1
    -
    +y$grad_fn +#> AddBackward1

    Do more operations on y

    -z <- y * y * 3
    -z
    +z <- y * y * 3
    +z
     #> torch_tensor 
     #>  27  27
     #>  27  27
     #> [ CPUFloatType{2,2} ]
    -out <- z$mean()
    -out
    +out <- z$mean()
    +out
     #> torch_tensor 
     #> 27
    -#> [ CPUFloatType{} ]
    -
    +#> [ CPUFloatType{} ]

    $requires_grad_( ... ) changes an existing Tensor’s requires_grad flag in-place. The input flag defaults to FALSE if not given.

    -a <- torch_randn(2, 2)
    -a <- (a * 3) / (a - 1)
    -a$requires_grad
    +a <- torch_randn(2, 2)
    +a <- (a * 3) / (a - 1)
    +a$requires_grad
     #> [1] FALSE
    -a$requires_grad_(TRUE)
    +a$requires_grad_(TRUE)
     #> torch_tensor 
    -#> -0.4350  1.4882
    -#> -0.5849  9.3457
    +#>  1.8070  0.0621
    +#> -3.7943  1.4618
     #> [ CPUFloatType{2,2} ]
    -a$requires_grad
    +a$requires_grad
     #> [1] TRUE
    -b <- (a * a)$sum()
    -b$grad_fn
    -#> SumBackward0
    -
    +b <- (a * a)$sum() +b$grad_fn +#> SumBackward0

    Gradients

    Let’s backprop now. Because out contains a single scalar, out$backward() is equivalent to out$backward(torch.tensor(1.)).

    -out$backward()
    -
    +out$backward()

    Print gradients d(out)/dx

    -x$grad
    +x$grad
     #> torch_tensor 
     #>  4.5000  4.5000
     #>  4.5000  4.5000
    -#> [ CPUFloatType{2,2} ]
    -
    +#> [ CPUFloatType{2,2} ]

    You should have got a matrix of 4.5. Let’s call the out Tensor \(o\).

    We have that \(o = \frac{1}{4}\sum_i z_i\), \(z_i = 3(x_i+2)^2\) and \(z_i\bigr\rvert_{x_i=1} = 27\). Therefore, \(\frac{\partial o}{\partial x_i} = \frac{3}{2}(x_i+2)\), hence \(\frac{\partial o}{\partial x_i}\bigr\rvert_{x_i=1} = \frac{9}{2} = 4.5\).

    Mathematically, if you have a vector valued function \(\vec{y}=f(\vec{x})\), then the gradient of \(\vec{y}\) with respect to \(\vec{x}\) is a Jacobian matrix:

    @@ -273,50 +292,46 @@

    This characteristic of vector-Jacobian product makes it very convenient to feed external gradients into a model that has non-scalar output.

    Now let’s take a look at an example of vector-Jacobian product:

    -x <- torch_randn(3, requires_grad=TRUE)
    -y <- 100 * x
    -y
    +x <- torch_randn(3, requires_grad=TRUE)
    +y <- 100 * x
    +y
     #> torch_tensor 
    -#>  -50.4960
    -#>  -28.4113
    -#>  101.7135
    -#> [ CPUFloatType{3} ]
    -
    +#> -25.7096 +#> -34.4775 +#> 124.1432 +#> [ CPUFloatType{3} ]

    Now in this case y is no longer a scalar. autograd could not compute the full Jacobian directly, but if we just want the vector-Jacobian product, simply pass the vector to backward as argument:

    -v <- torch_tensor(c(0.1, 1.0, 0.0001))
    -y$backward(v)
    +v <- torch_tensor(c(0.1, 1.0, 0.0001))
    +y$backward(v)
     
    -x$grad
    +x$grad
     #> torch_tensor 
     #>  1.0000e+01
     #>  1.0000e+02
     #>  1.0000e-02
    -#> [ CPUFloatType{3} ]
    -
    +#> [ CPUFloatType{3} ]

    You can also stop autograd from tracking history on Tensors with $requires_grad=TRUE either by wrapping the code block in with with_no_grad():

    -x$requires_grad
    +x$requires_grad
     #> [1] TRUE
    -(x ** 2)$requires_grad
    +(x ** 2)$requires_grad
     #> [1] TRUE
     
    -with_no_grad({
    -  print((x ** 2)$requires_grad)
    -})
    -#> [1] FALSE
    -
    +with_no_grad({ + print((x ** 2)$requires_grad) +}) +#> [1] FALSE
    -x$requires_grad
    +x$requires_grad
     #> [1] TRUE
    -y <- x$detach()
    -y$requires_grad
    +y <- x$detach()
    +y$requires_grad
     #> [1] FALSE
    -x$eq(y)$all()
    +x$eq(y)$all()
     #> torch_tensor 
     #> 1
    -#> [ CPUBoolType{} ]
    -
    +#> [ CPUBoolType{} ]

    Read Later:

    Document about help(autograd_function), vignette("using-autograd"), vignette("extending-autograd").

    diff --git a/articles/getting-started/control-flow-and-weight-sharing.html b/articles/getting-started/control-flow-and-weight-sharing.html index 89a34409145381dd4425ef518ac5bd79d7df9a32..ed3f69d963ad1be85cc77248b020babeb00d2d7d 100644 --- a/articles/getting-started/control-flow-and-weight-sharing.html +++ b/articles/getting-started/control-flow-and-weight-sharing.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,21 +186,20 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    As an example of dynamic graphs and weight sharing, we implement a very strange model: a fully-connected ReLU network that on each forward pass chooses a random number between 1 and 4 and uses that many hidden layers, reusing the same weights multiple times to compute the innermost hidden layers.

    For this model we can use normal R flow control to implement the loop, and we can implement weight sharing among the innermost layers by simply reusing the same Module multiple times when defining the forward pass.

    We can easily implement this model using nn_module:

    -dynamic_net <- nn_module(
    +dynamic_net <- nn_module(
        "dynamic_net",
        # In the constructor we construct three nn_linear instances that we will use
        # in the forward pass.
    -   initialize = function(D_in, H, D_out) {
    -      self$input_linear <- nn_linear(D_in, H)
    -      self$middle_linear <- nn_linear(H, H)
    -      self$output_linear <- nn_linear(H, D_out)
    -   },
    +   initialize = function(D_in, H, D_out) {
    +      self$input_linear <- nn_linear(D_in, H)
    +      self$middle_linear <- nn_linear(H, H)
    +      self$output_linear <- nn_linear(H, D_out)
    +   },
        # For the forward pass of the model, we randomly choose either 0, 1, 2, or 3
        # and reuse the middle_linear Module that many times to compute hidden layer
        # representations.
    @@ -185,85 +211,84 @@
        # Here we also see that it is perfectly safe to reuse the same Module many
        # times when defining a computational graph. This is a big improvement from Lua
        # Torch, where each Module could be used only once.
    -   forward = function(x) {
    -      h_relu <- self$input_linear(x)$clamp(min = 0)
    -      for (i in seq_len(sample.int(4, size = 1))) {
    -         h_relu <- self$middle_linear(h_relu)$clamp(min=0)
    -      }
    -      y_pred <- self$output_linear(h_relu)
    -      y_pred
    -   }
    -)
    +   forward = function(x) {
    +      h_relu <- self$input_linear(x)$clamp(min = 0)
    +      for (i in seq_len(sample.int(4, size = 1))) {
    +         h_relu <- self$middle_linear(h_relu)$clamp(min=0)
    +      }
    +      y_pred <- self$output_linear(h_relu)
    +      y_pred
    +   }
    +)
     
     
    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
     # Setting requires_grad=FALSE (the default) indicates that we do not need to 
     # compute gradients with respect to these Tensors during the backward pass.
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Construct our model by instantiating the class defined above
    -model <- dynamic_net(D_in, H, D_out)
    +model <- dynamic_net(D_in, H, D_out)
     
     # The nn package also contains definitions of popular loss functions; in this
     # case we will use Mean Squared Error (MSE) as our loss function.
    -loss_fn <- nnf_mse_loss
    +loss_fn <- nnf_mse_loss
     
     # Use the optim package to define an Optimizer that will update the weights of
     # the model for us. Here we will use Adam; the optim package contains many other
     # optimization algorithms. The first argument to the Adam constructor tells the
     # optimizer which Tensors it should update.
    -learning_rate <- 1e-4
    -optimizer <- optim_sgd(model$parameters, lr=learning_rate, momentum = 0.9)
    +learning_rate <- 1e-4
    +optimizer <- optim_sgd(model$parameters, lr=learning_rate, momentum = 0.9)
     
    -for (t in seq_len(500)) {
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y by passing x to the model. Module objects
        # can be called like functions. When doing so you pass a Tensor of input
        # data to the Module and it produces a Tensor of output data.
    -   y_pred <- model(x)
    +   y_pred <- model(x)
        
        # Compute and print loss. We pass Tensors containing the predicted and true
        # values of y, and the loss function returns a Tensor containing the
        # loss.
    -   loss <- loss_fn(y_pred, y)
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", as.numeric(loss), "\n")
    +   loss <- loss_fn(y_pred, y)
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", as.numeric(loss), "\n")
        
        # Before the backward pass, use the optimizer object to zero all of the
        # gradients for the variables it will update (which are the learnable
        # weights of the model). This is because by default, gradients are
        # accumulated in buffers( i.e, not overwritten) whenever $backward()
        # is called. Checkout docs of `autograd_backward` for more details.
    -   optimizer$zero_grad()
    +   optimizer$zero_grad()
     
        # Backward pass: compute gradient of the loss with respect to model
        # parameters
    -   loss$backward()
    +   loss$backward()
     
        # Calling the step function on an Optimizer makes an update to its
        # parameters
    -   optimizer$step()
    -}
    -#> Step: 1 : 1.054659 
    -#> Step: 100 : 1.05705 
    -#> Step: 200 : 1.048708 
    -#> Step: 300 : 1.052647 
    -#> Step: 400 : 1.042869 
    -#> Step: 500 : 1.039991
    -
    + optimizer$step() +} +#> Step: 1 : 1.016029 +#> Step: 100 : 1.016147 +#> Step: 200 : 1.014156 +#> Step: 300 : 1.012017 +#> Step: 400 : 1.006162 +#> Step: 500 : 1.010858

    Sometimes you will want to specify models that are more complex than a sequence of existing Modules; for these cases you can define your own Modules by using nn_module function and defining a forward which receives input Tensors and produces output Tensors using other modules or other autograd operations on Tensors.

    In this example we implement our two-layer network as a custom Module subclass:

    -two_layer_net <- nn_module(
    +two_layer_net <- nn_module(
        "two_layer_net",
    -   initialize = function(D_in, H, D_out) {
    -      self$linear1 <- nn_linear(D_in, H)
    -      self$linear2 <- nn_linear(H, D_out)
    -   },
    -   forward = function(x) {
    -      x %>% 
    -         self$linear1() %>% 
    -         nnf_relu() %>% 
    -         self$linear2()
    -   }
    -)
    +   initialize = function(D_in, H, D_out) {
    +      self$linear1 <- nn_linear(D_in, H)
    +      self$linear2 <- nn_linear(H, D_out)
    +   },
    +   forward = function(x) {
    +      x %>% 
    +         self$linear1() %>% 
    +         nnf_relu() %>% 
    +         self$linear2()
    +   }
    +)
     
     
    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
     # Setting requires_grad=FALSE (the default) indicates that we do not need to 
     # compute gradients with respect to these Tensors during the backward pass.
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Construct our model by instantiating the class defined above
    -model <- two_layer_net(D_in, H, D_out)
    +model <- two_layer_net(D_in, H, D_out)
     
     # The nn package also contains definitions of popular loss functions; in this
     # case we will use Mean Squared Error (MSE) as our loss function.
    -loss_fn <- nnf_mse_loss
    +loss_fn <- nnf_mse_loss
     
     # Use the optim package to define an Optimizer that will update the weights of
     # the model for us. Here we will use Adam; the optim package contains many other
     # optimization algorithms. The first argument to the Adam constructor tells the
     # optimizer which Tensors it should update.
    -learning_rate <- 1e-4
    -optimizer <- optim_sgd(model$parameters, lr=learning_rate)
    +learning_rate <- 1e-4
    +optimizer <- optim_sgd(model$parameters, lr=learning_rate)
     
    -for (t in seq_len(500)) {
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y by passing x to the model. Module objects
        # can be called like functions. When doing so you pass a Tensor of input
        # data to the Module and it produces a Tensor of output data.
    -   y_pred <- model(x)
    +   y_pred <- model(x)
        
        # Compute and print loss. We pass Tensors containing the predicted and true
        # values of y, and the loss function returns a Tensor containing the
        # loss.
    -   loss <- loss_fn(y_pred, y)
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", as.numeric(loss), "\n")
    +   loss <- loss_fn(y_pred, y)
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", as.numeric(loss), "\n")
        
        # Before the backward pass, use the optimizer object to zero all of the
        # gradients for the variables it will update (which are the learnable
        # weights of the model). This is because by default, gradients are
        # accumulated in buffers( i.e, not overwritten) whenever $backward()
        # is called. Checkout docs of `autograd_backward` for more details.
    -   optimizer$zero_grad()
    +   optimizer$zero_grad()
     
        # Backward pass: compute gradient of the loss with respect to model
        # parameters
    -   loss$backward()
    +   loss$backward()
     
        # Calling the step function on an Optimizer makes an update to its
        # parameters
    -   optimizer$step()
    -}
    -#> Step: 1 : 1.04065 
    -#> Step: 100 : 1.026708 
    -#> Step: 200 : 1.013019 
    -#> Step: 300 : 0.9996911 
    -#> Step: 400 : 0.986709 
    -#> Step: 500 : 0.9740159
    -
    + optimizer$step() +} +#> Step: 1 : 1.096671 +#> Step: 100 : 1.082999 +#> Step: 200 : 1.06948 +#> Step: 300 : 1.056253 +#> Step: 400 : 1.043316 +#> Step: 500 : 1.030622

    In the next example we will about dynamic graphs in torch.

    diff --git a/articles/getting-started/neural-networks.html b/articles/getting-started/neural-networks.html index 9c40455a7655242b62260f8840edffbc81a1e8e8..ca6fdb7961595079cf0521d1d767a39df312caba 100644 --- a/articles/getting-started/neural-networks.html +++ b/articles/getting-started/neural-networks.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,8 +186,7 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Soumith Chintala.

    -library(torch)
    -
    +library(torch)

    Neural networks can be constructed using the nn functionality.

    Now that you had a glimpse of autograd, nn depends on autograd to define models and differentiate them. An nn.Module contains layers, and a method forward(input) that returns the output.

    For example, look at this network that classifies digit images:

    @@ -182,44 +208,43 @@ Define the network

    Let’s define this network:

    -Net <- nn_module(
    -  initialize = function() {
    -    self$conv1 = nn_conv2d(1, 6, 3)
    -    self$conv2 = nn_conv2d(6, 16, 3)
    +Net <- nn_module(
    +  initialize = function() {
    +    self$conv1 = nn_conv2d(1, 6, 3)
    +    self$conv2 = nn_conv2d(6, 16, 3)
         # an affine operation: y = Wx + b
    -    self$fc1 = nn_linear(16 * 6 * 6, 120)  # 6*6 from image dimension
    -    self$fc2 = nn_linear(120, 84)
    -    self$fc3 = nn_linear(84, 10)
    -  },
    -  forward = function(x) {
    -    x %>% 
    +    self$fc1 = nn_linear(16 * 6 * 6, 120)  # 6*6 from image dimension
    +    self$fc2 = nn_linear(120, 84)
    +    self$fc3 = nn_linear(84, 10)
    +  },
    +  forward = function(x) {
    +    x %>% 
           
    -      self$conv1() %>% 
    -      nnf_relu() %>% 
    -      nnf_max_pool2d(c(2,2)) %>% 
    +      self$conv1() %>% 
    +      nnf_relu() %>% 
    +      nnf_max_pool2d(c(2,2)) %>% 
           
    -      self$conv2() %>% 
    -      nnf_relu() %>% 
    -      nnf_max_pool2d(c(2,2)) %>% 
    +      self$conv2() %>% 
    +      nnf_relu() %>% 
    +      nnf_max_pool2d(c(2,2)) %>% 
           
    -      torch_flatten(start_dim = 2) %>% 
    +      torch_flatten(start_dim = 2) %>% 
           
    -      self$fc1() %>% 
    -      nnf_relu() %>% 
    +      self$fc1() %>% 
    +      nnf_relu() %>% 
           
    -      self$fc2() %>% 
    -      nnf_relu() %>% 
    +      self$fc2() %>% 
    +      nnf_relu() %>% 
           
    -      self$fc3()
    -  }
    -)
    +      self$fc3()
    +  }
    +)
     
    -net <- Net()
    -
    +net <- Net()

    You just have to define the forward function, and the backward function (where gradients are computed) is automatically defined for you using autograd. You can use any of the Tensor operations in the forward function.

    The learnable parameters of a model are returned by net$parameters.

    -str(net$parameters)
    +str(net$parameters)
     #> List of 10
     #>  $ conv1.weight:Float [1:6, 1:1, 1:3, 1:3]
     #>  $ conv1.bias  :Float [1:6]
    @@ -230,22 +255,19 @@
     #>  $ fc2.weight  :Float [1:84, 1:120]
     #>  $ fc2.bias    :Float [1:84]
     #>  $ fc3.weight  :Float [1:10, 1:84]
    -#>  $ fc3.bias    :Float [1:10]
    -
    +#> $ fc3.bias :Float [1:10]

    Let’s try a random 32x32 input. Note: expected input size of this net (LeNet) is 32x32. To use this net on the MNIST dataset, please resize the images from the dataset to 32x32.

    -input <- torch_randn(1, 1, 32, 32)
    -out <- net(input)
    -out
    +input <- torch_randn(1, 1, 32, 32)
    +out <- net(input)
    +out
     #> torch_tensor 
    -#> -0.0560  0.0916  0.0401 -0.1081 -0.0183 -0.0508  0.1250 -0.0574  0.0058  0.0025
    -#> [ CPUFloatType{1,10} ]
    -
    +#> -0.0417 0.0845 0.0490 -0.0918 -0.0495 -0.0496 0.1334 -0.0540 0.0141 0.0121 +#> [ CPUFloatType{1,10} ]

    Zero the gradient buffers of all parameters and backprops with random gradients:

    -net$zero_grad()
    -out$backward(torch_randn(1, 10))
    -
    +net$zero_grad() +out$backward(torch_randn(1, 10))

    Note: nn only supports mini-batches. The entire torch.nn package only supports inputs that are a mini-batch of samples, and not a single sample. For example, nn_conv2d will take in a 4D Tensor of nSamples x nChannels x Height x Width. If you have a single sample, just use input$unsqueeze(1) to add a fake batch dimension.

    @@ -284,16 +306,15 @@

    There are several different loss functions under the nn package . A simple loss is: nnf_mse_loss which computes the mean-squared error between the input and the target.

    For example:

    -output <- net(input)
    -target <- torch_randn(10)  # a dummy target, for example
    -target <- target$view(c(1, -1))  # make it the same shape as output
    +output <- net(input)
    +target <- torch_randn(10)  # a dummy target, for example
    +target <- target$view(c(1, -1))  # make it the same shape as output
     
    -loss <- nnf_mse_loss(output, target)
    -loss
    +loss <- nnf_mse_loss(output, target)
    +loss
     #> torch_tensor 
    -#> 0.388282
    -#> [ CPUFloatType{} ]
    -
    +#> 1.83784 +#> [ CPUFloatType{} ]

    Now, if you follow loss in the backward direction, using its $grad_fn attribute, you will see a graph of computations that looks like this:

    input -> conv2d -> relu -> maxpool2d -> conv2d -> relu -> maxpool2d
           -> view -> linear -> relu -> linear -> relu -> linear
    @@ -302,13 +323,12 @@
     

    So, when we call loss$backward(), the whole graph is differentiated w.r.t. the loss, and all Tensors in the graph that has requires_grad=True will have their #grad Tensor accumulated with the gradient.

    For illustration, let us follow a few steps backward:

    -loss$grad_fn
    +loss$grad_fn
     #> MseLossBackward
    -loss$grad_fn$next_functions[[1]]
    +loss$grad_fn$next_functions[[1]]
     #> AddmmBackward
    -loss$grad_fn$next_functions[[1]]$next_functions[[1]]
    -#> torch::autograd::AccumulateGrad
    -
    +loss$grad_fn$next_functions[[1]]$next_functions[[1]] +#> torch::autograd::AccumulateGrad

    @@ -316,10 +336,10 @@

    To backpropagate the error all we have to do is to loss$backward(). You need to clear the existing gradients though, else gradients will be accumulated to existing gradients.

    Now we shall call loss$backward(), and have a look at conv1’s bias gradients before and after the backward.

    -net$zero_grad()     # zeroes the gradient buffers of all parameters
    +net$zero_grad()     # zeroes the gradient buffers of all parameters
     
     # conv1.bias.grad before backward
    -net$conv1$bias$grad
    +net$conv1$bias$grad
     #> torch_tensor 
     #>  0
     #>  0
    @@ -329,20 +349,19 @@
     #>  0
     #> [ CPUFloatType{6} ]
     
    -loss$backward()
    +loss$backward()
     
     # conv1.bias.grad after backward
    -net$conv1$bias$grad
    +net$conv1$bias$grad
     #> torch_tensor 
    -#> 0.001 *
    -#>  2.3567
    -#> -1.3589
    -#> -0.6749
    -#>  5.5939
    -#> -4.2062
    -#>  0.6161
    -#> [ CPUFloatType{6} ]
    -
    +#> 0.01 * +#> -2.0062 +#> 0.9085 +#> -0.2479 +#> -1.8640 +#> 0.5936 +#> 0.5216 +#> [ CPUFloatType{6} ]

    Now, we have seen how to use loss functions.

    @@ -352,29 +371,27 @@

    \[weight = weight - learning_rate * gradient\]

    We can implement this using simple R code:

    -learning_rate <- 0.01
    -for (f in net$parameters) {
    -  with_no_grad({
    -    f$sub_(f$grad * learning_rate)
    -  })
    -}
    -
    +learning_rate <- 0.01 +for (f in net$parameters) { + with_no_grad({ + f$sub_(f$grad * learning_rate) + }) +}

    Note: Weight updates here is wraped around with_no_grad as we don’t the updates to be tracked by the autograd engine.

    However, as you use neural networks, you want to use various different update rules such as SGD, Nesterov-SGD, Adam, RMSProp, etc.

     # create your optimizer
    -optimizer <- optim_sgd(net$parameters, lr = 0.01)
    +optimizer <- optim_sgd(net$parameters, lr = 0.01)
     
     # in your training loop:
    -optimizer$zero_grad()   # zero the gradient buffers
    -output <- net(input)
    -loss <- nnf_mse_loss(output, target)
    -loss$backward()
    -optimizer$step()    # Does the update
    -#> NULL
    -
    +optimizer$zero_grad() # zero the gradient buffers +output <- net(input) +loss <- nnf_mse_loss(output, target) +loss$backward() +optimizer$step() # Does the update +#> NULL

    Note: Observe how gradient buffers had to be manually set to zero using optimizer$zero_grad(). This is because gradients are accumulated as explained in the Backprop section.

    diff --git a/articles/getting-started/new-autograd-functions.html b/articles/getting-started/new-autograd-functions.html index 7b118773a0cfe232f3322afaec1308f95279d6a9..cf978bc00a9681862d01767cabd13ea43a10de53 100644 --- a/articles/getting-started/new-autograd-functions.html +++ b/articles/getting-started/new-autograd-functions.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,8 +186,7 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    Under the hood, each primitive autograd operator is really two functions that operate on Tensors. The forward function computes output Tensors from input Tensors. The backward function receives the gradient of the output Tensors with respect to some scalar value, and computes the gradient of the input Tensors with respect to that same scalar value.

    In torch we can easily define our own autograd operator by defining a subclass of autograd_function and implementing the forward and backward functions. We can then use our new autograd operator by constructing an instance and calling it like a function, passing Tensors containing input data.

    In this example we define our own custom autograd function for performing the ReLU nonlinearity, and use it to implement our two-layer network:

    @@ -168,93 +194,92 @@ # We can implement our own custom autograd Functions by subclassing # autograd_functioon and implementing the forward and backward passes # which operate on Tensors. -my_relu <- autograd_function( +my_relu <- autograd_function( # In the forward pass we receive a Tensor containing the input and return # a Tensor containing the output. ctx is a context object that can be used # to stash information for backward computation. You can cache arbitrary # objects for use in the backward pass using the ctx$save_for_backward method. - forward = function(ctx, input) { - ctx$save_for_backward(input = input) - input$clamp(min = 0) - }, + forward = function(ctx, input) { + ctx$save_for_backward(input = input) + input$clamp(min = 0) + }, # In the backward pass we receive a Tensor containing the gradient of the loss # with respect to the output, and we need to compute the gradient of the loss # with respect to the input. - backward = function(ctx, grad_output) { - v <- ctx$saved_variables - grad_input <- grad_output$clone() - grad_input[v$input < 0] <- 0 - list(input = grad_input) - } -) + backward = function(ctx, grad_output) { + v <- ctx$saved_variables + grad_input <- grad_output$clone() + grad_input[v$input < 0] <- 0 + list(input = grad_input) + } +) -if (cuda_is_available()) { - device <- torch_device("cuda") -} else { - device <- torch_device("cpu") -} +if (cuda_is_available()) { + device <- torch_device("cuda") +} else { + device <- torch_device("cpu") +} # N is batch size; D_in is input dimension; # H is hidden dimension; D_out is output dimension. -N <- 64 -D_in <- 1000 -H <- 100 -D_out <- 10 +N <- 64 +D_in <- 1000 +H <- 100 +D_out <- 10 # Create random input and output data # Setting requires_grad=FALSE (the default) indicates that we do not need to # compute gradients with respect to these Tensors during the backward pass. -x <- torch_randn(N, D_in, device=device) -y <- torch_randn(N, D_out, device=device) +x <- torch_randn(N, D_in, device=device) +y <- torch_randn(N, D_out, device=device) # Randomly initialize weights # Setting requires_grad=TRUE indicates that we want to compute gradients with # respect to these Tensors during the backward pass. -w1 <- torch_randn(D_in, H, device=device, requires_grad = TRUE) -w2 <- torch_randn(H, D_out, device=device, requires_grad = TRUE) +w1 <- torch_randn(D_in, H, device=device, requires_grad = TRUE) +w2 <- torch_randn(H, D_out, device=device, requires_grad = TRUE) -learning_rate <- 1e-6 -for (t in seq_len(500)) { +learning_rate <- 1e-6 +for (t in seq_len(500)) { # Forward pass: compute predicted y using operations on Tensors; these # are exactly the same operations we used to compute the forward pass using # Tensors, but we do not need to keep references to intermediate values since # we are not implementing the backward pass by hand. - y_pred <- my_relu(x$mm(w1))$mm(w2) + y_pred <- my_relu(x$mm(w1))$mm(w2) # Compute and print loss using operations on Tensors. # Now loss is a Tensor of shape (1,) - loss <- (y_pred - y)$pow(2)$sum() - if (t %% 100 == 0 || t == 1) - cat("Step:", t, ":", as.numeric(loss), "\n") + loss <- (y_pred - y)$pow(2)$sum() + if (t %% 100 == 0 || t == 1) + cat("Step:", t, ":", as.numeric(loss), "\n") # Use autograd to compute the backward pass. This call will compute the # gradient of loss with respect to all Tensors with requires_grad=True. # After this call w1$grad and w2$grad will be Tensors holding the gradient # of the loss with respect to w1 and w2 respectively. - loss$backward() + loss$backward() # Manually update weights using gradient descent. Wrap in `with_no_grad` # because weights have requires_grad=TRUE, but we don't need to track this # in autograd. # You can also use optim_sgd to achieve this. - with_no_grad({ + with_no_grad({ # operations suffixed with an `_` operates on in-place on the tensor. - w1$sub_(learning_rate * w1$grad) - w2$sub_(learning_rate * w2$grad) + w1$sub_(learning_rate * w1$grad) + w2$sub_(learning_rate * w2$grad) # Manually zero the gradients after updating weights - w1$grad$zero_() - w2$grad$zero_() - }) -} -#> Step: 1 : 25332368 -#> Step: 100 : 473.5124 -#> Step: 200 : 2.001738 -#> Step: 300 : 0.01279241 -#> Step: 400 : 0.0002553065 -#> Step: 500 : 4.130635e-05 - + w1$grad$zero_() + w2$grad$zero_() + }) +} +#> Step: 1 : 29097476 +#> Step: 100 : 387.3773 +#> Step: 200 : 1.046869 +#> Step: 300 : 0.005221077 +#> Step: 400 : 0.0001420454 +#> Step: 500 : 2.901151e-05

    In the next example we will learn how to use the neural networks abstractions in torch.

    diff --git a/articles/getting-started/nn.html b/articles/getting-started/nn.html index f2d0e7bc110a7ea6ed8d67a0deb8f664b5f050ad..95b9358054e40d0db7b89a98b05bb70a1b6e4947 100644 --- a/articles/getting-started/nn.html +++ b/articles/getting-started/nn.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,85 +186,83 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    Computational graphs and autograd are a very powerful paradigm for defining complex operators and automatically taking derivatives; however for large neural networks raw autograd can be a bit too low-level.

    When building neural networks we frequently think of arranging the computation into layers, some of which have learnable parameters which will be optimized during learning.

    In TensorFlow, packages like Keras, TensorFlow-Slim, and TFLearn provide higher-level abstractions over raw computational graphs that are useful for building neural networks.

    In torch, the nn functionality serves this same purpose. The nn feature defines a set of Modules, which are roughly equivalent to neural network layers. A Module receives input Tensors and computes output Tensors, but may also hold internal state such as Tensors containing learnable parameters. The nn collection also defines a set of useful loss functions that are commonly used when training neural networks.

    In this example we use nn to implement our two-layer network:

    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
     # Setting requires_grad=FALSE (the default) indicates that we do not need to 
     # compute gradients with respect to these Tensors during the backward pass.
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Use the nn package to define our model as a sequence of layers. nn_sequential
     # is a Module which contains other Modules, and applies them in sequence to
     # produce its output. Each Linear Module computes output from input using a
     # linear function, and holds internal Tensors for its weight and bias.
    -model <- nn_sequential(
    -    nn_linear(D_in, H),
    -    nn_relu(),
    -    nn_linear(H, D_out)
    -)
    +model <- nn_sequential(
    +    nn_linear(D_in, H),
    +    nn_relu(),
    +    nn_linear(H, D_out)
    +)
     
     # The nn package also contains definitions of popular loss functions; in this
     # case we will use Mean Squared Error (MSE) as our loss function.
    -loss_fn <- nnf_mse_loss
    +loss_fn <- nnf_mse_loss
     
    -learning_rate <- 1e-6
    -for (t in seq_len(500)) {
    +learning_rate <- 1e-6
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y by passing x to the model. Module objects
        # can be called like functions. When doing so you pass a Tensor of input
        # data to the Module and it produces a Tensor of output data.
    -   y_pred <- model(x)
    +   y_pred <- model(x)
        
        # Compute and print loss. We pass Tensors containing the predicted and true
        # values of y, and the loss function returns a Tensor containing the
        # loss.
    -   loss <- loss_fn(y_pred, y)
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", as.numeric(loss), "\n")
    +   loss <- loss_fn(y_pred, y)
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", as.numeric(loss), "\n")
        
        # Zero the gradients before running the backward pass.
    -   model$zero_grad()
    +   model$zero_grad()
     
        # Backward pass: compute gradient of the loss with respect to all the learnable
        # parameters of the model. Internally, the parameters of each Module are stored
        # in Tensors with requires_grad=TRUE, so this call will compute gradients for
        # all learnable parameters in the model.
    -   loss$backward()
    +   loss$backward()
        
        # Update the weights using gradient descent. Each parameter is a Tensor, so
        # we can access its gradients like we did before.
    -   with_no_grad({
    -      for (param in model$parameters) {
    -         param$sub_(learning_rate * param$grad)
    -      }
    -   })
    -}
    -#> Step: 1 : 1.04115 
    -#> Step: 100 : 1.041026 
    -#> Step: 200 : 1.040901 
    -#> Step: 300 : 1.040776 
    -#> Step: 400 : 1.04065 
    -#> Step: 500 : 1.040525
    -
    + with_no_grad({ + for (param in model$parameters) { + param$sub_(learning_rate * param$grad) + } + }) +} +#> Step: 1 : 1.038781 +#> Step: 100 : 1.038654 +#> Step: 200 : 1.038526 +#> Step: 300 : 1.038399 +#> Step: 400 : 1.038271 +#> Step: 500 : 1.038143

    In the next example we will learn how to use optimizers implemented in torch.

    diff --git a/articles/getting-started/optim.html b/articles/getting-started/optim.html index d5bcc6308a6f28f8b766ec2bf8fa624b8fbbf4e6..4c93b57aa0ad944bc24ded1620fb4782f2a7895c 100644 --- a/articles/getting-started/optim.html +++ b/articles/getting-started/optim.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,87 +186,85 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    Up to this point we have updated the weights of our models by manually mutating the Tensors holding learnable parameters (with with_no_grad to avoid tracking history in autograd). This is not a huge burden for simple optimization algorithms like stochastic gradient descent, but in practice we often train neural networks using more sophisticated optimizers like AdaGrad, RMSProp, Adam, etc.

    The optim package in torch abstracts the idea of an optimization algorithm and provides implementations of commonly used optimization algorithms.

    In this example we will use the nn package to define our model as before, but we will optimize the model using the Adam algorithm provided by optim:

    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
     # Setting requires_grad=FALSE (the default) indicates that we do not need to 
     # compute gradients with respect to these Tensors during the backward pass.
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Use the nn package to define our model as a sequence of layers. nn_sequential
     # is a Module which contains other Modules, and applies them in sequence to
     # produce its output. Each Linear Module computes output from input using a
     # linear function, and holds internal Tensors for its weight and bias.
    -model <- nn_sequential(
    -    nn_linear(D_in, H),
    -    nn_relu(),
    -    nn_linear(H, D_out)
    -)
    +model <- nn_sequential(
    +    nn_linear(D_in, H),
    +    nn_relu(),
    +    nn_linear(H, D_out)
    +)
     
     # The nn package also contains definitions of popular loss functions; in this
     # case we will use Mean Squared Error (MSE) as our loss function.
    -loss_fn <- nnf_mse_loss
    +loss_fn <- nnf_mse_loss
     
     # Use the optim package to define an Optimizer that will update the weights of
     # the model for us. Here we will use Adam; the optim package contains many other
     # optimization algorithms. The first argument to the Adam constructor tells the
     # optimizer which Tensors it should update.
    -learning_rate <- 1e-4
    -optimizer <- optim_adam(model$parameters, lr=learning_rate)
    +learning_rate <- 1e-4
    +optimizer <- optim_adam(model$parameters, lr=learning_rate)
     
    -for (t in seq_len(500)) {
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y by passing x to the model. Module objects
        # can be called like functions. When doing so you pass a Tensor of input
        # data to the Module and it produces a Tensor of output data.
    -   y_pred <- model(x)
    +   y_pred <- model(x)
        
        # Compute and print loss. We pass Tensors containing the predicted and true
        # values of y, and the loss function returns a Tensor containing the
        # loss.
    -   loss <- loss_fn(y_pred, y)
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", as.numeric(loss), "\n")
    +   loss <- loss_fn(y_pred, y)
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", as.numeric(loss), "\n")
        
        # Before the backward pass, use the optimizer object to zero all of the
        # gradients for the variables it will update (which are the learnable
        # weights of the model). This is because by default, gradients are
        # accumulated in buffers( i.e, not overwritten) whenever $backward()
        # is called. Checkout docs of `autograd_backward` for more details.
    -   optimizer$zero_grad()
    +   optimizer$zero_grad()
     
        # Backward pass: compute gradient of the loss with respect to model
        # parameters
    -   loss$backward()
    +   loss$backward()
     
        # Calling the step function on an Optimizer makes an update to its
        # parameters
    -   optimizer$step()
    -}
    -#> Step: 1 : 1.03194 
    -#> Step: 100 : 0.08338322 
    -#> Step: 200 : 0.001254716 
    -#> Step: 300 : 3.605265e-06 
    -#> Step: 400 : 2.155708e-09 
    -#> Step: 500 : 5.439427e-13
    -
    + optimizer$step() +} +#> Step: 1 : 0.9450495 +#> Step: 100 : 0.06193794 +#> Step: 200 : 0.0007609552 +#> Step: 300 : 2.95975e-06 +#> Step: 400 : 4.247736e-09 +#> Step: 500 : 1.415559e-12

    In the next example we will learn how to create custom nn_modules.

    diff --git a/articles/getting-started/tensors-and-autograd.html b/articles/getting-started/tensors-and-autograd.html index b26b93941a340acc42402bad6372a302ad2f2529..565fb9bab890861892428e58eab93903e43ef9a2 100644 --- a/articles/getting-started/tensors-and-autograd.html +++ b/articles/getting-started/tensors-and-autograd.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,80 +186,78 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    In the previous examples, we had to manually implement both the forward and backward passes of our neural network. Manually implementing the backward pass is not a big deal for a small two-layer network, but can quickly get very hairy for large complex networks.

    Thankfully, we can use automatic differentiation to automate the computation of backward passes in neural networks. The autograd feature in torch provides exactly this functionality. When using autograd, the forward pass of your network will define a computational graph; nodes in the graph will be Tensors, and edges will be functions that produce output Tensors from input Tensors. Backpropagating through this graph then allows you to easily compute gradients.

    This sounds complicated, it’s pretty simple to use in practice. Each Tensor represents a node in a computational graph. If x is a Tensor that has x$requires_grad=TRUE then x$grad is another Tensor holding the gradient of x with respect to some scalar value.

    Here we use torch Tensors and autograd to implement our two-layer network; now we no longer need to manually implement the backward pass through the network:

    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
     # Setting requires_grad=FALSE (the default) indicates that we do not need to 
     # compute gradients with respect to these Tensors during the backward pass.
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Randomly initialize weights
     # Setting requires_grad=TRUE indicates that we want to compute gradients with
     # respect to these Tensors during the backward pass.
    -w1 <- torch_randn(D_in, H, device=device, requires_grad = TRUE)
    -w2 <- torch_randn(H, D_out, device=device, requires_grad = TRUE)
    +w1 <- torch_randn(D_in, H, device=device, requires_grad = TRUE)
    +w2 <- torch_randn(H, D_out, device=device, requires_grad = TRUE)
     
    -learning_rate <- 1e-6
    -for (t in seq_len(500)) {
    +learning_rate <- 1e-6
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y using operations on Tensors; these
        # are exactly the same operations we used to compute the forward pass using
        # Tensors, but we do not need to keep references to intermediate values since
        # we are not implementing the backward pass by hand.
    -   y_pred <- x$mm(w1)$clamp(min=0)$mm(w2)
    +   y_pred <- x$mm(w1)$clamp(min=0)$mm(w2)
        
        # Compute and print loss using operations on Tensors.
        # Now loss is a Tensor of shape (1,)
    -   loss <- (y_pred - y)$pow(2)$sum()
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", as.numeric(loss), "\n")
    +   loss <- (y_pred - y)$pow(2)$sum()
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", as.numeric(loss), "\n")
        
        # Use autograd to compute the backward pass. This call will compute the
        # gradient of loss with respect to all Tensors with requires_grad=True.
        # After this call w1$grad and w2$grad will be Tensors holding the gradient
        # of the loss with respect to w1 and w2 respectively.
    -   loss$backward()
    +   loss$backward()
        
        # Manually update weights using gradient descent. Wrap in `with_no_grad`
        # because weights have requires_grad=TRUE, but we don't need to track this
        # in autograd.
        # You can also use optim_sgd to achieve this.
    -   with_no_grad({
    +   with_no_grad({
           
           # operations suffixed with an `_` operates on in-place on the tensor.
    -      w1$sub_(learning_rate * w1$grad)
    -      w2$sub_(learning_rate * w2$grad)
    +      w1$sub_(learning_rate * w1$grad)
    +      w2$sub_(learning_rate * w2$grad)
           
           # Manually zero the gradients after updating weights
    -      w1$grad$zero_()
    -      w2$grad$zero_()
    -   })
    -}
    -#> Step: 1 : 27399256 
    -#> Step: 100 : 756.7294 
    -#> Step: 200 : 9.271971 
    -#> Step: 300 : 0.205474 
    -#> Step: 400 : 0.00579866 
    -#> Step: 500 : 0.0003981641
    -
    + w1$grad$zero_() + w2$grad$zero_() + }) +} +#> Step: 1 : 32265452 +#> Step: 100 : 617.4926 +#> Step: 200 : 3.489705 +#> Step: 300 : 0.03414487 +#> Step: 400 : 0.0006231664 +#> Step: 500 : 6.901405e-05

    In the next example we will learn how to create new autograd functions.

    diff --git a/articles/getting-started/tensors.html b/articles/getting-started/tensors.html index f79bad2fb5585214b7da72e4d251bef9b4ecb4e0..9eb3f10a7e6c9206061daeab3ac8612e0f58d990 100644 --- a/articles/getting-started/tensors.html +++ b/articles/getting-started/tensors.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,65 +186,63 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    R arrays are great, but they cannot utilize GPUs to accelerate its numerical computations. For modern deep neural networks, GPUs often provide speedups of 50x or greater, so unfortunately pure R won’t be enough for modern deep learning.

    Here we introduce the most fundamental torch concept: the Tensor. A torch Tensor is conceptually similar to an R array: a Tensor is an n-dimensional array, and torch provides many functions for operating on these Tensors. Behind the scenes, Tensors can keep track of a computational graph and gradients, but they’re also useful as a generic tool for scientific computing.

    Also unlike R, torch Tensors can utilize GPUs to accelerate their numeric computations. To run a torch Tensor on GPU, you simply need to cast it to a new datatype.

    Here we use torch Tensors to fit a two-layer network to random data. Like the R before we need to manually implement the forward and backward passes through the network:

    -if (cuda_is_available()) {
    -   device <- torch_device("cuda")
    -} else {
    -   device <- torch_device("cpu")
    -}
    +if (cuda_is_available()) {
    +   device <- torch_device("cuda")
    +} else {
    +   device <- torch_device("cpu")
    +}
        
     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
    -x <- torch_randn(N, D_in, device=device)
    -y <- torch_randn(N, D_out, device=device)
    +x <- torch_randn(N, D_in, device=device)
    +y <- torch_randn(N, D_out, device=device)
     
     # Randomly initialize weights
    -w1 <- torch_randn(D_in, H, device=device)
    -w2 <- torch_randn(H, D_out, device=device)
    +w1 <- torch_randn(D_in, H, device=device)
    +w2 <- torch_randn(H, D_out, device=device)
     
    -learning_rate <- 1e-6
    -for (t in seq_len(500)) {
    +learning_rate <- 1e-6
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y
    -   h <- x$mm(w1)
    -   h_relu <- h$clamp(min=0)
    -   y_pred <- h_relu$mm(w2)
    +   h <- x$mm(w1)
    +   h_relu <- h$clamp(min=0)
    +   y_pred <- h_relu$mm(w2)
        
        # Compute and print loss
    -   loss <- as.numeric((y_pred - y)$pow(2)$sum())
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", loss, "\n")
    +   loss <- as.numeric((y_pred - y)$pow(2)$sum())
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", loss, "\n")
        
        # Backprop to compute gradients of w1 and w2 with respect to loss
    -   grad_y_pred <- 2.0 * (y_pred - y)
    -   grad_w2 <- h_relu$t()$mm(grad_y_pred)
    -   grad_h_relu <- grad_y_pred$mm(w2$t())
    -   grad_h <- grad_h_relu$clone()
    -   grad_h[h < 0] <- 0
    -   grad_w1 <- x$t()$mm(grad_h)
    +   grad_y_pred <- 2.0 * (y_pred - y)
    +   grad_w2 <- h_relu$t()$mm(grad_y_pred)
    +   grad_h_relu <- grad_y_pred$mm(w2$t())
    +   grad_h <- grad_h_relu$clone()
    +   grad_h[h < 0] <- 0
    +   grad_w1 <- x$t()$mm(grad_h)
        
        # Update weights using gradient descent
    -   w1 <- w1 - learning_rate * grad_w1
    -   w2 <- w2 - learning_rate * grad_w2
    -}
    -#> Step: 1 : 31418640 
    -#> Step: 100 : 959.8534 
    -#> Step: 200 : 34.41588 
    -#> Step: 300 : 1.953276 
    -#> Step: 400 : 0.1213879 
    -#> Step: 500 : 0.008213471
    -
    + w1 <- w1 - learning_rate * grad_w1 + w2 <- w2 - learning_rate * grad_w2 +} +#> Step: 1 : 35593956 +#> Step: 100 : 1013.579 +#> Step: 200 : 12.88333 +#> Step: 300 : 0.2700103 +#> Step: 400 : 0.007026888 +#> Step: 500 : 0.0004394832

    In the next example we will use autograd instead of computing the gradients manually.

    diff --git a/articles/getting-started/warmup.html b/articles/getting-started/warmup.html index f85e2bf0630bcd80636c187ec9b3e909eea037df..ed4195e826012f7a18f4812233d7f853d5801ec3 100644 --- a/articles/getting-started/warmup.html +++ b/articles/getting-started/warmup.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,58 +186,56 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Justin Johnson.

    -library(torch)
    -
    +library(torch)

    A fully-connected ReLU network with one hidden layer and no biases, trained to predict y from x using Euclidean error.

    This implementation uses pure R to manually compute the forward pass, loss, and backward pass.

    An R array is a generic n-dimensional array; it does not know anything about deep learning or gradients or computational graphs, and is just a way to perform generic numeric computations.

     # N is batch size; D_in is input dimension;
     # H is hidden dimension; D_out is output dimension.
    -N <- 64
    -D_in <- 1000
    -H <- 100
    -D_out <- 10
    +N <- 64
    +D_in <- 1000
    +H <- 100
    +D_out <- 10
     
     # Create random input and output data
    -x <- array(rnorm(N*D_in), dim = c(N, D_in))
    -y <- array(rnorm(N*D_out), dim = c(N, D_out))
    +x <- array(rnorm(N*D_in), dim = c(N, D_in))
    +y <- array(rnorm(N*D_out), dim = c(N, D_out))
     
     # Randomly initialize weights
    -w1 <- array(rnorm(D_in*H), dim = c(D_in, H))
    -w2 <- array(rnorm(H*D_out), dim = c(H, D_out))
    +w1 <- array(rnorm(D_in*H), dim = c(D_in, H))
    +w2 <- array(rnorm(H*D_out), dim = c(H, D_out))
     
    -learning_rate <- 1e-6
    -for (t in seq_len(500)) {
    +learning_rate <- 1e-6
    +for (t in seq_len(500)) {
        # Forward pass: compute predicted y
    -   h <- x %*% w1
    -   h_relu <- ifelse(h < 0, 0, h)
    -   y_pred <- h_relu %*% w2
    +   h <- x %*% w1
    +   h_relu <- ifelse(h < 0, 0, h)
    +   y_pred <- h_relu %*% w2
        
        # Compute and print loss
    -   loss <- sum((y_pred - y)^2)
    -   if (t %% 100 == 0 || t == 1)
    -      cat("Step:", t, ":", loss, "\n")
    +   loss <- sum((y_pred - y)^2)
    +   if (t %% 100 == 0 || t == 1)
    +      cat("Step:", t, ":", loss, "\n")
        
        # Backprop to compute gradients of w1 and w2 with respect to loss
    -   grad_y_pred <- 2 * (y_pred - y)
    -   grad_w2 <- t(h_relu) %*% grad_y_pred
    -   grad_h_relu <- grad_y_pred %*% t(w2)
    -   grad_h <- grad_h_relu
    -   grad_h[h < 0] <- 0
    -   grad_w1 <- t(x) %*% grad_h
    +   grad_y_pred <- 2 * (y_pred - y)
    +   grad_w2 <- t(h_relu) %*% grad_y_pred
    +   grad_h_relu <- grad_y_pred %*% t(w2)
    +   grad_h <- grad_h_relu
    +   grad_h[h < 0] <- 0
    +   grad_w1 <- t(x) %*% grad_h
        
        # Update weights
    -   w1 <- w1 - learning_rate * grad_w1
    -   w2 <- w2 - learning_rate * grad_w2
    -}
    -#> Step: 1 : 28115720 
    -#> Step: 100 : 536.8496 
    -#> Step: 200 : 2.748443 
    -#> Step: 300 : 0.01913319 
    -#> Step: 400 : 0.0001405911 
    -#> Step: 500 : 1.056453e-06
    -
    + w1 <- w1 - learning_rate * grad_w1 + w2 <- w2 - learning_rate * grad_w2 +} +#> Step: 1 : 41617929 +#> Step: 100 : 853.0884 +#> Step: 200 : 6.804854 +#> Step: 300 : 0.1015232 +#> Step: 400 : 0.001972299 +#> Step: 500 : 4.278207e-05

    In the next example we will replace the R array for a torch Tensor.

    diff --git a/articles/getting-started/what-is-torch.html b/articles/getting-started/what-is-torch.html index cf83c6ada393e32104e3af08ea96f1f344cf429a..3f7253688ad2586b663b2adc3ba2d43a57cf1e90 100644 --- a/articles/getting-started/what-is-torch.html +++ b/articles/getting-started/what-is-torch.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -159,8 +186,7 @@

    Note: This is an R port of the official tutorial available here. All credits goes to Soumith Chintala.

    -library(torch)
    -
    +library(torch)

    It’s a scientific computing package targeted at two sets of audiences:

    - +
  • Reference
  • @@ -193,6 +225,14 @@

    +
    basic-autograd
    +
    +
    basic-nn-module
    +
    +
    dataset
    +
    +
    Examples
    +
    Extending Autograd
    Autograd: automatic differentiation
    @@ -221,6 +261,8 @@
    Loading data
    +
    Serialization
    +
    Tensor objects
    Creating tensors
    diff --git a/articles/indexing.html b/articles/indexing.html index cf9232fa03e7f4e3316fb6edf8b5d3ce83cce59b..5c494bdfa8c55803b6a34af3e95cd76db9c22246 100644 --- a/articles/indexing.html +++ b/articles/indexing.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -156,8 +183,7 @@ +library(torch)

    In this article we describe the indexing operator for torch tensors and how it compares to the R indexing operator for arrays.

    Torch’s indexing semantics are closer to numpy’s semantics than R’s. You will find a lot of similarities between this article and the numpy indexing article available here.

    @@ -165,52 +191,49 @@ Single element indexing

    Single element indexing for a 1-D tensors works mostly as expected. Like R, it is 1-based. Unlike R though, it accepts negative indices for indexing from the end of the array. (In R, negative indices are used to remove elements.)

    -x <- torch_tensor(1:10)
    -x[1]
    +x <- torch_tensor(1:10)
    +x[1]
     #> torch_tensor 
     #> 1
     #> [ CPULongType{} ]
    -x[-1]
    +x[-1]
     #> torch_tensor 
     #> 10
    -#> [ CPULongType{} ]
    -
    +#> [ CPULongType{} ]

    You can also subset matrices and higher dimensions arrays using the same syntax:

    -x <- x$reshape(shape = c(2,5))
    -x
    +x <- x$reshape(shape = c(2,5))
    +x
     #> torch_tensor 
     #>   1   2   3   4   5
     #>   6   7   8   9  10
     #> [ CPULongType{2,5} ]
    -x[1,3]
    +x[1,3]
     #> torch_tensor 
     #> 3
     #> [ CPULongType{} ]
    -x[1,-1]
    +x[1,-1]
     #> torch_tensor 
     #> 5
    -#> [ CPULongType{} ]
    -
    +#> [ CPULongType{} ]

    Note that if one indexes a multidimensional tensor with fewer indices than dimensions, one gets an error, unlike in R that would flatten the array. For example:

    -x[1]
    +x[1]
     #> torch_tensor 
     #>  1
     #>  2
     #>  3
     #>  4
     #>  5
    -#> [ CPULongType{5} ]
    -
    +#> [ CPULongType{5} ]

    Slicing and striding

    It is possible to slice and stride arrays to extract sub-arrays of the same number of dimensions, but of different sizes than the original. This is best illustrated by a few examples:

    -x <- torch_tensor(1:10)
    -x
    +x <- torch_tensor(1:10)
    +x
     #> torch_tensor 
     #>   1
     #>   2
    @@ -223,33 +246,31 @@
     #>   9
     #>  10
     #> [ CPULongType{10} ]
    -x[2:5]
    +x[2:5]
     #> torch_tensor 
     #>  2
     #>  3
     #>  4
     #>  5
     #> [ CPULongType{4} ]
    -x[1:(-7)]
    +x[1:(-7)]
     #> torch_tensor 
     #>  1
     #>  2
     #>  3
     #>  4
    -#> [ CPULongType{4} ]
    -
    +#> [ CPULongType{4} ]

    You can also use the 1:10:2 syntax which means: In the range from 1 to 10, take every second item. For example:

    -x[1:5:2]
    +x[1:5:2]
     #> torch_tensor 
     #>  1
     #>  3
     #>  5
    -#> [ CPULongType{3} ]
    -
    +#> [ CPULongType{3} ]

    Another special syntax is the N, meaning the size of the specified dimension.

    -x[5:N]
    +x[5:N]
     #> torch_tensor 
     #>   5
     #>   6
    @@ -257,8 +278,7 @@
     #>   8
     #>   9
     #>  10
    -#> [ CPULongType{6} ]
    -
    +#> [ CPULongType{6} ]

    @@ -266,72 +286,65 @@

    Like in R, you can take all elements in a dimension by leaving an index empty.

    Consider a matrix:

    -x <- torch_randn(2, 3)
    -x
    +x <- torch_randn(2, 3)
    +x
     #> torch_tensor 
    -#>  1.4158  0.9219 -0.1461
    -#>  0.9801  0.7556  0.2140
    -#> [ CPUFloatType{2,3} ]
    -
    +#> -0.3778 -0.2716 0.9928 +#> 0.5261 1.5789 -0.5842 +#> [ CPUFloatType{2,3} ]

    The following syntax will give you the first row:

    -x[1,]
    +x[1,]
     #> torch_tensor 
    -#>  1.4158
    -#>  0.9219
    -#> -0.1461
    -#> [ CPUFloatType{3} ]
    -
    +#> -0.3778 +#> -0.2716 +#> 0.9928 +#> [ CPUFloatType{3} ]

    And this would give you the first 2 columns:

    -x[,1:2]
    +x[,1:2]
     #> torch_tensor 
    -#>  1.4158  0.9219
    -#>  0.9801  0.7556
    -#> [ CPUFloatType{2,2} ]
    -
    +#> -0.3778 -0.2716 +#> 0.5261 1.5789 +#> [ CPUFloatType{2,2} ]

    Dropping dimensions

    By default, when indexing by a single integer, this dimension will be dropped to avoid the singleton dimension:

    -x <- torch_randn(2, 3)
    -x[1,]$shape
    -#> [1] 3
    -
    +x <- torch_randn(2, 3) +x[1,]$shape +#> [1] 3

    You can optionally use the drop = FALSE argument to avoid dropping the dimension.

    -x[1,,drop = FALSE]$shape
    -#> [1] 1 3
    -
    +x[1,,drop = FALSE]$shape +#> [1] 1 3

    Adding a new dimension

    It’s possible to add a new dimension to a tensor using index-like syntax:

    -x <- torch_tensor(c(10))
    -x$shape
    +x <- torch_tensor(c(10))
    +x$shape
     #> [1] 1
    -x[, newaxis]$shape
    +x[, newaxis]$shape
     #> [1] 1 1
    -x[, newaxis, newaxis]$shape
    -#> [1] 1 1 1
    -
    +x[, newaxis, newaxis]$shape +#> [1] 1 1 1

    You can also use NULL instead of newaxis:

    -x[,NULL]$shape
    -#> [1] 1 1
    -
    +x[,NULL]$shape +#> [1] 1 1

    Dealing with variable number of indices

    Sometimes we don’t know how many dimensions a tensor has, but we do know what to do with the last available dimension, or the first one. To subsume all others, we can use ..:

    -z <- torch_tensor(1:125)$reshape(c(5,5,5))
    -z[1,..]
    +z <- torch_tensor(1:125)$reshape(c(5,5,5))
    +z[1,..]
     #> torch_tensor 
     #>   1   2   3   4   5
     #>   6   7   8   9  10
    @@ -339,15 +352,14 @@
     #>  16  17  18  19  20
     #>  21  22  23  24  25
     #> [ CPULongType{5,5} ]
    -z[..,1]
    +z[..,1]
     #> torch_tensor 
     #>    1    6   11   16   21
     #>   26   31   36   41   46
     #>   51   56   61   66   71
     #>   76   81   86   91   96
     #>  101  106  111  116  121
    -#> [ CPULongType{5,5} ]
    -
    +#> [ CPULongType{5,5} ]
    diff --git a/articles/loading-data.html b/articles/loading-data.html index a00e41ea32c08c39db7d215ad228b5a623d11a83..29c4b7b68157aa00e6510de03793ab1434c6b585 100644 --- a/articles/loading-data.html +++ b/articles/loading-data.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -156,8 +183,7 @@ +library(torch)

    Datasets and data loaders

    @@ -186,10 +212,10 @@

    A custom dataset

    -library(palmerpenguins)
    -library(magrittr)
    +library(palmerpenguins)
    +library(magrittr)
     
    -penguins
    +penguins
     #> # A tibble: 344 x 8
     #>    species island bill_length_mm bill_depth_mm flipper_length_… body_mass_g
     #>    <fct>   <fct>           <dbl>         <dbl>            <int>       <int>
    @@ -203,52 +229,50 @@
     #>  8 Adelie  Torge…           39.2          19.6              195        4675
     #>  9 Adelie  Torge…           34.1          18.1              193        3475
     #> 10 Adelie  Torge…           42            20.2              190        4250
    -#> # … with 334 more rows, and 2 more variables: sex <fct>, year <int>
    -
    +#> # … with 334 more rows, and 2 more variables: sex <fct>, year <int>

    Datasets are R6 classes created using the dataset() constructor. You can pass a name and various member functions. Among those should be initialize(), to create instance variables, .getitem(), to indicate how the data should be returned, and .length(), to say how many items we have.

    In addition, any number of helper functions can be defined.

    Here, we assume the penguins have already been loaded, and all preprocessing consists in removing lines with NA values, transforming factors to numbers starting from 0, and converting from R data types to torch tensors.

    In .getitem, we essentially decide how this data is going to be used: All variables besides species go into x, the predictor, and species will constitute y, the target. Predictor and target are returned in a list, to be accessed as batch[[1]] and batch[[2]] during training.

    -penguins_dataset <- dataset(
    +penguins_dataset <- dataset(
       
    -  name = "penguins_dataset",
    +  name = "penguins_dataset",
       
    -  initialize = function() {
    -    self$data <- self$prepare_penguin_data()
    -  },
    +  initialize = function() {
    +    self$data <- self$prepare_penguin_data()
    +  },
       
    -  .getitem = function(index) {
    +  .getitem = function(index) {
         
    -    x <- self$data[index, 2:-1]
    -    y <- self$data[index, 1]$to(torch_long())
    +    x <- self$data[index, 2:-1]
    +    y <- self$data[index, 1]$to(torch_long())
         
    -    list(x, y)
    -  },
    +    list(x, y)
    +  },
       
    -  .length = function() {
    -    self$data$size()[[1]]
    -  },
    +  .length = function() {
    +    self$data$size()[[1]]
    +  },
       
    -  prepare_penguin_data = function() {
    +  prepare_penguin_data = function() {
         
    -    input <- na.omit(penguins) 
    +    input <- na.omit(penguins) 
         # conveniently, the categorical data are already factors
    -    input$species <- as.numeric(input$species)
    -    input$island <- as.numeric(input$island)
    -    input$sex <- as.numeric(input$sex)
    +    input$species <- as.numeric(input$species)
    +    input$island <- as.numeric(input$island)
    +    input$sex <- as.numeric(input$sex)
         
    -    input <- as.matrix(input)
    -    torch_tensor(input)
    -  }
    -)
    -
    + input <- as.matrix(input) + torch_tensor(input) + } +)

    Let’s create the dataset , query for it’s length, and look at its first item:

    -tuxes <- penguins_dataset()
    -tuxes$.length()
    +tuxes <- penguins_dataset()
    +tuxes$.length()
     #> [1] 333
    -tuxes$.getitem(1)
    +tuxes$.getitem(1)
     #> [[1]]
     #> torch_tensor 
     #>     3.0000
    @@ -263,22 +287,19 @@
     #> [[2]]
     #> torch_tensor 
     #> 1
    -#> [ CPULongType{} ]
    -
    +#> [ CPULongType{} ]

    To be able to iterate over tuxes, we need a data loader (we override the default batch size of 1):

    -dl <-tuxes %>% dataloader(batch_size = 8)
    -
    +dl <-tuxes %>% dataloader(batch_size = 8)

    Calling .length() on a data loader (as opposed to a dataset) will return the number of batches we have:

    -dl$.length()
    -#> [1] 42
    -
    +dl$.length() +#> [1] 42

    And we can create an iterator to inspect the first batch:

    -iter <- dl$.iter()
    -b <- iter$.next()
    -b
    +iter <- dl$.iter()
    +b <- iter$.next()
    +b
     #> [[1]]
     #> torch_tensor 
     #>     3.0000    39.1000    18.7000   181.0000  3750.0000     2.0000  2007.0000
    @@ -301,8 +322,7 @@
     #>  1
     #>  1
     #>  1
    -#> [ CPULongType{8} ]
    -
    +#> [ CPULongType{8} ]

    To train a network, we can use enumerate to iterate over batches.

    @@ -310,44 +330,42 @@ Training with data loaders

    Our example network is very simple. (In reality, we would want to treat island as the categorical variable it is, and either one-hot-encode or embed it.)

    -net <- nn_module(
    +net <- nn_module(
       "PenguinNet",
    -  initialize = function() {
    -    self$fc1 <- nn_linear(7, 32)
    -    self$fc2 <- nn_linear(32, 3)
    -  },
    -  forward = function(x) {
    -    x %>% 
    -      self$fc1() %>% 
    -      nnf_relu() %>% 
    -      self$fc2() %>% 
    -      nnf_log_softmax(dim = 1)
    -  }
    -)
    +  initialize = function() {
    +    self$fc1 <- nn_linear(7, 32)
    +    self$fc2 <- nn_linear(32, 3)
    +  },
    +  forward = function(x) {
    +    x %>% 
    +      self$fc1() %>% 
    +      nnf_relu() %>% 
    +      self$fc2() %>% 
    +      nnf_log_softmax(dim = 1)
    +  }
    +)
     
    -model <- net()
    -
    +model <- net()

    We still need an optimizer:

    -optimizer <- optim_sgd(model$parameters, lr = 0.01)
    -
    +optimizer <- optim_sgd(model$parameters, lr = 0.01)

    And we’re ready to train:

    -for (epoch in 1:10) {
    +for (epoch in 1:10) {
       
    -  l <- c()
    +  l <- c()
       
    -  for (b in enumerate(dl)) {
    -    optimizer$zero_grad()
    -    output <- model(b[[1]])
    -    loss <- nnf_nll_loss(output, b[[2]])
    -    loss$backward()
    -    optimizer$step()
    -    l <- c(l, loss$item())
    -  }
    +  for (b in enumerate(dl)) {
    +    optimizer$zero_grad()
    +    output <- model(b[[1]])
    +    loss <- nnf_nll_loss(output, b[[2]])
    +    loss$backward()
    +    optimizer$step()
    +    l <- c(l, loss$item())
    +  }
       
    -  cat(sprintf("Loss at epoch %d: %3f\n", epoch, mean(l)))
    -}
    +  cat(sprintf("Loss at epoch %d: %3f\n", epoch, mean(l)))
    +}
     #> Loss at epoch 1: 51.747068
     #> Loss at epoch 2: 2.068251
     #> Loss at epoch 3: 2.068251
    @@ -357,8 +375,7 @@
     #> Loss at epoch 7: 2.068251
     #> Loss at epoch 8: 2.068251
     #> Loss at epoch 9: 2.068251
    -#> Loss at epoch 10: 2.068251
    -
    +#> Loss at epoch 10: 2.068251 diff --git a/articles/serialization.html b/articles/serialization.html new file mode 100644 index 0000000000000000000000000000000000000000..e0d195aeeb7f3dcc2ba8db3933629abaa988c2f0 --- /dev/null +++ b/articles/serialization.html @@ -0,0 +1,276 @@ + + + + + + + +Serialization • torch + + + + + + + + + + + +
    +
    + + + + +
    +
    + + + + + +

    Torch tensors in R are pointers to Tensors allocated by LibTorch. This has one major consequence for serialization. One cannot simply use saveRDS for serializing tensors, as you would save the pointer but not the data itself. When reloading a tensor saved with saveRDS the pointer might have been deleted in LibTorch and you would get wrong results.

    +

    To solve this problem, torch implements specialized functions for serializing tensors to the disk:

    +
      +
    • +torch_save(): to save tensors and models to the disk.
    • +
    • +torch_load(): to load the models or tensors back to the session.
    • +
    +

    Please note that this format is still experimental and you shouldn’t use it for long term storage.

    +
    +

    +Saving tensors

    +

    You can save any object of type torch_tensor to the disk using:

    +
    +x <- torch_randn(10, 10)
    +torch_save(x, "tensor.pt")
    +x_ <- torch_load("tensor.pt")
    +
    +torch_allclose(x, x_)
    +#> [1] TRUE
    +
    +
    +

    +Saving modules

    +

    The torch_save and torch_load functions also work for nn_modules objects.

    +

    When saving an nn_module, all the object is serialized including the model structure and it’s state.

    +
    +module <- nn_module(
    +  "my_module",
    +  initialize = function() {
    +    self$fc1 <- nn_linear(10, 10)
    +    self$fc2 <- nn_linear(10, 1)
    +  },
    +  forward = function(x) {
    +    x %>% 
    +      self$fc1() %>% 
    +      self$fc2()
    +  }
    +)
    +
    +model <- module()
    +torch_save(model, "model.pt")
    +model_ <- torch_load("model.pt")
    +
    +# input tensor
    +x <- torch_randn(50, 10)
    +torch_allclose(model(x), model_(x))
    +#> [1] TRUE
    +
    +
    +

    +Loading models saved in python

    +

    Currently the only way to load models from python is to rewrite the model architecture in R. All the parameter names must be identical.

    +

    You can then save the PyTorch model state_dict using:

    +
    torch.save(model, fpath, _use_new_zipfile_serialization=True)
    +

    You can then reload the state dict in R and reload it into the model with:

    +
    +state_dict <- load_state_dict(fpath)
    +model <- Model()
    +model$load_state_dict(state_dict)
    +

    You can find working examples in torchvision. For example this is what we do for the AlexNet model.

    +
    +
    + + + +
    + + + +
    + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + diff --git a/articles/serialization_files/accessible-code-block-0.0.1/empty-anchor.js b/articles/serialization_files/accessible-code-block-0.0.1/empty-anchor.js new file mode 100644 index 0000000000000000000000000000000000000000..ca349fd6a570108bde9d7daace534cd651c5f042 --- /dev/null +++ b/articles/serialization_files/accessible-code-block-0.0.1/empty-anchor.js @@ -0,0 +1,15 @@ +// Hide empty tag within highlighted CodeBlock for screen reader accessibility (see https://github.com/jgm/pandoc/issues/6352#issuecomment-626106786) --> +// v0.0.1 +// Written by JooYoung Seo (jooyoung@psu.edu) and Atsushi Yasumoto on June 1st, 2020. + +document.addEventListener('DOMContentLoaded', function() { + const codeList = document.getElementsByClassName("sourceCode"); + for (var i = 0; i < codeList.length; i++) { + var linkList = codeList[i].getElementsByTagName('a'); + for (var j = 0; j < linkList.length; j++) { + if (linkList[j].innerHTML === "") { + linkList[j].setAttribute('aria-hidden', 'true'); + } + } + } +}); diff --git a/articles/tensor-creation.html b/articles/tensor-creation.html index a60aa0f72ca338da0ad0faf631597570798e9a62..5f7499f82c32d8d3daef89d09f498fb090001b32 100644 --- a/articles/tensor-creation.html +++ b/articles/tensor-creation.html @@ -11,12 +11,19 @@ - + + +
    @@ -31,7 +38,7 @@ torch - 0.0.3 + 0.1.0
    @@ -104,6 +111,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -117,7 +127,24 @@
  • - +
  • Reference
  • @@ -156,8 +183,7 @@ +library(torch)

    In this article we describe various ways of creating torch tensors in R.

    @@ -165,7 +191,7 @@

    You can create tensors from R objects using the torch_tensor function. The torch_tensor function takes an R vector, matrix or array and creates an equivalent torch_tensor.

    You can see a few examples below:

    -torch_tensor(c(1,2,3))
    +torch_tensor(c(1,2,3))
     #> torch_tensor 
     #>  1
     #>  2
    @@ -173,37 +199,35 @@
     #> [ CPUFloatType{3} ]
     
     # conform to row-major indexing used in torch
    -torch_tensor(matrix(1:10, ncol = 5, nrow = 2, byrow = TRUE))
    +torch_tensor(matrix(1:10, ncol = 5, nrow = 2, byrow = TRUE))
     #> torch_tensor 
     #>   1   2   3   4   5
     #>   6   7   8   9  10
     #> [ CPULongType{2,5} ]
    -torch_tensor(array(runif(12), dim = c(2, 2, 3)))
    +torch_tensor(array(runif(12), dim = c(2, 2, 3)))
     #> torch_tensor 
     #> (1,.,.) = 
    -#>   0.5612  0.4325  0.6571
    -#>   0.4899  0.4636  0.9910
    +#>   0.8530  0.3081  0.4772
    +#>   0.3160  0.0449  0.0551
     #> 
     #> (2,.,.) = 
    -#>   0.7073  0.8791  0.3117
    -#>   0.9112  0.6383  0.1045
    -#> [ CPUFloatType{2,2,3} ]
    -
    +#> 0.8822 0.8065 0.0366 +#> 0.5736 0.1065 0.2902 +#> [ CPUFloatType{2,2,3} ]

    By default, we will create tensors in the cpu device, converting their R datatype to the corresponding torch dtype.

    Note currently, only numeric and boolean types are supported.

    You can always modify dtype and device when converting an R object to a torch tensor. For example:

    -torch_tensor(1, dtype = torch_long())
    +torch_tensor(1, dtype = torch_long())
     #> torch_tensor 
     #>  1
     #> [ CPULongType{1} ]
    -torch_tensor(1, device = "cpu", dtype = torch_float64())
    +torch_tensor(1, device = "cpu", dtype = torch_float64())
     #> torch_tensor 
     #>  1
    -#> [ CPUDoubleType{1} ]
    -
    +#> [ CPUDoubleType{1} ]

    Other options available when creating a tensor are:

    - +
  • Reference
  • @@ -156,8 +183,7 @@
    -library(torch)
    -
    +library(torch)

    So far, all we’ve been using from torch is tensors, but we’ve been performing all calculations ourselves – the computing the predictions, the loss, the gradients (and thus, the necessary updates to the weights), and the new weight values. In this chapter, we’ll make a significant change: Namely, we spare ourselves the cumbersome calculation of gradients, and have torch do it for us.

    Before we see that in action, let’s get some more background.

    @@ -166,92 +192,84 @@

    Torch uses a module called autograd to record operations performed on tensors, and store what has to be done to obtain the respective gradients. These actions are stored as functions, and those functions are applied in order when the gradient of the output (normally, the loss) with respect to those tensors is calculated: starting from the output node and propagating gradients back through the network. This is a form of reverse mode automatic differentiation.

    As users, we can see a bit of this implementation. As a prerequisite for this “recording” to happen, tensors have to be created with requires_grad = TRUE. E.g.

    -x <- torch_ones(2,2, requires_grad = TRUE)
    -
    +x <- torch_ones(2,2, requires_grad = TRUE)

    To be clear, this is a tensor with respect to which gradients have to be calculated – normally, a tensor representing a weight or a bias, not the input data 1. If we now perform some operation on that tensor, assigning the result to y

    -y <- x$mean()
    -
    +y <- x$mean()

    we find that y now has a non-empty grad_fn that tells torch how to compute the gradient of y with respect to x:

    -y$grad_fn
    -#> MeanBackward0
    -
    +y$grad_fn +#> MeanBackward0

    Actual computation of gradients is triggered by calling backward() on the output tensor.

    -y$backward()
    -
    +y$backward()

    That executed, x now has a non-empty field grad that stores the gradient of y with respect to x:

    -x$grad
    +x$grad
     #> torch_tensor 
     #>  0.2500  0.2500
     #>  0.2500  0.2500
    -#> [ CPUFloatType{2,2} ]
    -
    +#> [ CPUFloatType{2,2} ]

    With a longer chain of computations, we can peek at how torch builds up a graph of backward operations.

    Here is a slightly more complex example. We call retain_grad() on y and z just for demonstration purposes; by default, intermediate gradients – while of course they have to be computed – aren’t stored, in order to save memory.

    -x1 <- torch_ones(2,2, requires_grad = TRUE)
    -x2 <- torch_tensor(1.1, requires_grad = TRUE)
    -y <- x1 * (x2 + 2)
    -y$retain_grad()
    -z <- y$pow(2) * 3
    -z$retain_grad()
    -out <- z$mean()
    -
    +x1 <- torch_ones(2,2, requires_grad = TRUE) +x2 <- torch_tensor(1.1, requires_grad = TRUE) +y <- x1 * (x2 + 2) +y$retain_grad() +z <- y$pow(2) * 3 +z$retain_grad() +out <- z$mean()

    Starting from out$grad_fn, we can follow the graph all back to the leaf nodes:

     # how to compute the gradient for mean, the last operation executed
    -out$grad_fn
    +out$grad_fn
     #> MeanBackward0
     # how to compute the gradient for the multiplication by 3 in z = y$pow(2) * 3
    -out$grad_fn$next_functions
    +out$grad_fn$next_functions
     #> [[1]]
     #> MulBackward1
     # how to compute the gradient for pow in z = y.pow(2) * 3
    -out$grad_fn$next_functions[[1]]$next_functions
    +out$grad_fn$next_functions[[1]]$next_functions
     #> [[1]]
     #> PowBackward0
     # how to compute the gradient for the multiplication in y = x * (x + 2)
    -out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions
    +out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions
     #> [[1]]
     #> MulBackward0
     # how to compute the gradient for the two branches of y = x * (x + 2),
     # where the left branch is a leaf node (AccumulateGrad for x1)
    -out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions[[1]]$next_functions
    +out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions[[1]]$next_functions
     #> [[1]]
     #> torch::autograd::AccumulateGrad
     #> [[2]]
     #> AddBackward1
     # here we arrive at the other leaf node (AccumulateGrad for x2)
    -out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions[[1]]$next_functions[[2]]$next_functions
    +out$grad_fn$next_functions[[1]]$next_functions[[1]]$next_functions[[1]]$next_functions[[2]]$next_functions
     #> [[1]]
    -#> torch::autograd::AccumulateGrad
    -
    +#> torch::autograd::AccumulateGrad

    After calling out$backward(), all tensors in the graph will have their respective gradients created. Without our calls to retain_grad above, z$grad and y$grad would be empty:

    -out$backward()
    -z$grad
    +out$backward()
    +z$grad
     #> torch_tensor 
     #>  0.2500  0.2500
     #>  0.2500  0.2500
     #> [ CPUFloatType{2,2} ]
    -y$grad
    +y$grad
     #> torch_tensor 
     #>  4.6500  4.6500
     #>  4.6500  4.6500
     #> [ CPUFloatType{2,2} ]
    -x2$grad
    +x2$grad
     #> torch_tensor 
     #>  18.6000
     #> [ CPUFloatType{1} ]
    -x1$grad
    +x1$grad
     #> torch_tensor 
     #>  14.4150  14.4150
     #>  14.4150  14.4150
    -#> [ CPUFloatType{2,2} ]
    -
    +#> [ CPUFloatType{2,2} ]

    Thus acquainted with autograd, we’re ready to modify our example.

    @@ -261,80 +279,79 @@
     ### generate training data -----------------------------------------------------
     # input dimensionality (number of input features)
    -d_in <- 3
    +d_in <- 3
     # output dimensionality (number of predicted features)
    -d_out <- 1
    +d_out <- 1
     # number of observations in training set
    -n <- 100
    +n <- 100
     # create random data
    -x <- torch_randn(n, d_in)
    -y <- x[,1]*0.2 - x[..,2]*1.3 - x[..,3]*0.5 + torch_randn(n)
    -y <- y$unsqueeze(dim = 1)
    +x <- torch_randn(n, d_in)
    +y <- x[,1]*0.2 - x[..,2]*1.3 - x[..,3]*0.5 + torch_randn(n)
    +y <- y$unsqueeze(dim = 1)
     ### initialize weights ---------------------------------------------------------
     # dimensionality of hidden layer
    -d_hidden <- 32
    +d_hidden <- 32
     # weights connecting input to hidden layer
    -w1 <- torch_randn(d_in, d_hidden, requires_grad = TRUE)
    +w1 <- torch_randn(d_in, d_hidden, requires_grad = TRUE)
     # weights connecting hidden to output layer
    -w2 <- torch_randn(d_hidden, d_out, requires_grad = TRUE)
    +w2 <- torch_randn(d_hidden, d_out, requires_grad = TRUE)
     # hidden layer bias
    -b1 <- torch_zeros(1, d_hidden, requires_grad = TRUE)
    +b1 <- torch_zeros(1, d_hidden, requires_grad = TRUE)
     # output layer bias
    -b2 <- torch_zeros(1, d_out,requires_grad = TRUE)
    +b2 <- torch_zeros(1, d_out,requires_grad = TRUE)
     ### network parameters ---------------------------------------------------------
    -learning_rate <- 1e-4
    +learning_rate <- 1e-4
     ### training loop --------------------------------------------------------------
    -for (t in 1:200) {
    +for (t in 1:200) {
     
         ### -------- Forward pass -------- 
    -    y_pred <- x$mm(w1)$add(b1)$clamp(min = 0)$mm(w2)$add(b2)
    +    y_pred <- x$mm(w1)$add(b1)$clamp(min = 0)$mm(w2)$add(b2)
         ### -------- compute loss -------- 
    -    loss <- (y_pred - y)$pow(2)$mean()
    -    if (t %% 10 == 0) cat(t, as_array(loss), "\n")
    +    loss <- (y_pred - y)$pow(2)$mean()
    +    if (t %% 10 == 0) cat(t, as_array(loss), "\n")
         ### -------- Backpropagation -------- 
         # compute the gradient of loss with respect to all tensors with requires_grad = True.
    -    loss$backward()
    +    loss$backward()
      
         ### -------- Update weights -------- 
         
         # Wrap in torch.no_grad() because this is a part we DON'T want to record for automatic gradient computation
    -    with_no_grad({
    +    with_no_grad({
           
    -      w1$sub_(learning_rate * w1$grad)
    -      w2$sub_(learning_rate * w2$grad)
    -      b1$sub_(learning_rate * b1$grad)
    -      b2$sub_(learning_rate * b2$grad)
    +      w1$sub_(learning_rate * w1$grad)
    +      w2$sub_(learning_rate * w2$grad)
    +      b1$sub_(learning_rate * b1$grad)
    +      b2$sub_(learning_rate * b2$grad)
           
           # Zero the gradients after every pass, because they'd accumulate otherwise
    -      w1$grad$zero_()
    -      w2$grad$zero_()
    -      b1$grad$zero_()
    -      b2$grad$zero_()
    +      w1$grad$zero_()
    +      w2$grad$zero_()
    +      b1$grad$zero_()
    +      b2$grad$zero_()
         
    -    })
    +    })
         
    -}
    -#> 10 27.60956 
    -#> 20 25.39985 
    -#> 30 23.42485 
    -#> 40 21.65899 
    -#> 50 20.07844 
    -#> 60 18.66107 
    -#> 70 17.38713 
    -#> 80 16.24375 
    -#> 90 15.21763 
    -#> 100 14.29351 
    -#> 110 13.45975 
    -#> 120 12.70921 
    -#> 130 12.03104 
    -#> 140 11.41835 
    -#> 150 10.86677 
    -#> 160 10.36613 
    -#> 170 9.911062 
    -#> 180 9.496947 
    -#> 190 9.121381 
    -#> 200 8.778724
    -
    +} +#> 10 16.32032 +#> 20 15.48668 +#> 30 14.73107 +#> 40 14.046 +#> 50 13.42319 +#> 60 12.85455 +#> 70 12.33454 +#> 80 11.85764 +#> 90 11.41966 +#> 100 11.01673 +#> 110 10.64522 +#> 120 10.30196 +#> 130 9.9842 +#> 140 9.689827 +#> 150 9.416393 +#> 160 9.162653 +#> 170 8.927996 +#> 180 8.709177 +#> 190 8.504665 +#> 200 8.31341

    We still manually compute the forward pass, and we still manually update the weights. In the last two chapters of this section, we’ll see how these parts of the logic can be made more modular and reusable, as well.

    diff --git a/authors.html b/authors.html index 95f59cb12d9ee5a765ef62aa2515437c19d8f00e..21537c16e1d1a9b882966a379b8b316d5016a5ec 100644 --- a/authors.html +++ b/authors.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0
    @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • diff --git a/extra.css b/extra.css new file mode 100644 index 0000000000000000000000000000000000000000..d8c4f4705152cda4cafbca72f23b5e79aedbc1b7 --- /dev/null +++ b/extra.css @@ -0,0 +1,44 @@ +.navbar-default { + background-color: #7e1f77; + border-color: #7e1f77; +} + +.navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus { + background-color: #a953a2; +} + +.navbar-default .navbar-collapse, .navbar-default .navbar-form { + border-color: #ffffff; +} + +.navbar-default .navbar-nav .open .dropdown-menu>.dropdown-header { + border-color: #ffffff !important; +} + +.navbar-default .navbar-nav>li>a:hover, .navbar-default .navbar-nav>li>a:focus { + color: #ffffff; + background-color: #a953a2; +} + +.navbar-default .navbar-nav>.active>a, .navbar-default .navbar-nav>.active>a:hover, .navbar-default .navbar-nav>.active>a:focus { + background-color: #a953a2; +} + +.navbar-default .navbar-nav>.open>a, .navbar-default .navbar-nav>.open>a:hover, .navbar-default .navbar-nav>.open>a:focus { + background-color: #a953a2; +} + +.nav-pills>li.active>a, .nav-pills>li.active>a:hover, .nav-pills>li.active>a:focus { + background-color: #a953a2; +} + +.dropdown-menu>.active>a, +.dropdown-menu>.active>a:hover, +.dropdown-menu>.active>a:focus { + background-color: #7e1f77; +} + +.dropdown-menu>a:hover, +.dropdown-menu>a:focus { + background-color: #7e1f77; +} \ No newline at end of file diff --git a/index.html b/index.html index 881a7c951b0ef4585aa7b108b847e370e600537c..8fb268bbf72e57f0ae6844a177d26aa340d5b76b 100644 --- a/index.html +++ b/index.html @@ -11,7 +11,8 @@ - + + +
    @@ -34,7 +41,7 @@ torch - 0.0.3 + 0.1.0
    @@ -107,6 +114,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -120,7 +130,24 @@
  • - +
  • Reference
  • diff --git a/news/index.html b/news/index.html index 5cedf5d547bd06e3fcc63f20a490a4ed2dce6f94..97275fc1d14b70e9e284286505a562f4ea0f3942 100644 --- a/news/index.html +++ b/news/index.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • @@ -189,10 +221,17 @@ Source: NEWS.md -
    -

    -torch (development version) Unreleased +
    +

    +torch 0.1.0 Unreleased

    +
      +
    • Added many missing losses (#252)
    • +
    • Implemented the $<- and [[<- operators for the nn_module class. (#253)
    • +
    • Export nn_parameter, nn_buffer, and is_* auxiliary functions.
    • +
    • Added a new serialization vignette.
    • +
    • Added a few learning rate schedulers (#258)
    • +

    diff --git a/pkgdown.yml b/pkgdown.yml index 2d96c50e44dfe65990a835e67dfd481533019653..2c0e72a50ab2907b1da400a4f2d3f7cad5f03383 100644 --- a/pkgdown.yml +++ b/pkgdown.yml @@ -2,6 +2,10 @@ pandoc: 2.7.3 pkgdown: 1.6.1 pkgdown_sha: ~ articles: + examples/basic-autograd: basic-autograd.html + examples/basic-nn-module: basic-nn-module.html + examples/dataset: dataset.html + examples/index: index.html extending-autograd: extending-autograd.html getting-started/autograd: autograd.html getting-started/control-flow-and-weight-sharing: control-flow-and-weight-sharing.html @@ -16,8 +20,9 @@ articles: getting-started/what-is-torch: what-is-torch.html indexing: indexing.html loading-data: loading-data.html + serialization: serialization.html tensor/index: index.html tensor-creation: tensor-creation.html using-autograd: using-autograd.html -last_built: 2020-09-17T22:12Z +last_built: 2020-09-28T13:32Z diff --git a/reference/AutogradContext.html b/reference/AutogradContext.html index 08ef97f1a8edc4b7b801bc52c8ef47612e6ad650..985f14ab8550b642e24e6c658d5171fdd40bebca 100644 --- a/reference/AutogradContext.html +++ b/reference/AutogradContext.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ Class representing the context." /> torch - 0.0.3 + 0.1.0

    @@ -146,6 +158,9 @@ Class representing the context." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Class representing the context." />
  • - +
  • Reference
  • @@ -228,12 +260,12 @@ Class representing the context." />


    Method new()

    (Dev related) Initializes the context. Not user related.

    Usage

    -

    AutogradContext$new(
    -  ptr,
    -  env,
    -  argument_names = NULL,
    -  argument_needs_grad = NULL
    -)

    +

    AutogradContext$new(
    +  ptr,
    +  env,
    +  argument_names = NULL,
    +  argument_needs_grad = NULL
    +)

    Arguments

    @@ -255,7 +287,7 @@ method.

    Before returning them to the user, a check is made to ensure they weren’t used in any in-place operation that modified their content.

    Arguments can also be any kind of R object.

    Usage

    -

    AutogradContext$save_for_backward(...)

    +

    AutogradContext$save_for_backward(...)

    Arguments

    @@ -273,7 +305,7 @@ of backward computation. You still need to accept a gradient for each output in backward(), but it’s always going to be a zero tensor with the same shape as the shape of a corresponding output.

    This is used e.g. for indices returned from a max Function.

    Usage

    -

    AutogradContext$mark_non_differentiable(...)

    +

    AutogradContext$mark_non_differentiable(...)

    Arguments

    @@ -288,7 +320,7 @@ and all arguments should be inputs.

    Every tensor that’s been modified in-place in a call to forward() should be given to this function, to ensure correctness of our checks. It doesn’t matter whether the function is called before or after modification.

    Usage

    -

    AutogradContext$mark_dirty(...)

    +

    AutogradContext$mark_dirty(...)

    Arguments

    @@ -298,7 +330,7 @@ matter whether the function is called before or after modification.


    Method clone()

    The objects of this class are cloneable with this method.

    Usage

    -

    AutogradContext$clone(deep = FALSE)

    +

    AutogradContext$clone(deep = FALSE)

    Arguments

    diff --git a/reference/as_array.html b/reference/as_array.html index a903d715ca2acd6a3479f55eaa3b31bdfa97f521..04a0e43f721e3a32ac0f69861a8e0339d0fcc577 100644 --- a/reference/as_array.html +++ b/reference/as_array.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Converts to array

    -
    as_array(x)
    +
    as_array(x)

    Arguments

    diff --git a/reference/autograd_backward.html b/reference/autograd_backward.html index 95eae9d7be6a0186c2d2a31de0cb7771d8e9482e..3b0f369af29d020e6dd919bc097ae80c0fc61151 100644 --- a/reference/autograd_backward.html +++ b/reference/autograd_backward.html @@ -38,6 +38,8 @@ + + + + + + @@ -79,7 +91,7 @@ tensors)." /> torch - 0.0.3 + 0.1.0 @@ -152,6 +164,9 @@ tensors)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -165,7 +180,24 @@ tensors)." />
  • - +
  • Reference
  • @@ -209,12 +241,12 @@ tensors (None is an acceptable value for all tensors that don’t need gradient tensors).

    -
    autograd_backward(
    -  tensors,
    -  grad_tensors = NULL,
    -  retain_graph = create_graph,
    -  create_graph = FALSE
    -)
    +
    autograd_backward(
    +  tensors,
    +  grad_tensors = NULL,
    +  retain_graph = create_graph,
    +  create_graph = FALSE
    +)

    Arguments

    @@ -250,16 +282,16 @@ Defaults to FALSE.

    them before calling it.

    Examples

    -
    if (torch_is_installed()) { -x <- torch_tensor(1, requires_grad = TRUE) -y <- 2 * x +
    if (torch_is_installed()) { +x <- torch_tensor(1, requires_grad = TRUE) +y <- 2 * x -a <- torch_tensor(1, requires_grad = TRUE) -b <- 3 * a +a <- torch_tensor(1, requires_grad = TRUE) +b <- 3 * a -autograd_backward(list(y, b)) +autograd_backward(list(y, b)) -} +}
    @@ -150,6 +162,9 @@ Function object, and passing returned gradients on to next Function's." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -163,7 +178,24 @@ Function object, and passing returned gradients on to next Function's." />
  • - +
  • Reference
  • @@ -205,7 +237,7 @@ processed in the topological ordering, by calling backward() method Function object, and passing returned gradients on to next Function's.

    -
    autograd_function(forward, backward)
    +
    autograd_function(forward, backward)

    Arguments

    @@ -234,20 +266,20 @@ See AutogradContext for more information abou

    Examples

    -
    if (torch_is_installed()) { - -exp2 <- autograd_function( - forward = function(ctx, i) { - result <- i$exp() - ctx$save_for_backward(result = result) - result - }, - backward = function(ctx, grad_output) { - list(i = grad_output * ctx$saved_variable$result) - } -) - -} +
    if (torch_is_installed()) { + +exp2 <- autograd_function( + forward = function(ctx, i) { + result <- i$exp() + ctx$save_for_backward(result = result) + result + }, + backward = function(ctx, grad_output) { + list(i = grad_output * ctx$saved_variable$result) + } +) + +}
    @@ -147,6 +159,9 @@ the outputs. If an output doesn’t require_grad, then the gradient can be None)
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ the outputs. If an output doesn’t require_grad, then the gradient can be None)
  • - +
  • Reference
  • @@ -199,14 +231,14 @@ in Jacobian-vector product, usually the pre-computed gradients w.r.t. each of the outputs. If an output doesn’t require_grad, then the gradient can be None).

    -
    autograd_grad(
    -  outputs,
    -  inputs,
    -  grad_outputs = NULL,
    -  retain_graph = create_graph,
    -  create_graph = FALSE,
    -  allow_unused = FALSE
    -)
    +
    autograd_grad(
    +  outputs,
    +  inputs,
    +  grad_outputs = NULL,
    +  retain_graph = create_graph,
    +  create_graph = FALSE,
    +  allow_unused = FALSE
    +)

    Arguments

    @@ -253,18 +285,18 @@ the specified inputs. If it’s FALSE, then gradient w.r.t. all rem will still be computed, and will be accumulated into their .grad attribute.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_tensor(0.5, requires_grad = TRUE) -b <- torch_tensor(0.9, requires_grad = TRUE) -x <- torch_tensor(runif(100)) -y <- 2 * x + 1 -loss <- (y - (w*x + b))^2 -loss <- loss$mean() - -o <- autograd_grad(loss, list(w, b)) -o +
    if (torch_is_installed()) { +w <- torch_tensor(0.5, requires_grad = TRUE) +b <- torch_tensor(0.9, requires_grad = TRUE) +x <- torch_tensor(runif(100)) +y <- 2 * x + 1 +loss <- (y - (w*x + b))^2 +loss <- loss$mean() + +o <- autograd_grad(loss, list(w, b)) +o -} +}
    #> [[1]] #> torch_tensor #> -0.9935 diff --git a/reference/autograd_set_grad_mode.html b/reference/autograd_set_grad_mode.html index 485d2028f8b7bf837e96a7bc62db7aedc32e3681..bbd4ff301ff5b55f9d446692845b75e82aa3bac6 100644 --- a/reference/autograd_set_grad_mode.html +++ b/reference/autograd_set_grad_mode.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sets or disables gradient history.

    -
    autograd_set_grad_mode(enabled)
    +
    autograd_set_grad_mode(enabled)

    Arguments

    diff --git a/reference/cuda_current_device.html b/reference/cuda_current_device.html index 528914706714a2c1aa4e53bc2ba8b899af005e4e..aaa72902041c1eeb8cc4cab3c2a8166c2253d3ef 100644 --- a/reference/cuda_current_device.html +++ b/reference/cuda_current_device.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Returns the index of a currently selected device.

    -
    cuda_current_device()
    +
    cuda_current_device()
    diff --git a/reference/cuda_device_count.html b/reference/cuda_device_count.html index 8e58bf3ed050374ca3bf997364d34920f71d654c..727a49b1d61f6b37f0e759b6f26a33e4bccdce6c 100644 --- a/reference/cuda_device_count.html +++ b/reference/cuda_device_count.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Returns the number of GPUs available.

    -
    cuda_device_count()
    +
    cuda_device_count()
    diff --git a/reference/cuda_is_available.html b/reference/cuda_is_available.html index 2282ae1373fa1ce3000d672d8d5311d170942a8e..2529b5e9ade627dd31dd60d4d6bbc1afa31ac072 100644 --- a/reference/cuda_is_available.html +++ b/reference/cuda_is_available.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Returns a bool indicating if CUDA is currently available.

    -
    cuda_is_available()
    +
    cuda_is_available()
    diff --git a/reference/dataloader.html b/reference/dataloader.html index 5f20021f4d164ef6a3289d0039ec4c3756a4053b..0d965b6a3fffc5b69b87bc95b04cbf0d67067f9f 100644 --- a/reference/dataloader.html +++ b/reference/dataloader.html @@ -39,6 +39,8 @@ single- or multi-process iterators over the dataset. — dataloader • torch + @@ -58,6 +60,16 @@ single- or multi-process iterators over the dataset." /> + + + + @@ -75,7 +87,7 @@ single- or multi-process iterators over the dataset." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ single- or multi-process iterators over the dataset." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ single- or multi-process iterators over the dataset." />
  • - +
  • Reference
  • @@ -200,19 +232,19 @@ single- or multi-process iterators over the dataset. single- or multi-process iterators over the dataset.

    -
    dataloader(
    -  dataset,
    -  batch_size = 1,
    -  shuffle = FALSE,
    -  sampler = NULL,
    -  batch_sampler = NULL,
    -  num_workers = 0,
    -  collate_fn = NULL,
    -  pin_memory = FALSE,
    -  drop_last = FALSE,
    -  timeout = 0,
    -  worker_init_fn = NULL
    -)
    +
    dataloader(
    +  dataset,
    +  batch_size = 1,
    +  shuffle = FALSE,
    +  sampler = NULL,
    +  batch_sampler = NULL,
    +  num_workers = 0,
    +  collate_fn = NULL,
    +  pin_memory = FALSE,
    +  drop_last = FALSE,
    +  timeout = 0,
    +  worker_init_fn = NULL
    +)

    Arguments

    diff --git a/reference/dataloader_make_iter.html b/reference/dataloader_make_iter.html index ed0b16f265e0ef9e55c133d3274002b24cf60342..979a5f926e1fc0d4a26fcda4acd7e2b21b5c3188 100644 --- a/reference/dataloader_make_iter.html +++ b/reference/dataloader_make_iter.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Creates an iterator from a DataLoader

    -
    dataloader_make_iter(dataloader)
    +
    dataloader_make_iter(dataloader)

    Arguments

    diff --git a/reference/dataloader_next.html b/reference/dataloader_next.html index 966e40f3bfdc3b3efcbfe041f639df12ac994e1a..ff091df01a7f5334168f7cc521ad502f5a6350ba 100644 --- a/reference/dataloader_next.html +++ b/reference/dataloader_next.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Get the next element of a dataloader iterator

    -
    dataloader_next(iter)
    +
    dataloader_next(iter)

    Arguments

    diff --git a/reference/dataset.html b/reference/dataset.html index 558e19b17d6ed5ea6c9f78de14b62f5af22e9361..0a6257e3fbabdb46a683c1d9687f30dcd3afb742 100644 --- a/reference/dataset.html +++ b/reference/dataset.html @@ -38,6 +38,8 @@ + + + + + + @@ -77,7 +89,7 @@ of ~torch.utils.data.DataLoader." /> torch - 0.0.3 + 0.1.0 @@ -150,6 +162,9 @@ of ~torch.utils.data.DataLoader." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -163,7 +178,24 @@ of ~torch.utils.data.DataLoader." />
  • - +
  • Reference
  • @@ -205,7 +237,7 @@ data sample for a given key. Subclasses could also optionally overwrite of ~torch.utils.data.DataLoader.

    -
    dataset(name = NULL, inherit = Dataset, ..., parent_env = parent.frame())
    +
    dataset(name = NULL, inherit = Dataset, ..., parent_env = parent.frame())

    Arguments

    diff --git a/reference/default_dtype.html b/reference/default_dtype.html index a517ff8bfbcd2e5ac8c966c454580971fa1b289c..df0f5e7bfcd1b7c0686d07648d87ac4d615f80df 100644 --- a/reference/default_dtype.html +++ b/reference/default_dtype.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Gets and sets the default floating point dtype.

    -
    torch_set_default_dtype(d)
    +    
    torch_set_default_dtype(d)
     
    -torch_get_default_dtype()
    +torch_get_default_dtype()

    Arguments

    diff --git a/reference/enumerate.dataloader.html b/reference/enumerate.dataloader.html index cdace84a567575119d25890890bb6838c3e572f4..4bbd7d24c73ee6c81958c15912f9db17b3ce3986 100644 --- a/reference/enumerate.dataloader.html +++ b/reference/enumerate.dataloader.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -196,7 +228,7 @@
    # S3 method for dataloader
    -enumerate(x, max_len = 1e+06, ...)
    +enumerate(x, max_len =1e+06, ...)

    Arguments

    diff --git a/reference/enumerate.html b/reference/enumerate.html index 5726147af6131a22299222fec509f1f812a93d3b..978f0850f0f0bc46cead9ce1abafe4ab8ce0e8be 100644 --- a/reference/enumerate.html +++ b/reference/enumerate.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Enumerate an iterator

    -
    enumerate(x, ...)
    +
    enumerate(x, ...)

    Arguments

    diff --git a/reference/figures/torch-full.png b/reference/figures/torch-full.png new file mode 100644 index 0000000000000000000000000000000000000000..61d24b86074b110f4cf3298f417c4148938c8f05 Binary files /dev/null and b/reference/figures/torch-full.png differ diff --git a/reference/figures/torch.png b/reference/figures/torch.png index 61d24b86074b110f4cf3298f417c4148938c8f05..5979d02181f69b5a53de418c149a7542531b0169 100644 Binary files a/reference/figures/torch.png and b/reference/figures/torch.png differ diff --git a/reference/index.html b/reference/index.html index de6c63a9ca2b1214c3e6614fe428667aade2d4ff..f32ea8ccffc8131899724df52ee6e648760ab390 100644 --- a/reference/index.html +++ b/reference/index.html @@ -38,6 +38,8 @@ + + @@ -54,6 +56,16 @@ + + + + @@ -71,7 +83,7 @@ torch - 0.0.3 + 0.1.0 @@ -144,6 +156,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -157,7 +172,24 @@
  • - +
  • Reference
  • @@ -1897,12 +1929,24 @@ planes.

    + + + + + + + + @@ -1945,12 +1989,24 @@ planes.

    + + + + + + + + @@ -2029,6 +2085,12 @@ planes.

    + + + + @@ -2125,6 +2187,18 @@ planes.

    + + + + + + + + @@ -2163,6 +2237,12 @@ planes.

    planes.

    + + + + @@ -2212,12 +2292,60 @@ planes.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -2266,6 +2394,18 @@ planes.

    + + + + + + + + @@ -2320,6 +2460,18 @@ planes.

    + + + + + + + + @@ -2342,6 +2494,24 @@ planes.

    nn_utils_rnn_pad_sequence()

    + + + + + + + + + + + + @@ -2921,6 +3091,12 @@ planes.

    + + + + @@ -2956,6 +3132,57 @@ planes.

    optim_sgd()

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/reference/install_torch.html b/reference/install_torch.html index c4e14bd5f2d0284222174d35f67c09d88664f51b..ef89e1dea06dc2606a47c57e602322fce6d31b05 100644 --- a/reference/install_torch.html +++ b/reference/install_torch.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Installs Torch and its dependencies.

    -
    install_torch(
    -  version = "1.5.0",
    -  type = install_type(version = version),
    -  reinstall = FALSE,
    -  path = install_path(),
    -  ...
    -)
    +
    install_torch(
    +  version = "1.5.0",
    +  type = install_type(version = version),
    +  reinstall = FALSE,
    +  path = install_path(),
    +  ...
    +)

    Arguments

    Binary cross entropy loss

    +

    nn_bce_with_logits_loss()

    +

    BCE with logits loss

    nn_bilinear()

    Bilinear module

    +

    nn_buffer()

    +

    Creates a nn_buffer

    nn_celu()

    ConvTranpose3D module

    +

    nn_cosine_embedding_loss()

    +

    Cosine embedding loss

    nn_cross_entropy_loss()

    CrossEntropyLoss module

    +

    nn_ctc_loss()

    +

    The Connectionist Temporal Classification loss.

    nn_dropout()

    Hardtanh module

    +

    nn_hinge_embedding_loss()

    +

    Hinge embedding loss

    nn_identity()

    Zeros initialization

    +

    nn_kl_div_loss()

    +

    Kullback-Leibler divergence loss

    +

    nn_l1_loss()

    +

    L1 loss

    nn_leaky_relu()

    +

    nn_margin_ranking_loss()

    +

    Margin ranking loss

    nn_max_pool1d()

    Holds submodules in a list.

    +

    nn_mse_loss()

    +

    MSE loss

    +

    nn_multi_margin_loss()

    +

    Multi margin loss

    nn_multihead_attention()

    MultiHead attention

    +

    nn_multilabel_margin_loss()

    +

    Multilabel margin loss

    +

    nn_multilabel_soft_margin_loss()

    +

    Multi label soft margin loss

    +

    nn_nll_loss()

    +

    Nll loss

    +

    nn_pairwise_distance()

    +

    Pairwise distance

    +

    nn_parameter()

    +

    Creates an nn_parameter

    +

    nn_poisson_nll_loss()

    +

    Poisson NLL loss

    nn_prelu()

    Sigmoid module

    +

    nn_smooth_l1_loss()

    +

    Smooth L1 loss

    +

    nn_soft_margin_loss()

    +

    Soft margin loss

    nn_softmax()

    Threshoold module

    +

    nn_triplet_margin_loss()

    +

    Triplet margin loss

    +

    nn_triplet_margin_with_distance_loss()

    +

    Triplet margin with distance loss

    nn_utils_rnn_pack_padded_sequence()

    Pad a list of variable length Tensors with padding_value

    +

    is_nn_module()

    +

    Checks if the object is an nn_module

    +

    is_nn_parameter()

    +

    Checks if an object is a nn_parameter

    +

    is_nn_buffer()

    +

    Checks if the object is a nn_buffer

    Triplet_margin_loss

    +

    nnf_triplet_margin_with_distance_loss()

    +

    Triplet margin with distance loss

    nnf_unfold()

    SGD optimizer

    +

    is_optimizer()

    +

    Checks if the object is a torch optimizer

    +

    Learning rate schedulers

    +

    +
    +

    lr_lambda()

    +

    Sets the learning rate of each parameter group to the initial lr +times a given function. When last_epoch=-1, sets initial lr as lr.

    +

    lr_multiplicative()

    +

    Multiply the learning rate of each parameter group by the factor given +in the specified function. When last_epoch=-1, sets initial lr as lr.

    +

    lr_one_cycle()

    +

    Once cycle learning rate

    +

    lr_scheduler()

    +

    Creates learning rate schedulers

    +

    lr_step()

    +

    Step learning rate decay

    diff --git a/reference/is_dataloader.html b/reference/is_dataloader.html index 5688453efd44110f5f676760e6107ea02a8d5d8f..9a2501287078c04bdcddb11bc155417ec18600bd 100644 --- a/reference/is_dataloader.html +++ b/reference/is_dataloader.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Checks if the object is a dataloader

    -
    is_dataloader(x)
    +
    is_dataloader(x)

    Arguments

    diff --git a/reference/is_nn_buffer.html b/reference/is_nn_buffer.html new file mode 100644 index 0000000000000000000000000000000000000000..5a6ecbf6040c8756e4ddaa913ed0d84316107b55 --- /dev/null +++ b/reference/is_nn_buffer.html @@ -0,0 +1,269 @@ + + + + + + + + +Checks if the object is a nn_buffer — is_nn_buffer • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Checks if the object is a nn_buffer

    +
    + +
    is_nn_buffer(x)
    + +

    Arguments

    +
    + + + + + +
    x

    object to check

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/is_nn_module.html b/reference/is_nn_module.html new file mode 100644 index 0000000000000000000000000000000000000000..494a2d97ae3420347e46e770f9c479a23d227b5a --- /dev/null +++ b/reference/is_nn_module.html @@ -0,0 +1,269 @@ + + + + + + + + +Checks if the object is an nn_module — is_nn_module • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Checks if the object is an nn_module

    +
    + +
    is_nn_module(x)
    + +

    Arguments

    + + + + + + +
    x

    object to check

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/is_nn_parameter.html b/reference/is_nn_parameter.html new file mode 100644 index 0000000000000000000000000000000000000000..5163543b1105e2df45ed042e93a4211f4153c28f --- /dev/null +++ b/reference/is_nn_parameter.html @@ -0,0 +1,269 @@ + + + + + + + + +Checks if an object is a nn_parameter — is_nn_parameter • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Checks if an object is a nn_parameter

    +
    + +
    is_nn_parameter(x)
    + +

    Arguments

    + + + + + + +
    x

    the object to check

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/is_optimizer.html b/reference/is_optimizer.html new file mode 100644 index 0000000000000000000000000000000000000000..1b831b798a64212f66477e0326f64aa8558895be --- /dev/null +++ b/reference/is_optimizer.html @@ -0,0 +1,269 @@ + + + + + + + + +Checks if the object is a torch optimizer — is_optimizer • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Checks if the object is a torch optimizer

    +
    + +
    is_optimizer(x)
    + +

    Arguments

    + + + + + + +
    x

    object to check

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/is_torch_device.html b/reference/is_torch_device.html index 760d1dd2b622c2a846f65510fb7f67b6b3240893..36892e09cb0b9220ab70d6693c940c1382b254dd 100644 --- a/reference/is_torch_device.html +++ b/reference/is_torch_device.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Checks if object is a device

    -
    is_torch_device(x)
    +
    is_torch_device(x)

    Arguments

    diff --git a/reference/is_torch_dtype.html b/reference/is_torch_dtype.html index 8315ff6b9479dc2603398f3365344014313443e8..7977b802bb8d96d247f0fecf7d1d7a88bdba6894 100644 --- a/reference/is_torch_dtype.html +++ b/reference/is_torch_dtype.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Check if object is a torch data type

    -
    is_torch_dtype(x)
    +
    is_torch_dtype(x)

    Arguments

    diff --git a/reference/is_torch_layout.html b/reference/is_torch_layout.html index 393fd39df5e2d35ca794a4e3f93f4f9ea1ccc2c7..03d83e7aac07852e79d8dba2e6d3cbb04489a78f 100644 --- a/reference/is_torch_layout.html +++ b/reference/is_torch_layout.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Check if an object is a torch layout.

    -
    is_torch_layout(x)
    +
    is_torch_layout(x)

    Arguments

    diff --git a/reference/is_torch_memory_format.html b/reference/is_torch_memory_format.html index a0e1b0f8d0324cf922a9dcb7fe218bf6dc7b113d..1fc2b8399e03ad6afe6a4d709b10094733dd029f 100644 --- a/reference/is_torch_memory_format.html +++ b/reference/is_torch_memory_format.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Check if an object is a memory format

    -
    is_torch_memory_format(x)
    +
    is_torch_memory_format(x)

    Arguments

    diff --git a/reference/is_torch_qscheme.html b/reference/is_torch_qscheme.html index aee7821a6c58996d50760bc7c39bf38b8e224ff1..3c40a69f0213a2696d498026e675ffffdba31dc5 100644 --- a/reference/is_torch_qscheme.html +++ b/reference/is_torch_qscheme.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Checks if an object is a QScheme

    -
    is_torch_qscheme(x)
    +
    is_torch_qscheme(x)

    Arguments

    diff --git a/reference/is_undefined_tensor.html b/reference/is_undefined_tensor.html index f2a893716d1e0587ab7c5faba286cf0beaa0ab95..a86303e26b1a4fd85f8e9e46f706a3429bc9613b 100644 --- a/reference/is_undefined_tensor.html +++ b/reference/is_undefined_tensor.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Checks if a tensor is undefined

    -
    is_undefined_tensor(x)
    +
    is_undefined_tensor(x)

    Arguments

    diff --git a/reference/load_state_dict.html b/reference/load_state_dict.html index 3cf56efae6c05836777966bc665d56c49eea8eb6..7d30785059f908c5e2866d453d7138e10c0ca40d 100644 --- a/reference/load_state_dict.html +++ b/reference/load_state_dict.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ classes from the tensors in the dict." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ classes from the tensors in the dict." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ classes from the tensors in the dict." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ For it to work correctly you need to use torch.save with the flag: classes from the tensors in the dict.

    -
    load_state_dict(path)
    +
    load_state_dict(path)

    Arguments

    diff --git a/reference/lr_lambda.html b/reference/lr_lambda.html new file mode 100644 index 0000000000000000000000000000000000000000..8483f0d528347a28c80ab0492fe6cbcf99e0cc0c --- /dev/null +++ b/reference/lr_lambda.html @@ -0,0 +1,305 @@ + + + + + + + + +Sets the learning rate of each parameter group to the initial lr +times a given function. When last_epoch=-1, sets initial lr as lr. — lr_lambda • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Sets the learning rate of each parameter group to the initial lr +times a given function. When last_epoch=-1, sets initial lr as lr.

    +
    + +
    lr_lambda(optimizer, lr_lambda, last_epoch = -1, verbose = FALSE)
    + +

    Arguments

    +
    + + + + + + + + + + + + + + + + + +
    optimizer

    (Optimizer): Wrapped optimizer.

    lr_lambda

    (function or list): A function which computes a multiplicative +factor given an integer parameter epoch, or a list of such +functions, one for each group in optimizer.param_groups.

    last_epoch

    (int): The index of last epoch. Default: -1.

    verbose

    (bool): If TRUE, prints a message to stdout for +each update. Default: FALSE.

    + + +

    Examples

    +
    if (torch_is_installed()) { +# Assuming optimizer has two groups. +lambda1 <- function(epoch) epoch %/% 30 +lambda2 <- function(epoch) 0.95^epoch +if (FALSE) { +scheduler <- lr_lambda(optimizer, lr_lambda = list(lambda1, lambda2)) +for (epoch in 1:100) { + train(...) + validate(...) + scheduler$step() +} +} + +} +
    + + + + + + + + + + + + + + + diff --git a/reference/lr_multiplicative.html b/reference/lr_multiplicative.html new file mode 100644 index 0000000000000000000000000000000000000000..7ffb61b6a147c4aa4e10162aaa260720e196e95b --- /dev/null +++ b/reference/lr_multiplicative.html @@ -0,0 +1,303 @@ + + + + + + + + +Multiply the learning rate of each parameter group by the factor given +in the specified function. When last_epoch=-1, sets initial lr as lr. — lr_multiplicative • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Multiply the learning rate of each parameter group by the factor given +in the specified function. When last_epoch=-1, sets initial lr as lr.

    +
    + +
    lr_multiplicative(optimizer, lr_lambda, last_epoch = -1, verbose = FALSE)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    optimizer

    (Optimizer): Wrapped optimizer.

    lr_lambda

    (function or list): A function which computes a multiplicative +factor given an integer parameter epoch, or a list of such +functions, one for each group in optimizer.param_groups.

    last_epoch

    (int): The index of last epoch. Default: -1.

    verbose

    (bool): If TRUE, prints a message to stdout for +each update. Default: FALSE.

    + + +

    Examples

    +
    if (torch_is_installed()) { +if (FALSE) { +lmbda <- function(epoch) 0.95 +scheduler <- lr_multiplicative(optimizer, lr_lambda=lmbda) +for (epoch in 1:100) { + train(...) + validate(...) + scheduler$step() +} +} + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/lr_one_cycle.html b/reference/lr_one_cycle.html new file mode 100644 index 0000000000000000000000000000000000000000..67b01b27b779c31a5a5d3e4b9e3c3968f3877289 --- /dev/null +++ b/reference/lr_one_cycle.html @@ -0,0 +1,417 @@ + + + + + + + + +Once cycle learning rate — lr_one_cycle • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Sets the learning rate of each parameter group according to the +1cycle learning rate policy. The 1cycle policy anneals the learning +rate from an initial learning rate to some maximum learning rate and then +from that maximum learning rate to some minimum learning rate much lower +than the initial learning rate.

    +
    + +
    lr_one_cycle(
    +  optimizer,
    +  max_lr,
    +  total_steps = NULL,
    +  epochs = NULL,
    +  steps_per_epoch = NULL,
    +  pct_start = 0.3,
    +  anneal_strategy = "cos",
    +  cycle_momentum = TRUE,
    +  base_momentum = 0.85,
    +  max_momentum = 0.95,
    +  div_factor = 25,
    +  final_div_factor = 10000,
    +  last_epoch = -1,
    +  verbose = FALSE
    +)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    optimizer

    (Optimizer): Wrapped optimizer.

    max_lr

    (float or list): Upper learning rate boundaries in the cycle +for each parameter group.

    total_steps

    (int): The total number of steps in the cycle. Note that +if a value is not provided here, then it must be inferred by providing +a value for epochs and steps_per_epoch. +Default: NULL

    epochs

    (int): The number of epochs to train for. This is used along +with steps_per_epoch in order to infer the total number of steps in the cycle +if a value for total_steps is not provided. +Default: NULL

    steps_per_epoch

    (int): The number of steps per epoch to train for. This is +used along with epochs in order to infer the total number of steps in the +cycle if a value for total_steps is not provided. +Default: NULL

    pct_start

    (float): The percentage of the cycle (in number of steps) spent +increasing the learning rate. +Default: 0.3

    anneal_strategy

    (str): 'cos', 'linear' +Specifies the annealing strategy: "cos" for cosine annealing, "linear" for +linear annealing. +Default: 'cos'

    cycle_momentum

    (bool): If TRUE, momentum is cycled inversely +to learning rate between 'base_momentum' and 'max_momentum'. +Default: TRUE

    base_momentum

    (float or list): Lower momentum boundaries in the cycle +for each parameter group. Note that momentum is cycled inversely +to learning rate; at the peak of a cycle, momentum is +'base_momentum' and learning rate is 'max_lr'. +Default: 0.85

    max_momentum

    (float or list): Upper momentum boundaries in the cycle +for each parameter group. Functionally, +it defines the cycle amplitude (max_momentum - base_momentum). +Note that momentum is cycled inversely +to learning rate; at the start of a cycle, momentum is 'max_momentum' +and learning rate is 'base_lr' +Default: 0.95

    div_factor

    (float): Determines the initial learning rate via +initial_lr = max_lr/div_factor +Default: 25

    final_div_factor

    (float): Determines the minimum learning rate via +min_lr = initial_lr/final_div_factor +Default: 1e4

    last_epoch

    (int): The index of the last batch. This parameter is used when +resuming a training job. Since step() should be invoked after each +batch instead of after each epoch, this number represents the total +number of batches computed, not the total number of epochs computed. +When last_epoch=-1, the schedule is started from the beginning. +Default: -1

    verbose

    (bool): If TRUE, prints a message to stdout for +each update. Default: FALSE.

    + +

    Details

    + +

    This policy was initially described in the paper +Super-Convergence: Very Fast Training of Neural Networks Using Large Learning Rates.

    +

    The 1cycle learning rate policy changes the learning rate after every batch. +step should be called after a batch has been used for training. +This scheduler is not chainable.

    +

    Note also that the total number of steps in the cycle can be determined in one +of two ways (listed in order of precedence):

      +
    • A value for total_steps is explicitly provided.

    • +
    • A number of epochs (epochs) and a number of steps per epoch +(steps_per_epoch) are provided.

    • +
    + +

    In this case, the number of total steps is inferred by +total_steps = epochs * steps_per_epoch

    +

    You must either provide a value for total_steps or provide a value for both +epochs and steps_per_epoch.

    + +

    Examples

    +
    if (torch_is_installed()) { +if (FALSE) { +data_loader <- dataloader(...) +optimizer <- optim_sgd(model$parameters, lr=0.1, momentum=0.9) +scheduler <- lr_one_cycle(optimizer, max_lr=0.01, steps_per_epoch=length(data_loader), + epochs=10) + +for (i in 1:epochs) { + for (batch in enumerate(data_loader)) { + train_batch(...) + scheduler$step() + } +} +} + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/lr_scheduler.html b/reference/lr_scheduler.html new file mode 100644 index 0000000000000000000000000000000000000000..05b90cf19256c7203dc2d95889f9762a28b3d391 --- /dev/null +++ b/reference/lr_scheduler.html @@ -0,0 +1,288 @@ + + + + + + + + +Creates learning rate schedulers — lr_scheduler • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates learning rate schedulers

    +
    + +
    lr_scheduler(
    +  classname = NULL,
    +  inherit = LRScheduler,
    +  ...,
    +  parent_env = parent.frame()
    +)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    classname

    optional name for the learning rate scheduler

    inherit

    an optional learning rate scheduler to inherit from

    ...

    named list of methods. You must implement the get_lr() +method that doesn't take any argument and returns learning rates +for each param_group in the optimizer.

    parent_env

    passed to R6::R6Class().

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/lr_step.html b/reference/lr_step.html new file mode 100644 index 0000000000000000000000000000000000000000..0d80a7315c2045cba50e6e426cbc27586d839002 --- /dev/null +++ b/reference/lr_step.html @@ -0,0 +1,306 @@ + + + + + + + + +Step learning rate decay — lr_step • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Decays the learning rate of each parameter group by gamma every +step_size epochs. Notice that such decay can happen simultaneously with +other changes to the learning rate from outside this scheduler. When +last_epoch=-1, sets initial lr as lr.

    +
    + +
    lr_step(optimizer, step_size, gamma = 0.1, last_epoch = -1)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    optimizer

    (Optimizer): Wrapped optimizer.

    step_size

    (int): Period of learning rate decay.

    gamma

    (float): Multiplicative factor of learning rate decay. +Default: 0.1.

    last_epoch

    (int): The index of last epoch. Default: -1.

    + + +

    Examples

    +
    if (torch_is_installed()) { +if (FALSE) { +# Assuming optimizer uses lr = 0.05 for all groups +# lr = 0.05 if epoch < 30 +# lr = 0.005 if 30 <= epoch < 60 +# lr = 0.0005 if 60 <= epoch < 90 +# ... +scheduler <- lr_step(optimizer, step_size=30, gamma=0.1) +for (epoch in 1:100) { + train(...) + validate(...) + scheduler$step() +} +} + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_adaptive_avg_pool1d.html b/reference/nn_adaptive_avg_pool1d.html index 40bd9d2f8ca8e8d820924ca0ebecba26e5273ed0..d4e13c1c85e65b49a5ff7294cbc741889c9b455a 100644 --- a/reference/nn_adaptive_avg_pool1d.html +++ b/reference/nn_adaptive_avg_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ The number of output features is equal to the number of input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_avg_pool1d(output_size)
    +
    nn_adaptive_avg_pool1d(output_size)

    Arguments

    @@ -210,13 +242,13 @@ The number of output features is equal to the number of input planes.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5 -m = nn_adaptive_avg_pool1d(5) -input <- torch_randn(1, 64, 8) -output <- m(input) +m = nn_adaptive_avg_pool1d(5) +input <- torch_randn(1, 64, 8) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_avg_pool2d(output_size)
    +
    nn_adaptive_avg_pool2d(output_size)

    Arguments

    @@ -213,17 +245,17 @@ be the same as that of the input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5x7 -m <- nn_adaptive_avg_pool2d(c(5,7)) -input <- torch_randn(1, 64, 8, 9) -output <- m(input) +m <- nn_adaptive_avg_pool2d(c(5,7)) +input <- torch_randn(1, 64, 8, 9) +output <- m(input) # target output size of 7x7 (square) -m <- nn_adaptive_avg_pool2d(7) -input <- torch_randn(1, 64, 10, 9) -output <- m(input) +m <- nn_adaptive_avg_pool2d(7) +input <- torch_randn(1, 64, 10, 9) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_avg_pool3d(output_size)
    +
    nn_adaptive_avg_pool3d(output_size)

    Arguments

    @@ -213,17 +245,17 @@ be the same as that of the input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5x7x9 -m <- nn_adaptive_avg_pool3d(c(5,7,9)) -input <- torch_randn(1, 64, 8, 9, 10) -output <- m(input) +m <- nn_adaptive_avg_pool3d(c(5,7,9)) +input <- torch_randn(1, 64, 8, 9, 10) +output <- m(input) # target output size of 7x7x7 (cube) -m <- nn_adaptive_avg_pool3d(7) -input <- torch_randn(1, 64, 10, 9, 8) -output <- m(input) +m <- nn_adaptive_avg_pool3d(7) +input <- torch_randn(1, 64, 10, 9, 8) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ Efficient softmax approximation for GPUs by Edouard Grave, Armand Joulin, Mousta
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Efficient softmax approximation for GPUs by Edouard Grave, Armand Joulin, Mousta
  • - +
  • Reference
  • @@ -197,13 +229,13 @@ Efficient softmax approximation for GPUs by Edouard Grave, Armand Joulin, Mousta Efficient softmax approximation for GPUs by Edouard Grave, Armand Joulin, Moustapha Cissé, David Grangier, and Hervé Jégou

    -
    nn_adaptive_log_softmax_with_loss(
    -  in_features,
    -  n_classes,
    -  cutoffs,
    -  div_value = 4,
    -  head_bias = FALSE
    -)
    +
    nn_adaptive_log_softmax_with_loss(
    +  in_features,
    +  n_classes,
    +  cutoffs,
    +  div_value = 4,
    +  head_bias = FALSE
    +)

    Arguments

    diff --git a/reference/nn_adaptive_max_pool1d.html b/reference/nn_adaptive_max_pool1d.html index 35300f6438b6d1a2ebef4fa742c44cb2d148c61a..a48f501a36324b1e72ef8edd9cd996c4c5cfcc58 100644 --- a/reference/nn_adaptive_max_pool1d.html +++ b/reference/nn_adaptive_max_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ The number of output features is equal to the number of input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_max_pool1d(output_size, return_indices = FALSE)
    +
    nn_adaptive_max_pool1d(output_size, return_indices = FALSE)

    Arguments

    @@ -215,13 +247,13 @@ Useful to pass to nn_max_unpool1d()Examples -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5 -m <- nn_adaptive_max_pool1d(5) -input <- torch_randn(1, 64, 8) -output <- m(input) +m <- nn_adaptive_max_pool1d(5) +input <- torch_randn(1, 64, 8) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_max_pool2d(output_size, return_indices = FALSE)
    +
    nn_adaptive_max_pool2d(output_size, return_indices = FALSE)

    Arguments

    @@ -218,17 +250,17 @@ Useful to pass to nn_max_unpool2d()Examples -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5x7 -m <- nn_adaptive_max_pool2d(c(5,7)) -input <- torch_randn(1, 64, 8, 9) -output <- m(input) +m <- nn_adaptive_max_pool2d(c(5,7)) +input <- torch_randn(1, 64, 8, 9) +output <- m(input) # target output size of 7x7 (square) -m <- nn_adaptive_max_pool2d(7) -input <- torch_randn(1, 64, 10, 9) -output <- m(input) +m <- nn_adaptive_max_pool2d(7) +input <- torch_randn(1, 64, 10, 9) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ The number of output features is equal to the number of input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ The number of output features is equal to the number of input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ The number of output features is equal to the number of input planes." /> The number of output features is equal to the number of input planes.

    -
    nn_adaptive_max_pool3d(output_size, return_indices = FALSE)
    +
    nn_adaptive_max_pool3d(output_size, return_indices = FALSE)

    Arguments

    @@ -218,17 +250,17 @@ Useful to pass to nn_max_unpool3d()Examples -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # target output size of 5x7x9 -m <- nn_adaptive_max_pool3d(c(5,7,9)) -input <- torch_randn(1, 64, 8, 9, 10) -output <- m(input) +m <- nn_adaptive_max_pool3d(c(5,7,9)) +input <- torch_randn(1, 64, 8, 9, 10) +output <- m(input) # target output size of 7x7x7 (cube) -m <- nn_adaptive_max_pool3d(7) -input <- torch_randn(1, 64, 10, 9, 8) -output <- m(input) +m <- nn_adaptive_max_pool3d(7) +input <- torch_randn(1, 64, 10, 9, 8) +output <- m(input) -} +}
    @@ -153,6 +165,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -166,7 +181,24 @@ $$" />
  • - +
  • Reference
  • @@ -210,13 +242,13 @@ can be precisely described as:

    $$

    -
    nn_avg_pool1d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE
    -)
    +
    nn_avg_pool1d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE
    +)

    Arguments

    @@ -264,16 +296,16 @@ an int or a one-element tuple.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool with window of size=3, stride=2 -m <- nn_avg_pool1d(3, stride=2) -m(torch_randn(1, 1, 8)) +m <- nn_avg_pool1d(3, stride=2) +m(torch_randn(1, 1, 8)) -} +}
    #> torch_tensor #> (1,.,.) = -#> 0.3975 -0.0079 -0.0240 +#> -0.1279 0.8035 0.4940 #> [ CPUFloatType{1,1,3} ]
    @@ -153,6 +165,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -166,7 +181,24 @@ $$" />
  • - +
  • Reference
  • @@ -210,14 +242,14 @@ input(N_i, C_j, stride[0] \times h + m, stride[1] \times w + n) $$

    -
    nn_avg_pool2d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE,
    -  divisor_override = NULL
    -)
    +
    nn_avg_pool2d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE,
    +  divisor_override = NULL
    +)

    Arguments

    @@ -277,16 +309,16 @@ $$ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of square window of size=3, stride=2 -m <- nn_avg_pool2d(3, stride=2) +m <- nn_avg_pool2d(3, stride=2) # pool of non-square window -m <- nn_avg_pool2d(c(3, 2), stride=c(2, 1)) -input <- torch_randn(20, 16, 50, 32) -output <- m(input) +m <- nn_avg_pool2d(c(3, 2), stride=c(2, 1)) +input <- torch_randn(20, 16, 50, 32) +output <- m(input) -} +}
    @@ -155,6 +167,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -168,7 +183,24 @@ $$" />
  • - +
  • Reference
  • @@ -214,14 +246,14 @@ can be precisely described as:

    $$

    -
    nn_avg_pool3d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE,
    -  divisor_override = NULL
    -)
    +
    nn_avg_pool3d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE,
    +  divisor_override = NULL
    +)

    Arguments

    @@ -285,16 +317,16 @@ $$ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of square window of size=3, stride=2 -m = nn_avg_pool3d(3, stride=2) +m = nn_avg_pool3d(3, stride=2) # pool of non-square window -m = nn_avg_pool3d(c(3, 2, 2), stride=c(2, 1, 2)) -input = torch_randn(20, 16, 50,44, 31) -output = m(input) +m = nn_avg_pool3d(c(3, 2, 2), stride=c(2, 1, 2)) +input = torch_randn(20, 16, 50,44, 31) +output = m(input) -} +}
    @@ -147,6 +159,9 @@ Batch Normalization: Accelerating Deep Network Training by Reducing Internal Cov
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ Batch Normalization: Accelerating Deep Network Training by Reducing Internal Cov
  • - +
  • Reference
  • @@ -199,13 +231,13 @@ inputs with optional additional channel dimension) as described in the paper Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift

    -
    nn_batch_norm1d(
    -  num_features,
    -  eps = 1e-05,
    -  momentum = 0.1,
    -  affine = TRUE,
    -  track_running_stats = TRUE
    -)
    +
    nn_batch_norm1d(
    +  num_features,
    +  eps = 1e-05,
    +  momentum = 0.1,
    +  affine = TRUE,
    +  track_running_stats = TRUE
    +)

    Arguments

    @@ -280,15 +312,15 @@ on (N, L) slices, it's common terminology to call this Temporal Bat

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With Learnable Parameters -m <- nn_batch_norm1d(100) +m <- nn_batch_norm1d(100) # Without Learnable Parameters -m <- nn_batch_norm1d(100, affine = FALSE) -input <- torch_randn(20, 100) -output <- m(input) +m <- nn_batch_norm1d(100, affine = FALSE) +input <- torch_randn(20, 100) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ Batch Normalization: Accelerating Deep Network Training by Reducing Internal Cov
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ Batch Normalization: Accelerating Deep Network Training by Reducing Internal Cov
  • - +
  • Reference
  • @@ -199,13 +231,13 @@ additional channel dimension) as described in the paper Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift.

    -
    nn_batch_norm2d(
    -  num_features,
    -  eps = 1e-05,
    -  momentum = 0.1,
    -  affine = TRUE,
    -  track_running_stats = TRUE
    -)
    +
    nn_batch_norm2d(
    +  num_features,
    +  eps = 1e-05,
    +  momentum = 0.1,
    +  affine = TRUE,
    +  track_running_stats = TRUE
    +)

    Arguments

    @@ -279,15 +311,15 @@ on (N, H, W) slices, it's common terminology to call this Spatial B

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With Learnable Parameters -m <- nn_batch_norm2d(100) +m <- nn_batch_norm2d(100) # Without Learnable Parameters -m <- nn_batch_norm2d(100, affine=FALSE) -input <- torch_randn(20, 100, 35, 45) -output <- m(input) +m <- nn_batch_norm2d(100, affine=FALSE) +input <- torch_randn(20, 100, 35, 45) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ between the target and the output:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ between the target and the output:" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ between the target and the output:" /> between the target and the output:

    -
    nn_bce_loss(weight = NULL, reduction = "mean")
    +
    nn_bce_loss(weight = NULL, reduction = "mean")

    Arguments

    @@ -264,15 +296,15 @@ shape as input.

    Examples

    -
    if (torch_is_installed()) { -m <- nn_sigmoid() -loss <- nn_bce_loss() -input <- torch_randn(3, requires_grad=TRUE) -target <- torch_rand(3) -output <- loss(m(input), target) -output$backward() - -} +
    if (torch_is_installed()) { +m <- nn_sigmoid() +loss <- nn_bce_loss() +input <- torch_randn(3, requires_grad=TRUE) +target <- torch_rand(3) +output <- loss(m(input), target) +output$backward() + +}
    + + + + + + + + + + + + + +
    weight

    (Tensor, optional): a manual rescaling weight given to the loss +of each batch element. If given, has to be a Tensor of size nbatch.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    pos_weight

    (Tensor, optional): a weight of positive examples. +Must be a vector with length equal to the number of classes.

    + +

    Details

    + +

    The unreduced (i.e. with reduction set to 'none') loss can be described as:

    +

    $$ + \ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad +l_n = - w_n \left[ y_n \cdot \log \sigma(x_n) + + (1 - y_n) \cdot \log (1 - \sigma(x_n)) \right], +$$

    +

    where \(N\) is the batch size. If reduction is not 'none' +(default 'mean'), then

    +

    $$ + \ell(x, y) = \begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{'mean';}\\ +\mbox{sum}(L), & \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    This is used for measuring the error of a reconstruction in for example +an auto-encoder. Note that the targets t[i] should be numbers +between 0 and 1. +It's possible to trade off recall and precision by adding weights to positive examples. +In the case of multi-label classification the loss can be described as:

    +

    $$ +\ell_c(x, y) = L_c = \{l_{1,c},\dots,l_{N,c}\}^\top, \quad +l_{n,c} = - w_{n,c} \left[ p_c y_{n,c} \cdot \log \sigma(x_{n,c}) ++ (1 - y_{n,c}) \cdot \log (1 - \sigma(x_{n,c})) \right], +$$ +where \(c\) is the class number (\(c > 1\) for multi-label binary +classification,

    +

    \(c = 1\) for single-label binary classification), +\(n\) is the number of the sample in the batch and +\(p_c\) is the weight of the positive answer for the class \(c\). +\(p_c > 1\) increases the recall, \(p_c < 1\) increases the precision. +For example, if a dataset contains 100 positive and 300 negative examples of a single class, +then pos_weight for the class should be equal to \(\frac{300}{100}=3\). +The loss would act as if the dataset contains \(3\times 100=300\) positive examples.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    • Output: scalar. If reduction is 'none', then \((N, *)\), same +shape as input.

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_bce_with_logits_loss() +input <- torch_randn(3, requires_grad=TRUE) +target <- torch_empty(3)$random_(1, 2) +output <- loss(input, target) +output$backward() + +target <- torch_ones(10, 64, dtype=torch_float32()) # 64 classes, batch size = 10 +output <- torch_full(c(10, 64), 1.5) # A prediction (logit) +pos_weight <- torch_ones(64) # All weights are equal to 1 +criterion <- nn_bce_with_logits_loss(pos_weight=pos_weight) +criterion(output, target) # -log(sigmoid(1.5)) + +} +
    #> torch_tensor +#> 0.201413 +#> [ CPUFloatType{} ]
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_bilinear.html b/reference/nn_bilinear.html index b2507d1260d17d733fb842bd37679aebbe33fab5..3cae1b8a54e2703d8311e36fa0d82b9cc11f3e73 100644 --- a/reference/nn_bilinear.html +++ b/reference/nn_bilinear.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ \(y = x_1^T A x_2 + b\)

    -
    nn_bilinear(in1_features, in2_features, out_features, bias = TRUE)
    +
    nn_bilinear(in1_features, in2_features, out_features, bias = TRUE)

    Arguments

    @@ -251,14 +283,14 @@ If bias is TRUE, the values are initialized from

    Examples

    -
    if (torch_is_installed()) { -m <- nn_bilinear(20, 30, 50) -input1 <- torch_randn(128, 20) -input2 <- torch_randn(128, 30) -output = m(input1, input2) -print(output$size()) - -} +
    if (torch_is_installed()) { +m <- nn_bilinear(20, 30, 50) +input1 <- torch_randn(128, 20) +input2 <- torch_randn(128, 30) +output = m(input1, input2) +print(output$size()) + +}
    #> [1] 128 50
    + + + + + + + + + +
    x

    the tensor that will be converted to nn_buffer

    persistent

    whether the buffer should be persistent or not.

    + + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_celu.html b/reference/nn_celu.html index a74105dc471c2b194da51ba44a0b782293be2808..84feb68b1ce35011ec10573c9fc7f71061b6fc28 100644 --- a/reference/nn_celu.html +++ b/reference/nn_celu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_celu(alpha = 1, inplace = FALSE)
    +
    nn_celu(alpha = 1, inplace = FALSE)

    Arguments

    @@ -229,12 +261,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_celu() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_celu() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -149,6 +161,9 @@ precisely described as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -162,7 +177,24 @@ precisely described as:" />
  • - +
  • Reference
  • @@ -203,17 +235,17 @@ In the simplest case, the output value of the layer with input size precisely described as:

    -
    nn_conv1d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1,
    -  bias = TRUE,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv1d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1,
    +  bias = TRUE,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -340,12 +372,12 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { -m <- nn_conv1d(16, 33, 3, stride=2) -input <- torch_randn(20, 16, 50) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_conv1d(16, 33, 3, stride=2) +input <- torch_randn(20, 16, 50) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,17 +229,17 @@ planes." /> planes.

    -
    nn_conv2d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1,
    -  bias = TRUE,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv2d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1,
    +  bias = TRUE,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -351,18 +383,18 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With square kernels and equal stride -m <- nn_conv2d(16, 33, 3, stride = 2) +m <- nn_conv2d(16, 33, 3, stride = 2) # non-square kernels and unequal stride and with padding -m <- nn_conv2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2)) +m <- nn_conv2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2)) # non-square kernels and unequal stride and with padding and dilation -m <- nn_conv2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2), dilation=c(3, 1)) -input <- torch_randn(20, 16, 50, 100) -output <- m(input) +m <- nn_conv2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2), dilation=c(3, 1)) +input <- torch_randn(20, 16, 50, 100) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ and output \((N, C_{out}, D_{out}, H_{out}, W_{out})\) can be precisely describe
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ and output \((N, C_{out}, D_{out}, H_{out}, W_{out})\) can be precisely describe
  • - +
  • Reference
  • @@ -201,17 +233,17 @@ In the simplest case, the output value of the layer with input size \((N, C_{in} and output \((N, C_{out}, D_{out}, H_{out}, W_{out})\) can be precisely described as:

    -
    nn_conv3d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1,
    -  bias = TRUE,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv3d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1,
    +  bias = TRUE,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -342,15 +374,15 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With square kernels and equal stride -m <- nn_conv3d(16, 33, 3, stride=2) +m <- nn_conv3d(16, 33, 3, stride=2) # non-square kernels and unequal stride and with padding -m <- nn_conv3d(16, 33, c(3, 5, 2), stride=c(2, 1, 1), padding=c(4, 2, 0)) -input <- torch_randn(20, 16, 10, 50, 100) -output <- m(input) +m <- nn_conv3d(16, 33, c(3, 5, 2), stride=c(2, 1, 1), padding=c(4, 2, 0)) +input <- torch_randn(20, 16, 10, 50, 100) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ composed of several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ composed of several input planes." />
  • - +
  • Reference
  • @@ -197,18 +229,18 @@ composed of several input planes." /> composed of several input planes.

    -
    nn_conv_transpose1d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  bias = TRUE,
    -  dilation = 1,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv_transpose1d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  bias = TRUE,
    +  dilation = 1,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -338,12 +370,12 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { -m <- nn_conv_transpose1d(32, 16, 2) -input <- torch_randn(10, 32, 2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_conv_transpose1d(32, 16, 2) +input <- torch_randn(10, 32, 2) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ composed of several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ composed of several input planes." />
  • - +
  • Reference
  • @@ -197,18 +229,18 @@ composed of several input planes." /> composed of several input planes.

    -
    nn_conv_transpose2d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  bias = TRUE,
    -  dilation = 1,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv_transpose2d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  bias = TRUE,
    +  dilation = 1,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -347,23 +379,23 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With square kernels and equal stride -m <- nn_conv_transpose2d(16, 33, 3, stride=2) +m <- nn_conv_transpose2d(16, 33, 3, stride=2) # non-square kernels and unequal stride and with padding -m <- nn_conv_transpose2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2)) -input <- torch_randn(20, 16, 50, 100) -output <- m(input) +m <- nn_conv_transpose2d(16, 33, c(3, 5), stride=c(2, 1), padding=c(4, 2)) +input <- torch_randn(20, 16, 50, 100) +output <- m(input) # exact output size can be also specified as an argument -input <- torch_randn(1, 16, 12, 12) -downsample <- nn_conv2d(16, 16, 3, stride=2, padding=1) -upsample <- nn_conv_transpose2d(16, 16, 3, stride=2, padding=1) -h <- downsample(input) -h$size() -output <- upsample(h, output_size=input$size()) -output$size() - -} +input <- torch_randn(1, 16, 12, 12) +downsample <- nn_conv2d(16, 16, 3, stride=2, padding=1) +upsample <- nn_conv_transpose2d(16, 16, 3, stride=2, padding=1) +h <- downsample(input) +h$size() +output <- upsample(h, output_size=input$size()) +output$size() + +}
    #> [1] 1 16 12 12
    @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,18 +229,18 @@ planes." /> planes.

    -
    nn_conv_transpose3d(
    -  in_channels,
    -  out_channels,
    -  kernel_size,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  bias = TRUE,
    -  dilation = 1,
    -  padding_mode = "zeros"
    -)
    +
    nn_conv_transpose3d(
    +  in_channels,
    +  out_channels,
    +  kernel_size,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  bias = TRUE,
    +  dilation = 1,
    +  padding_mode = "zeros"
    +)

    Arguments

    @@ -355,16 +387,16 @@ sampled from \(\mathcal{U}(-\sqrt{k}, \sqrt{k})\) where

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { +
    if (torch_is_installed()) { +if (FALSE) { # With square kernels and equal stride -m <- nn_conv_transpose3d(16, 33, 3, stride=2) +m <- nn_conv_transpose3d(16, 33, 3, stride=2) # non-square kernels and unequal stride and with padding -m <- nn_conv_transpose3d(16, 33, c(3, 5, 2), stride=c(2, 1, 1), padding=c(0, 4, 2)) -input <- torch_randn(20, 16, 10, 50, 100) -output <- m(input) -} -} +m <- nn_conv_transpose3d(16, 33, c(3, 5, 2), stride=c(2, 1, 1), padding=c(0, 4, 2)) +input <- torch_randn(20, 16, 10, 50, 100) +output <- m(input) +} +}
    + + + + + + + + + +
    margin

    (float, optional): Should be a number from \(-1\) to \(1\), +\(0\) to \(0.5\) is suggested. If margin is missing, the +default value is \(0\).

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ + \mbox{loss}(x, y) = + \begin{array}{ll} +1 - \cos(x_1, x_2), & \mbox{if } y = 1 \\ +\max(0, \cos(x_1, x_2) - \mbox{margin}), & \mbox{if } y = -1 +\end{array} +$$

    + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_cross_entropy_loss.html b/reference/nn_cross_entropy_loss.html index e45c00e75d3886d8458b6455612235217b10fd55..1083964e47258321ea37d76862fe15f32fe52b54 100644 --- a/reference/nn_cross_entropy_loss.html +++ b/reference/nn_cross_entropy_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ It is useful when training a classification problem with C classes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ It is useful when training a classification problem with C classes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ It is useful when training a classification problem with C classes." />
  • - +
  • Reference
  • @@ -193,11 +225,11 @@ It is useful when training a classification problem with C classes." />
    -

    This criterion combines nn_log_softmax() and nn_nll_loss() in one single class. +

    This criterion combines nn_log_softmax() and nn_nll_loss() in one single class. It is useful when training a classification problem with C classes.

    -
    nn_cross_entropy_loss(weight = NULL, ignore_index = -100, reduction = "mean")
    +
    nn_cross_entropy_loss(weight = NULL, ignore_index = -100, reduction = "mean")

    Arguments

    @@ -271,14 +303,14 @@ of K-dimensional loss.

    Examples

    -
    if (torch_is_installed()) { -loss <- nn_cross_entropy_loss() -input <- torch_randn(3, 5, requires_grad=TRUE) -target <- torch_randint(low = 1, high = 5, size = 3, dtype = torch_long()) -output <- loss(input, target) -output$backward() - -} +
    if (torch_is_installed()) { +loss <- nn_cross_entropy_loss() +input <- torch_randn(3, 5, requires_grad=TRUE) +target <- torch_randint(low = 1, high = 5, size = 3, dtype = torch_long()) +output <- loss(input, target) +output$backward() + +}
    + + + + + + + + + + + + + +
    blank

    (int, optional): blank label. Default \(0\).

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the output losses will be divided by the target lengths and +then the mean over the batch is taken. Default: 'mean'

    zero_infinity

    (bool, optional): +Whether to zero infinite losses and the associated gradients. +Default: FALSE +Infinite losses mainly occur when the inputs are too short +to be aligned to the targets.

    + +

    Note

    + +

    In order to use CuDNN, the following must be satisfied: targets must be +in concatenated format, all input_lengths must be T. \(blank=0\), +target_lengths \(\leq 256\), the integer arguments must be of +The regular implementation uses the (more common in PyTorch) torch_long dtype. +dtype torch_int32.

    +

    In some circumstances when using the CUDA backend with CuDNN, this operator +may select a nondeterministic algorithm to increase performance. If this is +undesirable, you can try to make the operation deterministic (potentially at +a performance cost) by setting torch.backends.cudnn.deterministic = TRUE.

    +

    Shape

    + + + +
      +
    • Log_probs: Tensor of size \((T, N, C)\), +where \(T = \mbox{input length}\), +\(N = \mbox{batch size}\), and +\(C = \mbox{number of classes (including blank)}\). +The logarithmized probabilities of the outputs (e.g. obtained with +[nnf)log_softmax()]).

    • +
    • Targets: Tensor of size \((N, S)\) or +\((\mbox{sum}(\mbox{target\_lengths}))\), +where \(N = \mbox{batch size}\) and +\(S = \mbox{max target length, if shape is } (N, S)\). +It represent the target sequences. Each element in the target +sequence is a class index. And the target index cannot be blank (default=0). +In the \((N, S)\) form, targets are padded to the +length of the longest sequence, and stacked. +In the \((\mbox{sum}(\mbox{target\_lengths}))\) form, +the targets are assumed to be un-padded and +concatenated within 1 dimension.

    • +
    • Input_lengths: Tuple or tensor of size \((N)\), +where \(N = \mbox{batch size}\). It represent the lengths of the +inputs (must each be \(\leq T\)). And the lengths are specified +for each sequence to achieve masking under the assumption that sequences +are padded to equal lengths.

    • +
    • Target_lengths: Tuple or tensor of size \((N)\), +where \(N = \mbox{batch size}\). It represent lengths of the targets. +Lengths are specified for each sequence to achieve masking under the +assumption that sequences are padded to equal lengths. If target shape is +\((N,S)\), target_lengths are effectively the stop index +\(s_n\) for each target sequence, such that target_n = targets[n,0:s_n] for +each target in a batch. Lengths must each be \(\leq S\) +If the targets are given as a 1d tensor that is the concatenation of individual +targets, the target_lengths must add up to the total length of the tensor.

    • +
    • Output: scalar. If reduction is 'none', then +\((N)\), where \(N = \mbox{batch size}\).

    • +
    + +

    [nnf)log_softmax()]: R:nnf)log_softmax() +[n,0:s_n]: R:n,0:s_n

    +

    References

    + +

    A. Graves et al.: Connectionist Temporal Classification: +Labelling Unsegmented Sequence Data with Recurrent Neural Networks: +https://www.cs.toronto.edu/~graves/icml_2006.pdf

    + +

    Examples

    +
    if (torch_is_installed()) { +# Target are to be padded +T <- 50 # Input sequence length +C <- 20 # Number of classes (including blank) +N <- 16 # Batch size +S <- 30 # Target sequence length of longest target in batch (padding length) +S_min <- 10 # Minimum target length, for demonstration purposes + +# Initialize random batch of input vectors, for *size = (T,N,C) +input <- torch_randn(T, N, C)$log_softmax(2)$detach()$requires_grad_() + +# Initialize random batch of targets (0 = blank, 1:C = classes) +target <- torch_randint(low=1, high=C, size=c(N, S), dtype=torch_long()) + +input_lengths <- torch_full(size=c(N), fill_value=TRUE, dtype=torch_long()) +target_lengths <- torch_randint(low=S_min, high=S, size=c(N), dtype=torch_long()) +ctc_loss <- nn_ctc_loss() +loss <- ctc_loss(input, target, input_lengths, target_lengths) +loss$backward() + + +# Target are to be un-padded +T <- 50 # Input sequence length +C <- 20 # Number of classes (including blank) +N <- 16 # Batch size + +# Initialize random batch of input vectors, for *size = (T,N,C) +input <- torch_randn(T, N, C)$log_softmax(2)$detach()$requires_grad_() +input_lengths <- torch_full(size=c(N), fill_value=TRUE, dtype=torch_long()) + +# Initialize random batch of targets (0 = blank, 1:C = classes) +target_lengths <- torch_randint(low=1, high=T, size=c(N), dtype=torch_long()) +target <- torch_randint(low=1, high=C, size=as.integer(sum(target_lengths)), dtype=torch_long()) +ctc_loss <- nn_ctc_loss() +loss <- ctc_loss(input, target, input_lengths, target_lengths) +loss$backward() + +} +
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_dropout.html b/reference/nn_dropout.html index 1db17faad4f50a5db6f49b231948700bd07c8c93..8b3a281d5ab97511d2194f8ef4b6eaf336c4455a 100644 --- a/reference/nn_dropout.html +++ b/reference/nn_dropout.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ call." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ call." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ call." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ distribution. Each channel will be zeroed out independently on every forward call.

    -
    nn_dropout(p = 0.5, inplace = FALSE)
    +
    nn_dropout(p = 0.5, inplace = FALSE)

    Arguments

    @@ -235,12 +267,12 @@ identity function.

    Examples

    -
    if (torch_is_installed()) { -m <- nn_dropout(p = 0.2) -input <- torch_randn(20, 16) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_dropout(p = 0.2) +input <- torch_randn(20, 16) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ batched input is a 2D tensor \(\mbox{input}[i, j]\))." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ batched input is a 2D tensor \(\mbox{input}[i, j]\))." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ e.g., the \(j\)-th channel of the \(i\)-th sample in the batched input is a 2D tensor \(\mbox{input}[i, j]\)).

    -
    nn_dropout2d(p = 0.5, inplace = FALSE)
    +
    nn_dropout2d(p = 0.5, inplace = FALSE)

    Arguments

    @@ -239,12 +271,12 @@ feature maps and should be used instead.

    Examples

    -
    if (torch_is_installed()) { -m <- nn_dropout2d(p = 0.2) -input <- torch_randn(20, 16, 32, 32) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_dropout2d(p = 0.2) +input <- torch_randn(20, 16, 32, 32) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ batched input is a 3D tensor \(\mbox{input}[i, j]\))." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ batched input is a 3D tensor \(\mbox{input}[i, j]\))." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ e.g., the \(j\)-th channel of the \(i\)-th sample in the batched input is a 3D tensor \(\mbox{input}[i, j]\)).

    -
    nn_dropout3d(p = 0.5, inplace = FALSE)
    +
    nn_dropout3d(p = 0.5, inplace = FALSE)

    Arguments

    @@ -239,12 +271,12 @@ feature maps and should be used instead.

    Examples

    -
    if (torch_is_installed()) { -m <- nn_dropout3d(p = 0.2) -input <- torch_randn(20, 16, 4, 32, 32) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_dropout3d(p = 0.2) +input <- torch_randn(20, 16, 4, 32, 32) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_elu(alpha = 1, inplace = FALSE)
    +
    nn_elu(alpha = 1, inplace = FALSE)

    Arguments

    @@ -227,12 +259,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_elu() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_elu() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ word embeddings." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ word embeddings." />
  • - +
  • Reference
  • @@ -201,16 +233,16 @@ The input to the module is a list of indices, and the output is the correspondin word embeddings.

    -
    nn_embedding(
    -  num_embeddings,
    -  embedding_dim,
    -  padding_idx = NULL,
    -  max_norm = NULL,
    -  norm_type = 2,
    -  scale_grad_by_freq = FALSE,
    -  sparse = FALSE,
    -  .weight = NULL
    -)
    +
    nn_embedding(
    +  num_embeddings,
    +  embedding_dim,
    +  padding_idx = NULL,
    +  max_norm = NULL,
    +  norm_type = 2,
    +  scale_grad_by_freq = FALSE,
    +  sparse = FALSE,
    +  .weight = NULL
    +)

    Arguments

    @@ -284,24 +316,24 @@ initialized from \(\mathcal{N}(0, 1)\)

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # an Embedding module containing 10 tensors of size 3 -embedding <- nn_embedding(10, 3) +embedding <- nn_embedding(10, 3) # a batch of 2 samples of 4 indices each -input <- torch_tensor(rbind(c(1,2,4,5),c(4,3,2,9)), dtype = torch_long()) -embedding(input) +input <- torch_tensor(rbind(c(1,2,4,5),c(4,3,2,9)), dtype = torch_long()) +embedding(input) # example with padding_idx -embedding <- nn_embedding(10, 3, padding_idx=1) -input <- torch_tensor(matrix(c(1,3,1,6), nrow = 1), dtype = torch_long()) -embedding(input) +embedding <- nn_embedding(10, 3, padding_idx=1) +input <- torch_tensor(matrix(c(1,3,1,6), nrow = 1), dtype = torch_long()) +embedding(input) -} +}
    #> torch_tensor #> (1,.,.) = #> 0.0000 0.0000 0.0000 -#> -1.2943 -1.0279 0.6483 +#> -2.0373 -0.1153 -1.4552 #> 0.0000 0.0000 0.0000 -#> 0.4053 0.7866 -0.3922 +#> -0.3688 1.0265 -2.1888 #> [ CPUFloatType{1,4,3} ]
    @@ -146,6 +158,9 @@ Fractional MaxPooling by Ben Graham" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Fractional MaxPooling by Ben Graham" />
  • - +
  • Reference
  • @@ -197,12 +229,12 @@ Fractional MaxPooling by Ben Graham" /> Fractional MaxPooling by Ben Graham

    -
    nn_fractional_max_pool2d(
    -  kernel_size,
    -  output_size = NULL,
    -  output_ratio = NULL,
    -  return_indices = FALSE
    -)
    +
    nn_fractional_max_pool2d(
    +  kernel_size,
    +  output_size = NULL,
    +  output_ratio = NULL,
    +  return_indices = FALSE
    +)

    Arguments

    @@ -236,16 +268,10 @@ step size determined by the target output size. The number of output features is equal to the number of input planes.

    Examples

    -
    if (torch_is_installed()) { -# pool of square window of size=3, and target output size 13x12 -m = nn_fractional_max_pool2d(3, output_size=c(13, 12)) -# pool of square window and target output size being half of input image size -m = nn_fractional_max_pool2d(3, output_ratio=c(0.5, 0.5)) -input = torch_randn(20, 16, 50, 32) -output = m(input) - -} -
    +
    if (torch_is_installed()) { + +} +
    #> NULL
    @@ -146,6 +158,9 @@ Fractional MaxPooling by Ben Graham" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Fractional MaxPooling by Ben Graham" />
  • - +
  • Reference
  • @@ -197,12 +229,12 @@ Fractional MaxPooling by Ben Graham" /> Fractional MaxPooling by Ben Graham

    -
    nn_fractional_max_pool3d(
    -  kernel_size,
    -  output_size = NULL,
    -  output_ratio = NULL,
    -  return_indices = FALSE
    -)
    +
    nn_fractional_max_pool3d(
    +  kernel_size,
    +  output_size = NULL,
    +  output_ratio = NULL,
    +  return_indices = FALSE
    +)

    Arguments

    @@ -236,15 +268,15 @@ step size determined by the target output size. The number of output features is equal to the number of input planes.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of cubic window of size=3, and target output size 13x12x11 -m = nn_fractional_max_pool3d(3, output_size=c(13, 12, 11)) +m = nn_fractional_max_pool3d(3, output_size=c(13, 12, 11)) # pool of cubic window and target output size being half of input size -m = nn_fractional_max_pool3d(3, output_ratio=c(0.5, 0.5, 0.5)) -input = torch_randn(20, 16, 50, 32, 16) -output = m(input) +m = nn_fractional_max_pool3d(3, output_ratio=c(0.5, 0.5, 0.5)) +input = torch_randn(20, 16, 50, 32, 16) +output = m(input) -} +}
    @@ -146,6 +158,9 @@ $$\mbox{GELU}(x) = x * \Phi(x)$$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ $$\mbox{GELU}(x) = x * \Phi(x)$$" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ $$\mbox{GELU}(x) = x * \Phi(x)$$" /> $$\mbox{GELU}(x) = x * \Phi(x)$$

    -
    nn_gelu()
    +
    nn_gelu()

    Details

    @@ -215,12 +247,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m = nn_gelu() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m = nn_gelu() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ of the input matrices and \(b\) is the second half." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ of the input matrices and \(b\) is the second half." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ of the input matrices and \(b\) is the second half." /> of the input matrices and \(b\) is the second half.

    -
    nn_glu(dim = -1)
    +
    nn_glu(dim = -1)

    Arguments

    @@ -222,12 +254,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_glu() -input <- torch_randn(4, 2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_glu() +input <- torch_randn(4, 2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the hard shrinkage function element-wise:

    -
    nn_hardshrink(lambd = 0.5)
    +
    nn_hardshrink(lambd = 0.5)

    Arguments

    @@ -229,12 +261,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_hardshrink() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_hardshrink() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_hardsigmoid()
    +
    nn_hardsigmoid()

    Details

    @@ -220,12 +252,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_hardsigmoid() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_hardsigmoid() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ Searching for MobileNetV3" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Searching for MobileNetV3" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ Searching for MobileNetV3" /> Searching for MobileNetV3

    -
    nn_hardswish()
    +
    nn_hardswish()

    Details

    @@ -221,14 +253,14 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -m <- nn_hardswish() -input <- torch_randn(2) -output <- m(input) -} - -} +
    if (torch_is_installed()) { +if (FALSE) { +m <- nn_hardswish() +input <- torch_randn(2) +output <- m(input) +} + +}
    @@ -146,6 +158,9 @@ HardTanh is defined as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ HardTanh is defined as:" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ HardTanh is defined as:" /> HardTanh is defined as:

    -
    nn_hardtanh(min_val = -1, max_val = 1, inplace = FALSE)
    +
    nn_hardtanh(min_val = -1, max_val = 1, inplace = FALSE)

    Arguments

    @@ -240,12 +272,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_hardtanh(-2, 2) -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_hardtanh(-2, 2) +input <- torch_randn(2) +output <- m(input) -} +}
    + + + + + + + + + +
    margin

    (float, optional): Has a default value of 1.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    This is usually used for measuring whether two inputs are similar or +dissimilar, e.g. using the L1 pairwise distance as \(x\), and is typically +used for learning nonlinear embeddings or semi-supervised learning. +The loss function for \(n\)-th sample in the mini-batch is

    +

    $$ + l_n = \begin{array}{ll} +x_n, & \mbox{if}\; y_n = 1,\\ +\max \{0, \Delta - x_n\}, & \mbox{if}\; y_n = -1, +\end{array} +$$

    +

    and the total loss functions is

    +

    $$ + \ell(x, y) = \begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{'mean';}\\ +\mbox{sum}(L), & \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    where \(L = \{l_1,\dots,l_N\}^\top\).

    +

    Shape

    + + + +
      +
    • Input: \((*)\) where \(*\) means, any number of dimensions. The sum operation +operates over all the elements.

    • +
    • Target: \((*)\), same shape as the input

    • +
    • Output: scalar. If reduction is 'none', then same shape as the input

    • +
    + + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_identity.html b/reference/nn_identity.html index 31ff5e2949ad02b8fa41aba05553adce9fe48dea..51525c31a188d841b1e599db7dd83b3c6f0db24c 100644 --- a/reference/nn_identity.html +++ b/reference/nn_identity.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    A placeholder identity operator that is argument-insensitive.

    -
    nn_identity(...)
    +
    nn_identity(...)

    Arguments

    @@ -208,13 +240,13 @@

    Examples

    -
    if (torch_is_installed()) { -m <- nn_identity(54, unused_argument1 = 0.1, unused_argument2 = FALSE) -input <- torch_randn(128, 20) -output <- m(input) -print(output$size()) +
    if (torch_is_installed()) { +m <- nn_identity(54, unused_argument1 = 0.1, unused_argument2 = FALSE) +input <- torch_randn(128, 20) +output <- m(input) +print(output$size()) -} +}
    #> [1] 128 20
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Return the recommended gain value for the given nonlinearity function.

    -
    nn_init_calculate_gain(nonlinearity, param = NULL)
    +
    nn_init_calculate_gain(nonlinearity, param = NULL)

    Arguments

    diff --git a/reference/nn_init_constant_.html b/reference/nn_init_constant_.html index 7be0e2c24db7be6c309aa43381e5ea768ed2b39d..431492c76524e7b6fd8e987232e5b28353185ad5 100644 --- a/reference/nn_init_constant_.html +++ b/reference/nn_init_constant_.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fills the input Tensor with the value val.

    -
    nn_init_constant_(tensor, val)
    +
    nn_init_constant_(tensor, val)

    Arguments

    @@ -212,11 +244,11 @@

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_constant_(w, 0.3) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_constant_(w, 0.3) -} +}
    #> torch_tensor #> 0.3000 0.3000 0.3000 0.3000 0.3000 #> 0.3000 0.3000 0.3000 0.3000 0.3000 diff --git a/reference/nn_init_dirac_.html b/reference/nn_init_dirac_.html index e75204703f08d5851b238aa6a7212fd64bcd703f..7e57c6a4fbde1e5817bb1ebe5f1489e7bf515233 100644 --- a/reference/nn_init_dirac_.html +++ b/reference/nn_init_dirac_.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ of groups&gt;1, each group of channels preserves identity." /> torch - 0.0.3 + 0.1.0
    @@ -148,6 +160,9 @@ of groups&gt;1, each group of channels preserves identity." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ of groups&gt;1, each group of channels preserves identity." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ layers, where as many input channels are preserved as possible. In case of groups>1, each group of channels preserves identity.

    -
    nn_init_dirac_(tensor, groups = 1)
    +
    nn_init_dirac_(tensor, groups = 1)

    Arguments

    @@ -218,13 +250,13 @@ of groups>1, each group of channels preserves identity.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -w <- torch_empty(3, 16, 5, 5) -nn_init_dirac_(w) -} +
    if (torch_is_installed()) { +if (FALSE) { +w <- torch_empty(3, 16, 5, 5) +nn_init_dirac_(w) +} -} +}
    @@ -147,6 +159,9 @@ many inputs are preserved as possible." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ many inputs are preserved as possible." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ Preserves the identity of the inputs in Linear layers, where as many inputs are preserved as possible.

    -
    nn_init_eye_(tensor)
    +
    nn_init_eye_(tensor)

    Arguments

    @@ -212,11 +244,11 @@ many inputs are preserved as possible.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_eye_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_eye_(w) -} +}
    #> torch_tensor #> 1 0 0 0 0 #> 0 1 0 0 0 diff --git a/reference/nn_init_kaiming_normal_.html b/reference/nn_init_kaiming_normal_.html index 89b43854deef1f0060b6c77074a7a2c1ac2f6da5..3e79e5645c87d25c2fa323f5c117bade72ce147e 100644 --- a/reference/nn_init_kaiming_normal_.html +++ b/reference/nn_init_kaiming_normal_.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ normal distribution." /> torch - 0.0.3 + 0.1.0
    @@ -147,6 +159,9 @@ normal distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ normal distribution." />
  • - +
  • Reference
  • @@ -199,12 +231,12 @@ described in Delving deep into rectifiers: Surpassing human-level performa normal distribution.

    -
    nn_init_kaiming_normal_(
    -  tensor,
    -  a = 0,
    -  mode = "fan_in",
    -  nonlinearity = "leaky_relu"
    -)
    +
    nn_init_kaiming_normal_(
    +  tensor,
    +  a = 0,
    +  mode = "fan_in",
    +  nonlinearity = "leaky_relu"
    +)

    Arguments

    @@ -233,15 +265,15 @@ or 'leaky_relu' (default).

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_kaiming_normal_(w, mode = "fan_in", nonlinearity = "leaky_relu") +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_kaiming_normal_(w, mode = "fan_in", nonlinearity = "leaky_relu") -} +}
    #> torch_tensor -#> -0.5594 0.2408 0.3946 0.5860 -0.4834 -#> -0.0442 0.7170 -0.3028 0.4015 -0.8906 -#> -0.5157 -0.1763 0.9366 0.4640 -0.5356 +#> 0.2008 0.1724 0.3809 -0.3675 1.1300 +#> -0.4793 -1.0204 0.2167 -0.0427 -0.3795 +#> 0.9913 -0.4316 -0.0365 -0.8236 0.5538 #> [ CPUFloatType{3,5} ]
    @@ -147,6 +159,9 @@ uniform distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ uniform distribution." />
  • - +
  • Reference
  • @@ -199,12 +231,12 @@ described in Delving deep into rectifiers: Surpassing human-level performa uniform distribution.

    -
    nn_init_kaiming_uniform_(
    -  tensor,
    -  a = 0,
    -  mode = "fan_in",
    -  nonlinearity = "leaky_relu"
    -)
    +
    nn_init_kaiming_uniform_(
    +  tensor,
    +  a = 0,
    +  mode = "fan_in",
    +  nonlinearity = "leaky_relu"
    +)

    Arguments

    @@ -233,15 +265,15 @@ or 'leaky_relu' (default).

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_kaiming_uniform_(w, mode = "fan_in", nonlinearity = "leaky_relu") +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_kaiming_uniform_(w, mode = "fan_in", nonlinearity = "leaky_relu") -} +}
    #> torch_tensor -#> -0.7460 0.2070 -0.1066 -0.4344 -0.4666 -#> -0.5351 -0.4524 0.0950 -1.0077 -0.2169 -#> -0.9525 0.8753 0.0070 -0.4553 -0.3445 +#> -0.4344 -0.4666 -0.5351 -0.4524 0.0950 +#> -1.0077 -0.2169 -0.9525 0.8753 0.0070 +#> -0.4553 -0.3445 -0.4484 0.2880 -0.6149 #> [ CPUFloatType{3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fills the input Tensor with values drawn from the normal distribution

    -
    nn_init_normal_(tensor, mean = 0, std = 1)
    +
    nn_init_normal_(tensor, mean = 0, std = 1)

    Arguments

    @@ -216,15 +248,15 @@

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_normal_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_normal_(w) -} +}
    #> torch_tensor -#> -1.0569 -1.0900 1.2740 -1.7728 0.0593 -#> -1.7131 -0.1353 0.8191 0.1481 -0.9940 -#> -0.7544 -1.0298 0.4237 1.4650 0.0575 +#> -1.4331 -0.2350 0.6633 -2.0450 -1.5820 +#> 1.4685 -0.1794 -2.1461 0.4202 -0.3558 +#> 0.2748 -0.0637 1.7519 -0.1447 0.6195 #> [ CPUFloatType{3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fills the input Tensor with the scalar value 1

    -
    nn_init_ones_(tensor)
    +
    nn_init_ones_(tensor)

    Arguments

    @@ -208,11 +240,11 @@

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_ones_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_ones_(w) -} +}
    #> torch_tensor #> 1 1 1 1 1 #> 1 1 1 1 1 diff --git a/reference/nn_init_orthogonal_.html b/reference/nn_init_orthogonal_.html index 8c116004b3e455bec7d7e0fb839adcfed5a2dc4d..e0de0bb63b82677b05df141606e6d093ed6c0094 100644 --- a/reference/nn_init_orthogonal_.html +++ b/reference/nn_init_orthogonal_.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ trailing dimensions are flattened." /> torch - 0.0.3 + 0.1.0
    @@ -148,6 +160,9 @@ trailing dimensions are flattened." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ trailing dimensions are flattened." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ at least 2 dimensions, and for tensors with more than 2 dimensions the trailing dimensions are flattened.

    -
    nn_init_orthogonal_(tensor, gain = 1)
    +
    nn_init_orthogonal_(tensor, gain = 1)

    Arguments

    @@ -218,15 +250,15 @@ trailing dimensions are flattened.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3,5) -nn_init_orthogonal_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3,5) +nn_init_orthogonal_(w) -} +}
    #> torch_tensor -#> -0.0261 -0.1169 0.8583 0.1248 -0.4831 -#> -0.1584 -0.1637 0.4579 -0.4078 0.7564 -#> 0.4959 -0.5308 0.0252 0.6151 0.3053 +#> 0.4375 -0.0544 0.1218 -0.7055 -0.5414 +#> -0.5530 -0.7542 0.2625 -0.2375 -0.0026 +#> 0.3739 -0.2071 -0.1548 -0.3925 0.7996 #> [ CPUFloatType{3,5} ]
    @@ -147,6 +159,9 @@ as described in Deep learning via Hessian-free optimization - Martens, J. (2010)
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ as described in Deep learning via Hessian-free optimization - Martens, J. (2010)
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ non-zero elements will be drawn from the normal distribution as described in Deep learning via Hessian-free optimization - Martens, J. (2010).

    -
    nn_init_sparse_(tensor, sparsity, std = 0.01)
    +
    nn_init_sparse_(tensor, sparsity, std = 0.01)

    Arguments

    @@ -221,12 +253,12 @@ the non-zero values

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -w <- torch_empty(3, 5) -nn_init_sparse_(w, sparsity = 0.1) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +w <- torch_empty(3, 5) +nn_init_sparse_(w, sparsity = 0.1) +} +}
    @@ -146,6 +158,9 @@ normal distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ normal distribution." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ normal distribution." /> normal distribution.

    -
    nn_init_trunc_normal_(tensor, mean = 0, std = 1, a = -2, b = -2)
    +
    nn_init_trunc_normal_(tensor, mean = 0, std = 1, a = -2, b = -2)

    Arguments

    @@ -226,11 +258,11 @@ normal distribution.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_trunc_normal_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_trunc_normal_(w) -} +}
    #> torch_tensor #> -2 -2 -2 -2 -2 #> -2 -2 -2 -2 -2 diff --git a/reference/nn_init_uniform_.html b/reference/nn_init_uniform_.html index 98b02d9aa980cc69b87c1ad2bd93988288b61cb5..43e79107f1dc5e6b0c2b8a07c319f0a42103294f 100644 --- a/reference/nn_init_uniform_.html +++ b/reference/nn_init_uniform_.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fills the input Tensor with values drawn from the uniform distribution

    -
    nn_init_uniform_(tensor, a = 0, b = 1)
    +
    nn_init_uniform_(tensor, a = 0, b = 1)

    Arguments

    @@ -216,15 +248,15 @@

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_uniform_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_uniform_(w) -} +}
    #> torch_tensor -#> 0.8556 0.9331 0.3515 0.8071 0.4948 -#> 0.6075 0.9042 0.7181 0.7329 0.7563 -#> 0.2584 0.5293 0.9757 0.3030 0.3341 +#> 0.8071 0.4948 0.6075 0.9042 0.7181 +#> 0.7329 0.7563 0.2584 0.5293 0.9757 +#> 0.3030 0.3341 0.7465 0.4465 0.1515 #> [ CPUFloatType{3,5} ]
    @@ -147,6 +159,9 @@ distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ distribution." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ described in Understanding the difficulty of training deep feedforward neu distribution.

    -
    nn_init_xavier_normal_(tensor, gain = 1)
    +
    nn_init_xavier_normal_(tensor, gain = 1)

    Arguments

    @@ -216,15 +248,15 @@ distribution.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_xavier_normal_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_xavier_normal_(w) -} +}
    #> torch_tensor -#> 1.2535 -0.2197 0.5425 -3.0052 -4.2446 -#> -0.3570 -1.6970 -2.0154 -0.5348 2.7582 -#> 0.8714 -0.8924 0.7675 3.2553 -1.4333 +#> -1.1814 2.0745 -1.6342 0.8342 -1.3561 +#> -1.6676 -1.9991 2.9938 0.4390 0.6559 +#> 3.1886 0.6221 -1.8850 -0.9772 0.6616 #> [ CPUFloatType{3,5} ]
    @@ -147,6 +159,9 @@ distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ distribution." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ described in Understanding the difficulty of training deep feedforward neu distribution.

    -
    nn_init_xavier_uniform_(tensor, gain = 1)
    +
    nn_init_xavier_uniform_(tensor, gain = 1)

    Arguments

    @@ -216,15 +248,15 @@ distribution.

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_xavier_uniform_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_xavier_uniform_(w) -} +}
    #> torch_tensor -#> 1.3397 1.1040 -3.0453 -1.7935 0.9545 -#> -0.0194 -2.4483 2.9345 2.2750 -2.4048 -#> -0.4406 -2.2409 0.4155 -0.1573 1.9776 +#> -1.7935 0.9545 -0.0194 -2.4483 2.9345 +#> 2.2750 -2.4048 -0.4406 -2.2409 0.4155 +#> -0.1573 1.9776 1.6310 1.5990 1.2116 #> [ CPUFloatType{3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fills the input Tensor with the scalar value 0

    -
    nn_init_zeros_(tensor)
    +
    nn_init_zeros_(tensor)

    Arguments

    @@ -208,11 +240,11 @@

    Examples

    -
    if (torch_is_installed()) { -w <- torch_empty(3, 5) -nn_init_zeros_(w) +
    if (torch_is_installed()) { +w <- torch_empty(3, 5) +nn_init_zeros_(w) -} +}
    #> torch_tensor #> 0 0 0 0 0 #> 0 0 0 0 0 diff --git a/reference/nn_kl_div_loss.html b/reference/nn_kl_div_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..2335c2a286a0d15f34f2e4e4b1375626e493d521 --- /dev/null +++ b/reference/nn_kl_div_loss.html @@ -0,0 +1,329 @@ + + + + + + + + +Kullback-Leibler divergence loss — nn_kl_div_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    The Kullback-Leibler divergence loss measure +Kullback-Leibler divergence +is a useful distance measure for continuous distributions and is often +useful when performing direct regression over the space of (discretely sampled) +continuous output distributions.

    +
    + +
    nn_kl_div_loss(reduction = "mean")
    + +

    Arguments

    +
    + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'batchmean' | 'sum' | 'mean'. +'none': no reduction will be applied. +'batchmean': the sum of the output will be divided by batchsize. +'sum': the output will be summed. +'mean': the output will be divided by the number of elements in the output. +Default: 'mean'

    + +

    Details

    + +

    As with nn_nll_loss(), the input given is expected to contain +log-probabilities and is not restricted to a 2D Tensor.

    +

    The targets are interpreted as probabilities by default, but could be considered +as log-probabilities with log_target set to TRUE.

    +

    This criterion expects a target Tensor of the same size as the +input Tensor.

    +

    The unreduced (i.e. with reduction set to 'none') loss can be described +as:

    +

    $$ + l(x,y) = L = \{ l_1,\dots,l_N \}, \quad +l_n = y_n \cdot \left( \log y_n - x_n \right) +$$

    +

    where the index \(N\) spans all dimensions of input and \(L\) has the same +shape as input. If reduction is not 'none' (default 'mean'), then:

    +

    $$ + \ell(x, y) = \begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{'mean';} \\ +\mbox{sum}(L), & \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    In default reduction mode 'mean', the losses are averaged for each minibatch +over observations as well as over dimensions. 'batchmean' mode gives the +correct KL divergence where losses are averaged over batch dimension only. +'mean' mode's behavior will be changed to the same as 'batchmean' in the next +major release.

    +

    Note

    + +

    reduction = 'mean' doesn't return the true kl divergence value, +please use reduction = 'batchmean' which aligns with KL math +definition. +In the next major release, 'mean' will be changed to be the same as +'batchmean'.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    • Output: scalar by default. If reduction is 'none', then \((N, *)\), +the same shape as the input

    • +
    + + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_l1_loss.html b/reference/nn_l1_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..17242cba0031e464c9090751002f8dc68699d484 --- /dev/null +++ b/reference/nn_l1_loss.html @@ -0,0 +1,319 @@ + + + + + + + + +L1 loss — nn_l1_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates a criterion that measures the mean absolute error (MAE) between each +element in the input \(x\) and target \(y\).

    +
    + +
    nn_l1_loss(reduction = "mean")
    + +

    Arguments

    + + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    The unreduced (i.e. with reduction set to 'none') loss can be described +as:

    +

    $$ +\ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad +l_n = \left| x_n - y_n \right|, +$$

    +

    where \(N\) is the batch size. If reduction is not 'none' +(default 'mean'), then:

    +

    $$ +\ell(x, y) = +\begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{'mean';}\\ +\mbox{sum}(L), & \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    \(x\) and \(y\) are tensors of arbitrary shapes with a total +of \(n\) elements each.

    +

    The sum operation still operates over all the elements, and divides by \(n\). +The division by \(n\) can be avoided if one sets reduction = 'sum'.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    • Output: scalar. If reduction is 'none', then +\((N, *)\), same shape as the input

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_l1_loss() +input <- torch_randn(3, 5, requires_grad=TRUE) +target <- torch_randn(3, 5) +output <- loss(input, target) +output$backward() + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_leaky_relu.html b/reference/nn_leaky_relu.html index 001286474c11f8d81acbad72cd0eff3f364adbf6..27a2b856d519f55c3c1ed1309ee1d486b784c313 100644 --- a/reference/nn_leaky_relu.html +++ b/reference/nn_leaky_relu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_leaky_relu(negative_slope = 0.01, inplace = FALSE)
    +
    nn_leaky_relu(negative_slope = 0.01, inplace = FALSE)

    Arguments

    @@ -236,12 +268,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_leaky_relu(0.1) -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_leaky_relu(0.1) +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies a linear transformation to the incoming data: y = xA^T + b

    -
    nn_linear(in_features, out_features, bias = TRUE)
    +
    nn_linear(in_features, out_features, bias = TRUE)

    Arguments

    @@ -243,13 +275,13 @@ If bias is TRUE, the values are initialized from

    Examples

    -
    if (torch_is_installed()) { -m <- nn_linear(20, 30) -input <- torch_randn(128, 20) -output <- m(input) -print(output$size()) +
    if (torch_is_installed()) { +m <- nn_linear(20, 30) +input <- torch_randn(128, 20) +output <- m(input) +print(output$size()) -} +}
    #> [1] 128 30
    @@ -148,6 +160,9 @@ $$
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ $$
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ $$ $$

    -
    nn_log_sigmoid()
    +
    nn_log_sigmoid()

    Shape

    @@ -216,12 +248,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_log_sigmoid() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_log_sigmoid() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ input Tensor. The LogSoftmax formulation can be simplified as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ input Tensor. The LogSoftmax formulation can be simplified as:" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ input Tensor. The LogSoftmax formulation can be simplified as:" /> input Tensor. The LogSoftmax formulation can be simplified as:

    -
    nn_log_softmax(dim)
    +
    nn_log_softmax(dim)

    Arguments

    @@ -229,12 +261,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_log_softmax(1) -input <- torch_randn(2, 3) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_log_softmax(1) +input <- torch_randn(2, 3) +output <- m(input) -} +}
    @@ -150,6 +162,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -163,7 +178,24 @@ $$" />
  • - +
  • Reference
  • @@ -204,7 +236,7 @@ planes. $$

    -
    nn_lp_pool1d(norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)
    +
    nn_lp_pool1d(norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)

    Arguments

    @@ -254,13 +286,13 @@ not defined. This implementation will set the gradient to zero in this case.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # power-2 pool of window of length 3, with stride 2. -m <- nn_lp_pool1d(2, 3, stride=2) -input <- torch_randn(20, 16, 50) -output <- m(input) +m <- nn_lp_pool1d(2, 3, stride=2) +input <- torch_randn(20, 16, 50) +output <- m(input) -} +}
    @@ -150,6 +162,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -163,7 +178,24 @@ $$" />
  • - +
  • Reference
  • @@ -204,7 +236,7 @@ planes. $$

    -
    nn_lp_pool2d(norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)
    +
    nn_lp_pool2d(norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)

    Arguments

    @@ -263,16 +295,16 @@ $$ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # power-2 pool of square window of size=3, stride=2 -m <- nn_lp_pool2d(2, 3, stride=2) +m <- nn_lp_pool2d(2, 3, stride=2) # pool of non-square window of power 1.2 -m <- nn_lp_pool2d(1.2, c(3, 2), stride=c(2, 1)) -input <- torch_randn(20, 16, 50, 32) -output <- m(input) +m <- nn_lp_pool2d(1.2, c(3, 2), stride=c(2, 1)) +input <- torch_randn(20, 16, 50, 32) +output <- m(input) -} +}
    + + + + + + + + + +
    margin

    (float, optional): Has a default value of \(0\).

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    The loss function for each pair of samples in the mini-batch is:

    +

    $$ + \mbox{loss}(x1, x2, y) = \max(0, -y * (x1 - x2) + \mbox{margin}) +$$

    +

    Shape

    + + + +
      +
    • Input1: \((N)\) where N is the batch size.

    • +
    • Input2: \((N)\), same shape as the Input1.

    • +
    • Target: \((N)\), same shape as the inputs.

    • +
    • Output: scalar. If reduction is 'none', then \((N)\).

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_margin_ranking_loss() +input1 <- torch_randn(3, requires_grad=TRUE) +input2 <- torch_randn(3, requires_grad=TRUE) +target <- torch_randn(3)$sign() +output <- loss(input1, input2, target) +output$backward() + +} +
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_max_pool1d.html b/reference/nn_max_pool1d.html index 1169c25a17aecaa10ee05654e0be6f17fe42d479..e127011428f2d3f449bec1285e1669d0879baa18 100644 --- a/reference/nn_max_pool1d.html +++ b/reference/nn_max_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,14 +229,14 @@ planes." /> planes.

    -
    nn_max_pool1d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  dilation = 1,
    -  return_indices = FALSE,
    -  ceil_mode = FALSE
    -)
    +
    nn_max_pool1d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  dilation = 1,
    +  return_indices = FALSE,
    +  ceil_mode = FALSE
    +)

    Arguments

    @@ -263,13 +295,13 @@ has a nice visualization of what dilation does.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of size=3, stride=2 -m <- nn_max_pool1d(3, stride=2) -input <- torch_randn(20, 16, 50) -output <- m(input) +m <- nn_max_pool1d(3, stride=2) +input <- torch_randn(20, 16, 50) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,14 +229,14 @@ planes." /> planes.

    -
    nn_max_pool2d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  dilation = 1,
    -  return_indices = FALSE,
    -  ceil_mode = FALSE
    -)
    +
    nn_max_pool2d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  dilation = 1,
    +  return_indices = FALSE,
    +  ceil_mode = FALSE
    +)

    Arguments

    @@ -276,15 +308,15 @@ $$

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of square window of size=3, stride=2 -m <- nn_max_pool2d(3, stride=2) +m <- nn_max_pool2d(3, stride=2) # pool of non-square window -m <- nn_max_pool2d(c(3, 2), stride=c(2, 1)) -input <- torch_randn(20, 16, 50, 32) -output <- m(input) +m <- nn_max_pool2d(c(3, 2), stride=c(2, 1)) +input <- torch_randn(20, 16, 50, 32) +output <- m(input) -} +}
    @@ -149,6 +161,9 @@ can be precisely described as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -162,7 +177,24 @@ can be precisely described as:" />
  • - +
  • Reference
  • @@ -202,14 +234,14 @@ output \((N, C, D_{out}, H_{out}, W_{out})\) and kernel_size \((kD, can be precisely described as:

    -
    nn_max_pool3d(
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  dilation = 1,
    -  return_indices = FALSE,
    -  ceil_mode = FALSE
    -)
    +
    nn_max_pool3d(
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  dilation = 1,
    +  return_indices = FALSE,
    +  ceil_mode = FALSE
    +)

    Arguments

    @@ -281,15 +313,15 @@ $$

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of square window of size=3, stride=2 -m <- nn_max_pool3d(3, stride=2) +m <- nn_max_pool3d(3, stride=2) # pool of non-square window -m <- nn_max_pool3d(c(3, 2, 2), stride=c(2, 1, 2)) -input <- torch_randn(20, 16, 50,44, 31) -output <- m(input) +m <- nn_max_pool3d(c(3, 2, 2), stride=c(2, 1, 2)) +input <- torch_randn(20, 16, 50,44, 31) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ in which all non-maximal values are set to zero." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ in which all non-maximal values are set to zero." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ including the indices of the maximal values and computes a partial inverse in which all non-maximal values are set to zero.

    -
    nn_max_unpool1d(kernel_size, stride = NULL, padding = 0)
    +
    nn_max_unpool1d(kernel_size, stride = NULL, padding = 0)

    Arguments

    @@ -253,21 +285,21 @@ or as given by output_size in the call operator

    Examples

    -
    if (torch_is_installed()) { -pool <- nn_max_pool1d(2, stride=2, return_indices=TRUE) -unpool <- nn_max_unpool1d(2, stride=2) +
    if (torch_is_installed()) { +pool <- nn_max_pool1d(2, stride=2, return_indices=TRUE) +unpool <- nn_max_unpool1d(2, stride=2) -input <- torch_tensor(array(1:8/1, dim = c(1,1,8))) -out <- pool(input) -unpool(out[[1]], out[[2]]) +input <- torch_tensor(array(1:8/1, dim = c(1,1,8))) +out <- pool(input) +unpool(out[[1]], out[[2]]) # Example showcasing the use of output_size -input <- torch_tensor(array(1:8/1, dim = c(1,1,8))) -out <- pool(input) -unpool(out[[1]], out[[2]], output_size=input$size()) -unpool(out[[1]], out[[2]]) +input <- torch_tensor(array(1:8/1, dim = c(1,1,8))) +out <- pool(input) +unpool(out[[1]], out[[2]], output_size=input$size()) +unpool(out[[1]], out[[2]]) -} +}
    #> torch_tensor #> (1,1,.,.) = #> 0 diff --git a/reference/nn_max_unpool2d.html b/reference/nn_max_unpool2d.html index 03109ac8473f11086825a6d67f9df33ae5214bab..b9ab5679e8162fb6127a7baee02225b034fbaf3b 100644 --- a/reference/nn_max_unpool2d.html +++ b/reference/nn_max_unpool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ in which all non-maximal values are set to zero." /> torch - 0.0.3 + 0.1.0
    @@ -148,6 +160,9 @@ in which all non-maximal values are set to zero." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ in which all non-maximal values are set to zero." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ including the indices of the maximal values and computes a partial inverse in which all non-maximal values are set to zero.

    -
    nn_max_unpool2d(kernel_size, stride = NULL, padding = 0)
    +
    nn_max_unpool2d(kernel_size, stride = NULL, padding = 0)

    Arguments

    @@ -256,23 +288,23 @@ or as given by output_size in the call operator

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -pool <- nn_max_pool2d(2, stride=2, return_indices=TRUE) -unpool <- nn_max_unpool2d(2, stride=2) -input <- torch_randn(1,1,4,4) -out <- pool(input) -unpool(out[[1]], out[[2]]) +pool <- nn_max_pool2d(2, stride=2, return_indices=TRUE) +unpool <- nn_max_unpool2d(2, stride=2) +input <- torch_randn(1,1,4,4) +out <- pool(input) +unpool(out[[1]], out[[2]]) # specify a different output size than input size -unpool(out[[1]], out[[2]], output_size=c(1, 1, 5, 5)) +unpool(out[[1]], out[[2]], output_size=c(1, 1, 5, 5)) -} +}
    #> torch_tensor #> (1,1,.,.) = -#> 0.0000 0.0000 0.0000 1.6626 1.3884 -#> 0.0000 0.0000 0.0000 0.4660 0.0000 -#> 0.0000 1.6033 0.0000 0.0000 0.0000 +#> 0.5229 0.0000 -0.4815 0.0000 0.0000 +#> 0.0000 0.0000 0.0000 0.0000 0.8102 +#> 0.0000 0.0000 0.0000 0.0000 0.6613 #> 0.0000 0.0000 0.0000 0.0000 0.0000 #> 0.0000 0.0000 0.0000 0.0000 0.0000 #> [ CPUFloatType{1,1,5,5} ]
    diff --git a/reference/nn_max_unpool3d.html b/reference/nn_max_unpool3d.html index fc4e8e4967a684090d933eaf60ad1d66d6a3bd8d..d9d513b02e520559624ae34de1a18193d9a4e178 100644 --- a/reference/nn_max_unpool3d.html +++ b/reference/nn_max_unpool3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ in which all non-maximal values are set to zero." /> torch - 0.0.3 + 0.1.0
    @@ -148,6 +160,9 @@ in which all non-maximal values are set to zero." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ in which all non-maximal values are set to zero." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ including the indices of the maximal values and computes a partial inverse in which all non-maximal values are set to zero.

    -
    nn_max_unpool3d(kernel_size, stride = NULL, padding = 0)
    +
    nn_max_unpool3d(kernel_size, stride = NULL, padding = 0)

    Arguments

    @@ -259,16 +291,16 @@ $$

    or as given by output_size in the call operator

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # pool of square window of size=3, stride=2 -pool <- nn_max_pool3d(3, stride=2, return_indices=TRUE) -unpool <- nn_max_unpool3d(3, stride=2) -out <- pool(torch_randn(20, 16, 51, 33, 15)) -unpooled_output <- unpool(out[[1]], out[[2]]) -unpooled_output$size() +pool <- nn_max_pool3d(3, stride=2, return_indices=TRUE) +unpool <- nn_max_unpool3d(3, stride=2) +out <- pool(torch_randn(20, 16, 51, 33, 15)) +unpooled_output <- unpool(out[[1]], out[[2]]) +unpooled_output$size() -} +}
    #> [1] 20 16 51 33 15
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,12 +227,12 @@

    Your models should also subclass this class.

    -
    nn_module(
    -  classname = NULL,
    -  inherit = nn_Module,
    -  ...,
    -  parent_env = parent.frame()
    -)
    +
    nn_module(
    +  classname = NULL,
    +  inherit = nn_Module,
    +  ...,
    +  parent_env = parent.frame()
    +)

    Arguments

    @@ -219,7 +251,7 @@ - +
    parent_env

    passed to R6::R6Class().

    passed to R6::R6Class().

    @@ -229,22 +261,22 @@ structure. You can assign the submodules as regular attributes.

    Examples

    -
    if (torch_is_installed()) { -model <- nn_module( - initialize = function() { - self$conv1 <- nn_conv2d(1, 20, 5) - self$conv2 <- nn_conv2d(20, 20, 5) - }, - forward = function(input) { - input <- self$conv1(input) - input <- nnf_relu(input) - input <- self$conv2(input) - input <- nnf_relu(input) - input - } -) - -} +
    if (torch_is_installed()) { +model <- nn_module( + initialize = function() { + self$conv1 <- nn_conv2d(1, 20, 5) + self$conv2 <- nn_conv2d(20, 20, 5) + }, + forward = function(input) { + input <- self$conv1(input) + input <- nnf_relu(input) + input <- self$conv2(input) + input <- nnf_relu(input) + input + } +) + +}
    @@ -147,6 +159,9 @@ nn_module methods." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ nn_module methods." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ modules it contains are properly registered, and will be visible by all nn_module methods.

    -
    nn_module_list(modules = list())
    +
    nn_module_list(modules = list())

    Arguments

    @@ -212,20 +244,20 @@ modules it contains are properly registered, and will be visible by all

    Examples

    -
    if (torch_is_installed()) { - -my_module <- nn_module( - initialize = function() { - self$linears <- nn_module_list(lapply(1:10, function(x) nn_linear(10, 10))) - }, - forward = function(x) { - for (i in 1:length(self$linears)) - x <- self$linears[[i]](x) - x - } -) - -} +
    if (torch_is_installed()) { + +my_module <- nn_module( + initialize = function() { + self$linears <- nn_module_list(lapply(1:10, function(x) nn_linear(10, 10))) + }, + forward = function(x) { + for (i in 1:length(self$linears)) + x <- self$linears[[i]](x) + x + } +) + +}
    + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ + \ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad +l_n = \left( x_n - y_n \right)^2, +$$

    +

    where \(N\) is the batch size. If reduction is not 'none' +(default 'mean'), then:

    +

    $$ + \ell(x, y) = + \begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{'mean';}\\ +\mbox{sum}(L), & \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    \(x\) and \(y\) are tensors of arbitrary shapes with a total +of \(n\) elements each.

    +

    The mean operation still operates over all the elements, and divides by \(n\). +The division by \(n\) can be avoided if one sets reduction = 'sum'.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_mse_loss() +input <- torch_randn(3, 5, requires_grad=TRUE) +target <- torch_randn(3, 5) +output <- loss(input, target) +output$backward() + +} +
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_multi_margin_loss.html b/reference/nn_multi_margin_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..4015af99f2c861bb8a79beee059e30f7202b4338 --- /dev/null +++ b/reference/nn_multi_margin_loss.html @@ -0,0 +1,310 @@ + + + + + + + + +Multi margin loss — nn_multi_margin_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates a criterion that optimizes a multi-class classification hinge +loss (margin-based loss) between input \(x\) (a 2D mini-batch Tensor) and +output \(y\) (which is a 1D tensor of target class indices, +\(0 \leq y \leq \mbox{x.size}(1)-1\)):

    +
    + +
    nn_multi_margin_loss(p = 1, margin = 1, weight = NULL, reduction = "mean")
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    p

    (int, optional): Has a default value of \(1\). \(1\) and \(2\) +are the only supported values.

    margin

    (float, optional): Has a default value of \(1\).

    weight

    (Tensor, optional): a manual rescaling weight given to each +class. If given, it has to be a Tensor of size C. Otherwise, it is +treated as if having all ones.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    For each mini-batch sample, the loss in terms of the 1D input \(x\) and scalar +output \(y\) is: +$$ + \mbox{loss}(x, y) = \frac{\sum_i \max(0, \mbox{margin} - x[y] + x[i]))^p}{\mbox{x.size}(0)} +$$

    +

    where \(x \in \left\{0, \; \cdots , \; \mbox{x.size}(0) - 1\right\}\) +and \(i \neq y\).

    +

    Optionally, you can give non-equal weighting on the classes by passing +a 1D weight tensor into the constructor. +The loss function then becomes:

    +

    $$ + \mbox{loss}(x, y) = \frac{\sum_i \max(0, w[y] * (\mbox{margin} - x[y] + x[i]))^p)}{\mbox{x.size}(0)} +$$

    + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_multihead_attention.html b/reference/nn_multihead_attention.html index 277b3d21d4a7afbe00c9b7b659dd302ee091ac9a..025ae5847ee78bfe55fe25c6042c7f8a7780ec68 100644 --- a/reference/nn_multihead_attention.html +++ b/reference/nn_multihead_attention.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ See reference: Attention Is All You Need" /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ See reference: Attention Is All You Need" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ See reference: Attention Is All You Need" />
  • - +
  • Reference
  • @@ -199,16 +231,16 @@ from different representation subspaces. See reference: Attention Is All You Need

    -
    nn_multihead_attention(
    -  embed_dim,
    -  num_heads,
    -  dropout = 0,
    -  bias = TRUE,
    -  add_bias_kv = FALSE,
    -  add_zero_attn = FALSE,
    -  kdim = NULL,
    -  vdim = NULL
    -)
    +
    nn_multihead_attention(
    +  embed_dim,
    +  num_heads,
    +  dropout = 0,
    +  bias = TRUE,
    +  add_bias_kv = FALSE,
    +  add_zero_attn = FALSE,
    +  kdim = NULL,
    +  vdim = NULL
    +)

    Arguments

    @@ -290,15 +322,15 @@ L is the target sequence length, S is the source sequence length.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -multihead_attn = nn_multihead_attention(embed_dim, num_heads) -out <- multihead_attn(query, key, value) -attn_output <- out[[1]] -attn_output_weights <- out[[2]] -} - -} +
    if (torch_is_installed()) { +if (FALSE) { +multihead_attn = nn_multihead_attention(embed_dim, num_heads) +out <- multihead_attn(query, key, value) +attn_output <- out[[1]] +attn_output_weights <- out[[2]] +} + +}
    + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ + \mbox{loss}(x, y) = \sum_{ij}\frac{\max(0, 1 - (x[y[j]] - x[i]))}{\mbox{x.size}(0)} +$$

    +

    where \(x \in \left\{0, \; \cdots , \; \mbox{x.size}(0) - 1\right\}\), \ +\(y \in \left\{0, \; \cdots , \; \mbox{y.size}(0) - 1\right\}\), \ +\(0 \leq y[j] \leq \mbox{x.size}(0)-1\), \ +and \(i \neq y[j]\) for all \(i\) and \(j\). +\(y\) and \(x\) must have the same size.

    +

    The criterion only considers a contiguous block of non-negative targets that +starts at the front. +This allows for different samples to have variable amounts of target classes.

    +

    Shape

    + + + +
      +
    • Input: \((C)\) or \((N, C)\) where N is the batch size and C +is the number of classes.

    • +
    • Target: \((C)\) or \((N, C)\), label targets padded by -1 ensuring same shape as the input.

    • +
    • Output: scalar. If reduction is 'none', then \((N)\).

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_multilabel_margin_loss() +x <- torch_tensor(c(0.1, 0.2, 0.4, 0.8))$view(c(1,4)) +# for target y, only consider labels 4 and 1, not after label -1 +y <- torch_tensor(c(4, 1, -1, 2), dtype = torch_long())$view(c(1,4)) +loss(x, y) + +} +
    #> torch_tensor +#> 0.85 +#> [ CPUFloatType{} ]
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_multilabel_soft_margin_loss.html b/reference/nn_multilabel_soft_margin_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..f2648b4eb54514373be0fb00e7d330105551da0e --- /dev/null +++ b/reference/nn_multilabel_soft_margin_loss.html @@ -0,0 +1,303 @@ + + + + + + + + +Multi label soft margin loss — nn_multilabel_soft_margin_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates a criterion that optimizes a multi-label one-versus-all +loss based on max-entropy, between input \(x\) and target \(y\) of size +\((N, C)\).

    +
    + +
    nn_multilabel_soft_margin_loss(weight = NULL, reduction = "mean")
    + +

    Arguments

    + + + + + + + + + + +
    weight

    (Tensor, optional): a manual rescaling weight given to each +class. If given, it has to be a Tensor of size C. Otherwise, it is +treated as if having all ones.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    For each sample in the minibatch:

    +

    $$ + loss(x, y) = - \frac{1}{C} * \sum_i y[i] * \log((1 + \exp(-x[i]))^{-1}) ++ (1-y[i]) * \log\left(\frac{\exp(-x[i])}{(1 + \exp(-x[i]))}\right) +$$

    +

    where \(i \in \left\{0, \; \cdots , \; \mbox{x.nElement}() - 1\right\}\), +\(y[i] \in \left\{0, \; 1\right\}\).

    +

    Shape

    + + + +
      +
    • Input: \((N, C)\) where N is the batch size and C is the number of classes.

    • +
    • Target: \((N, C)\), label targets padded by -1 ensuring same shape as the input.

    • +
    • Output: scalar. If reduction is 'none', then \((N)\).

    • +
    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_nll_loss.html b/reference/nn_nll_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..721f0d687ba8a96148a0648d5094a18567bb7a57 --- /dev/null +++ b/reference/nn_nll_loss.html @@ -0,0 +1,368 @@ + + + + + + + + +Nll loss — nn_nll_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    The negative log likelihood loss. It is useful to train a classification +problem with C classes.

    +
    + +
    nn_nll_loss(weight = NULL, ignore_index = -100, reduction = "mean")
    + +

    Arguments

    + + + + + + + + + + + + + + +
    weight

    (Tensor, optional): a manual rescaling weight given to each +class. If given, it has to be a Tensor of size C. Otherwise, it is +treated as if having all ones.

    ignore_index

    (int, optional): Specifies a target value that is ignored +and does not contribute to the input gradient.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will +be applied, 'mean': the weighted mean of the output is taken, +'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in +the meantime, specifying either of those two args will override +reduction. Default: 'mean'

    + +

    Details

    + +

    If provided, the optional argument weight should be a 1D Tensor assigning +weight to each of the classes. This is particularly useful when you have an +unbalanced training set.

    +

    The input given through a forward call is expected to contain +log-probabilities of each class. input has to be a Tensor of size either +\((minibatch, C)\) or \((minibatch, C, d_1, d_2, ..., d_K)\) +with \(K \geq 1\) for the K-dimensional case (described later).

    +

    Obtaining log-probabilities in a neural network is easily achieved by +adding a LogSoftmax layer in the last layer of your network.

    +

    You may use CrossEntropyLoss instead, if you prefer not to add an extra +layer.

    +

    The target that this loss expects should be a class index in the range \([0, C-1]\) +where C = number of classes; if ignore_index is specified, this loss also accepts +this class index (this index may not necessarily be in the class range).

    +

    The unreduced (i.e. with reduction set to 'none') loss can be described as:

    +

    $$ +\ell(x, y) = L = \{l_1,\dots,l_N\}^\top, \quad +l_n = - w_{y_n} x_{n,y_n}, \quad +w_{c} = \mbox{weight}[c] \cdot \mbox{1}\{c \not= \mbox{ignore\_index}\}, +$$

    +

    where \(x\) is the input, \(y\) is the target, \(w\) is the weight, and +\(N\) is the batch size. If reduction is not 'none' +(default 'mean'), then

    +

    $$ +\ell(x, y) = \begin{array}{ll} +\sum_{n=1}^N \frac{1}{\sum_{n=1}^N w_{y_n}} l_n, & + \mbox{if reduction} = \mbox{'mean';}\\ +\sum_{n=1}^N l_n, & + \mbox{if reduction} = \mbox{'sum'.} +\end{array} +$$

    +

    Can also be used for higher dimension inputs, such as 2D images, by providing +an input of size \((minibatch, C, d_1, d_2, ..., d_K)\) with \(K \geq 1\), +where \(K\) is the number of dimensions, and a target of appropriate shape +(see below). In the case of images, it computes NLL loss per-pixel.

    +

    Shape

    + + + +
      +
    • Input: \((N, C)\) where C = number of classes, or +\((N, C, d_1, d_2, ..., d_K)\) with \(K \geq 1\) +in the case of K-dimensional loss.

    • +
    • Target: \((N)\) where each value is \(0 \leq \mbox{targets}[i] \leq C-1\), or +\((N, d_1, d_2, ..., d_K)\) with \(K \geq 1\) in the case of +K-dimensional loss.

    • +
    • Output: scalar.

    • +
    + +

    If reduction is 'none', then the same size as the target: \((N)\), or +\((N, d_1, d_2, ..., d_K)\) with \(K \geq 1\) in the case +of K-dimensional loss.

    + +

    Examples

    +
    if (torch_is_installed()) { +m <- nn_log_softmax(dim=2) +loss <- nn_nll_loss() +# input is of size N x C = 3 x 5 +input <- torch_randn(3, 5, requires_grad=TRUE) +# each element in target has to have 0 <= value < C +target <- torch_tensor(c(2, 1, 5), dtype = torch_long()) +output <- loss(m(input), target) +output$backward() + +# 2D loss example (used, for example, with image inputs) +N <- 5 +C <- 4 +loss <- nn_nll_loss() +# input is of size N x C x height x width +data <- torch_randn(N, 16, 10, 10) +conv <- nn_conv2d(16, C, c(3, 3)) +m <- nn_log_softmax(dim=1) +# each element in target has to have 0 <= value < C +target <- torch_empty(N, 8, 8, dtype=torch_long())$random_(1, C) +output <- loss(m(conv(data)), target) +output$backward() + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_pairwise_distance.html b/reference/nn_pairwise_distance.html new file mode 100644 index 0000000000000000000000000000000000000000..6d92168e3d0cff1818bd647c283b3a9e759f0a80 --- /dev/null +++ b/reference/nn_pairwise_distance.html @@ -0,0 +1,305 @@ + + + + + + + + +Pairwise distance — nn_pairwise_distance • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Computes the batchwise pairwise distance between vectors \(v_1\), \(v_2\) +using the p-norm:

    +
    + +
    nn_pairwise_distance(p = 2, eps = 1e-06, keepdim = FALSE)
    + +

    Arguments

    + + + + + + + + + + + + + + +
    p

    (real): the norm degree. Default: 2

    eps

    (float, optional): Small value to avoid division by zero. +Default: 1e-6

    keepdim

    (bool, optional): Determines whether or not to keep the vector dimension. +Default: FALSE

    + +

    Details

    + +

    $$ + \Vert x \Vert _p = \left( \sum_{i=1}^n \vert x_i \vert ^ p \right) ^ {1/p}. +$$

    +

    Shape

    + + + +
      +
    • Input1: \((N, D)\) where D = vector dimension

    • +
    • Input2: \((N, D)\), same shape as the Input1

    • +
    • Output: \((N)\). If keepdim is TRUE, then \((N, 1)\).

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +pdist <- nn_pairwise_distance(p=2) +input1 <- torch_randn(100, 128) +input2 <- torch_randn(100, 128) +output <- pdist(input1, input2) + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_parameter.html b/reference/nn_parameter.html new file mode 100644 index 0000000000000000000000000000000000000000..c1597c2df61cff30aebe307e20bb82ce9f028184 --- /dev/null +++ b/reference/nn_parameter.html @@ -0,0 +1,274 @@ + + + + + + + + +Creates an nn_parameter — nn_parameter • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Indicates to nn_module that x is a parameter

    +
    + +
    nn_parameter(x, requires_grad = TRUE)
    + +

    Arguments

    + + + + + + + + + + +
    x

    the tensor that you want to indicate as parameter

    requires_grad

    whether this parameter should have +requires_grad = TRUE

    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_poisson_nll_loss.html b/reference/nn_poisson_nll_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..3d2fe77a30f19d213443f913d7c937067ce6c340 --- /dev/null +++ b/reference/nn_poisson_nll_loss.html @@ -0,0 +1,330 @@ + + + + + + + + +Poisson NLL loss — nn_poisson_nll_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Negative log likelihood loss with Poisson distribution of target. +The loss can be described as:

    +
    + +
    nn_poisson_nll_loss(
    +  log_input = TRUE,
    +  full = FALSE,
    +  eps = 1e-08,
    +  reduction = "mean"
    +)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    log_input

    (bool, optional): if TRUE the loss is computed as +\(\exp(\mbox{input}) - \mbox{target}*\mbox{input}\), if FALSE the loss is +\(\mbox{input} - \mbox{target}*\log(\mbox{input}+\mbox{eps})\).

    full

    (bool, optional): whether to compute full loss, i. e. to add the +Stirling approximation term +\(\mbox{target}*\log(\mbox{target}) - \mbox{target} + 0.5 * \log(2\pi\mbox{target})\).

    eps

    (float, optional): Small value to avoid evaluation of \(\log(0)\) when +log_input = FALSE. Default: 1e-8

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ +\mbox{target} \sim \mathrm{Poisson}(\mbox{input}) +\mbox{loss}(\mbox{input}, \mbox{target}) = \mbox{input} - \mbox{target} * \log(\mbox{input}) ++ \log(\mbox{target!}) +$$

    +

    The last term can be omitted or approximated with Stirling formula. The +approximation is used for target values more than 1. For targets less or +equal to 1 zeros are added to the loss.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    • Output: scalar by default. If reduction is 'none', then \((N, *)\), +the same shape as the input

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +loss <- nn_poisson_nll_loss() +log_input <- torch_randn(5, 2, requires_grad=TRUE) +target <- torch_randn(5, 2) +output <- loss(log_input, target) +output$backward() + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_prelu.html b/reference/nn_prelu.html index 0002809059d8ec7e5cc5b151d6739c1472ec9c61..a7d3c7e75204946e280dd353650a8327bdf4b541 100644 --- a/reference/nn_prelu.html +++ b/reference/nn_prelu.html @@ -38,6 +38,8 @@ + + + + + + @@ -84,7 +96,7 @@ $$" /> torch - 0.0.3 + 0.1.0 @@ -157,6 +169,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -170,7 +185,24 @@ $$" />
  • - +
  • Reference
  • @@ -219,7 +251,7 @@ ax, & \mbox{ otherwise } $$

    -
    nn_prelu(num_parameters = 1, init = 0.25)
    +
    nn_prelu(num_parameters = 1, init = 0.25)

    Arguments

    @@ -266,12 +298,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_prelu() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_prelu() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ $$\mbox{ReLU}(x) = (x)^+ = \max(0, x)$$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ $$\mbox{ReLU}(x) = (x)^+ = \max(0, x)$$" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ $$\mbox{ReLU}(x) = (x)^+ = \max(0, x)$$" /> $$\mbox{ReLU}(x) = (x)^+ = \max(0, x)$$

    -
    nn_relu(inplace = FALSE)
    +
    nn_relu(inplace = FALSE)

    Arguments

    @@ -220,15 +252,15 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_relu() -input <- torch_randn(2) -m(input) +
    if (torch_is_installed()) { +m <- nn_relu() +input <- torch_randn(2) +m(input) -} +}
    #> torch_tensor -#> 0.1347 -#> 0.1303 +#> 0.1047 +#> 0.5221 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_relu6(inplace = FALSE)
    +
    nn_relu6(inplace = FALSE)

    Arguments

    @@ -223,12 +255,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_relu6() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_relu6() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -146,6 +158,9 @@ to an input sequence." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ to an input sequence." />
  • - +
  • Reference
  • @@ -197,17 +229,17 @@ to an input sequence." /> to an input sequence.

    -
    nn_rnn(
    -  input_size,
    -  hidden_size,
    -  num_layers = 1,
    -  nonlinearity = NULL,
    -  bias = TRUE,
    -  batch_first = FALSE,
    -  dropout = 0,
    -  bidirectional = FALSE,
    -  ...
    -)
    +
    nn_rnn(
    +  input_size,
    +  hidden_size,
    +  num_layers = 1,
    +  nonlinearity = NULL,
    +  bias = TRUE,
    +  batch_first = FALSE,
    +  dropout = 0,
    +  bidirectional = FALSE,
    +  ...
    +)

    Arguments

    @@ -345,108 +377,108 @@ of shape (hidden_size)

    where \(k = \frac{1}{\mbox{hidden\_size}}\)

    Examples

    -
    if (torch_is_installed()) { -rnn <- nn_rnn(10, 20, 2) -input <- torch_randn(5, 3, 10) -h0 <- torch_randn(2, 3, 20) -rnn(input, h0) +
    if (torch_is_installed()) { +rnn <- nn_rnn(10, 20, 2) +input <- torch_randn(5, 3, 10) +h0 <- torch_randn(2, 3, 20) +rnn(input, h0) -} +}
    #> [[1]] #> torch_tensor #> (1,.,.) = -#> Columns 1 to 9 0.0589 0.6076 0.5733 0.4325 -0.7804 -0.5884 -0.7990 -0.5201 -0.5453 -#> -0.3423 -0.1028 0.8203 -0.4395 -0.8638 -0.2453 -0.7419 -0.5734 -0.9229 -#> -0.0001 0.2485 -0.1473 0.5797 -0.1626 -0.1213 -0.3884 -0.5131 0.2432 +#> Columns 1 to 9 0.7161 0.2524 -0.2267 -0.2711 -0.2845 -0.4067 0.5843 -0.1285 -0.5132 +#> 0.6516 0.7333 0.6177 0.2089 -0.5074 0.5751 0.5784 0.6827 0.0714 +#> 0.2538 0.6949 -0.0763 0.5675 -0.3406 -0.1142 0.4442 -0.0423 -0.0378 #> -#> Columns 10 to 18 0.2049 0.6141 0.0159 -0.5202 -0.0390 0.8628 0.0559 -0.6653 0.5929 -#> 0.1484 -0.3955 0.4318 -0.6923 0.6768 0.9427 -0.5935 -0.1124 0.8008 -#> -0.1115 0.3922 -0.0407 0.7891 -0.1390 0.3705 -0.2486 0.8745 -0.3595 +#> Columns 10 to 18 -0.1102 0.6036 0.1749 -0.0608 -0.4173 -0.0102 -0.2940 0.4289 -0.1320 +#> -0.3589 -0.2381 0.0580 0.7796 0.0998 -0.2888 -0.1735 -0.8531 -0.4652 +#> 0.1831 0.0378 -0.6822 -0.2816 0.1889 -0.2580 -0.0010 -0.2612 0.1988 #> -#> Columns 19 to 20 0.0777 -0.9009 -#> -0.0200 -0.2583 -#> -0.1381 -0.4595 +#> Columns 19 to 20 0.3093 -0.4167 +#> 0.0837 0.0514 +#> 0.7636 0.3214 #> #> (2,.,.) = -#> Columns 1 to 9 -0.5747 -0.2374 0.3985 0.6116 -0.7816 0.0973 -0.2544 -0.2252 -0.2584 -#> -0.2011 -0.3030 0.4559 0.2132 -0.6755 0.1609 -0.8239 -0.2612 -0.6095 -#> -0.5134 0.3075 -0.2427 0.1875 -0.3368 0.1697 -0.3024 -0.5115 0.0946 +#> Columns 1 to 9 0.5985 0.0873 0.5781 -0.1434 -0.1528 0.1490 0.2079 0.7476 -0.1045 +#> 0.3875 0.0819 0.5025 0.3552 -0.2228 0.5654 -0.0206 0.6165 -0.0171 +#> 0.4864 0.6804 0.0500 0.3966 -0.0198 -0.0722 -0.0076 0.4736 -0.2741 #> -#> Columns 10 to 18 0.5029 -0.4663 -0.6517 -0.3916 -0.3694 -0.3079 0.0697 0.4641 0.4321 -#> 0.8177 0.0184 -0.5879 -0.3717 -0.0954 0.1382 -0.1993 0.5621 -0.1939 -#> 0.3419 -0.0541 0.0665 0.0764 0.6821 0.0040 0.0504 0.6075 0.0703 +#> Columns 10 to 18 -0.1348 0.2238 0.0753 -0.3945 0.1603 0.2999 0.2501 -0.5067 -0.6350 +#> -0.3108 -0.3115 -0.2025 0.3937 0.4892 -0.2319 -0.2614 -0.6291 0.0740 +#> -0.5959 -0.0322 -0.2894 0.1512 0.3782 -0.1010 0.1482 -0.3825 0.4464 #> -#> Columns 19 to 20 -0.1393 -0.0169 -#> -0.2283 0.0531 -#> -0.0904 0.2892 +#> Columns 19 to 20 0.2921 -0.1500 +#> 0.6855 -0.0422 +#> 0.9088 -0.0266 #> #> (3,.,.) = -#> Columns 1 to 9 0.4901 0.5564 0.4157 -0.1239 -0.3616 -0.3069 -0.7718 0.2739 0.1213 -#> 0.4148 0.1776 0.2237 0.0323 -0.5166 -0.3952 -0.6464 0.1560 0.1433 -#> 0.3964 0.0038 0.3253 0.0715 0.1306 -0.0327 -0.2969 0.1245 -0.0192 +#> Columns 1 to 9 0.6381 0.1431 0.4164 0.3592 0.0609 0.3689 0.1772 0.4760 0.0470 +#> 0.4289 -0.1033 0.4358 0.1910 -0.0969 0.5367 0.1665 0.4100 0.0255 +#> 0.2304 0.1687 0.5151 0.1998 0.2694 0.3913 0.2137 -0.1146 -0.1561 #> -#> Columns 10 to 18 0.7138 -0.0042 0.1386 -0.0459 -0.4418 0.2562 -0.1983 0.0256 0.4770 -#> 0.7310 0.4867 -0.2056 -0.4410 -0.1978 0.4279 -0.1295 0.2951 0.4745 -#> 0.6958 0.5029 0.0787 -0.2960 0.3043 0.3281 -0.3449 0.3081 0.4197 +#> Columns 10 to 18 -0.4619 -0.2263 -0.2680 0.3368 0.2228 0.1836 0.2800 -0.6207 -0.2572 +#> -0.1175 -0.2322 -0.6100 0.0511 0.2566 -0.2520 -0.2023 -0.6280 -0.1511 +#> -0.3149 -0.2550 -0.2557 0.1514 -0.2146 0.2289 0.2341 -0.7046 -0.1510 #> -#> Columns 19 to 20 0.1745 -0.1350 -#> 0.0668 -0.4015 -#> 0.5455 0.3373 +#> Columns 19 to 20 0.4382 0.3621 +#> 0.4396 0.0057 +#> 0.6035 -0.3589 #> #> (4,.,.) = -#> Columns 1 to 9 -0.1242 -0.1825 0.0703 0.3368 -0.1879 -0.6188 -0.3606 0.2400 -0.4979 -#> -0.2285 -0.1238 0.2816 0.7158 -0.3795 -0.1800 -0.0903 0.0951 -0.4055 -#> -0.4161 0.1562 0.1320 0.5917 -0.2003 0.5784 -0.1302 -0.1113 -0.7354 +#> Columns 1 to 9 0.2884 -0.0291 0.1229 0.0041 -0.1166 0.3444 0.0175 0.3235 0.0658 +#> 0.6147 0.4537 0.1856 0.1567 -0.3019 0.1505 0.2564 0.5269 0.1842 +#> 0.4272 0.2199 0.5595 0.2342 -0.1921 -0.1427 0.3725 0.0805 0.0195 #> -#> Columns 10 to 18 0.6470 0.0346 -0.6063 -0.0664 0.3145 0.3909 -0.3163 0.0040 0.6242 -#> 0.3401 -0.2107 -0.5455 -0.4844 -0.0769 0.0620 -0.1112 0.5133 0.6606 -#> 0.4108 -0.3172 -0.3713 -0.5535 0.3277 0.0108 -0.0800 0.5410 0.1190 +#> Columns 10 to 18 -0.0812 -0.0068 -0.1565 -0.0358 -0.0224 -0.2966 -0.2252 -0.4043 0.0966 +#> -0.3598 -0.1854 -0.3976 0.0953 0.1748 -0.5111 0.1526 -0.4405 -0.2781 +#> -0.3753 0.0663 -0.1238 -0.0727 -0.0397 -0.0635 0.0157 -0.0993 -0.1252 #> -#> Columns 19 to 20 -0.1511 -0.0390 -#> 0.2056 0.1060 -#> 0.1649 0.4299 +#> Columns 19 to 20 0.5735 -0.1105 +#> 0.7309 0.1156 +#> 0.7796 0.3005 #> #> (5,.,.) = -#> Columns 1 to 9 -0.0957 0.3769 0.1599 0.2209 -0.5452 -0.2272 -0.5200 -0.0675 -0.3636 -#> 0.3314 0.3243 0.5617 0.3386 -0.1621 0.4163 -0.6332 -0.0253 -0.0027 -#> 0.1690 0.3272 0.5195 -0.1727 -0.1203 -0.0701 -0.5596 -0.2452 0.1866 +#> Columns 1 to 9 0.6705 0.3787 0.4285 0.3407 0.0861 0.0279 0.3407 0.3227 -0.1617 +#> 0.5895 0.1572 0.6036 0.4525 0.0566 0.3992 0.5467 0.0065 -0.2110 +#> 0.1297 -0.0524 0.5491 0.2352 0.2431 0.4329 -0.1639 0.5197 -0.1087 #> -#> Columns 10 to 18 0.7646 0.0595 -0.1395 -0.1890 0.1511 0.3273 -0.2851 0.1429 0.4489 -#> 0.7122 0.0548 -0.1359 -0.2292 -0.1363 -0.1930 -0.3282 0.5265 0.3602 -#> 0.7579 0.1487 0.1720 -0.0712 -0.0081 0.3472 -0.1236 0.3269 0.3627 +#> Columns 10 to 18 -0.1388 -0.0327 -0.5949 0.0944 -0.1133 -0.1225 0.1684 -0.6087 -0.1145 +#> -0.3481 -0.1070 -0.5201 0.3679 0.1345 0.1734 0.1243 -0.5208 0.0556 +#> -0.4741 -0.3434 -0.3230 -0.2729 0.3473 0.0649 0.2237 -0.2233 -0.0649 #> -#> Columns 19 to 20 -0.3324 0.0199 -#> 0.4319 0.0719 -#> 0.0541 -0.1996 +#> Columns 19 to 20 0.6030 0.2479 +#> 0.6858 -0.1467 +#> 0.6513 -0.2089 #> [ CPUFloatType{5,3,20} ] #> #> [[2]] #> torch_tensor #> (1,.,.) = -#> Columns 1 to 9 -0.0859 0.2441 -0.3785 0.4773 -0.6194 0.7348 -0.2389 0.3932 -0.5751 -#> -0.3461 -0.4299 -0.5382 -0.0556 0.2770 -0.4997 0.3906 -0.4267 -0.2962 -#> 0.0901 -0.0487 -0.2221 0.7402 -0.3091 0.1068 0.2309 0.0844 -0.8371 +#> Columns 1 to 9 0.0992 -0.6436 -0.3197 0.6220 0.2256 -0.7712 0.2246 -0.1414 0.3241 +#> 0.3420 -0.6408 -0.3408 0.5502 -0.2351 -0.0798 0.3823 -0.7235 0.1726 +#> -0.1093 -0.3336 -0.2989 0.4091 0.1061 0.5667 -0.4299 -0.3630 0.3574 #> -#> Columns 10 to 18 -0.7465 -0.0237 -0.5216 0.2091 0.7810 0.0142 -0.0300 0.0860 -0.4565 -#> 0.4484 -0.2549 0.6987 -0.4158 -0.6085 -0.0974 0.0892 -0.3520 0.0667 -#> -0.1097 -0.0399 0.2584 -0.1982 0.0520 -0.0103 -0.0936 0.3060 0.0546 +#> Columns 10 to 18 0.2762 -0.1739 -0.1027 0.3692 -0.6185 -0.1532 -0.2170 0.0237 -0.4784 +#> 0.6071 -0.0503 -0.7492 0.4626 -0.3258 0.1729 -0.4063 0.0940 -0.1114 +#> 0.0344 -0.3348 0.5308 -0.3231 0.3965 -0.0367 -0.0665 -0.2350 0.4546 #> -#> Columns 19 to 20 0.1902 0.0056 -#> 0.3507 0.1104 -#> -0.0194 0.3957 +#> Columns 19 to 20 0.5460 -0.0594 +#> 0.1078 0.4941 +#> -0.2752 0.2571 #> #> (2,.,.) = -#> Columns 1 to 9 -0.0957 0.3769 0.1599 0.2209 -0.5452 -0.2272 -0.5200 -0.0675 -0.3636 -#> 0.3314 0.3243 0.5617 0.3386 -0.1621 0.4163 -0.6332 -0.0253 -0.0027 -#> 0.1690 0.3272 0.5195 -0.1727 -0.1203 -0.0701 -0.5596 -0.2452 0.1866 +#> Columns 1 to 9 0.6705 0.3787 0.4285 0.3407 0.0861 0.0279 0.3407 0.3227 -0.1617 +#> 0.5895 0.1572 0.6036 0.4525 0.0566 0.3992 0.5467 0.0065 -0.2110 +#> 0.1297 -0.0524 0.5491 0.2352 0.2431 0.4329 -0.1639 0.5197 -0.1087 #> -#> Columns 10 to 18 0.7646 0.0595 -0.1395 -0.1890 0.1511 0.3273 -0.2851 0.1429 0.4489 -#> 0.7122 0.0548 -0.1359 -0.2292 -0.1363 -0.1930 -0.3282 0.5265 0.3602 -#> 0.7579 0.1487 0.1720 -0.0712 -0.0081 0.3472 -0.1236 0.3269 0.3627 +#> Columns 10 to 18 -0.1388 -0.0327 -0.5949 0.0944 -0.1133 -0.1225 0.1684 -0.6087 -0.1145 +#> -0.3481 -0.1070 -0.5201 0.3679 0.1345 0.1734 0.1243 -0.5208 0.0556 +#> -0.4741 -0.3434 -0.3230 -0.2729 0.3473 0.0649 0.2237 -0.2233 -0.0649 #> -#> Columns 19 to 20 -0.3324 0.0199 -#> 0.4319 0.0719 -#> 0.0541 -0.1996 +#> Columns 19 to 20 0.6030 0.2479 +#> 0.6858 -0.1467 +#> 0.6513 -0.2089 #> [ CPUFloatType{2,3,20} ] #>
    diff --git a/reference/nn_rrelu.html b/reference/nn_rrelu.html index 366c47be2d9b48be5db032ea09b0a0ca4e645f42..8c4c3937a5b7259ff467b0d8177a696c04fb8a59 100644 --- a/reference/nn_rrelu.html +++ b/reference/nn_rrelu.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ as described in the paper:" /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ as described in the paper:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ as described in the paper:" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ as described in the paper:" /> as described in the paper:

    -
    nn_rrelu(lower = 1/8, upper = 1/3, inplace = FALSE)
    +
    nn_rrelu(lower = 1/8, upper = 1/3, inplace = FALSE)

    Arguments

    @@ -243,15 +275,15 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_rrelu(0.1, 0.3) -input <- torch_randn(2) -m(input) +
    if (torch_is_installed()) { +m <- nn_rrelu(0.1, 0.3) +input <- torch_randn(2) +m(input) -} +}
    #> torch_tensor -#> 0.5388 -#> 1.3588 +#> 1.9403 +#> 0.6835 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applied element-wise, as:

    -
    nn_selu(inplace = FALSE)
    +
    nn_selu(inplace = FALSE)

    Arguments

    @@ -227,12 +259,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_selu() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_selu() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ See examples." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ See examples." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ Modules will be added to it in the order they are passed in the constructor. See examples.

    -
    nn_sequential(..., name = NULL)
    +
    nn_sequential(..., name = NULL)

    Arguments

    @@ -216,18 +248,18 @@ See examples.

    Examples

    -
    if (torch_is_installed()) { - -model <- nn_sequential( - nn_conv2d(1, 20, 5), - nn_relu(), - nn_conv2d(20, 64, 5), - nn_relu() -) -input <- torch_randn(32, 1, 28, 28) -output <- model(input) - -} +
    if (torch_is_installed()) { + +model <- nn_sequential( + nn_conv2d(1, 20, 5), + nn_relu(), + nn_conv2d(20, 64, 5), + nn_relu() +) +input <- torch_randn(32, 1, 28, 28) +output <- model(input) + +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_sigmoid()
    +
    nn_sigmoid()

    Details

    @@ -215,12 +247,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_sigmoid() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_sigmoid() +input <- torch_randn(2) +output <- m(input) -} +}
    + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ + \mbox{loss}(x, y) = \frac{1}{n} \sum_{i} z_{i} +$$

    +

    where \(z_{i}\) is given by:

    +

    $$ + z_{i} = + \begin{array}{ll} +0.5 (x_i - y_i)^2, & \mbox{if } |x_i - y_i| < 1 \\ +|x_i - y_i| - 0.5, & \mbox{otherwise } +\end{array} +$$

    +

    \(x\) and \(y\) arbitrary shapes with a total of \(n\) elements each +the sum operation still operates over all the elements, and divides by \(n\). +The division by \(n\) can be avoided if sets reduction = 'sum'.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((N, *)\), same shape as the input

    • +
    • Output: scalar. If reduction is 'none', then +\((N, *)\), same shape as the input

    • +
    + + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_soft_margin_loss.html b/reference/nn_soft_margin_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..46246b9877d2c7cc5f30d1d8bb469cb9695907b4 --- /dev/null +++ b/reference/nn_soft_margin_loss.html @@ -0,0 +1,294 @@ + + + + + + + + +Soft margin loss — nn_soft_margin_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates a criterion that optimizes a two-class classification +logistic loss between input tensor \(x\) and target tensor \(y\) +(containing 1 or -1).

    +
    + +
    nn_soft_margin_loss(reduction = "mean")
    + +

    Arguments

    + + + + + + +
    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    $$ + \mbox{loss}(x, y) = \sum_i \frac{\log(1 + \exp(-y[i]*x[i]))}{\mbox{x.nelement}()} +$$

    +

    Shape

    + + + +
      +
    • Input: \((*)\) where \(*\) means, any number of additional +dimensions

    • +
    • Target: \((*)\), same shape as the input

    • +
    • Output: scalar. If reduction is 'none', then same shape as the input

    • +
    + + +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_softmax.html b/reference/nn_softmax.html index 74bfe04e3daf5d08971988bdbba9aef42188c49e..dc85e788b2e080106c50e17943daa895aee8f7a9 100644 --- a/reference/nn_softmax.html +++ b/reference/nn_softmax.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ Softmax is defined as:" /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ Softmax is defined as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ Softmax is defined as:" />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ lie in the range [0,1] and sum to 1. Softmax is defined as:

    -
    nn_softmax(dim)
    +
    nn_softmax(dim)

    Arguments

    @@ -242,12 +274,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softmax(1) -input <- torch_randn(2, 3) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softmax(1) +input <- torch_randn(2, 3) +output <- m(input) -} +}
    @@ -147,6 +159,9 @@ apply Softmax to each location \((Channels, h_i, w_j)\)" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ apply Softmax to each location \((Channels, h_i, w_j)\)" />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ When given an image of Channels x Height x Width, it will apply Softmax to each location \((Channels, h_i, w_j)\)

    -
    nn_softmax2d()
    +
    nn_softmax2d()

    Value

    @@ -217,12 +249,12 @@ values in the range [0, 1]

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softmax2d() -input <- torch_randn(2, 3, 12, 13) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softmax2d() +input <- torch_randn(2, 3, 12, 13) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ Softmin is defined as:" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ Softmin is defined as:" />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ lie in the range [0, 1] and sum to 1. Softmin is defined as:

    -
    nn_softmin(dim)
    +
    nn_softmin(dim)

    Arguments

    @@ -234,12 +266,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softmin(dim = 1) -input <- torch_randn(2, 2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softmin(dim = 1) +input <- torch_randn(2, 2) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ $$" />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ $$ $$

    -
    nn_softplus(beta = 1, threshold = 20)
    +
    nn_softplus(beta = 1, threshold = 20)

    Arguments

    @@ -234,12 +266,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softplus() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softplus() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the soft shrinkage function elementwise:

    -
    nn_softshrink(lambd = 0.5)
    +
    nn_softshrink(lambd = 0.5)

    Arguments

    @@ -229,12 +261,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softshrink() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softshrink() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -148,6 +160,9 @@ $$" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ $$" />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ $$ $$

    -
    nn_softsign()
    +
    nn_softsign()

    Shape

    @@ -216,12 +248,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_softsign() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_softsign() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_tanh()
    +
    nn_tanh()

    Details

    @@ -215,12 +247,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_tanh() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_tanh() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function:

    -
    nn_tanhshrink()
    +
    nn_tanhshrink()

    Details

    @@ -215,12 +247,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_tanhshrink() -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_tanhshrink() +input <- torch_randn(2) +output <- m(input) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Thresholds each element of the input Tensor.

    -
    nn_threshold(threshold, value, inplace = FALSE)
    +
    nn_threshold(threshold, value, inplace = FALSE)

    Arguments

    @@ -237,12 +269,12 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -m <- nn_threshold(0.1, 20) -input <- torch_randn(2) -output <- m(input) +
    if (torch_is_installed()) { +m <- nn_threshold(0.1, 20) +input <- torch_randn(2) +output <- m(input) -} +}
    + + + + + + + + + + + + + + + + + + + + + +
    margin

    (float, optional): Default: \(1\).

    p

    (int, optional): The norm degree for pairwise distance. Default: \(2\).

    eps

    constant to avoid NaN's

    swap

    (bool, optional): The distance swap is described in detail in the paper +Learning shallow convolutional feature descriptors with triplet losses by +V. Balntas, E. Riba et al. Default: FALSE.

    reduction

    (string, optional): Specifies the reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Note: size_average +and reduce are in the process of being deprecated, and in the meantime, +specifying either of those two args will override reduction. Default: 'mean'

    + +

    Details

    + +

    The distance swap is described in detail in the paper +Learning shallow convolutional feature descriptors with triplet losses by +V. Balntas, E. Riba et al.

    +

    The loss function for each sample in the mini-batch is:

    +

    $$ + L(a, p, n) = \max \{d(a_i, p_i) - d(a_i, n_i) + {\rm margin}, 0\} +$$

    +

    where

    +

    $$ + d(x_i, y_i) = | {\bf x}_i - {\bf y}_i |_p +$$

    +

    See also nn_triplet_margin_with_distance_loss(), which computes the +triplet margin loss for input tensors using a custom distance function.

    +

    Shape

    + + + +
      +
    • Input: \((N, D)\) where \(D\) is the vector dimension.

    • +
    • Output: A Tensor of shape \((N)\) if reduction is 'none', or a scalar +otherwise.

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +triplet_loss <- nn_triplet_margin_loss(margin = 1, p = 2) +anchor <- torch_randn(100, 128, requires_grad=TRUE) +positive <- torch_randn(100, 128, requires_grad=TRUE) +negative <- torch_randn(100, 128, requires_grad=TRUE) +output <- triplet_loss(anchor, positive, negative) +output$backward() + +} +
    + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nn_triplet_margin_with_distance_loss.html b/reference/nn_triplet_margin_with_distance_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..039a69bbb2baa9a930e3a246125debed56f987b7 --- /dev/null +++ b/reference/nn_triplet_margin_with_distance_loss.html @@ -0,0 +1,382 @@ + + + + + + + + +Triplet margin with distance loss — nn_triplet_margin_with_distance_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + +
    +

    Creates a criterion that measures the triplet loss given input +tensors \(a\), \(p\), and \(n\) (representing anchor, +positive, and negative examples, respectively), and a nonnegative, +real-valued function ("distance function") used to compute the relationship +between the anchor and positive example ("positive distance") and the +anchor and negative example ("negative distance").

    +
    + +
    nn_triplet_margin_with_distance_loss(
    +  distance_function = NULL,
    +  margin = 1,
    +  swap = FALSE,
    +  reduction = "mean"
    +)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + +
    distance_function

    (callable, optional): A nonnegative, real-valued function that +quantifies the closeness of two tensors. If not specified, +nn_pairwise_distance() will be used. Default: None

    margin

    (float, optional): A non-negative margin representing the minimum difference +between the positive and negative distances required for the loss to be 0. Larger +margins penalize cases where the negative examples are not distant enough from the +anchors, relative to the positives. Default: \(1\).

    swap

    (bool, optional): Whether to use the distance swap described in the paper +Learning shallow convolutional feature descriptors with triplet losses by +V. Balntas, E. Riba et al. If TRUE, and if the positive example is closer to the +negative example than the anchor is, swaps the positive example and the anchor in +the loss computation. Default: FALSE.

    reduction

    (string, optional): Specifies the (optional) reduction to apply to the output: +'none' | 'mean' | 'sum'. 'none': no reduction will be applied, +'mean': the sum of the output will be divided by the number of +elements in the output, 'sum': the output will be summed. Default: 'mean'

    + +

    Details

    + +

    The unreduced loss (i.e., with reduction set to 'none') +can be described as:

    +

    $$ + \ell(a, p, n) = L = \{l_1,\dots,l_N\}^\top, \quad +l_i = \max \{d(a_i, p_i) - d(a_i, n_i) + {\rm margin}, 0\} +$$

    +

    where \(N\) is the batch size; \(d\) is a nonnegative, real-valued function +quantifying the closeness of two tensors, referred to as the distance_function; +and \(margin\) is a non-negative margin representing the minimum difference +between the positive and negative distances that is required for the loss to +be 0. The input tensors have \(N\) elements each and can be of any shape +that the distance function can handle. +If reduction is not 'none' +(default 'mean'), then:

    +

    $$ +\ell(x, y) = +\begin{array}{ll} +\mbox{mean}(L), & \mbox{if reduction} = \mbox{`mean';}\\ + \mbox{sum}(L), & \mbox{if reduction} = \mbox{`sum'.} +\end{array} +$$

    +

    See also nn_triplet_margin_loss(), which computes the triplet +loss for input tensors using the \(l_p\) distance as the distance function.

    +

    Shape

    + + + +
      +
    • Input: \((N, *)\) where \(*\) represents any number of additional dimensions +as supported by the distance function.

    • +
    • Output: A Tensor of shape \((N)\) if reduction is 'none', or a scalar +otherwise.

    • +
    + + +

    Examples

    +
    if (torch_is_installed()) { +# Initialize embeddings +embedding <- nn_embedding(1000, 128) +anchor_ids <- torch_randint(0, 1000, 1, dtype = torch_long()) +positive_ids <- torch_randint(0, 1000, 1, dtype = torch_long()) +negative_ids <- torch_randint(0, 1000, 1, dtype = torch_long()) +anchor <- embedding(anchor_ids) +positive <- embedding(positive_ids) +negative <- embedding(negative_ids) + +# Built-in Distance Function +triplet_loss <- nn_triplet_margin_with_distance_loss( + distance_function=nn_pairwise_distance() +) +output <- triplet_loss(anchor, positive, negative) + +# Custom Distance Function +l_infinity <- function(x1, x2) { + torch_max(torch_abs(x1 - x2), dim = 1)[[1]] +} + +triplet_loss <- nn_triplet_margin_with_distance_loss( + distance_function=l_infinity, margin=1.5 +) +output <- triplet_loss(anchor, positive, negative) + +# Custom Distance Function (Lambda) +triplet_loss <- nn_triplet_margin_with_distance_loss( + distance_function = function(x, y) { + 1 - nnf_cosine_similarity(x, y) + } +) + +output <- triplet_loss(anchor, positive, negative) + +} +
    +
    + +
    + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    +
    + + + + + + + + diff --git a/reference/nn_utils_rnn_pack_padded_sequence.html b/reference/nn_utils_rnn_pack_padded_sequence.html index f63ff359ed15ab29db3d4ea093b5e81dab174767..ac6ac95021b5204d90bf0ef63d7ab0ee9c73ab64 100644 --- a/reference/nn_utils_rnn_pack_padded_sequence.html +++ b/reference/nn_utils_rnn_pack_padded_sequence.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ TRUE, B x T x * input is expected." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ TRUE, B x T x * input is expected." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ TRUE, B x T x * input is expected." />
  • - +
  • Reference
  • @@ -201,12 +233,12 @@ longest sequence (equal to lengths[1]), B is the batch TRUE, B x T x * input is expected.

    -
    nn_utils_rnn_pack_padded_sequence(
    -  input,
    -  lengths,
    -  batch_first = FALSE,
    -  enforce_sorted = TRUE
    -)
    +
    nn_utils_rnn_pack_padded_sequence(
    +  input,
    +  lengths,
    +  batch_first = FALSE,
    +  enforce_sorted = TRUE
    +)

    Arguments

    diff --git a/reference/nn_utils_rnn_pack_sequence.html b/reference/nn_utils_rnn_pack_sequence.html index 5609d4356fd838af9b8f7cf316d4823d7f769e18..18882847aa9e57ca200e393dbd131d49a30569ca 100644 --- a/reference/nn_utils_rnn_pack_sequence.html +++ b/reference/nn_utils_rnn_pack_sequence.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ including zero." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ including zero." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ including zero." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ the length of a sequence and nn_utils_rnn_pack_sequence(sequences, enforce_sorted = TRUE) +
    nn_utils_rnn_pack_sequence(sequences, enforce_sorted = TRUE)

    Arguments

    @@ -226,14 +258,14 @@ is TRUE, the sequences should be sorted in the order of decreasing enforce_sorted = TRUE is only necessary for ONNX export.

    Examples

    -
    if (torch_is_installed()) { -x <- torch_tensor(c(1,2,3), dtype = torch_long()) -y <- torch_tensor(c(4, 5), dtype = torch_long()) -z <- torch_tensor(c(6), dtype = torch_long()) +
    if (torch_is_installed()) { +x <- torch_tensor(c(1,2,3), dtype = torch_long()) +y <- torch_tensor(c(4, 5), dtype = torch_long()) +z <- torch_tensor(c(6), dtype = torch_long()) -p <- nn_utils_rnn_pack_sequence(list(x, y, z)) +p <- nn_utils_rnn_pack_sequence(list(x, y, z)) -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,12 +227,12 @@

    It is an inverse operation to nn_utils_rnn_pack_padded_sequence().

    -
    nn_utils_rnn_pad_packed_sequence(
    -  sequence,
    -  batch_first = FALSE,
    -  padding_value = 0,
    -  total_length = NULL
    -)
    +
    nn_utils_rnn_pad_packed_sequence(
    +  sequence,
    +  batch_first = FALSE,
    +  padding_value = 0,
    +  total_length = NULL
    +)

    Arguments

    @@ -246,15 +278,15 @@ the data will be transposed into B x T x * format.

    nn_module wrapped in ~torch.nn.DataParallel.

    Examples

    -
    if (torch_is_installed()) { -seq <- torch_tensor(rbind(c(1,2,0), c(3,0,0), c(4,5,6))) -lens <- c(2,1,3) -packed <- nn_utils_rnn_pack_padded_sequence(seq, lens, batch_first = TRUE, - enforce_sorted = FALSE) -packed -nn_utils_rnn_pad_packed_sequence(packed, batch_first=TRUE) - -} +
    if (torch_is_installed()) { +seq <- torch_tensor(rbind(c(1,2,0), c(3,0,0), c(4,5,6))) +lens <- c(2,1,3) +packed <- nn_utils_rnn_pack_padded_sequence(seq, lens, batch_first = TRUE, + enforce_sorted = FALSE) +packed +nn_utils_rnn_pad_packed_sequence(packed, batch_first=TRUE) + +}
    #> [[1]] #> torch_tensor #> 1 2 0 diff --git a/reference/nn_utils_rnn_pad_sequence.html b/reference/nn_utils_rnn_pad_sequence.html index cc3e8b10e2ec8ff167e2571357eeb3995318de4c..d11d4c8566a00abf6323c7debc6efb6ecfd91d53 100644 --- a/reference/nn_utils_rnn_pad_sequence.html +++ b/reference/nn_utils_rnn_pad_sequence.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ otherwise." /> torch - 0.0.3 + 0.1.0
    @@ -148,6 +160,9 @@ otherwise." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ otherwise." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ sequences with size L x * and if batch_first is False, and T otherwise.

    -
    nn_utils_rnn_pad_sequence(sequences, batch_first = FALSE, padding_value = 0)
    +
    nn_utils_rnn_pad_sequence(sequences, batch_first = FALSE, padding_value = 0)

    Arguments

    @@ -238,13 +270,13 @@ where T is the length of the longest sequence. This function assume trailing dimensions and type of all the Tensors in sequences are same.

    Examples

    -
    if (torch_is_installed()) { -a <- torch_ones(25, 300) -b <- torch_ones(22, 300) -c <- torch_ones(15, 300) -nn_utils_rnn_pad_sequence(list(a, b, c))$size() +
    if (torch_is_installed()) { +a <- torch_ones(25, 300) +b <- torch_ones(22, 300) +c <- torch_ones(15, 300) +nn_utils_rnn_pad_sequence(list(a, b, c))$size() -} +}
    #> [1] 25 3 300
    @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_avg_pool1d(input, output_size)
    +
    nnf_adaptive_avg_pool1d(input, output_size)

    Arguments

    diff --git a/reference/nnf_adaptive_avg_pool2d.html b/reference/nnf_adaptive_avg_pool2d.html index e2fbfbb5ce50d55d5a8e4ef48a599e2c3e7eb463..a3de332f6ae34bc19f3993401999b80f3d1fdb25 100644 --- a/reference/nnf_adaptive_avg_pool2d.html +++ b/reference/nnf_adaptive_avg_pool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ several input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_avg_pool2d(input, output_size)
    +
    nnf_adaptive_avg_pool2d(input, output_size)

    Arguments

    diff --git a/reference/nnf_adaptive_avg_pool3d.html b/reference/nnf_adaptive_avg_pool3d.html index e919dbe4ec6b1fdf0d6c0f498b93bb7655cd4f56..0ac63d9d28cd9f01920c2729ef6e7706c12830cb 100644 --- a/reference/nnf_adaptive_avg_pool3d.html +++ b/reference/nnf_adaptive_avg_pool3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ several input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_avg_pool3d(input, output_size)
    +
    nnf_adaptive_avg_pool3d(input, output_size)

    Arguments

    diff --git a/reference/nnf_adaptive_max_pool1d.html b/reference/nnf_adaptive_max_pool1d.html index 19b488653c75760223de0a51bd5432dd7826a303..2c5d34870e08c132c244d0d96057e682b4a225fe 100644 --- a/reference/nnf_adaptive_max_pool1d.html +++ b/reference/nnf_adaptive_max_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ several input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_max_pool1d(input, output_size, return_indices = FALSE)
    +
    nnf_adaptive_max_pool1d(input, output_size, return_indices = FALSE)

    Arguments

    diff --git a/reference/nnf_adaptive_max_pool2d.html b/reference/nnf_adaptive_max_pool2d.html index 6e389c087fdcae182fc3db06898757ecc0763fed..a23dcfdf122d73f37d3316de3f7d45206c66ad27 100644 --- a/reference/nnf_adaptive_max_pool2d.html +++ b/reference/nnf_adaptive_max_pool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ several input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_max_pool2d(input, output_size, return_indices = FALSE)
    +
    nnf_adaptive_max_pool2d(input, output_size, return_indices = FALSE)

    Arguments

    diff --git a/reference/nnf_adaptive_max_pool3d.html b/reference/nnf_adaptive_max_pool3d.html index 66721efd9e86794bb0efcdfdd1e119e9851c7e87..5c1fd3ef90a449409a59945dc2e40b723addae92 100644 --- a/reference/nnf_adaptive_max_pool3d.html +++ b/reference/nnf_adaptive_max_pool3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ several input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ several input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ several input planes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ several input planes." /> several input planes.

    -
    nnf_adaptive_max_pool3d(input, output_size, return_indices = FALSE)
    +
    nnf_adaptive_max_pool3d(input, output_size, return_indices = FALSE)

    Arguments

    diff --git a/reference/nnf_affine_grid.html b/reference/nnf_affine_grid.html index 3ba7b5443fa5e0c46af130d67a3ab1f42eb49109..af14e52de2979e7c4b9339c43495691d1b6bd8ff 100644 --- a/reference/nnf_affine_grid.html +++ b/reference/nnf_affine_grid.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ affine matrices theta." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ affine matrices theta." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ affine matrices theta." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ affine matrices theta." /> affine matrices theta.

    -
    nnf_affine_grid(theta, size, align_corners = FALSE)
    +
    nnf_affine_grid(theta, size, align_corners = FALSE)

    Arguments

    diff --git a/reference/nnf_alpha_dropout.html b/reference/nnf_alpha_dropout.html index 00d715796e62289e81d24dd4ea6b9f9534c4d113..d7f8dba9ec6319b6b4239234b19a18f3bf17b3eb 100644 --- a/reference/nnf_alpha_dropout.html +++ b/reference/nnf_alpha_dropout.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies alpha dropout to the input.

    -
    nnf_alpha_dropout(input, p = 0.5, training = FALSE, inplace = FALSE)
    +
    nnf_alpha_dropout(input, p = 0.5, training = FALSE, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_avg_pool1d.html b/reference/nnf_avg_pool1d.html index cc914d070aed48d18bc064a67a1d11d68cdf678d..54b4d8acacde6c013e6f411f6f74a59880683c86 100644 --- a/reference/nnf_avg_pool1d.html +++ b/reference/nnf_avg_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ input planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ input planes." />
  • - +
  • Reference
  • @@ -197,14 +229,14 @@ input planes." /> input planes.

    -
    nnf_avg_pool1d(
    -  input,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE
    -)
    +
    nnf_avg_pool1d(
    +  input,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE
    +)

    Arguments

    diff --git a/reference/nnf_avg_pool2d.html b/reference/nnf_avg_pool2d.html index 934d38fa89e8860406abd53eb65c09ecb706aa83..be16992f39f32c55b04d21e0a82b6e38f909e04f 100644 --- a/reference/nnf_avg_pool2d.html +++ b/reference/nnf_avg_pool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ input planes." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ input planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ input planes." />
  • - +
  • Reference
  • @@ -199,15 +231,15 @@ input planes." /> input planes.

    -
    nnf_avg_pool2d(
    -  input,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE,
    -  divisor_override = NULL
    -)
    +
    nnf_avg_pool2d(
    +  input,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE,
    +  divisor_override = NULL
    +)

    Arguments

    diff --git a/reference/nnf_avg_pool3d.html b/reference/nnf_avg_pool3d.html index c064049a46aecf112afb65256bea4a5247f82923..0ec141a799b4e7936924397948e099076fb04a23 100644 --- a/reference/nnf_avg_pool3d.html +++ b/reference/nnf_avg_pool3d.html @@ -38,6 +38,8 @@ + + + + @@ -74,7 +86,7 @@ size \(sT * sH * sW\) steps. The number of output features is equal to torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ size \(sT * sH * sW\) steps. The number of output features is equal to
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ size \(sT * sH * sW\) steps. The number of output features is equal to
  • - +
  • Reference
  • @@ -199,15 +231,15 @@ size \(sT * sH * sW\) steps. The number of output features is equal to \(\lfloor \frac{ \mbox{input planes} }{sT} \rfloor\).

    -
    nnf_avg_pool3d(
    -  input,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE,
    -  divisor_override = NULL
    -)
    +
    nnf_avg_pool3d(
    +  input,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE,
    +  divisor_override = NULL
    +)

    Arguments

    diff --git a/reference/nnf_batch_norm.html b/reference/nnf_batch_norm.html index b691600350ef65b4cb732e3e7d67c4155d80962d..5747729f55c217fa7fefe246027ebff388557c5c 100644 --- a/reference/nnf_batch_norm.html +++ b/reference/nnf_batch_norm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Applies Batch Normalization for each channel across a batch of data.

    -
    nnf_batch_norm(
    -  input,
    -  running_mean,
    -  running_var,
    -  weight = NULL,
    -  bias = NULL,
    -  training = FALSE,
    -  momentum = 0.1,
    -  eps = 1e-05
    -)
    +
    nnf_batch_norm(
    +  input,
    +  running_mean,
    +  running_var,
    +  weight = NULL,
    +  bias = NULL,
    +  training = FALSE,
    +  momentum = 0.1,
    +  eps = 1e-05
    +)

    Arguments

    diff --git a/reference/nnf_bilinear.html b/reference/nnf_bilinear.html index 72032616ecfaea323a40aba71635c5fe09deb4a5..9b93c9d7f7ed22feb313bb49777c2d9cd3110bb5 100644 --- a/reference/nnf_bilinear.html +++ b/reference/nnf_bilinear.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ \(y = x_1 A x_2 + b\)

    -
    nnf_bilinear(input1, input2, weight, bias = NULL)
    +
    nnf_bilinear(input1, input2, weight, bias = NULL)

    Arguments

    diff --git a/reference/nnf_binary_cross_entropy.html b/reference/nnf_binary_cross_entropy.html index d4b1c2b7eebab354330e4f2ad812a1062444eecd..feaeada11a724868b1a43c3b606a503c388e9bcc 100644 --- a/reference/nnf_binary_cross_entropy.html +++ b/reference/nnf_binary_cross_entropy.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ between the target and the output." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ between the target and the output." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ between the target and the output." />
  • - +
  • Reference
  • @@ -197,12 +229,12 @@ between the target and the output." /> between the target and the output.

    -
    nnf_binary_cross_entropy(
    -  input,
    -  target,
    -  weight = NULL,
    -  reduction = c("mean", "sum", "none")
    -)
    +
    nnf_binary_cross_entropy(
    +  input,
    +  target,
    +  weight = NULL,
    +  reduction = c("mean", "sum", "none")
    +)

    Arguments

    diff --git a/reference/nnf_binary_cross_entropy_with_logits.html b/reference/nnf_binary_cross_entropy_with_logits.html index 7aedf2ba0eb1cf53b77140b020ad0b716a4d72da..9d70a3b10f27cb6f0acfdae941f4bb1ec2f2856f 100644 --- a/reference/nnf_binary_cross_entropy_with_logits.html +++ b/reference/nnf_binary_cross_entropy_with_logits.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ logits." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ logits." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ logits." />
  • - +
  • Reference
  • @@ -197,13 +229,13 @@ logits." /> logits.

    -
    nnf_binary_cross_entropy_with_logits(
    -  input,
    -  target,
    -  weight = NULL,
    -  reduction = c("mean", "sum", "none"),
    -  pos_weight = NULL
    -)
    +
    nnf_binary_cross_entropy_with_logits(
    +  input,
    +  target,
    +  weight = NULL,
    +  reduction = c("mean", "sum", "none"),
    +  pos_weight = NULL
    +)

    Arguments

    diff --git a/reference/nnf_celu.html b/reference/nnf_celu.html index 71ae09d124c266ad9421154e77ed7980499d1dc0..97feb36a3b19d72e501834e1454f478ddf789f9f 100644 --- a/reference/nnf_celu.html +++ b/reference/nnf_celu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Applies element-wise, \(CELU(x) = max(0,x) + min(0, \alpha * (exp(x \alpha) - 1))\).

    -
    nnf_celu(input, alpha = 1, inplace = FALSE)
    +    
    nnf_celu(input, alpha = 1, inplace = FALSE)
     
    -nnf_celu_(input, alpha = 1)
    +nnf_celu_(input, alpha = 1)

    Arguments

    diff --git a/reference/nnf_conv1d.html b/reference/nnf_conv1d.html index 5d76c0be8c7a71264971871caa69377ecbb50275..74ba59b5dfc1b726d0151ca5c975f7731696a3e8 100644 --- a/reference/nnf_conv1d.html +++ b/reference/nnf_conv1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_conv1d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1
    -)
    +
    nnf_conv1d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1
    +)

    Arguments

    diff --git a/reference/nnf_conv2d.html b/reference/nnf_conv2d.html index e53d6faefa4a94ab8c0cd2cb25303c38e2ac08f7..25c0e381444fae1d8db33ce97f85f4c9cab7a0b4 100644 --- a/reference/nnf_conv2d.html +++ b/reference/nnf_conv2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_conv2d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1
    -)
    +
    nnf_conv2d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1
    +)

    Arguments

    diff --git a/reference/nnf_conv3d.html b/reference/nnf_conv3d.html index 9a872f5b9530dbc952824b5599f67863c8f99cec..748e74624afd678518e39529937e5d4d00ea10dd 100644 --- a/reference/nnf_conv3d.html +++ b/reference/nnf_conv3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_conv3d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  dilation = 1,
    -  groups = 1
    -)
    +
    nnf_conv3d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  dilation = 1,
    +  groups = 1
    +)

    Arguments

    diff --git a/reference/nnf_conv_tbc.html b/reference/nnf_conv_tbc.html index 8c08e93eb105aee39ba1fd9f6f001e92ab754370..0bb107c8812e6c270d51fb3b8ad54872aad28cdc 100644 --- a/reference/nnf_conv_tbc.html +++ b/reference/nnf_conv_tbc.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ Input and output dimensions are (Time, Batch, Channels) - hence TBC." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ Input and output dimensions are (Time, Batch, Channels) - hence TBC." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Input and output dimensions are (Time, Batch, Channels) - hence TBC." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ Input and output dimensions are (Time, Batch, Channels) - hence TBC." /> Input and output dimensions are (Time, Batch, Channels) - hence TBC.

    -
    nnf_conv_tbc(input, weight, bias, pad = 0)
    +
    nnf_conv_tbc(input, weight, bias, pad = 0)

    Arguments

    diff --git a/reference/nnf_conv_transpose1d.html b/reference/nnf_conv_transpose1d.html index c199dc33628f6dccc2dd55f8c8ec1c5825575b2a..6ecf3429a07e80ef8dbd984724da6ea3fe838615 100644 --- a/reference/nnf_conv_transpose1d.html +++ b/reference/nnf_conv_transpose1d.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ composed of several input planes, sometimes also called "deconvolution" torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ composed of several input planes, sometimes also called "deconvolution"
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ composed of several input planes, sometimes also called "deconvolution"
  • - +
  • Reference
  • @@ -197,16 +229,16 @@ composed of several input planes, sometimes also called "deconvolution" composed of several input planes, sometimes also called "deconvolution".

    -
    nnf_conv_transpose1d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  dilation = 1
    -)
    +
    nnf_conv_transpose1d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  dilation = 1
    +)

    Arguments

    diff --git a/reference/nnf_conv_transpose2d.html b/reference/nnf_conv_transpose2d.html index 368764e354d9aabfc61da6c833812c90a07e63f6..5b5ca70e0348c322a4ee640770ed98ab1b960e1e 100644 --- a/reference/nnf_conv_transpose2d.html +++ b/reference/nnf_conv_transpose2d.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ composed of several input planes, sometimes also called "deconvolution" torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ composed of several input planes, sometimes also called "deconvolution"
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ composed of several input planes, sometimes also called "deconvolution"
  • - +
  • Reference
  • @@ -197,16 +229,16 @@ composed of several input planes, sometimes also called "deconvolution" composed of several input planes, sometimes also called "deconvolution".

    -
    nnf_conv_transpose2d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  dilation = 1
    -)
    +
    nnf_conv_transpose2d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  dilation = 1
    +)

    Arguments

    diff --git a/reference/nnf_conv_transpose3d.html b/reference/nnf_conv_transpose3d.html index c18b0b1242574b0b53ebb899fb1c13fd83ce4884..b7a2ff1347d6a7054270c11445640c5966937bc4 100644 --- a/reference/nnf_conv_transpose3d.html +++ b/reference/nnf_conv_transpose3d.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ composed of several input planes, sometimes also called "deconvolution" torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ composed of several input planes, sometimes also called "deconvolution"
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ composed of several input planes, sometimes also called "deconvolution"
  • - +
  • Reference
  • @@ -197,16 +229,16 @@ composed of several input planes, sometimes also called "deconvolution" composed of several input planes, sometimes also called "deconvolution"

    -
    nnf_conv_transpose3d(
    -  input,
    -  weight,
    -  bias = NULL,
    -  stride = 1,
    -  padding = 0,
    -  output_padding = 0,
    -  groups = 1,
    -  dilation = 1
    -)
    +
    nnf_conv_transpose3d(
    +  input,
    +  weight,
    +  bias = NULL,
    +  stride = 1,
    +  padding = 0,
    +  output_padding = 0,
    +  groups = 1,
    +  dilation = 1
    +)

    Arguments

    diff --git a/reference/nnf_cosine_embedding_loss.html b/reference/nnf_cosine_embedding_loss.html index f812a34b379e0c106d8ca3748b99ee56b1ce3aad..eb8566f890989ef35e4b56e9ea0c040a40b286e8 100644 --- a/reference/nnf_cosine_embedding_loss.html +++ b/reference/nnf_cosine_embedding_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ for learning nonlinear embeddings or semi-supervised learning." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ for learning nonlinear embeddings or semi-supervised learning." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ for learning nonlinear embeddings or semi-supervised learning." />
  • - +
  • Reference
  • @@ -201,13 +233,13 @@ are similar or dissimilar, using the cosine distance, and is typically used for learning nonlinear embeddings or semi-supervised learning.

    -
    nnf_cosine_embedding_loss(
    -  input1,
    -  input2,
    -  target,
    -  margin = 0,
    -  reduction = c("mean", "sum", "none")
    -)
    +
    nnf_cosine_embedding_loss(
    +  input1,
    +  input2,
    +  target,
    +  margin = 0,
    +  reduction = c("mean", "sum", "none")
    +)

    Arguments

    diff --git a/reference/nnf_cosine_similarity.html b/reference/nnf_cosine_similarity.html index 32359212d4cc5de23383c8c654627fd8971a3f9f..9e229f8b6fc29e430ec01e14a4c6a567802df055 100644 --- a/reference/nnf_cosine_similarity.html +++ b/reference/nnf_cosine_similarity.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Returns cosine similarity between x1 and x2, computed along dim.

    -
    nnf_cosine_similarity(x1, x2, dim = 1, eps = 1e-08)
    +
    nnf_cosine_similarity(x1, x2, dim = 1, eps = 1e-08)

    Arguments

    diff --git a/reference/nnf_cross_entropy.html b/reference/nnf_cross_entropy.html index fec6ab54d2785e08f30d0d4a82b4443466fc7261..7a98136562a600145a981bf5c80de0fbb9b95ec2 100644 --- a/reference/nnf_cross_entropy.html +++ b/reference/nnf_cross_entropy.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ function." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ function." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ function." />
  • - +
  • Reference
  • @@ -197,13 +229,13 @@ function." /> function.

    -
    nnf_cross_entropy(
    -  input,
    -  target,
    -  weight = NULL,
    -  ignore_index = -100,
    -  reduction = c("mean", "sum", "none")
    -)
    +
    nnf_cross_entropy(
    +  input,
    +  target,
    +  weight = NULL,
    +  ignore_index = -100,
    +  reduction = c("mean", "sum", "none")
    +)

    Arguments

    diff --git a/reference/nnf_ctc_loss.html b/reference/nnf_ctc_loss.html index 78800fb870356b59f4928cf496fd11c1b124d661..cb4912410b923d8a5bbe7ec67451f2fa8efec265 100644 --- a/reference/nnf_ctc_loss.html +++ b/reference/nnf_ctc_loss.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    The Connectionist Temporal Classification loss.

    -
    nnf_ctc_loss(
    -  log_probs,
    -  targets,
    -  input_lengths,
    -  target_lengths,
    -  blank = 0,
    -  reduction = c("mean", "sum", "none"),
    -  zero_infinity = FALSE
    -)
    +
    nnf_ctc_loss(
    +  log_probs,
    +  targets,
    +  input_lengths,
    +  target_lengths,
    +  blank = 0,
    +  reduction = c("mean", "sum", "none"),
    +  zero_infinity = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_dropout.html b/reference/nnf_dropout.html index dac8fcc91f822158847f84b09d6c355b72a2d4fb..20360a00aca26d39e89153f7ad3d1c8e4abdcbaf 100644 --- a/reference/nnf_dropout.html +++ b/reference/nnf_dropout.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ distribution." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ distribution." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ tensor with probability p using samples from a Bernoulli distribution.

    -
    nnf_dropout(input, p = 0.5, training = TRUE, inplace = FALSE)
    +
    nnf_dropout(input, p = 0.5, training = TRUE, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_dropout2d.html b/reference/nnf_dropout2d.html index 0a008f3b084a289a3db4e0a00c3cd97486e636fb..b197efd599d29dfda350d984fc62022eb55b4616 100644 --- a/reference/nnf_dropout2d.html +++ b/reference/nnf_dropout2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -76,7 +88,7 @@ probability p using samples from a Bernoulli distribution." /> torch - 0.0.3 + 0.1.0 @@ -149,6 +161,9 @@ probability p using samples from a Bernoulli distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -162,7 +177,24 @@ probability p using samples from a Bernoulli distribution." />
  • - +
  • Reference
  • @@ -203,7 +235,7 @@ Each channel will be zeroed out independently on every forward call with probability p using samples from a Bernoulli distribution.

    -
    nnf_dropout2d(input, p = 0.5, training = TRUE, inplace = FALSE)
    +
    nnf_dropout2d(input, p = 0.5, training = TRUE, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_dropout3d.html b/reference/nnf_dropout3d.html index 417bbb3fab9e56f43740da938435d87ad387e50d..90948421f2b54170b02230cfffcc0466c92027fa 100644 --- a/reference/nnf_dropout3d.html +++ b/reference/nnf_dropout3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -76,7 +88,7 @@ probability p using samples from a Bernoulli distribution." /> torch - 0.0.3 + 0.1.0 @@ -149,6 +161,9 @@ probability p using samples from a Bernoulli distribution." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -162,7 +177,24 @@ probability p using samples from a Bernoulli distribution." />
  • - +
  • Reference
  • @@ -203,7 +235,7 @@ Each channel will be zeroed out independently on every forward call with probability p using samples from a Bernoulli distribution.

    -
    nnf_dropout3d(input, p = 0.5, training = TRUE, inplace = FALSE)
    +
    nnf_dropout3d(input, p = 0.5, training = TRUE, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_elu.html b/reference/nnf_elu.html index 6f7d757ee1395b9dfbf92aeb3bf79ccde7d7933b..8739683cdda79a02474a1393359d36b7c5cbf175 100644 --- a/reference/nnf_elu.html +++ b/reference/nnf_elu.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ $$ELU(x) = max(0,x) + min(0, \alpha * (exp(x) - 1))$$." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ $$ELU(x) = max(0,x) + min(0, \alpha * (exp(x) - 1))$$." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ $$ELU(x) = max(0,x) + min(0, \alpha * (exp(x) - 1))$$." />
  • - +
  • Reference
  • @@ -197,9 +229,9 @@ $$ELU(x) = max(0,x) + min(0, \alpha * (exp(x) - 1))$$." /> $$ELU(x) = max(0,x) + min(0, \alpha * (exp(x) - 1))$$.

    -
    nnf_elu(input, alpha = 1, inplace = FALSE)
    +    
    nnf_elu(input, alpha = 1, inplace = FALSE)
     
    -nnf_elu_(input, alpha = 1)
    +nnf_elu_(input, alpha = 1)

    Arguments

    @@ -221,13 +253,13 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -x <- torch_randn(2, 2) -y <- nnf_elu(x, alpha = 1) -nnf_elu_(x, alpha = 1) -torch_equal(x, y) +
    if (torch_is_installed()) { +x <- torch_randn(2, 2) +y <- nnf_elu(x, alpha = 1) +nnf_elu_(x, alpha = 1) +torch_equal(x, y) -} +}
    #> [1] TRUE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    A simple lookup table that looks up embeddings in a fixed dictionary and size.

    -
    nnf_embedding(
    -  input,
    -  weight,
    -  padding_idx = NULL,
    -  max_norm = NULL,
    -  norm_type = 2,
    -  scale_grad_by_freq = FALSE,
    -  sparse = FALSE
    -)
    +
    nnf_embedding(
    +  input,
    +  weight,
    +  padding_idx = NULL,
    +  max_norm = NULL,
    +  norm_type = 2,
    +  scale_grad_by_freq = FALSE,
    +  sparse = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_embedding_bag.html b/reference/nnf_embedding_bag.html index fb3d738e25a34540946d1486e20fc0463da19fef..006f65ba93ae7fb8e3825e011302cb88c6a5b806 100644 --- a/reference/nnf_embedding_bag.html +++ b/reference/nnf_embedding_bag.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ intermediate embeddings." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ intermediate embeddings." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ intermediate embeddings." />
  • - +
  • Reference
  • @@ -197,18 +229,18 @@ intermediate embeddings." /> intermediate embeddings.

    -
    nnf_embedding_bag(
    -  input,
    -  weight,
    -  offsets = NULL,
    -  max_norm = NULL,
    -  norm_type = 2,
    -  scale_grad_by_freq = FALSE,
    -  mode = "mean",
    -  sparse = FALSE,
    -  per_sample_weights = NULL,
    -  include_last_offset = FALSE
    -)
    +
    nnf_embedding_bag(
    +  input,
    +  weight,
    +  offsets = NULL,
    +  max_norm = NULL,
    +  norm_type = 2,
    +  scale_grad_by_freq = FALSE,
    +  mode = "mean",
    +  sparse = FALSE,
    +  per_sample_weights = NULL,
    +  include_last_offset = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_fold.html b/reference/nnf_fold.html index 6ac9488780e7f176d1d754256380933d48a4f6ab..9c46846db2379a164358f2d1c973617aec94a9ee 100644 --- a/reference/nnf_fold.html +++ b/reference/nnf_fold.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ tensor." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ tensor." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ tensor." />
  • - +
  • Reference
  • @@ -197,14 +229,14 @@ tensor." /> tensor.

    -
    nnf_fold(
    -  input,
    -  output_size,
    -  kernel_size,
    -  dilation = 1,
    -  padding = 0,
    -  stride = 1
    -)
    +
    nnf_fold(
    +  input,
    +  output_size,
    +  kernel_size,
    +  dilation = 1,
    +  padding = 0,
    +  stride = 1
    +)

    Arguments

    diff --git a/reference/nnf_fractional_max_pool2d.html b/reference/nnf_fractional_max_pool2d.html index a7afbdf5b6b9cd0e3889ecd671f0b0cba6dd2bf2..866ffe5fbd86e407c9290c5e1b09d1bfd1a938bb 100644 --- a/reference/nnf_fractional_max_pool2d.html +++ b/reference/nnf_fractional_max_pool2d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Applies 2D fractional max pooling over an input signal composed of several input planes.

    -
    nnf_fractional_max_pool2d(
    -  input,
    -  kernel_size,
    -  output_size = NULL,
    -  output_ratio = NULL,
    -  return_indices = FALSE,
    -  random_samples = NULL
    -)
    +
    nnf_fractional_max_pool2d(
    +  input,
    +  kernel_size,
    +  output_size = NULL,
    +  output_ratio = NULL,
    +  return_indices = FALSE,
    +  random_samples = NULL
    +)

    Arguments

    diff --git a/reference/nnf_fractional_max_pool3d.html b/reference/nnf_fractional_max_pool3d.html index 09dfbc31b162667c3f87c2438e6925bf88c444bc..a67f4fef38276e950d75434a0f8e592acd1296d4 100644 --- a/reference/nnf_fractional_max_pool3d.html +++ b/reference/nnf_fractional_max_pool3d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Applies 3D fractional max pooling over an input signal composed of several input planes.

    -
    nnf_fractional_max_pool3d(
    -  input,
    -  kernel_size,
    -  output_size = NULL,
    -  output_ratio = NULL,
    -  return_indices = FALSE,
    -  random_samples = NULL
    -)
    +
    nnf_fractional_max_pool3d(
    +  input,
    +  kernel_size,
    +  output_size = NULL,
    +  output_ratio = NULL,
    +  return_indices = FALSE,
    +  random_samples = NULL
    +)

    Arguments

    diff --git a/reference/nnf_gelu.html b/reference/nnf_gelu.html index 78acd4728ad375b7d5b8b64243bebce0d55a1293..745816e4442d475e2fecb7b8e0e8224882cc64af 100644 --- a/reference/nnf_gelu.html +++ b/reference/nnf_gelu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Gelu

    -
    nnf_gelu(input)
    +
    nnf_gelu(input)

    Arguments

    diff --git a/reference/nnf_glu.html b/reference/nnf_glu.html index b9bba4760fd62848a997e0ce70d093711bb90718..c4cb306ebb1eeb14ee7c5bbcd7c63565df702a90 100644 --- a/reference/nnf_glu.html +++ b/reference/nnf_glu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    The gated linear unit. Computes:

    -
    nnf_glu(input, dim = -1)
    +
    nnf_glu(input, dim = -1)

    Arguments

    diff --git a/reference/nnf_grid_sample.html b/reference/nnf_grid_sample.html index c1de2a986eb54b89a719355d3eb743bd5fb02493..68f299d6666df7037f52e2d8075a44a412595f70 100644 --- a/reference/nnf_grid_sample.html +++ b/reference/nnf_grid_sample.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ output using input values and pixel locations from grid." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ output using input values and pixel locations from grid." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ output using input values and pixel locations from grid." />
  • - +
  • Reference
  • @@ -197,13 +229,13 @@ output using input values and pixel locations from grid." /> output using input values and pixel locations from grid.

    -
    nnf_grid_sample(
    -  input,
    -  grid,
    -  mode = c("bilinear", "nearest"),
    -  padding_mode = c("zeros", "border", "reflection"),
    -  align_corners = FALSE
    -)
    +
    nnf_grid_sample(
    +  input,
    +  grid,
    +  mode = c("bilinear", "nearest"),
    +  padding_mode = c("zeros", "border", "reflection"),
    +  align_corners = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_group_norm.html b/reference/nnf_group_norm.html index 16e20f9e1ad26c4efa125d78788de6fab95b2a94..bfdddcb5624e3a82abb1d41eed4a0d5a3990f165 100644 --- a/reference/nnf_group_norm.html +++ b/reference/nnf_group_norm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies Group Normalization for last certain number of dimensions.

    -
    nnf_group_norm(input, num_groups, weight = NULL, bias = NULL, eps = 1e-05)
    +
    nnf_group_norm(input, num_groups, weight = NULL, bias = NULL, eps = 1e-05)

    Arguments

    diff --git a/reference/nnf_gumbel_softmax.html b/reference/nnf_gumbel_softmax.html index dabf6530357f007707fb948a8920820222b85376..fae2649d879f630f49b29fbcf45ff2d0624cd1d6 100644 --- a/reference/nnf_gumbel_softmax.html +++ b/reference/nnf_gumbel_softmax.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ optionally discretizes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ optionally discretizes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ optionally discretizes." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ optionally discretizes." /> optionally discretizes.

    -
    nnf_gumbel_softmax(logits, tau = 1, hard = FALSE, dim = -1)
    +
    nnf_gumbel_softmax(logits, tau = 1, hard = FALSE, dim = -1)

    Arguments

    diff --git a/reference/nnf_hardshrink.html b/reference/nnf_hardshrink.html index 633de3e9334c129b346d8a9b3c4cd540950b65de..684b9cc6c36991529b6c216b7b6a140fb545a63c 100644 --- a/reference/nnf_hardshrink.html +++ b/reference/nnf_hardshrink.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the hard shrinkage function element-wise

    -
    nnf_hardshrink(input, lambd = 0.5)
    +
    nnf_hardshrink(input, lambd = 0.5)

    Arguments

    diff --git a/reference/nnf_hardsigmoid.html b/reference/nnf_hardsigmoid.html index 9c3a747596644594bc4a25549534c39ae7335de5..334babfd8b6d1e3daf926112b9e5591409870c68 100644 --- a/reference/nnf_hardsigmoid.html +++ b/reference/nnf_hardsigmoid.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function \(\mbox{Hardsigmoid}(x) = \frac{ReLU6(x + 3)}{6}\)

    -
    nnf_hardsigmoid(input, inplace = FALSE)
    +
    nnf_hardsigmoid(input, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_hardswish.html b/reference/nnf_hardswish.html index 3d969fa2a09b4f4e5e511e4a5ec4e14d8c949db2..4251a6d54ba829e659db2e80825b3f36e1553933 100644 --- a/reference/nnf_hardswish.html +++ b/reference/nnf_hardswish.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ Searching for MobileNetV3." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ Searching for MobileNetV3." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Searching for MobileNetV3." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ Searching for MobileNetV3." /> Searching for MobileNetV3.

    -
    nnf_hardswish(input, inplace = FALSE)
    +
    nnf_hardswish(input, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_hardtanh.html b/reference/nnf_hardtanh.html index cc6e8c7089de4158e083159674c09eb7bd2ba678..44359ae1a16d91c500a3dffa67f46ee2a5489a57 100644 --- a/reference/nnf_hardtanh.html +++ b/reference/nnf_hardtanh.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Applies the HardTanh function element-wise.

    -
    nnf_hardtanh(input, min_val = -1, max_val = 1, inplace = FALSE)
    +    
    nnf_hardtanh(input, min_val = -1, max_val = 1, inplace = FALSE)
     
    -nnf_hardtanh_(input, min_val = -1, max_val = 1)
    +nnf_hardtanh_(input, min_val = -1, max_val = 1)

    Arguments

    diff --git a/reference/nnf_hinge_embedding_loss.html b/reference/nnf_hinge_embedding_loss.html index e7c6a0e6e6c180c49cb324a7ae4acf8e606aa195..3728e6301d3b65770a27a9d0b7b642b6a7de95ef 100644 --- a/reference/nnf_hinge_embedding_loss.html +++ b/reference/nnf_hinge_embedding_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ embeddings or semi-supervised learning." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ embeddings or semi-supervised learning." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ embeddings or semi-supervised learning." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ using the L1 pairwise distance as xx , and is typically used for learning nonlin embeddings or semi-supervised learning.

    -
    nnf_hinge_embedding_loss(input, target, margin = 1, reduction = "mean")
    +
    nnf_hinge_embedding_loss(input, target, margin = 1, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_instance_norm.html b/reference/nnf_instance_norm.html index 3b30820f5aa5d294b177cd6e24fb61840ec9ad41..c5236cf5c8ec9feb6129ca1f341290522c993274 100644 --- a/reference/nnf_instance_norm.html +++ b/reference/nnf_instance_norm.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ batch." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ batch." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ batch." />
  • - +
  • Reference
  • @@ -197,16 +229,16 @@ batch." /> batch.

    -
    nnf_instance_norm(
    -  input,
    -  running_mean = NULL,
    -  running_var = NULL,
    -  weight = NULL,
    -  bias = NULL,
    -  use_input_stats = TRUE,
    -  momentum = 0.1,
    -  eps = 1e-05
    -)
    +
    nnf_instance_norm(
    +  input,
    +  running_mean = NULL,
    +  running_var = NULL,
    +  weight = NULL,
    +  bias = NULL,
    +  use_input_stats = TRUE,
    +  momentum = 0.1,
    +  eps = 1e-05
    +)

    Arguments

    diff --git a/reference/nnf_interpolate.html b/reference/nnf_interpolate.html index 2606ca3be1737d593a8799e5855beee58f0da01f..b0ffa4f631bd50cdc714cb5123548a5e5dcd3f0a 100644 --- a/reference/nnf_interpolate.html +++ b/reference/nnf_interpolate.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ scale_factor" /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ scale_factor" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ scale_factor" />
  • - +
  • Reference
  • @@ -197,14 +229,14 @@ scale_factor" /> scale_factor

    -
    nnf_interpolate(
    -  input,
    -  size = NULL,
    -  scale_factor = NULL,
    -  mode = "nearest",
    -  align_corners = FALSE,
    -  recompute_scale_factor = NULL
    -)
    +
    nnf_interpolate(
    +  input,
    +  size = NULL,
    +  scale_factor = NULL,
    +  mode = "nearest",
    +  align_corners = FALSE,
    +  recompute_scale_factor = NULL
    +)

    Arguments

    diff --git a/reference/nnf_kl_div.html b/reference/nnf_kl_div.html index d90a75f01950c05519cbb2d3e2593dba04f2df03..c67ba8d488c6e72855cca720e44bc4b8d852427a 100644 --- a/reference/nnf_kl_div.html +++ b/reference/nnf_kl_div.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    The Kullback-Leibler divergence Loss.

    -
    nnf_kl_div(input, target, reduction = "mean")
    +
    nnf_kl_div(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_l1_loss.html b/reference/nnf_l1_loss.html index 790dee1b25112d3abb3d5779c0821f1fe7f3226d..6b22f0821788dc9696e04631299ce42dcae0f534 100644 --- a/reference/nnf_l1_loss.html +++ b/reference/nnf_l1_loss.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Function that takes the mean element-wise absolute value difference.

    -
    nnf_l1_loss(input, target, reduction = "mean")
    +
    nnf_l1_loss(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_layer_norm.html b/reference/nnf_layer_norm.html index b9ba41fab5a41faae6f3805f23e100975b6511cd..843b04bfbf864ee85b47e7fcbe6ae2d53a3fd221 100644 --- a/reference/nnf_layer_norm.html +++ b/reference/nnf_layer_norm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Applies Layer Normalization for last certain number of dimensions.

    -
    nnf_layer_norm(
    -  input,
    -  normalized_shape,
    -  weight = NULL,
    -  bias = NULL,
    -  eps = 1e-05
    -)
    +
    nnf_layer_norm(
    +  input,
    +  normalized_shape,
    +  weight = NULL,
    +  bias = NULL,
    +  eps = 1e-05
    +)

    Arguments

    diff --git a/reference/nnf_leaky_relu.html b/reference/nnf_leaky_relu.html index f5bcbf2540119ed121a7e7ef773fc8076fc7feea..37113825c5d9c14e8231b86069f14a625e054fb4 100644 --- a/reference/nnf_leaky_relu.html +++ b/reference/nnf_leaky_relu.html @@ -38,6 +38,8 @@ + + + + @@ -73,7 +85,7 @@ torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ \(LeakyReLU(x) = max(0, x) + negative_slope * min(0, x)\)

    -
    nnf_leaky_relu(input, negative_slope = 0.01, inplace = FALSE)
    +
    nnf_leaky_relu(input, negative_slope = 0.01, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_linear.html b/reference/nnf_linear.html index 45d222cf24ca2062b45dc55d51ed473bf87ab012..1578f3a06097351875437a6efb0b7060fa63c8bf 100644 --- a/reference/nnf_linear.html +++ b/reference/nnf_linear.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies a linear transformation to the incoming data: \(y = xA^T + b\).

    -
    nnf_linear(input, weight, bias = NULL)
    +
    nnf_linear(input, weight, bias = NULL)

    Arguments

    diff --git a/reference/nnf_local_response_norm.html b/reference/nnf_local_response_norm.html index 612a9ee528a415957a74af154118a17e273d9517..935ff78558b8942d2888ef1a75e47de936086b56 100644 --- a/reference/nnf_local_response_norm.html +++ b/reference/nnf_local_response_norm.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ Applies normalization across channels." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ Applies normalization across channels." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ Applies normalization across channels." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ several input planes, where channels occupy the second dimension. Applies normalization across channels.

    -
    nnf_local_response_norm(input, size, alpha = 1e-04, beta = 0.75, k = 1)
    +
    nnf_local_response_norm(input, size, alpha = 1e-04, beta = 0.75, k = 1)

    Arguments

    diff --git a/reference/nnf_log_softmax.html b/reference/nnf_log_softmax.html index f29faf0e33d702bbd95ac785460a104da764a52b..8787934ba61dae780026703d3b8f5b6d8673b04d 100644 --- a/reference/nnf_log_softmax.html +++ b/reference/nnf_log_softmax.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies a softmax followed by a logarithm.

    -
    nnf_log_softmax(input, dim = NULL, dtype = NULL)
    +
    nnf_log_softmax(input, dim = NULL, dtype = NULL)

    Arguments

    diff --git a/reference/nnf_logsigmoid.html b/reference/nnf_logsigmoid.html index 5c2b14712c9e45efb3348278b38f497943ed0af7..4d382827fb373ccd06928eb238d6b265347fa5d4 100644 --- a/reference/nnf_logsigmoid.html +++ b/reference/nnf_logsigmoid.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies element-wise \(LogSigmoid(x_i) = log(\frac{1}{1 + exp(-x_i)})\)

    -
    nnf_logsigmoid(input)
    +
    nnf_logsigmoid(input)

    Arguments

    diff --git a/reference/nnf_lp_pool1d.html b/reference/nnf_lp_pool1d.html index 70abd1fc226766b65d5852fddfa31732f549fc45..a15b026494b464f9d834aabf4b29daf4e5eaa405 100644 --- a/reference/nnf_lp_pool1d.html +++ b/reference/nnf_lp_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ zero, the gradient is set to zero as well." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ zero, the gradient is set to zero as well." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ zero, the gradient is set to zero as well." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ several input planes. If the sum of all inputs to the power of p is zero, the gradient is set to zero as well.

    -
    nnf_lp_pool1d(input, norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)
    +
    nnf_lp_pool1d(input, norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)

    Arguments

    diff --git a/reference/nnf_lp_pool2d.html b/reference/nnf_lp_pool2d.html index d8f53a49dd5e18174d1f9ccd4a2ba931f8bc9028..0215ea2cdb2c7ced3a278760a55935a273ddda6a 100644 --- a/reference/nnf_lp_pool2d.html +++ b/reference/nnf_lp_pool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ zero, the gradient is set to zero as well." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ zero, the gradient is set to zero as well." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ zero, the gradient is set to zero as well." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ several input planes. If the sum of all inputs to the power of p is zero, the gradient is set to zero as well.

    -
    nnf_lp_pool2d(input, norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)
    +
    nnf_lp_pool2d(input, norm_type, kernel_size, stride = NULL, ceil_mode = FALSE)

    Arguments

    diff --git a/reference/nnf_margin_ranking_loss.html b/reference/nnf_margin_ranking_loss.html index 02a37006266f37bdec4f9fb63b2369cdeff24039..0ac1cc43cb7573f2134008b47167db123412c33f 100644 --- a/reference/nnf_margin_ranking_loss.html +++ b/reference/nnf_margin_ranking_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ mini-batch Tensors, and a label 1D mini-batch tensor y (containing 1 or -1)." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ mini-batch Tensors, and a label 1D mini-batch tensor y (containing 1 or -1)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ mini-batch Tensors, and a label 1D mini-batch tensor y (containing 1 or -1)." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ mini-batch Tensors, and a label 1D mini-batch tensor y (containing 1 or -1)." /> mini-batch Tensors, and a label 1D mini-batch tensor y (containing 1 or -1).

    -
    nnf_margin_ranking_loss(input1, input2, target, margin = 0, reduction = "mean")
    +
    nnf_margin_ranking_loss(input1, input2, target, margin = 0, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_max_pool1d.html b/reference/nnf_max_pool1d.html index 0663c6e52165761f26f855b36b8eb1a930f6cedb..e18b04c1b726fd7f58a2a74d69baaef77996746a 100644 --- a/reference/nnf_max_pool1d.html +++ b/reference/nnf_max_pool1d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_max_pool1d(
    -  input,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  dilation = 1,
    -  ceil_mode = FALSE,
    -  return_indices = FALSE
    -)
    +
    nnf_max_pool1d(
    +  input,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  dilation = 1,
    +  ceil_mode = FALSE,
    +  return_indices = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_max_pool2d.html b/reference/nnf_max_pool2d.html index f7dafcb091493869816e01003c6834cc0d8758e6..bc4dab03ec9d53d15d4db7a2cf8b740d95585dcc 100644 --- a/reference/nnf_max_pool2d.html +++ b/reference/nnf_max_pool2d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_max_pool2d(
    -  input,
    -  kernel_size,
    -  stride = kernel_size,
    -  padding = 0,
    -  dilation = 1,
    -  ceil_mode = FALSE,
    -  return_indices = FALSE
    -)
    +
    nnf_max_pool2d(
    +  input,
    +  kernel_size,
    +  stride = kernel_size,
    +  padding = 0,
    +  dilation = 1,
    +  ceil_mode = FALSE,
    +  return_indices = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_max_pool3d.html b/reference/nnf_max_pool3d.html index 840ffcf2e5643c2255f91dadd882a29c62887deb..0c0307e9afa494ccdfab6ef91e35b0977797db04 100644 --- a/reference/nnf_max_pool3d.html +++ b/reference/nnf_max_pool3d.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ planes." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ planes." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ planes." />
  • - +
  • Reference
  • @@ -197,15 +229,15 @@ planes." /> planes.

    -
    nnf_max_pool3d(
    -  input,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  dilation = 1,
    -  ceil_mode = FALSE,
    -  return_indices = FALSE
    -)
    +
    nnf_max_pool3d(
    +  input,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  dilation = 1,
    +  ceil_mode = FALSE,
    +  return_indices = FALSE
    +)

    Arguments

    diff --git a/reference/nnf_max_unpool1d.html b/reference/nnf_max_unpool1d.html index 721c3111118510bf7cc50716ba8ffdccea377c5e..4474776591a157126d273d7495d9268f29079b8a 100644 --- a/reference/nnf_max_unpool1d.html +++ b/reference/nnf_max_unpool1d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Computes a partial inverse of MaxPool1d.

    -
    nnf_max_unpool1d(
    -  input,
    -  indices,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  output_size = NULL
    -)
    +
    nnf_max_unpool1d(
    +  input,
    +  indices,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  output_size = NULL
    +)

    Arguments

    diff --git a/reference/nnf_max_unpool2d.html b/reference/nnf_max_unpool2d.html index eaeb2fe6cc68d64491c71e2e8b4d9e7501e8e320..f456e186e780f3a730f50e4232750b29d26918fa 100644 --- a/reference/nnf_max_unpool2d.html +++ b/reference/nnf_max_unpool2d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Computes a partial inverse of MaxPool2d.

    -
    nnf_max_unpool2d(
    -  input,
    -  indices,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  output_size = NULL
    -)
    +
    nnf_max_unpool2d(
    +  input,
    +  indices,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  output_size = NULL
    +)

    Arguments

    diff --git a/reference/nnf_max_unpool3d.html b/reference/nnf_max_unpool3d.html index 65bb334cc989265d5df54d5587e1bb8997932c93..eadabab82a4fefe51a6ece450499aba134bfe944 100644 --- a/reference/nnf_max_unpool3d.html +++ b/reference/nnf_max_unpool3d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Computes a partial inverse of MaxPool3d.

    -
    nnf_max_unpool3d(
    -  input,
    -  indices,
    -  kernel_size,
    -  stride = NULL,
    -  padding = 0,
    -  output_size = NULL
    -)
    +
    nnf_max_unpool3d(
    +  input,
    +  indices,
    +  kernel_size,
    +  stride = NULL,
    +  padding = 0,
    +  output_size = NULL
    +)

    Arguments

    diff --git a/reference/nnf_mse_loss.html b/reference/nnf_mse_loss.html index 39a803971108ab68cef82fef1f90cbdff7a9386e..8db1da5890e4dbb2c69169592272349f159e562e 100644 --- a/reference/nnf_mse_loss.html +++ b/reference/nnf_mse_loss.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Measures the element-wise mean squared error.

    -
    nnf_mse_loss(input, target, reduction = "mean")
    +
    nnf_mse_loss(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_multi_head_attention_forward.html b/reference/nnf_multi_head_attention_forward.html index f4e6e342867559a3cfa4199aad63a793c7307bc9..e11a51cb22cc6c1eb5eabefc0a2f5193ca53f06e 100644 --- a/reference/nnf_multi_head_attention_forward.html +++ b/reference/nnf_multi_head_attention_forward.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ subspaces. See reference: Attention Is All You Need" /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ subspaces. See reference: Attention Is All You Need" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ subspaces. See reference: Attention Is All You Need" />
  • - +
  • Reference
  • @@ -197,31 +229,31 @@ subspaces. See reference: Attention Is All You Need" /> subspaces. See reference: Attention Is All You Need

    -
    nnf_multi_head_attention_forward(
    -  query,
    -  key,
    -  value,
    -  embed_dim_to_check,
    -  num_heads,
    -  in_proj_weight,
    -  in_proj_bias,
    -  bias_k,
    -  bias_v,
    -  add_zero_attn,
    -  dropout_p,
    -  out_proj_weight,
    -  out_proj_bias,
    -  training = TRUE,
    -  key_padding_mask = NULL,
    -  need_weights = TRUE,
    -  attn_mask = NULL,
    -  use_separate_proj_weight = FALSE,
    -  q_proj_weight = NULL,
    -  k_proj_weight = NULL,
    -  v_proj_weight = NULL,
    -  static_k = NULL,
    -  static_v = NULL
    -)
    +
    nnf_multi_head_attention_forward(
    +  query,
    +  key,
    +  value,
    +  embed_dim_to_check,
    +  num_heads,
    +  in_proj_weight,
    +  in_proj_bias,
    +  bias_k,
    +  bias_v,
    +  add_zero_attn,
    +  dropout_p,
    +  out_proj_weight,
    +  out_proj_bias,
    +  training = TRUE,
    +  key_padding_mask = NULL,
    +  need_weights = TRUE,
    +  attn_mask = NULL,
    +  use_separate_proj_weight = FALSE,
    +  q_proj_weight = NULL,
    +  k_proj_weight = NULL,
    +  v_proj_weight = NULL,
    +  static_k = NULL,
    +  static_v = NULL
    +)

    Arguments

    diff --git a/reference/nnf_multi_margin_loss.html b/reference/nnf_multi_margin_loss.html index 11526ddf479a37a3b395a81ea432e6a0c41d118c..b610bc67e0cd6ce5dcc0f0fefbb484e38c971117 100644 --- a/reference/nnf_multi_margin_loss.html +++ b/reference/nnf_multi_margin_loss.html @@ -38,6 +38,8 @@ + + + + @@ -74,7 +86,7 @@ torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@
  • - +
  • Reference
  • @@ -199,14 +231,14 @@ (which is a 1D tensor of target class indices, 0 <= y <= x$size(2) - 1 ).

    -
    nnf_multi_margin_loss(
    -  input,
    -  target,
    -  p = 1,
    -  margin = 1,
    -  weight = NULL,
    -  reduction = "mean"
    -)
    +
    nnf_multi_margin_loss(
    +  input,
    +  target,
    +  p = 1,
    +  margin = 1,
    +  weight = NULL,
    +  reduction = "mean"
    +)

    Arguments

    diff --git a/reference/nnf_multilabel_margin_loss.html b/reference/nnf_multilabel_margin_loss.html index 8281d5e2fffd5f9f59decb6acea3da6145e3dbf4..70df47978ed2a1be653c7cd3a3f117236230b129 100644 --- a/reference/nnf_multilabel_margin_loss.html +++ b/reference/nnf_multilabel_margin_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ is a 2D Tensor of target class indices)." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ is a 2D Tensor of target class indices)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ is a 2D Tensor of target class indices)." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ is a 2D Tensor of target class indices)." /> is a 2D Tensor of target class indices).

    -
    nnf_multilabel_margin_loss(input, target, reduction = "mean")
    +
    nnf_multilabel_margin_loss(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_multilabel_soft_margin_loss.html b/reference/nnf_multilabel_soft_margin_loss.html index 0150c00f4f0bd00a51fec1e317d8fa21c9d81692..b1ad73cc0e74d3cafa2be0a822ce7eb297210eae 100644 --- a/reference/nnf_multilabel_soft_margin_loss.html +++ b/reference/nnf_multilabel_soft_margin_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ max-entropy, between input x and target y of size (N, C)." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ max-entropy, between input x and target y of size (N, C)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ max-entropy, between input x and target y of size (N, C)." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ max-entropy, between input x and target y of size (N, C)." /> max-entropy, between input x and target y of size (N, C).

    -
    nnf_multilabel_soft_margin_loss(input, target, weight, reduction = "mean")
    +
    nnf_multilabel_soft_margin_loss(input, target, weight, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_nll_loss.html b/reference/nnf_nll_loss.html index 96ba14ba4939e7753a32c116be8c4defb4da84ea..097398f22a20116504ac05aa94e0161941056e5d 100644 --- a/reference/nnf_nll_loss.html +++ b/reference/nnf_nll_loss.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    The negative log likelihood loss.

    -
    nnf_nll_loss(
    -  input,
    -  target,
    -  weight = NULL,
    -  ignore_index = -100,
    -  reduction = "mean"
    -)
    +
    nnf_nll_loss(
    +  input,
    +  target,
    +  weight = NULL,
    +  ignore_index = -100,
    +  reduction = "mean"
    +)

    Arguments

    diff --git a/reference/nnf_normalize.html b/reference/nnf_normalize.html index 28efa2debbfebcd9dee8670c7bdfa9cb39f45867..d675623eca632dc218a18d225395c0cef1de26bf 100644 --- a/reference/nnf_normalize.html +++ b/reference/nnf_normalize.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Performs \(L_p\) normalization of inputs over specified dimension.

    -
    nnf_normalize(input, p = 2, dim = 1, eps = 1e-12, out = NULL)
    +
    nnf_normalize(input, p = 2, dim = 1, eps = 1e-12, out = NULL)

    Arguments

    diff --git a/reference/nnf_one_hot.html b/reference/nnf_one_hot.html index 7ffec6383e43b5bac16b16dafe8bf89a23ba4ed2..9794e54173741974f90f1808e2762fdacc3e4f7c 100644 --- a/reference/nnf_one_hot.html +++ b/reference/nnf_one_hot.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ in which case it will be 1." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ in which case it will be 1." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ in which case it will be 1." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ index of last dimension matches the corresponding value of the input tensor, in which case it will be 1.

    -
    nnf_one_hot(tensor, num_classes = -1)
    +
    nnf_one_hot(tensor, num_classes = -1)

    Arguments

    diff --git a/reference/nnf_pad.html b/reference/nnf_pad.html index aeb442200552b3a79bac15ed7e7115698aa4d60a..7e7a34f6e86774a2beafbc0dfe2f10d80488f421 100644 --- a/reference/nnf_pad.html +++ b/reference/nnf_pad.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Pads tensor.

    -
    nnf_pad(input, pad, mode = "constant", value = 0)
    +
    nnf_pad(input, pad, mode = "constant", value = 0)

    Arguments

    diff --git a/reference/nnf_pairwise_distance.html b/reference/nnf_pairwise_distance.html index 2c20427272fe88b57c573a79605421732182c05b..de1c5ee10b1c0be82e0223298a79549fe71a57ca 100644 --- a/reference/nnf_pairwise_distance.html +++ b/reference/nnf_pairwise_distance.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Computes the batchwise pairwise distance between vectors using the p-norm.

    -
    nnf_pairwise_distance(x1, x2, p = 2, eps = 1e-06, keepdim = FALSE)
    +
    nnf_pairwise_distance(x1, x2, p = 2, eps = 1e-06, keepdim = FALSE)

    Arguments

    diff --git a/reference/nnf_pdist.html b/reference/nnf_pdist.html index b383fc008beac11d3852e6b3d4bb3c94d525436a..1be497220d7058385c924f5012f648c09e5a1847 100644 --- a/reference/nnf_pdist.html +++ b/reference/nnf_pdist.html @@ -38,6 +38,8 @@ + + + + + + @@ -75,7 +87,7 @@ if the rows are contiguous." /> torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ if the rows are contiguous." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ if the rows are contiguous." />
  • - +
  • Reference
  • @@ -201,7 +233,7 @@ This is identical to the upper triangular portion, excluding the diagonal, of if the rows are contiguous.

    -
    nnf_pdist(input, p = 2)
    +
    nnf_pdist(input, p = 2)

    Arguments

    diff --git a/reference/nnf_pixel_shuffle.html b/reference/nnf_pixel_shuffle.html index 2024598b701e408be14f8cb4a886acd2faddb979..bec1542873c05041f94f60419233615883755166 100644 --- a/reference/nnf_pixel_shuffle.html +++ b/reference/nnf_pixel_shuffle.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ tensor of shape \((*, C, H \times r, W \times r)\)." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ tensor of shape \((*, C, H \times r, W \times r)\)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ tensor of shape \((*, C, H \times r, W \times r)\)." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ tensor of shape \((*, C, H \times r, W \times r)\)." /> tensor of shape \((*, C, H \times r, W \times r)\).

    -
    nnf_pixel_shuffle(input, upscale_factor)
    +
    nnf_pixel_shuffle(input, upscale_factor)

    Arguments

    diff --git a/reference/nnf_poisson_nll_loss.html b/reference/nnf_poisson_nll_loss.html index 4212a07df8b6c504f0178390131bba3d1aed9f85..fd330c6bb5c618c8fa354240170943f535568b52 100644 --- a/reference/nnf_poisson_nll_loss.html +++ b/reference/nnf_poisson_nll_loss.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Poisson negative log likelihood loss.

    -
    nnf_poisson_nll_loss(
    -  input,
    -  target,
    -  log_input = TRUE,
    -  full = FALSE,
    -  eps = 1e-08,
    -  reduction = "mean"
    -)
    +
    nnf_poisson_nll_loss(
    +  input,
    +  target,
    +  log_input = TRUE,
    +  full = FALSE,
    +  eps = 1e-08,
    +  reduction = "mean"
    +)

    Arguments

    diff --git a/reference/nnf_prelu.html b/reference/nnf_prelu.html index 6b6f6c6ff033eddb6373367b139f7c1da3e2e895..0c97d78815f5a7db894faf1dbbe464f609f922da 100644 --- a/reference/nnf_prelu.html +++ b/reference/nnf_prelu.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ where weight is a learnable parameter." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ where weight is a learnable parameter." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ where weight is a learnable parameter." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ where weight is a learnable parameter." /> where weight is a learnable parameter.

    -
    nnf_prelu(input, weight)
    +
    nnf_prelu(input, weight)

    Arguments

    diff --git a/reference/nnf_relu.html b/reference/nnf_relu.html index 529454defd9bcd3c89d9404023d718e2e54beec1..c0eadc5c772dc78f2670729d3812020d6ea0a0c4 100644 --- a/reference/nnf_relu.html +++ b/reference/nnf_relu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Applies the rectified linear unit function element-wise.

    -
    nnf_relu(input, inplace = FALSE)
    +    
    nnf_relu(input, inplace = FALSE)
     
    -nnf_relu_(input)
    +nnf_relu_(input)

    Arguments

    diff --git a/reference/nnf_relu6.html b/reference/nnf_relu6.html index e8dd073f92bb0624fdbae50de8d8ffbc9855e738..d08a089b06122537aa4ee62c2a476bf7eb154d4f 100644 --- a/reference/nnf_relu6.html +++ b/reference/nnf_relu6.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the element-wise function \(ReLU6(x) = min(max(0,x), 6)\).

    -
    nnf_relu6(input, inplace = FALSE)
    +
    nnf_relu6(input, inplace = FALSE)

    Arguments

    diff --git a/reference/nnf_rrelu.html b/reference/nnf_rrelu.html index f1826a3c16c2c984f1cfb6bb25ddb30d8463d2cc..a2b65ca3048f6dd9fff120595db5659b7683fec0 100644 --- a/reference/nnf_rrelu.html +++ b/reference/nnf_rrelu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Randomized leaky ReLU.

    -
    nnf_rrelu(input, lower = 1/8, upper = 1/3, training = FALSE, inplace = FALSE)
    +    
    nnf_rrelu(input, lower = 1/8, upper = 1/3, training = FALSE, inplace = FALSE)
     
    -nnf_rrelu_(input, lower = 1/8, upper = 1/3, training = FALSE)
    +nnf_rrelu_(input, lower = 1/8, upper = 1/3, training = FALSE)

    Arguments

    diff --git a/reference/nnf_selu.html b/reference/nnf_selu.html index caa020d5ceee9a4250c102287e95584941c8eaa5..07e60b824119fb4a3ec9e439a5f90f415eff75ed 100644 --- a/reference/nnf_selu.html +++ b/reference/nnf_selu.html @@ -38,6 +38,8 @@ + + + + @@ -75,7 +87,7 @@ with \(\alpha=1.6732632423543772848170429916717\) and torch - 0.0.3 + 0.1.0 @@ -148,6 +160,9 @@ with \(\alpha=1.6732632423543772848170429916717\) and
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -161,7 +176,24 @@ with \(\alpha=1.6732632423543772848170429916717\) and
  • - +
  • Reference
  • @@ -201,9 +233,9 @@ with \(\alpha=1.6732632423543772848170429916717\) and \(scale=1.0507009873554804934193349852946\).

    -
    nnf_selu(input, inplace = FALSE)
    +    
    nnf_selu(input, inplace = FALSE)
     
    -nnf_selu_(input)
    +nnf_selu_(input)

    Arguments

    @@ -221,13 +253,13 @@ dimensions

    Examples

    -
    if (torch_is_installed()) { -x <- torch_randn(2, 2) -y <- nnf_selu(x) -nnf_selu_(x) -torch_equal(x, y) +
    if (torch_is_installed()) { +x <- torch_randn(2, 2) +y <- nnf_selu(x) +nnf_selu_(x) +torch_equal(x, y) -} +}
    #> [1] TRUE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies element-wise \(Sigmoid(x_i) = \frac{1}{1 + exp(-x_i)}\)

    -
    nnf_sigmoid(input)
    +
    nnf_sigmoid(input)

    Arguments

    diff --git a/reference/nnf_smooth_l1_loss.html b/reference/nnf_smooth_l1_loss.html index a027812c94b517e89f87829dba510ccd74ca2f10..ba2efbe688bc421473673137b38b551a2948268d 100644 --- a/reference/nnf_smooth_l1_loss.html +++ b/reference/nnf_smooth_l1_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ element-wise error falls below 1 and an L1 term otherwise." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ element-wise error falls below 1 and an L1 term otherwise." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ element-wise error falls below 1 and an L1 term otherwise." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ element-wise error falls below 1 and an L1 term otherwise." /> element-wise error falls below 1 and an L1 term otherwise.

    -
    nnf_smooth_l1_loss(input, target, reduction = "mean")
    +
    nnf_smooth_l1_loss(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_soft_margin_loss.html b/reference/nnf_soft_margin_loss.html index 677426bae16647f16e761fa5b5f3ab9d9d44fdc3..249009f2892273b446e49b598a2b705c5eba5662 100644 --- a/reference/nnf_soft_margin_loss.html +++ b/reference/nnf_soft_margin_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ between input tensor x and target tensor y (containing 1 or -1)." /> torch - 0.0.3 + 0.1.0 @@ -146,6 +158,9 @@ between input tensor x and target tensor y (containing 1 or -1)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ between input tensor x and target tensor y (containing 1 or -1)." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ between input tensor x and target tensor y (containing 1 or -1)." /> between input tensor x and target tensor y (containing 1 or -1).

    -
    nnf_soft_margin_loss(input, target, reduction = "mean")
    +
    nnf_soft_margin_loss(input, target, reduction = "mean")

    Arguments

    diff --git a/reference/nnf_softmax.html b/reference/nnf_softmax.html index 7cd9ec6f90ae6b96b2487db793efb9d3d611b9c3..dad2c579775444f0659fd112d17a199407ceb9bb 100644 --- a/reference/nnf_softmax.html +++ b/reference/nnf_softmax.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies a softmax function.

    -
    nnf_softmax(input, dim, dtype = NULL)
    +
    nnf_softmax(input, dim, dtype = NULL)

    Arguments

    diff --git a/reference/nnf_softmin.html b/reference/nnf_softmin.html index 493bef654388eb2128f7451dcf8ebfe743d9b8f6..df57da560aefc0537c49231d3dde8861fda504ff 100644 --- a/reference/nnf_softmin.html +++ b/reference/nnf_softmin.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies a softmin function.

    -
    nnf_softmin(input, dim, dtype = NULL)
    +
    nnf_softmin(input, dim, dtype = NULL)

    Arguments

    diff --git a/reference/nnf_softplus.html b/reference/nnf_softplus.html index 425d517a159241d5193af49b8ed262c6481095cf..4521a5db843e001125ecabb2497635bbb1d3deff 100644 --- a/reference/nnf_softplus.html +++ b/reference/nnf_softplus.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies element-wise, the function \(Softplus(x) = 1/\beta * log(1 + exp(\beta * x))\).

    -
    nnf_softplus(input, beta = 1, threshold = 20)
    +
    nnf_softplus(input, beta = 1, threshold = 20)

    Arguments

    diff --git a/reference/nnf_softshrink.html b/reference/nnf_softshrink.html index 65b9433028ac4567dce861255e24fb554fcace76..e3b679d620aacff90b650956037715a5f98efdbd 100644 --- a/reference/nnf_softshrink.html +++ b/reference/nnf_softshrink.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies the soft shrinkage function elementwise

    -
    nnf_softshrink(input, lambd = 0.5)
    +
    nnf_softshrink(input, lambd = 0.5)

    Arguments

    diff --git a/reference/nnf_softsign.html b/reference/nnf_softsign.html index 7553e69fd514daf79bbfe75e576f2e138e7e3d52..e0a576508fd0ddc291255581f06190f76e5a3ad7 100644 --- a/reference/nnf_softsign.html +++ b/reference/nnf_softsign.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies element-wise, the function \(SoftSign(x) = x/(1 + |x|\)

    -
    nnf_softsign(input)
    +
    nnf_softsign(input)

    Arguments

    diff --git a/reference/nnf_tanhshrink.html b/reference/nnf_tanhshrink.html index 69950243ac92cc790f4d80b180a79608c3ddb4ff..d3edbbeda5657d87eaa310d828cce8ce8286c55e 100644 --- a/reference/nnf_tanhshrink.html +++ b/reference/nnf_tanhshrink.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Applies element-wise, \(Tanhshrink(x) = x - Tanh(x)\)

    -
    nnf_tanhshrink(input)
    +
    nnf_tanhshrink(input)

    Arguments

    diff --git a/reference/nnf_threshold.html b/reference/nnf_threshold.html index 37401c5561d7aaa164970fb212e53a7d83936bf1..53d10ada73cad74fc1c2f848201a7d1241493aed 100644 --- a/reference/nnf_threshold.html +++ b/reference/nnf_threshold.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Thresholds each element of the input Tensor.

    -
    nnf_threshold(input, threshold, value, inplace = FALSE)
    +    
    nnf_threshold(input, threshold, value, inplace = FALSE)
     
    -nnf_threshold_(input, threshold, value)
    +nnf_threshold_(input, threshold, value)

    Arguments

    diff --git a/reference/nnf_triplet_margin_loss.html b/reference/nnf_triplet_margin_loss.html index 045165d165f00e352af925bf249f6f8ea6dddfef..b0652fb6c829209aa027b41c5c77eb9e0906928a 100644 --- a/reference/nnf_triplet_margin_loss.html +++ b/reference/nnf_triplet_margin_loss.html @@ -38,6 +38,8 @@ + + + + + + @@ -76,7 +88,7 @@ input tensors should be (N, D)." /> torch - 0.0.3 + 0.1.0 @@ -149,6 +161,9 @@ input tensors should be (N, D)." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -162,7 +177,24 @@ input tensors should be (N, D)." />
  • - +
  • Reference
  • @@ -203,16 +235,16 @@ anchor, positive examples and negative examples respectively). The shapes of all input tensors should be (N, D).

    -
    nnf_triplet_margin_loss(
    -  anchor,
    -  positive,
    -  negative,
    -  margin = 1,
    -  p = 2,
    -  eps = 1e-06,
    -  swap = FALSE,
    -  reduction = "mean"
    -)
    +
    nnf_triplet_margin_loss(
    +  anchor,
    +  positive,
    +  negative,
    +  margin = 1,
    +  p = 2,
    +  eps = 1e-06,
    +  swap = FALSE,
    +  reduction = "mean"
    +)

    Arguments

    diff --git a/reference/nnf_triplet_margin_with_distance_loss.html b/reference/nnf_triplet_margin_with_distance_loss.html new file mode 100644 index 0000000000000000000000000000000000000000..ffd135990c7eaa99e68fa6dff7d5238758f48adb --- /dev/null +++ b/reference/nnf_triplet_margin_with_distance_loss.html @@ -0,0 +1,308 @@ + + + + + + + + +Triplet margin with distance loss — nnf_triplet_margin_with_distance_loss • torch + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + + +
    + +
    +
    + + + + +
    nnf_triplet_margin_with_distance_loss(
    +  anchor,
    +  positive,
    +  negative,
    +  distance_function = NULL,
    +  margin = 1,
    +  swap = FALSE,
    +  reduction = "mean"
    +)
    + +

    Arguments

    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    anchor

    the anchor input tensor

    positive

    the positive input tensor

    negative

    the negative input tensor

    distance_function

    (callable, optional): A nonnegative, real-valued function that +quantifies the closeness of two tensors. If not specified, +nn_pairwise_distance() will be used. Default: None

    margin

    Default: 1.

    swap

    The distance swap is described in detail in the paper Learning shallow +convolutional feature descriptors with triplet losses by V. Balntas, E. Riba et al. +Default: FALSE.

    reduction

    (string, optional) – Specifies the reduction to apply to the +output: 'none' | 'mean' | 'sum'. 'none': no reduction will be applied, 'mean': +the sum of the output will be divided by the number of elements in the output, +'sum': the output will be summed. Default: 'mean'

    + + + + + + + +
    + + +
    +

    Site built with pkgdown 1.6.1.

    +
    + +
    + + + + + + + + + diff --git a/reference/nnf_unfold.html b/reference/nnf_unfold.html index e431a19a71dc715025ea4e5e5f82021c5d5ed397..3d388e31868b4c0d7f583cc357323b10f438d126 100644 --- a/reference/nnf_unfold.html +++ b/reference/nnf_unfold.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Extracts sliding local blocks from an batched input tensor.

    -
    nnf_unfold(input, kernel_size, dilation = 1, padding = 0, stride = 1)
    +
    nnf_unfold(input, kernel_size, dilation = 1, padding = 0, stride = 1)

    Arguments

    diff --git a/reference/optim_adam.html b/reference/optim_adam.html index c32c93f39f30c40fac274ab4358669d869eaf500..6b14e34fbd05442a021caef3c168a946cef9d9cf 100644 --- a/reference/optim_adam.html +++ b/reference/optim_adam.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    It has been proposed in Adam: A Method for Stochastic Optimization.

    -
    optim_adam(
    -  params,
    -  lr = 0.001,
    -  betas = c(0.9, 0.999),
    -  eps = 1e-08,
    -  weight_decay = 0,
    -  amsgrad = FALSE
    -)
    +
    optim_adam(
    +  params,
    +  lr = 0.001,
    +  betas = c(0.9, 0.999),
    +  eps = 1e-08,
    +  weight_decay = 0,
    +  amsgrad = FALSE
    +)

    Arguments

    @@ -240,15 +272,15 @@ algorithm from the paper On

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -optimizer <- optim_adam(model$parameters(), lr=0.1) -optimizer$zero_grad() -loss_fn(model(input), target)$backward() -optimizer$step() -} - -} +
    if (torch_is_installed()) { +if (FALSE) { +optimizer <- optim_adam(model$parameters(), lr=0.1) +optimizer$zero_grad() +loss_fn(model(input), target)$backward() +optimizer$step() +} + +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    export

    -
    optim_required()
    +
    optim_required()
    diff --git a/reference/optim_sgd.html b/reference/optim_sgd.html index 46a631c5376f242fe5c67abd7093a1c0b0d96f4c..47b8096de9ee7bf02e4daf5947fb2b6d4729b53f 100644 --- a/reference/optim_sgd.html +++ b/reference/optim_sgd.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ On the importance of initialization and momentum in deep learning." /> torch - 0.0.3 + 0.1.0 @@ -147,6 +159,9 @@ On the importance of initialization and momentum in deep learning." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ On the importance of initialization and momentum in deep learning." />
  • - +
  • Reference
  • @@ -199,14 +231,14 @@ Nesterov momentum is based on the formula from On the importance of initialization and momentum in deep learning.

    -
    optim_sgd(
    -  params,
    -  lr = optim_required(),
    -  momentum = 0,
    -  dampening = 0,
    -  weight_decay = 0,
    -  nesterov = FALSE
    -)
    +
    optim_sgd(
    +  params,
    +  lr = optim_required(),
    +  momentum = 0,
    +  dampening = 0,
    +  weight_decay = 0,
    +  nesterov = FALSE
    +)

    Arguments

    @@ -265,15 +297,15 @@ $$ The Nesterov version is analogously modified.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -optimizer <- optim_sgd(model$parameters(), lr=0.1, momentum=0.9) -optimizer$zero_grad() -loss_fn(model(input), target)$backward() -optimizer$step() -} - -} +
    if (torch_is_installed()) { +if (FALSE) { +optimizer <- optim_sgd(model$parameters(), lr=0.1, momentum=0.9) +optimizer$zero_grad() +loss_fn(model(input), target)$backward() +optimizer$step() +} + +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    See magrittr::%>% for details.

    -
    lhs %>% rhs
    +
    lhs %>% rhs
    diff --git a/reference/tensor_dataset.html b/reference/tensor_dataset.html index 24d21ae1cfa6ea9ad2fb6b76cc02ecec7a0dbd05..d1c7d451d479752ef2269d39b61a0b29bfba4e12 100644 --- a/reference/tensor_dataset.html +++ b/reference/tensor_dataset.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Each sample will be retrieved by indexing tensors along the first dimension.

    -
    tensor_dataset(...)
    +
    tensor_dataset(...)

    Arguments

    diff --git a/reference/torch_abs.html b/reference/torch_abs.html index 7df9e69596b29ea42ad9719edac7f9aaf53bbcdb..b49aa4ffb5818261b73f02b6fd422b3b9893f07d 100644 --- a/reference/torch_abs.html +++ b/reference/torch_abs.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Abs

    -
    torch_abs(self)
    +
    torch_abs(self)

    Arguments

    @@ -217,10 +249,10 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_abs(torch_tensor(c(-1, -2, 3))) -} +torch_abs(torch_tensor(c(-1, -2, 3))) +}
    #> torch_tensor #> 1 #> 2 diff --git a/reference/torch_acos.html b/reference/torch_acos.html index 3d472e52d2dc0ba1d43ea92478db31fa761138ad..d3a61dd4e100fbd743948dae4426b98765df8273 100644 --- a/reference/torch_acos.html +++ b/reference/torch_acos.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Acos

    -
    torch_acos(self)
    +
    torch_acos(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_acos(a) -} +a = torch_randn(c(4)) +a +torch_acos(a) +}
    #> torch_tensor -#> 0.7871 -#> 1.0800 -#> 1.4084 -#> 1.7142 +#> 2.1714 +#> 1.2374 +#> 2.7904 +#> 0.9654 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Adaptive_avg_pool1d

    -
    torch_adaptive_avg_pool1d(self, output_size)
    +
    torch_adaptive_avg_pool1d(self, output_size)

    Arguments

    diff --git a/reference/torch_add.html b/reference/torch_add.html index 4baa586525c0ba59cac262037f1b2fa62266085a..87bd6a80f2b4bee2618883c27f8922c815959959 100644 --- a/reference/torch_add.html +++ b/reference/torch_add.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Add

    -
    torch_add(self, other, alpha = 1L)
    +
    torch_add(self, other, alpha = 1L)

    Arguments

    @@ -243,24 +275,24 @@ If other is of type FloatTensor or DoubleTensor, alpha a real number, otherwise it should be an integer.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_add(a, 20) +a = torch_randn(c(4)) +a +torch_add(a, 20) -a = torch_randn(c(4)) -a -b = torch_randn(c(4, 1)) -b -torch_add(a, b) -} +a = torch_randn(c(4)) +a +b = torch_randn(c(4, 1)) +b +torch_add(a, b) +}
    #> torch_tensor -#> -0.1740 -0.4454 0.7719 0.2179 -#> 0.1897 -0.0817 1.1355 0.5816 -#> -0.2508 -0.5222 0.6951 0.1411 -#> -1.4621 -1.7335 -0.5163 -1.0702 +#> -0.8442 -0.7448 -0.7203 0.1948 +#> 0.0081 0.1076 0.1320 1.0471 +#> -1.6073 -1.5079 -1.4834 -0.5683 +#> -0.9071 -0.8077 -0.7832 0.1319 #> [ CPUFloatType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addbmm

    -
    torch_addbmm(self, batch1, batch2, beta = 1L, alpha = 1L)
    +
    torch_addbmm(self, batch1, batch2, beta = 1L, alpha = 1L)

    Arguments

    @@ -245,17 +277,17 @@ For inputs of type FloatTensor or DoubleTensor, argume must be real numbers, otherwise they should be integers.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -M = torch_randn(c(3, 5)) -batch1 = torch_randn(c(10, 3, 4)) -batch2 = torch_randn(c(10, 4, 5)) -torch_addbmm(M, batch1, batch2) -} +M = torch_randn(c(3, 5)) +batch1 = torch_randn(c(10, 3, 4)) +batch2 = torch_randn(c(10, 4, 5)) +torch_addbmm(M, batch1, batch2) +}
    #> torch_tensor -#> 4.3623 -13.1864 1.5644 6.3273 -2.3118 -#> 1.5691 -5.3031 -0.9555 2.9742 2.8950 -#> -7.7087 -2.5553 -4.0583 -2.0273 4.9884 +#> 11.6949 -6.9679 2.1072 1.6915 4.1252 +#> -1.6824 1.3871 -4.7343 -2.1224 2.7545 +#> -0.4472 -6.0422 -4.1033 5.6913 -3.8870 #> [ CPUFloatType{3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addcdiv

    -
    torch_addcdiv(self, tensor1, tensor2, value = 1L)
    +
    torch_addcdiv(self, tensor1, tensor2, value = 1L)

    Arguments

    @@ -248,17 +280,17 @@ broadcastable .

    a real number, otherwise an integer.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -t = torch_randn(c(1, 3)) -t1 = torch_randn(c(3, 1)) -t2 = torch_randn(c(1, 3)) -torch_addcdiv(t, t1, t2, 0.1) -} +t = torch_randn(c(1, 3)) +t1 = torch_randn(c(3, 1)) +t2 = torch_randn(c(1, 3)) +torch_addcdiv(t, t1, t2, 0.1) +}
    #> torch_tensor -#> 0.2466 0.6010 1.8962 -#> 0.0637 0.7913 2.4221 -#> 0.8083 0.0164 0.2814 +#> -0.7597 1.3254 -1.3132 +#> -1.8727 -0.6315 1.4315 +#> -1.4516 0.1089 0.3930 #> [ CPUFloatType{3,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addcmul

    -
    torch_addcmul(self, tensor1, tensor2, value = 1L)
    +
    torch_addcmul(self, tensor1, tensor2, value = 1L)

    Arguments

    @@ -235,17 +267,17 @@ broadcastable .

    a real number, otherwise an integer.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -t = torch_randn(c(1, 3)) -t1 = torch_randn(c(3, 1)) -t2 = torch_randn(c(1, 3)) -torch_addcmul(t, t1, t2, 0.1) -} +t = torch_randn(c(1, 3)) +t1 = torch_randn(c(3, 1)) +t2 = torch_randn(c(1, 3)) +torch_addcmul(t, t1, t2, 0.1) +}
    #> torch_tensor -#> -0.9987 -0.1486 1.1388 -#> -1.0322 -0.0828 0.9679 -#> -0.9683 -0.2083 1.2937 +#> 0.0819 0.3796 1.1782 +#> 0.1175 0.2891 1.2214 +#> 0.0635 0.4265 1.1558 #> [ CPUFloatType{3,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addmm

    -
    torch_addmm(self, mat1, mat2, beta = 1L, alpha = 1L)
    +
    torch_addmm(self, mat1, mat2, beta = 1L, alpha = 1L)

    Arguments

    @@ -242,16 +274,16 @@ For inputs of type FloatTensor or DoubleTensor, argume alpha must be real numbers, otherwise they should be integers.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -M = torch_randn(c(2, 3)) -mat1 = torch_randn(c(2, 3)) -mat2 = torch_randn(c(3, 3)) -torch_addmm(M, mat1, mat2) -} +M = torch_randn(c(2, 3)) +mat1 = torch_randn(c(2, 3)) +mat2 = torch_randn(c(3, 3)) +torch_addmm(M, mat1, mat2) +}
    #> torch_tensor -#> 0.0521 1.5207 0.4070 -#> 1.0992 -2.9412 0.1886 +#> -0.8562 -4.9523 4.3217 +#> 0.2143 -0.0474 -1.3801 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addmv

    -
    torch_addmv(self, mat, vec, beta = 1L, alpha = 1L)
    +
    torch_addmv(self, mat, vec, beta = 1L, alpha = 1L)

    Arguments

    @@ -243,16 +275,16 @@ For inputs of type FloatTensor or DoubleTensor, argume alpha must be real numbers, otherwise they should be integers

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -M = torch_randn(c(2)) -mat = torch_randn(c(2, 3)) -vec = torch_randn(c(3)) -torch_addmv(M, mat, vec) -} +M = torch_randn(c(2)) +mat = torch_randn(c(2, 3)) +vec = torch_randn(c(3)) +torch_addmv(M, mat, vec) +}
    #> torch_tensor -#> 2.8130 -#> 1.3307 +#> 0.6005 +#> 2.0211 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Addr

    -
    torch_addr(self, vec1, vec2, beta = 1L, alpha = 1L)
    +
    torch_addr(self, vec1, vec2, beta = 1L, alpha = 1L)

    Arguments

    @@ -244,13 +276,13 @@ broadcastable with a matrix of size alpha must be real numbers, otherwise they should be integers

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -vec1 = torch_arange(1., 4.) -vec2 = torch_arange(1., 3.) -M = torch_zeros(c(3, 2)) -torch_addr(M, vec1, vec2) -} +vec1 = torch_arange(1., 4.) +vec2 = torch_arange(1., 3.) +M = torch_zeros(c(3, 2)) +torch_addr(M, vec1, vec2) +}
    #> torch_tensor #> 1 2 #> 2 4 diff --git a/reference/torch_allclose.html b/reference/torch_allclose.html index 035b5c3a53d02d29b13f556971997fd6db3f4333..ada1eefae83d48bf978236501fa7833ad588188f 100644 --- a/reference/torch_allclose.html +++ b/reference/torch_allclose.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Allclose

    -
    torch_allclose(self, other, rtol = 1e-05, atol = 0, equal_nan = FALSE)
    +
    torch_allclose(self, other, rtol = 1e-05, atol = 0, equal_nan = FALSE)

    Arguments

    @@ -235,13 +267,13 @@ elementwise, for all elements of input and other. The numpy.allclose <https://docs.scipy.org/doc/numpy/reference/generated/numpy.allclose.html>_

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_allclose(torch_tensor(c(10000., 1e-07)), torch_tensor(c(10000.1, 1e-08))) -torch_allclose(torch_tensor(c(10000., 1e-08)), torch_tensor(c(10000.1, 1e-09))) -torch_allclose(torch_tensor(c(1.0, NaN)), torch_tensor(c(1.0, NaN))) -torch_allclose(torch_tensor(c(1.0, NaN)), torch_tensor(c(1.0, NaN)), equal_nan=TRUE) -} +torch_allclose(torch_tensor(c(10000., 1e-07)), torch_tensor(c(10000.1, 1e-08))) +torch_allclose(torch_tensor(c(10000., 1e-08)), torch_tensor(c(10000.1, 1e-09))) +torch_allclose(torch_tensor(c(1.0, NaN)), torch_tensor(c(1.0, NaN))) +torch_allclose(torch_tensor(c(1.0, NaN)), torch_tensor(c(1.0, NaN)), equal_nan=TRUE) +}
    #> [1] TRUE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Angle

    -
    torch_angle(self)
    +
    torch_angle(self)

    Arguments

    @@ -217,12 +249,12 @@ $$

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -torch_angle(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i)))*180/3.14159 -} +
    if (torch_is_installed()) { +if (FALSE) { +torch_angle(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i)))*180/3.14159 +} -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Arange

    -
    torch_arange(
    -  start,
    -  end,
    -  step = 1,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_arange(
    +  start,
    +  end,
    +  step = 1,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -254,12 +286,12 @@ in such cases.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_arange(start = 0, end = 5) -torch_arange(1, 4) -torch_arange(1, 2.5, 0.5) -} +torch_arange(start = 0, end = 5) +torch_arange(1, 4) +torch_arange(1, 2.5, 0.5) +}
    #> torch_tensor #> 1.0000 #> 1.5000 diff --git a/reference/torch_argmax.html b/reference/torch_argmax.html index fad8cad656c5e41dc0ce25de2f5aa2377bd56a80..47fd0cfbffb2fc364c623c0d31cda396b20cb5e2 100644 --- a/reference/torch_argmax.html +++ b/reference/torch_argmax.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Argmax

    -
    torch_argmax(self, dim = NULL, keepdim = FALSE)
    +
    torch_argmax(self, dim = NULL, keepdim = FALSE)

    Arguments

    @@ -232,24 +264,24 @@ documentation for the exact semantics of this method.

    documentation for the exact semantics of this method.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -if (FALSE) { -a = torch_randn(c(4, 4)) -a -torch_argmax(a) -} +if (FALSE) { +a = torch_randn(c(4, 4)) +a +torch_argmax(a) +} -a = torch_randn(c(4, 4)) -a -torch_argmax(a, dim=1) -} +a = torch_randn(c(4, 4)) +a +torch_argmax(a, dim=1) +}
    #> torch_tensor -#> 2 -#> 0 -#> 1 +#> 3 +#> 3 #> 1 +#> 2 #> [ CPULongType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Argmin

    -
    torch_argmin(self, dim = NULL, keepdim = FALSE)
    +
    torch_argmin(self, dim = NULL, keepdim = FALSE)

    Arguments

    @@ -232,22 +264,22 @@ documentation for the exact semantics of this method.

    documentation for the exact semantics of this method.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4, 4)) -a -torch_argmin(a) +a = torch_randn(c(4, 4)) +a +torch_argmin(a) -a = torch_randn(c(4, 4)) -a -torch_argmin(a, dim=1) -} +a = torch_randn(c(4, 4)) +a +torch_argmin(a, dim=1) +}
    #> torch_tensor #> 0 +#> 3 #> 1 -#> 1 -#> 1 +#> 3 #> [ CPULongType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Argsort

    -
    torch_argsort(self, dim = -1L, descending = FALSE)
    +
    torch_argsort(self, dim = -1L, descending = FALSE)

    Arguments

    @@ -225,17 +257,17 @@ order by value.

    for the exact semantics of this method.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4, 4)) -a -torch_argsort(a, dim=1) -} +a = torch_randn(c(4, 4)) +a +torch_argsort(a, dim=1) +}
    #> torch_tensor -#> 3 0 0 2 +#> 0 2 3 2 +#> 3 3 0 1 #> 2 1 1 3 -#> 1 2 3 1 -#> 0 3 2 0 +#> 1 0 2 0 #> [ CPULongType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    As_strided

    -
    torch_as_strided(self, size, stride, storage_offset = NULL)
    +
    torch_as_strided(self, size, stride, storage_offset = NULL)

    Arguments

    @@ -240,18 +272,18 @@ advisable to use.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(3, 3)) -x -t = torch_as_strided(x, list(2, 2), list(1, 2)) -t -t = torch_as_strided(x, list(2, 2), list(1, 2), 1) -t -} +
    if (torch_is_installed()) { + +x = torch_randn(c(3, 3)) +x +t = torch_as_strided(x, list(2, 2), list(1, 2)) +t +t = torch_as_strided(x, list(2, 2), list(1, 2), 1) +t +}
    #> torch_tensor -#> 2.2728 1.0136 -#> -0.1885 1.2535 +#> 2.2925 1.0260 +#> 0.6235 -0.5973 #> [ CPUFloatType{2,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Asin

    -
    torch_asin(self)
    +
    torch_asin(self)

    Arguments

    @@ -217,18 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_asin(a) -} +a = torch_randn(c(4)) +a +torch_asin(a) +}
    #> torch_tensor -#> 0.01 * -#> nan -#> 7.9841 -#> nan -#> -30.8534 +#> 0.1146 +#> -1.2095 +#> -0.7692 +#> 1.0149 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Atan

    -
    torch_atan(self)
    +
    torch_atan(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_atan(a) -} +a = torch_randn(c(4)) +a +torch_atan(a) +}
    #> torch_tensor -#> -0.5608 -#> 0.9247 -#> 1.1369 -#> 0.8835 +#> 0.6850 +#> -0.4100 +#> 0.6624 +#> 0.6265 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Atan2

    -
    torch_atan2(self, other)
    +
    torch_atan2(self, other)

    Arguments

    @@ -225,17 +257,17 @@ parameter, is the y-coordinate.)

    broadcastable .

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_atan2(a, torch_randn(c(4))) -} +a = torch_randn(c(4)) +a +torch_atan2(a, torch_randn(c(4))) +}
    #> torch_tensor -#> -0.5145 -#> 0.3180 -#> 1.6062 -#> 2.3995 +#> 3.0034 +#> 1.5146 +#> -2.2932 +#> -1.9916 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Avg_pool1d

    -
    torch_avg_pool1d(
    -  self,
    -  kernel_size,
    -  stride = list(),
    -  padding = 0L,
    -  ceil_mode = FALSE,
    -  count_include_pad = TRUE
    -)
    +
    torch_avg_pool1d(
    +  self,
    +  kernel_size,
    +  stride = list(),
    +  padding = 0L,
    +  ceil_mode = FALSE,
    +  count_include_pad = TRUE
    +)

    Arguments

    diff --git a/reference/torch_baddbmm.html b/reference/torch_baddbmm.html index ec281a4708f71c058c695275294b3ec77f053a56..b591855296da5b65978cd549a875de992a890a94 100644 --- a/reference/torch_baddbmm.html +++ b/reference/torch_baddbmm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Baddbmm

    -
    torch_baddbmm(self, batch1, batch2, beta = 1L, alpha = 1L)
    +
    torch_baddbmm(self, batch1, batch2, beta = 1L, alpha = 1L)

    Arguments

    @@ -245,63 +277,63 @@ For inputs of type FloatTensor or DoubleTensor, argume alpha must be real numbers, otherwise they should be integers.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -M = torch_randn(c(10, 3, 5)) -batch1 = torch_randn(c(10, 3, 4)) -batch2 = torch_randn(c(10, 4, 5)) -torch_baddbmm(M, batch1, batch2) -} +M = torch_randn(c(10, 3, 5)) +batch1 = torch_randn(c(10, 3, 4)) +batch2 = torch_randn(c(10, 4, 5)) +torch_baddbmm(M, batch1, batch2) +}
    #> torch_tensor #> (1,.,.) = -#> 3.2950 1.1934 1.0951 -0.1641 -0.1625 -#> 0.0234 0.6164 1.1209 -1.4287 -1.4717 -#> 1.4437 0.0008 0.1610 -0.6639 -3.1524 +#> 4.7281 -6.0498 3.1056 1.1735 -2.4135 +#> -2.9917 -1.2809 -0.0952 3.6850 4.5780 +#> -1.7250 3.5495 -0.0858 0.4585 2.9365 #> #> (2,.,.) = -#> -1.1120 -1.6823 4.3323 -4.6368 5.3289 -#> -0.0067 0.9474 -2.1810 -2.4339 -1.4058 -#> -0.0811 1.0781 0.5114 -0.6792 5.5863 +#> -1.5660 0.8016 0.9269 -3.8592 0.1989 +#> -1.4357 1.1692 0.7212 -0.3221 1.1815 +#> -2.9738 3.6028 1.8993 -5.8404 -0.1716 #> #> (3,.,.) = -#> -1.0254 1.4248 2.5140 0.1094 0.8005 -#> -0.0712 3.0454 -0.8717 -1.3384 -1.1721 -#> -1.0192 -0.3714 0.0774 -0.0318 1.3226 +#> -1.3068 2.0233 0.8700 0.6349 -1.3450 +#> -0.4576 0.9226 -1.4827 -5.6012 3.4073 +#> -2.3818 0.8728 1.1533 1.1672 -1.0976 #> #> (4,.,.) = -#> 0.7244 1.0012 0.7026 -0.1657 -0.5903 -#> -1.1023 0.7397 -2.5617 -2.8854 -0.4641 -#> -1.8027 1.8865 -5.9998 -2.7661 -4.2857 +#> 3.7072 -2.2163 1.1927 -3.6210 -0.2271 +#> 2.7538 -2.0449 -0.8840 -3.4298 2.4292 +#> 0.6560 -0.3831 -1.1665 -4.4474 -1.8460 #> #> (5,.,.) = -#> 0.7827 0.5095 -4.7454 -0.9276 0.9389 -#> 0.2047 -0.1286 -0.0545 0.0190 2.7191 -#> -0.1130 -1.0268 0.5965 0.4682 -1.9710 +#> 2.8195 -0.8667 -0.3816 -2.9713 -2.0342 +#> -3.1794 -1.4194 -4.0081 1.4367 2.8798 +#> 4.6328 -0.2675 -1.9527 -2.8000 -0.4744 #> #> (6,.,.) = -#> 0.3016 -0.8588 0.4652 2.3788 -3.3351 -#> 0.6153 1.9567 -0.4531 -3.5487 -0.0359 -#> -1.0311 1.7841 -1.4333 -1.4575 -5.2221 +#> -1.6616 1.4558 0.8950 3.3749 0.1133 +#> 1.7758 2.6756 -0.7521 -1.7215 -5.8593 +#> 1.1681 1.2075 2.0621 -0.5453 -0.2713 #> #> (7,.,.) = -#> 0.6391 1.4053 1.4987 -1.3632 1.9467 -#> -0.3214 1.5380 -1.9825 5.0689 -1.2355 -#> -2.0131 0.5735 -0.3492 1.2934 -0.4853 +#> 2.1281 -5.5957 2.0165 -1.0600 -2.4151 +#> -3.0381 3.5730 -0.1263 2.0001 2.1600 +#> 1.3341 -0.7419 0.7636 0.5322 -0.4942 #> #> (8,.,.) = -#> -3.1747 0.3503 -0.8512 -0.8386 1.8389 -#> -0.4581 -0.1452 -0.2862 -2.5819 0.0945 -#> 1.9201 -3.9626 -1.0644 1.4395 -0.9784 +#> 2.1611 1.5023 -2.6154 1.4701 -1.2220 +#> 1.3418 -0.2604 0.0016 0.5194 -0.5101 +#> -0.0553 -1.3268 2.8341 -2.0275 -3.4602 #> #> (9,.,.) = -#> 2.0265 -1.0222 0.4194 3.9349 -1.2036 -#> 0.2550 3.1147 -1.8770 -1.3894 0.1003 -#> -0.7578 -3.4052 -1.9434 -5.4013 -1.7570 +#> 1.1191 2.4478 0.1713 -2.6500 2.6524 +#> 1.5080 -6.5683 -1.4471 0.0159 -1.6559 +#> -1.3632 0.4350 -0.2920 1.7903 0.0450 #> #> (10,.,.) = -#> 3.5818 2.3017 -0.6280 -1.3274 -2.3944 -#> -1.8303 1.0979 -0.9950 -1.9600 -1.0145 -#> -0.5720 -0.0300 -1.1251 4.3175 -0.5519 +#> 5.0413 2.4106 -1.1846 5.9640 1.0693 +#> -1.1074 -1.6447 1.9714 2.8367 1.9957 +#> -2.5291 -1.3038 -0.0376 0.3672 -1.1772 #> [ CPUFloatType{10,3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Bartlett_window

    -
    torch_bartlett_window(
    -  window_length,
    -  periodic = TRUE,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_bartlett_window(
    +  window_length,
    +  periodic = TRUE,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    diff --git a/reference/torch_bernoulli.html b/reference/torch_bernoulli.html index 75e3f5c07e3a14c3de8d9aae5c66b649b2be3bbf..6a68300b944549f13b1f26f086dfa28834a75a75 100644 --- a/reference/torch_bernoulli.html +++ b/reference/torch_bernoulli.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bernoulli

    -
    torch_bernoulli(self, p, generator = NULL)
    +
    torch_bernoulli(self, p, generator = NULL)

    Arguments

    @@ -238,16 +270,16 @@ shape as input.

    point dtype.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_empty(c(3, 3))$uniform_(0, 1) # generate a uniform random matrix with range c(0, 1) -a -torch_bernoulli(a) -a = torch_ones(c(3, 3)) # probability of drawing "1" is 1 -torch_bernoulli(a) -a = torch_zeros(c(3, 3)) # probability of drawing "1" is 0 -torch_bernoulli(a) -} +
    if (torch_is_installed()) { + +a = torch_empty(c(3, 3))$uniform_(0, 1) # generate a uniform random matrix with range c(0, 1) +a +torch_bernoulli(a) +a = torch_ones(c(3, 3)) # probability of drawing "1" is 1 +torch_bernoulli(a) +a = torch_zeros(c(3, 3)) # probability of drawing "1" is 0 +torch_bernoulli(a) +}
    #> torch_tensor #> 0 0 0 #> 0 0 0 diff --git a/reference/torch_bincount.html b/reference/torch_bincount.html index fa08a32bddcce9600cfb352085bdd33a74dd026e..cb49b2485e0a26b3d8a44a25930a59a8a369ae39 100644 --- a/reference/torch_bincount.html +++ b/reference/torch_bincount.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bincount

    -
    torch_bincount(self, weights = list(), minlength = 0L)
    +
    torch_bincount(self, weights = list(), minlength = 0L)

    Arguments

    @@ -230,25 +262,22 @@ tensor of size 0. If minlength is specified, the number of bins is

    .. include:: cuda_deterministic.rst

    Examples

    -
    if (torch_is_installed()) { - -input = torch_randint(0, 8, list(5), dtype=torch_int64()) -weights = torch_linspace(0, 1, steps=5) -input -weights -torch_bincount(input, weights) -input$bincount(weights) -} +
    if (torch_is_installed()) { + +input = torch_randint(0, 8, list(5), dtype=torch_int64()) +weights = torch_linspace(0, 1, steps=5) +input +weights +torch_bincount(input, weights) +input$bincount(weights) +}
    #> torch_tensor -#> 0.0000 -#> 0.0000 -#> 0.7500 #> 1.0000 +#> 0.7500 #> 0.2500 #> 0.0000 -#> 0.0000 #> 0.5000 -#> [ CPUFloatType{8} ]
    +#> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bitwise_and

    -
    torch_bitwise_and(self, other)
    +
    torch_bitwise_and(self, other)

    Arguments

    diff --git a/reference/torch_bitwise_not.html b/reference/torch_bitwise_not.html index 723dfc1950c1d8e9ae77b6894b1f8ce2a7b3983d..31eb408576c2c5c46c99b0ef73de47e4bf9322e4 100644 --- a/reference/torch_bitwise_not.html +++ b/reference/torch_bitwise_not.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bitwise_not

    -
    torch_bitwise_not(self)
    +
    torch_bitwise_not(self)

    Arguments

    diff --git a/reference/torch_bitwise_or.html b/reference/torch_bitwise_or.html index 3f56126d5268e4541fc88e5818417c0d959e1069..a4a2ee87d37ea97864a63864d5f60d69f265399b 100644 --- a/reference/torch_bitwise_or.html +++ b/reference/torch_bitwise_or.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bitwise_or

    -
    torch_bitwise_or(self, other)
    +
    torch_bitwise_or(self, other)

    Arguments

    diff --git a/reference/torch_bitwise_xor.html b/reference/torch_bitwise_xor.html index 66ac3aa011385b60a61d8f4f707428005b272763..e9c06c37c797159058c727c344c03d34a11a9bf7 100644 --- a/reference/torch_bitwise_xor.html +++ b/reference/torch_bitwise_xor.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bitwise_xor

    -
    torch_bitwise_xor(self, other)
    +
    torch_bitwise_xor(self, other)

    Arguments

    diff --git a/reference/torch_blackman_window.html b/reference/torch_blackman_window.html index 780331de260536b730cb5976c2bce0699058c3de..0e34768dd242fb306b5124fcf8cc9f40195f0282 100644 --- a/reference/torch_blackman_window.html +++ b/reference/torch_blackman_window.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Blackman_window

    -
    torch_blackman_window(
    -  window_length,
    -  periodic = TRUE,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_blackman_window(
    +  window_length,
    +  periodic = TRUE,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    diff --git a/reference/torch_bmm.html b/reference/torch_bmm.html index b4dc3f38e8c9799148970edf152633daa3cfc5d0..3d169fe89ee6318928d206b6a8f68fe2f053f87f 100644 --- a/reference/torch_bmm.html +++ b/reference/torch_bmm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Bmm

    -
    torch_bmm(self, mat2)
    +
    torch_bmm(self, mat2)

    Arguments

    @@ -231,63 +263,63 @@ the same number of matrices.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input = torch_randn(c(10, 3, 4)) -mat2 = torch_randn(c(10, 4, 5)) -res = torch_bmm(input, mat2) -res -} +input = torch_randn(c(10, 3, 4)) +mat2 = torch_randn(c(10, 4, 5)) +res = torch_bmm(input, mat2) +res +}
    #> torch_tensor #> (1,.,.) = -#> 1.0892 1.3748 0.8459 -2.0545 -1.4386 -#> 0.3660 -0.1655 0.0205 -0.7982 -1.6220 -#> 0.4613 0.7003 0.2010 1.5919 1.4269 +#> -0.9037 -0.1973 -1.1341 -0.9858 0.3039 +#> 0.2868 0.2786 0.3566 -0.7423 -2.7733 +#> 5.4676 -3.4729 3.8975 -1.7444 0.4233 #> #> (2,.,.) = -#> -2.2132 -1.0830 0.8680 -0.3013 0.5952 -#> -1.7194 1.7257 -1.8858 0.5295 0.0113 -#> -1.9067 -3.6883 3.4484 -0.6959 1.6596 +#> -3.1970 -1.4906 -0.8008 -2.1444 -1.2875 +#> -0.9690 -1.4588 -0.1471 -1.2726 -0.6858 +#> -0.3189 0.5680 -0.3345 -0.9867 -0.8067 #> #> (3,.,.) = -#> 0.4246 -0.4291 0.5495 0.0139 -2.8828 -#> 0.8869 -1.7168 0.9370 2.8693 -0.8925 -#> -0.1330 -0.6530 0.2961 -0.4661 0.2099 +#> 0.3459 0.5981 -1.5873 0.8666 -0.4362 +#> 0.2688 -1.3614 0.0694 1.3043 1.0296 +#> 1.8249 1.2295 -0.2098 -0.7566 0.0848 #> #> (4,.,.) = -#> 3.2950 1.2318 -0.2358 3.7370 7.2076 -#> 0.5021 -7.1274 -0.6974 2.2166 -1.0051 -#> 0.5575 -0.9316 -0.4947 1.2719 -0.3476 +#> -0.6213 1.2877 -0.9306 -0.4368 2.4335 +#> 3.6945 0.7509 2.2091 4.8111 8.8770 +#> -1.6086 1.6030 -0.6769 -3.3762 -0.2409 #> #> (5,.,.) = -#> -0.7364 -1.5675 1.2250 0.6341 2.4889 -#> -3.3237 2.9684 3.2533 0.5785 -1.9284 -#> -3.7940 1.4235 3.7042 -0.9522 -1.3960 +#> 2.7608 -0.6829 3.4384 -3.7808 -0.0867 +#> -0.0835 -0.5113 -0.2495 0.1662 -0.8862 +#> 0.1986 -0.7221 -1.9765 -0.4381 1.4731 #> #> (6,.,.) = -#> -3.5668 -1.3443 5.1221 -1.1451 0.9565 -#> -1.3876 1.5025 -3.9358 1.3078 0.9739 -#> -1.9256 -2.0872 7.0016 -1.1434 0.6967 +#> 1.3646 1.3529 1.7244 -0.7949 -2.1482 +#> 0.6172 0.6641 -0.1706 -1.2928 2.1358 +#> -0.5524 0.0898 -1.1729 2.1522 -1.7189 #> #> (7,.,.) = -#> -0.2430 -1.1612 0.2267 -0.4137 0.3275 -#> -1.0699 1.5163 1.1168 0.4205 1.8671 -#> -1.1855 5.5310 1.8558 4.2721 1.0853 +#> 1.3161 -1.3085 -0.4967 2.2483 0.0628 +#> -2.1286 2.2143 1.0357 -2.5808 0.1258 +#> -0.2157 1.8414 0.0169 -2.9079 -0.6500 #> #> (8,.,.) = -#> -1.0895 -2.0372 -2.1277 0.7614 -1.5783 -#> -0.6140 0.0763 1.5264 -0.7559 -0.3574 -#> 0.2487 -1.0749 0.2471 1.2148 0.5383 +#> -1.4650 1.0743 -4.3104 -2.9395 1.6595 +#> 2.2130 -0.8981 -1.0598 -1.1889 -6.1445 +#> -1.5164 0.6151 -3.6034 -1.3640 0.3356 #> #> (9,.,.) = -#> -0.7402 0.6009 1.3001 0.8246 -0.4576 -#> -0.1567 -0.6394 0.0932 -0.1400 -0.7153 -#> -0.5724 -0.2588 -1.4616 -1.0371 -0.5793 +#> 0.9710 -1.1168 0.7513 -1.7730 1.5852 +#> 0.1585 -2.1137 -1.4338 0.9732 -0.3186 +#> 0.9655 -2.5352 -5.2346 3.6612 -2.9682 #> #> (10,.,.) = -#> 7.8231 2.6005 -0.8253 -0.6163 -4.4321 -#> 3.5758 0.7528 0.7691 3.3599 -1.3278 -#> -3.2284 -0.5208 -1.5541 -2.5006 2.0721 +#> 2.1552 6.8907 4.4197 -0.9053 -1.2060 +#> -0.7011 -3.1555 -4.6818 -0.2381 -1.1970 +#> -1.1343 -4.0314 -3.4194 0.7506 -1.1217 #> [ CPUFloatType{10,3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Broadcast_tensors

    -
    torch_broadcast_tensors(tensors)
    +
    torch_broadcast_tensors(tensors)

    Arguments

    @@ -214,13 +246,13 @@

    Broadcasts the given tensors according to broadcasting-semantics.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_arange(0, 3)$view(c(1, 3)) -y = torch_arange(0, 2)$view(c(2, 1)) -out = torch_broadcast_tensors(list(x, y)) -out[[1]] -} +x = torch_arange(0, 3)$view(c(1, 3)) +y = torch_arange(0, 2)$view(c(2, 1)) +out = torch_broadcast_tensors(list(x, y)) +out[[1]] +}
    #> torch_tensor #> 0 1 2 #> 0 1 2 diff --git a/reference/torch_can_cast.html b/reference/torch_can_cast.html index 8716b900191c8f147feae28be2262c3c329e42f0..d7d63230cd82b3ba74de96a22a2b7a4633f90d50 100644 --- a/reference/torch_can_cast.html +++ b/reference/torch_can_cast.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Can_cast

    -
    torch_can_cast(from, to)
    +
    torch_can_cast(from, to)

    Arguments

    @@ -219,11 +251,11 @@ described in the type promotion documentation .

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_can_cast(torch_double(), torch_float()) -torch_can_cast(torch_float(), torch_int()) -} +torch_can_cast(torch_double(), torch_float()) +torch_can_cast(torch_float(), torch_int()) +}
    #> [1] FALSE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Do cartesian product of the given sequence of tensors.

    -
    torch_cartesian_prod(tensors)
    +
    torch_cartesian_prod(tensors)

    Arguments

    @@ -208,14 +240,14 @@

    Examples

    -
    if (torch_is_installed()) { - -a = c(1, 2, 3) -b = c(4, 5) -tensor_a = torch_tensor(a) -tensor_b = torch_tensor(b) -torch_cartesian_prod(list(tensor_a, tensor_b)) -} +
    if (torch_is_installed()) { + +a = c(1, 2, 3) +b = c(4, 5) +tensor_a = torch_tensor(a) +tensor_b = torch_tensor(b) +torch_cartesian_prod(list(tensor_a, tensor_b)) +}
    #> torch_tensor #> 1 4 #> 1 5 diff --git a/reference/torch_cat.html b/reference/torch_cat.html index 2841ea8b9ee5c79193035de3eb86c6d5b5ef2533..e55add7338454fdc125f636a95eeb7c96836b683 100644 --- a/reference/torch_cat.html +++ b/reference/torch_cat.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cat

    -
    torch_cat(tensors, dim = 1L)
    +
    torch_cat(tensors, dim = 1L)

    Arguments

    @@ -223,16 +255,16 @@ and torch_chunk.

    torch_cat can be best understood via examples.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_randn(c(2, 3)) -x -torch_cat(list(x, x, x), 1) -torch_cat(list(x, x, x), 2) -} +x = torch_randn(c(2, 3)) +x +torch_cat(list(x, x, x), 1) +torch_cat(list(x, x, x), 2) +}
    #> torch_tensor -#> 0.4974 -0.0008 -0.8116 0.4974 -0.0008 -0.8116 0.4974 -0.0008 -0.8116 -#> 0.0269 -0.0270 0.1641 0.0269 -0.0270 0.1641 0.0269 -0.0270 0.1641 +#> -0.1257 -0.2544 0.6346 -0.1257 -0.2544 0.6346 -0.1257 -0.2544 0.6346 +#> -0.1385 0.6449 0.9977 -0.1385 0.6449 0.9977 -0.1385 0.6449 0.9977 #> [ CPUFloatType{2,9} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cdist

    -
    torch_cdist(x1, x2, p = 2L, compute_mode = NULL)
    +
    torch_cdist(x1, x2, p = 2L, compute_mode = NULL)

    Arguments

    diff --git a/reference/torch_ceil.html b/reference/torch_ceil.html index b8c9c7c85530ef5677743fd9d1d2ceaaae3300db..6a1ffe2ab287dfc73860f8b5848917c5fd189d75 100644 --- a/reference/torch_ceil.html +++ b/reference/torch_ceil.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ceil

    -
    torch_ceil(self)
    +
    torch_ceil(self)

    Arguments

    @@ -218,17 +250,17 @@ the smallest integer greater than or equal to each element.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_ceil(a) -} +a = torch_randn(c(4)) +a +torch_ceil(a) +}
    #> torch_tensor #> 1 +#> 2 +#> 2 #> -1 -#> -0 -#> 1 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Celu

    -
    torch_celu(self, alpha = 1)
    +
    torch_celu(self, alpha = 1)

    Arguments

    diff --git a/reference/torch_celu_.html b/reference/torch_celu_.html index 60df6e872c3508934b28fcb31c777b83ab155d6a..81448a16bde1e9ec94ba21e7858365d4677775d3 100644 --- a/reference/torch_celu_.html +++ b/reference/torch_celu_.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Celu_

    -
    torch_celu_(self, alpha = 1)
    +
    torch_celu_(self, alpha = 1)

    Arguments

    diff --git a/reference/torch_chain_matmul.html b/reference/torch_chain_matmul.html index 205cdbf91c227cf2e0904d15fdfe21b57c12b587..1d63dbed24339c9af29cd4d69947420743609910 100644 --- a/reference/torch_chain_matmul.html +++ b/reference/torch_chain_matmul.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Chain_matmul

    -
    torch_chain_matmul(matrices)
    +
    torch_chain_matmul(matrices)

    Arguments

    @@ -218,18 +250,18 @@ needs to be greater than or equal to 2; if equal to 2 then a trivial matrix-matr If \(N\) is 1, then this is a no-op - the original matrix is returned as is.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 4)) -b = torch_randn(c(4, 5)) -c = torch_randn(c(5, 6)) -d = torch_randn(c(6, 7)) -torch_chain_matmul(list(a, b, c, d)) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 4)) +b = torch_randn(c(4, 5)) +c = torch_randn(c(5, 6)) +d = torch_randn(c(6, 7)) +torch_chain_matmul(list(a, b, c, d)) +}
    #> torch_tensor -#> -2.9763 5.1541 0.4802 4.2000 -0.3407 -3.9994 -9.5460 -#> 9.4136 -12.4751 -16.3055 -13.4903 0.4411 12.5776 21.3514 -#> 3.9498 -8.2069 -5.7414 1.2695 6.5496 18.3628 12.8464 +#> -5.0034 13.1294 -7.9439 12.2444 -1.5253 9.4320 -8.0039 +#> 1.2683 -13.7433 12.2470 -18.7686 3.0491 -19.3487 8.1020 +#> -0.0394 9.6720 -1.0461 0.6241 -1.5862 1.2076 -5.0604 #> [ CPUFloatType{3,7} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cholesky

    -
    torch_cholesky(self, upper = FALSE)
    +
    torch_cholesky(self, upper = FALSE)

    Arguments

    @@ -236,22 +268,22 @@ tensor will be composed of lower-triangular Cholesky factors of each of the indi matrices.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 3)) -a = torch_mm(a, a$t()) # make symmetric positive-definite -l = torch_cholesky(a) -a -l -torch_mm(l, l$t()) -a = torch_randn(c(3, 2, 2)) -if (FALSE) { -a = torch_matmul(a, a$transpose(-1, -2)) + 1e-03 # make symmetric positive-definite -l = torch_cholesky(a) -z = torch_matmul(l, l$transpose(-1, -2)) -torch_max(torch_abs(z - a)) # Max non-zero -} -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 3)) +a = torch_mm(a, a$t()) # make symmetric positive-definite +l = torch_cholesky(a) +a +l +torch_mm(l, l$t()) +a = torch_randn(c(3, 2, 2)) +if (FALSE) { +a = torch_matmul(a, a$transpose(-1, -2)) + 1e-03 # make symmetric positive-definite +l = torch_cholesky(a) +z = torch_matmul(l, l$transpose(-1, -2)) +torch_max(torch_abs(z - a)) # Max non-zero +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cholesky_inverse

    -
    torch_cholesky_inverse(self, upper = FALSE)
    +
    torch_cholesky_inverse(self, upper = FALSE)

    Arguments

    @@ -230,17 +262,17 @@ triangular such that the returned tensor is

    $$

    Examples

    -
    if (torch_is_installed()) { - -if (FALSE) { -a = torch_randn(c(3, 3)) -a = torch_mm(a, a$t()) + 1e-05 * torch_eye(3) # make symmetric positive definite -u = torch_cholesky(a) -a -torch_cholesky_inverse(u) -a$inverse() -} -} +
    if (torch_is_installed()) { + +if (FALSE) { +a = torch_randn(c(3, 3)) +a = torch_mm(a, a$t()) + 1e-05 * torch_eye(3) # make symmetric positive definite +u = torch_cholesky(a) +a +torch_cholesky_inverse(u) +a$inverse() +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cholesky_solve

    -
    torch_cholesky_solve(self, input2, upper = FALSE)
    +
    torch_cholesky_solve(self, input2, upper = FALSE)

    Arguments

    @@ -236,21 +268,21 @@ batches of 2D matrices. If the inputs are batches, then returns batched outputs c

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 3)) -a = torch_mm(a, a$t()) # make symmetric positive definite -u = torch_cholesky(a) -a -b = torch_randn(c(3, 2)) -b -torch_cholesky_solve(b, u) -torch_mm(a$inverse(), b) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 3)) +a = torch_mm(a, a$t()) # make symmetric positive definite +u = torch_cholesky(a) +a +b = torch_randn(c(3, 2)) +b +torch_cholesky_solve(b, u) +torch_mm(a$inverse(), b) +}
    #> torch_tensor -#> -4.4143 1.6973 -#> 3.7547 -2.0436 -#> 1.5229 -2.0137 +#> 0.8244 -0.2831 +#> -1.1787 -0.6527 +#> 1.0540 -0.4594 #> [ CPUFloatType{3,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Chunk

    -
    torch_chunk(self, chunks, dim = 1L)
    +
    torch_chunk(self, chunks, dim = 1L)

    Arguments

    diff --git a/reference/torch_clamp.html b/reference/torch_clamp.html index 87460e824a26344b69ca4f0e27ce0ea274f7eac5..4fa1d7830c6edd988aa293758ed9a10a1e1abb31 100644 --- a/reference/torch_clamp.html +++ b/reference/torch_clamp.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Clamp

    -
    torch_clamp(self, min = NULL, max = NULL)
    +
    torch_clamp(self, min = NULL, max = NULL)

    Arguments

    @@ -249,27 +281,27 @@ should be a real number, otherwise it should be an integer.

    should be a real number, otherwise it should be an integer.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_clamp(a, min=-0.5, max=0.5) +a = torch_randn(c(4)) +a +torch_clamp(a, min=-0.5, max=0.5) -a = torch_randn(c(4)) -a -torch_clamp(a, min=0.5) +a = torch_randn(c(4)) +a +torch_clamp(a, min=0.5) -a = torch_randn(c(4)) -a -torch_clamp(a, max=0.5) -} +a = torch_randn(c(4)) +a +torch_clamp(a, max=0.5) +}
    #> torch_tensor -#> -0.1812 -#> -0.9782 -#> 0.5000 -#> 0.2475 +#> -0.9450 +#> 0.4486 +#> -0.3574 +#> 0.1104 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Combinations

    -
    torch_combinations(self, r = 2L, with_replacement = FALSE)
    +
    torch_combinations(self, r = 2L, with_replacement = FALSE)

    Arguments

    @@ -224,14 +256,14 @@ python's itertools.combinations when with_replacementitertools.combinations_with_replacement when with_replacement is set to TRUE.

    Examples

    -
    if (torch_is_installed()) { - -a = c(1, 2, 3) -tensor_a = torch_tensor(a) -torch_combinations(tensor_a) -torch_combinations(tensor_a, r=3) -torch_combinations(tensor_a, with_replacement=TRUE) -} +
    if (torch_is_installed()) { + +a = c(1, 2, 3) +tensor_a = torch_tensor(a) +torch_combinations(tensor_a) +torch_combinations(tensor_a, r=3) +torch_combinations(tensor_a, with_replacement=TRUE) +}
    #> torch_tensor #> 1 1 #> 1 2 diff --git a/reference/torch_conj.html b/reference/torch_conj.html index a3560f399486d82a05666fc34b9516b2002b5259..257311cd331b8f417ca214755985b017cddf5412 100644 --- a/reference/torch_conj.html +++ b/reference/torch_conj.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Conj

    -
    torch_conj(self)
    +
    torch_conj(self)

    Arguments

    @@ -217,11 +249,11 @@ $$

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -torch_conj(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +torch_conj(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Conv1d

    -
    torch_conv1d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  dilation = 1L,
    -  groups = 1L
    -)
    +
    torch_conv1d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  dilation = 1L,
    +  groups = 1L
    +)

    Arguments

    @@ -248,4384 +280,4248 @@ planes.

    See nn_conv1d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -filters = torch_randn(c(33, 16, 3)) -inputs = torch_randn(c(20, 16, 50)) -nnf_conv1d(inputs, filters) -} +filters = torch_randn(c(33, 16, 3)) +inputs = torch_randn(c(20, 16, 50)) +nnf_conv1d(inputs, filters) +}
    #> torch_tensor #> (1,.,.) = -#> Columns 1 to 8 -9.2155 -13.3788 -4.8815 -5.9509 1.0559 11.3390 4.1760 -7.3364 -#> 0.9687 3.5362 6.5343 9.7170 3.5029 -0.5239 11.8401 -3.6896 -#> 9.2053 -0.8704 2.5167 5.3901 5.4175 -10.4469 -1.7450 -8.4401 -#> 17.3697 8.0818 -6.5866 -5.1253 -1.3999 -14.3850 -0.6795 6.7809 -#> 7.6453 6.5674 2.8867 6.4723 7.2736 -9.4473 -1.8036 -1.8636 -#> -10.4610 0.7640 -11.0805 7.2157 -3.6219 -11.6838 -7.2940 6.4684 -#> -5.6155 -2.4741 4.2673 -2.6692 -2.5511 5.6617 8.3692 11.8541 -#> 2.9653 10.0184 2.3498 10.2008 -0.0906 -8.3016 -7.6074 -8.9669 -#> -0.1892 -1.4338 -8.1527 6.1579 14.1115 6.3555 -5.8888 1.3635 -#> -0.8715 3.2313 -5.0396 -8.8492 -2.6872 2.5312 1.5977 4.7325 -#> -0.6426 6.7878 10.6713 4.4664 15.5926 -12.9809 8.5686 -14.1252 -#> -9.4297 2.5895 6.8971 -1.2187 -8.5115 2.7068 -3.6668 3.7638 -#> -4.8841 1.5752 -8.0881 -0.7977 -13.0366 1.6083 8.0490 -1.1757 -#> 9.2181 -18.6418 5.3897 -7.0765 10.9595 -1.2353 11.2700 5.9045 -#> -3.9796 18.5287 -6.2100 2.2016 -4.9045 13.3548 4.2783 7.5602 -#> 6.4389 -3.1190 4.0417 3.9676 5.6056 -6.5717 -6.5705 1.6651 -#> 7.8152 -2.1107 -8.4163 3.4218 -11.4875 -1.3201 10.1588 4.6279 -#> 3.1470 -0.6810 -4.7713 2.0391 -4.2066 -20.4188 -4.7113 8.9348 -#> -3.2366 -6.0332 2.2069 8.2774 12.2769 14.3749 -12.7319 -0.7708 -#> 0.6335 -1.3701 -9.1195 11.4271 -5.1396 5.6112 0.7465 0.8958 -#> -4.4243 3.2009 3.4101 0.8854 -6.3913 -5.3965 -5.5959 6.9523 -#> -2.0583 3.5878 2.1682 1.0807 2.3449 -7.7811 5.4776 -2.1308 -#> -14.8754 -3.5022 -4.9228 2.9605 5.1781 -0.4115 1.6601 6.8168 -#> -8.0577 -3.7750 -12.4450 -7.1238 -3.8977 14.8252 8.1138 3.9659 -#> -8.2239 3.0827 1.9788 -3.2824 1.4724 -0.2935 9.8999 4.8447 -#> 5.2614 -0.9639 -11.0854 -1.7399 -8.8694 3.1894 -9.9635 -0.2191 -#> -6.9460 2.0936 -4.6144 2.4964 -2.3149 -4.9224 5.6279 17.6400 -#> 2.7239 -7.1132 -3.2270 -4.4261 -0.1128 2.0319 5.7296 -8.8630 -#> 1.2327 2.0651 8.5595 -5.4172 13.7335 3.8594 13.4662 10.1180 -#> -10.8110 -5.3874 1.2913 -6.1393 -10.3391 6.3426 20.4897 -13.8959 -#> -1.9465 -9.7411 -2.5889 -3.1008 -4.2539 6.0369 -6.8057 4.0780 -#> 2.4637 0.0831 -4.2466 -2.7181 1.9345 -4.0950 -4.5429 -2.3028 -#> -11.2792 -0.2996 1.2558 -2.7901 3.7011 10.1111 3.7003 -1.7684 +#> Columns 1 to 8 -0.0017 0.5069 3.2581 3.7537 12.2754 -5.7048 -5.1129 -4.2523 +#> 2.7127 -7.6338 -3.6486 -1.6662 -4.1071 1.5403 0.6615 6.2160 +#> -2.2729 -3.8288 -2.1471 2.7769 12.7045 -2.8699 -3.6018 8.6840 +#> -7.2595 1.5596 2.9942 -1.1676 -4.7889 5.0214 8.8218 -0.3723 +#> 3.5509 0.5360 0.6371 -3.4385 7.4924 3.2963 3.1253 -6.3898 +#> -1.2744 -7.1059 1.7274 -8.6046 6.3798 2.0242 1.8863 -1.9836 +#> 6.8417 -1.1480 -3.7256 -1.9346 -21.1495 -8.0406 3.3976 4.0986 +#> 8.7771 -10.3459 0.3156 -1.4063 -0.7258 -3.8158 7.1611 -16.1417 +#> 9.2919 0.6332 -0.1275 4.9426 -11.1686 10.3236 -11.0894 15.3777 +#> -4.9045 4.2498 0.0221 2.9043 6.1274 10.6731 -11.9362 -5.5863 +#> 9.7324 -7.3030 5.2873 3.2367 8.0769 -16.1905 -2.3771 -4.1089 +#> 13.9064 3.3125 -11.6255 -9.6345 -0.7639 -4.8531 -0.0017 -6.6886 +#> -4.4770 -0.3462 8.2814 -0.2971 -0.1359 -2.4777 5.8273 -5.1940 +#> 9.3588 -2.9631 6.9407 0.2265 5.7733 -3.7639 15.6203 1.5223 +#> 5.5772 4.9461 7.4747 -4.4113 -13.4838 2.7753 7.3462 -1.0801 +#> 1.5742 -16.6824 2.1488 -1.6394 -2.2983 1.7920 7.6677 -1.1681 +#> -7.0215 12.6356 -2.5029 12.4752 -9.1480 5.5969 -12.8287 2.8613 +#> 1.4362 -9.0530 1.1091 2.7584 -10.4542 -2.3472 6.1680 -0.9943 +#> 7.0034 -13.8367 1.1581 -1.6444 7.5425 -16.3620 -0.9606 -3.5915 +#> -12.4596 -3.2990 -4.4497 5.4004 2.7059 12.8166 -3.7002 -5.2075 +#> 10.6642 0.3028 -7.6034 -4.2157 9.7668 -1.4204 -6.4702 -17.2024 +#> -13.5322 1.7728 -3.8414 -17.8105 -2.6958 -1.4289 -1.3667 -1.8366 +#> 15.8162 7.1249 -7.7765 4.1826 -14.7957 -10.0711 -1.6079 9.0596 +#> 8.9817 -6.6871 10.0282 -2.3882 9.9517 -1.9338 9.1863 -9.4137 +#> 4.4930 -3.6485 5.8536 -8.5625 -4.3754 -14.6884 5.1613 13.5964 +#> -0.3592 -1.6406 -8.0275 7.9000 9.7975 10.3208 -1.3015 -7.5435 +#> -8.9658 3.7266 4.4378 -3.8461 11.6946 6.2142 -6.4027 -5.7191 +#> 3.6667 -2.5505 -4.8785 -15.9986 3.9813 -1.5403 -8.4792 -7.8988 +#> 0.0458 4.8166 4.0429 7.8074 4.5504 5.5666 1.8543 -6.6329 +#> 15.7990 -2.9065 1.6362 8.8012 6.6865 -4.6375 -6.1845 -3.4298 +#> -0.1734 0.6800 8.6766 2.8967 0.2024 8.1668 13.2231 -9.2891 +#> 10.1324 -2.9827 1.4466 -4.3754 5.3177 7.4152 3.7100 -4.0681 +#> -2.0969 12.1802 -5.7960 -10.0887 4.2842 -8.3605 -10.4836 -3.9750 #> -#> Columns 9 to 16 -1.2297 14.3283 2.2631 1.3546 -1.5108 -7.3943 8.3483 6.9639 -#> -0.8142 -4.1600 0.6429 3.5323 1.7201 2.6613 -5.5547 -6.9555 -#> -0.4468 -9.6767 3.4243 -5.5370 7.9330 -3.7124 -8.0854 2.8903 -#> -0.8646 3.8281 -11.0271 3.5653 6.2605 -8.7873 -19.2922 1.3006 -#> 1.9641 -0.2966 1.1310 -8.4550 12.6014 -0.7149 -4.5330 3.5843 -#> 1.9168 17.5806 -0.1329 5.5879 8.6691 -11.9588 3.0880 -8.4299 -#> 7.6088 -4.8286 -7.4288 -2.1821 -10.3345 2.2468 -1.3855 6.6098 -#> 3.9023 2.7969 -6.6740 -2.6189 3.4172 -7.3487 4.3757 20.1543 -#> -9.2841 -5.3292 7.3305 -6.0223 6.0400 4.6032 -4.5086 1.9101 -#> 2.6685 -11.7018 -10.9301 7.0899 5.2995 10.9822 -4.8582 -10.7569 -#> 3.1771 -2.5365 17.9825 -12.2625 9.7687 -3.4043 -8.8135 4.3696 -#> 5.2823 0.7356 -4.1298 -0.8445 -1.4917 6.5445 13.9097 -1.9308 -#> 1.4821 4.8847 -0.5632 -3.9153 0.0182 -6.5668 3.3724 -0.0089 -#> 2.0266 -4.8865 10.2491 1.0754 -5.8866 -3.6270 -8.2653 6.7061 -#> -2.9120 2.2089 -9.9280 -5.7288 -0.5021 3.7785 2.3965 2.2677 -#> 2.3213 -1.8852 6.9190 1.6314 -2.5556 1.8858 -11.7685 -8.4369 -#> 9.6492 -2.9285 12.2038 -0.3930 -8.1063 2.2913 4.9456 10.8998 -#> -0.6015 -0.4647 -5.5746 15.3407 -13.0841 6.5545 -4.9857 -2.0244 -#> 4.8784 -8.7558 6.3537 4.8576 4.3489 -6.7662 -8.7465 2.3861 -#> -6.2488 11.2704 -1.0825 6.4878 -7.8461 5.2779 5.0536 4.8804 -#> 7.4775 -4.1008 -3.6440 -0.9562 1.8611 3.2053 3.7267 6.6024 -#> 1.7077 16.4328 1.1904 11.2724 -1.9391 -12.8586 3.8936 3.2751 -#> -12.2966 -2.3560 -0.6443 3.9921 -7.5230 0.9491 14.3577 -4.5637 -#> -0.1001 -11.4522 -0.7943 -1.2022 5.1980 -8.8641 -13.6163 -11.5659 -#> 3.8534 1.4053 4.2852 -9.7152 -4.8377 5.4733 11.9647 -3.4902 -#> 5.6387 12.2535 -4.6256 9.8812 -10.5789 3.1894 12.6462 2.0144 -#> 12.5536 -5.3916 -6.8821 -7.8555 0.9683 -10.4353 -7.7733 -3.1686 -#> 3.2834 -13.1777 0.7912 7.4581 7.8920 15.8003 -5.9017 6.8834 -#> 6.0552 1.3924 6.7256 -5.4646 -5.5976 -8.2639 0.7305 4.5903 -#> -3.8023 -2.4783 6.5691 -2.8264 5.2719 20.0680 3.1247 8.2646 -#> 5.6692 -8.6198 -15.6985 11.5830 -14.8205 10.7908 14.9619 17.0967 -#> -8.9506 -2.7822 1.0922 -10.7574 -0.3966 -10.7521 14.5444 -18.7963 -#> -6.0050 6.5820 -2.7543 4.5226 1.5246 9.3887 2.3188 -16.2012 +#> Columns 9 to 16 4.3836 -8.8815 5.6425 1.9450 1.8666 -2.7752 -3.8477 -8.1125 +#> -5.6739 -4.1562 5.0299 6.1100 3.0308 -3.2524 12.1550 1.4395 +#> 1.9829 -7.6963 13.7838 7.4857 -0.6755 3.1654 -3.4276 4.6276 +#> 6.0546 10.2154 -4.6687 -6.2842 -9.8280 -8.3897 -9.6450 -0.5691 +#> -16.1528 2.4416 -0.2655 1.6394 6.1251 2.1035 -5.6438 -10.8905 +#> 5.2522 1.9228 -11.4321 1.4259 -6.0636 -2.5523 -5.3765 5.1205 +#> -2.6455 -16.1003 6.9674 14.9567 10.1449 -0.3167 11.3960 -2.0346 +#> -0.0353 0.1707 3.8699 1.0839 8.1544 4.1939 1.5788 5.5370 +#> 14.0917 5.7634 -16.1650 -3.9257 -4.3522 -11.9848 -1.9146 10.5286 +#> -0.7115 2.0449 -8.5813 -1.1405 -4.5425 4.2556 5.6020 -1.2699 +#> 0.5076 6.5343 3.6051 -7.3038 2.6042 -12.1737 7.5300 -17.0367 +#> 4.4522 -4.3971 -0.0601 9.5360 3.1118 7.9875 -1.3043 -3.7343 +#> -1.3102 12.8509 -1.0419 -0.7859 -4.0887 2.8783 4.8966 12.1490 +#> -4.3585 -2.5972 -9.5963 -1.3815 10.5933 2.4334 4.9135 -8.8752 +#> 1.1210 -8.8738 5.5790 0.6040 1.2235 -3.7092 3.9525 -7.3712 +#> 9.4688 10.8544 4.8078 2.0908 -1.9452 -6.1405 -2.6633 4.1484 +#> 15.6146 0.6893 1.9452 7.0406 -9.2167 -7.9627 -8.6179 -16.5386 +#> -5.8947 9.3849 8.4597 -0.3568 4.2719 -8.7190 6.1498 -13.5015 +#> -0.0308 5.6615 0.3692 -2.2015 3.2229 5.9640 9.3285 4.4520 +#> 17.0183 2.6649 -12.7490 -3.5640 -3.0899 -3.8250 1.8043 6.0982 +#> -0.2770 4.4155 -2.7124 4.4607 4.7004 0.6141 3.5834 4.5741 +#> 4.2364 3.7896 9.8809 11.9708 -2.8114 11.5021 -8.3987 11.9446 +#> 2.2391 -20.1738 1.2792 14.1326 8.1334 -2.0730 -0.2089 -4.7396 +#> 3.0287 2.2975 0.3587 -3.3079 4.8468 -5.8013 -1.5528 -8.6124 +#> 0.8401 -0.3229 -1.4869 3.4499 2.2852 -5.8397 5.0788 4.2155 +#> -0.9151 0.5754 -7.9526 1.8602 0.9512 -6.7583 4.7192 6.8694 +#> 2.8743 7.5369 -14.9726 -2.3078 -7.1016 -2.8158 -4.9740 3.2809 +#> -11.1349 5.3591 6.0480 0.5696 -4.8892 0.9056 0.9976 0.2354 +#> 9.9833 -5.4914 -12.4955 4.5996 3.4927 0.3793 -2.4541 5.4637 +#> 4.0954 1.0442 -10.7929 -2.7203 9.8539 -0.8593 5.8514 0.3049 +#> 4.7648 -4.9256 -7.6856 -2.3927 2.9550 -3.7901 5.9330 1.6260 +#> 1.7529 5.3920 -0.0853 0.9710 -6.7972 -2.2590 -5.0644 4.8622 +#> -1.0515 5.1427 3.3580 -10.8688 -2.5220 7.5011 -6.1148 3.0191 #> -#> Columns 17 to 24 -4.8695 3.7966 2.5417 6.4239 1.1281 -12.0619 0.3447 -1.8495 -#> -1.4736 -17.1540 3.4796 0.8374 9.6773 -2.4723 3.6881 1.1677 -#> -14.2464 0.4473 -4.9651 0.1220 -3.6831 6.5208 2.0460 -4.2853 -#> -1.5036 4.6976 -3.8827 -16.1917 -1.0532 -6.9277 -9.8995 6.4730 -#> 0.7103 -1.6257 3.9315 -10.7942 4.8641 -4.6957 3.4183 2.0056 -#> -5.2118 3.0820 14.3102 -12.4407 -4.1198 -3.3126 -12.5885 -0.2005 -#> -2.3870 7.6276 5.7137 5.6988 -1.1870 -2.0965 1.6126 2.9134 -#> 14.2791 9.6267 9.3154 0.1505 13.1650 -2.0966 7.8122 -0.5654 -#> -12.7522 -0.3538 7.9037 10.9583 -2.5624 12.9610 0.0900 -2.0126 -#> 4.2564 -0.3686 -12.5285 3.6130 0.7694 -9.3640 -11.3119 4.9255 -#> -6.6944 -12.5297 7.5991 -9.9898 -5.4475 -1.1485 2.1881 -6.5117 -#> 6.7062 -5.6601 -0.6736 4.1640 -6.3789 -4.6947 3.3107 0.1270 -#> 1.4707 1.9069 7.0601 -11.8777 0.2879 -8.3140 10.3291 5.2538 -#> 4.4303 -6.6440 -10.1144 -3.8942 -15.6688 10.0768 -4.0495 3.0229 -#> 0.7410 3.9392 -5.8770 -9.6422 -4.4338 7.3714 1.1163 1.7708 -#> 9.1297 2.9569 -12.4715 3.4630 2.7644 -0.6531 -16.8944 1.5479 -#> -1.2994 8.4404 0.0242 6.1673 -5.8023 1.5684 14.3514 -0.2133 -#> -3.3769 6.3468 -2.9930 -3.8928 -14.3513 -1.1740 -16.5160 3.8613 -#> -2.0130 9.6563 3.1024 12.3306 0.5746 5.1057 0.9603 0.5509 -#> 3.8197 15.7484 0.7000 -5.7188 -3.2259 11.2313 -2.1890 2.5566 -#> -6.0698 5.0667 -0.8852 2.6291 4.4607 -2.9877 -0.4767 -2.3615 -#> 3.3213 2.0417 -7.6848 2.5958 3.6713 3.4101 2.9736 6.2485 -#> 6.5813 -5.7292 -4.1808 -4.0592 -2.1550 -1.4666 -10.1642 -4.0215 -#> -2.6416 1.1199 -1.7281 -1.9130 -22.5373 -15.1305 -2.5205 8.0506 -#> -5.9875 -3.0936 2.9764 6.2160 -1.3205 11.5670 1.5781 -3.7485 -#> 7.4648 16.2491 -5.4137 6.2873 13.0638 15.2053 0.0131 0.5840 -#> 6.0694 2.7495 15.8616 -9.4167 -2.1686 0.9645 7.1023 3.1712 -#> -4.9406 -1.4991 -8.2263 -11.4416 -7.9785 -4.3864 -0.9767 6.5426 -#> -0.4732 -1.7360 -5.7882 1.7442 -9.5797 -1.2339 2.6533 1.0732 -#> 2.7094 -7.0719 0.8703 13.5209 14.0761 -16.0680 4.5148 7.2220 -#> 5.4432 14.7888 -6.7092 17.5242 -11.2132 -9.3064 -1.8499 -3.9303 -#> 17.9627 -11.4976 9.9461 -0.3440 -4.5030 -1.2401 -0.3820 -8.0093 -#> -14.7131 -13.0150 -0.9647 -3.1112 -12.8724 -8.7595 -10.1707 -8.1402 +#> Columns 17 to 24 -12.7569 -3.2014 -4.9477 8.0396 0.1480 -8.1503 1.8466 1.4811 +#> -4.1258 -0.8260 -1.4697 2.2774 -1.4499 6.5272 6.7380 3.2592 +#> -0.3252 2.4459 -1.1071 3.5770 5.0879 -3.2505 14.6033 -3.1035 +#> -5.1707 -1.7914 3.3909 5.1288 -3.3835 4.1724 -11.6968 -0.4428 +#> -8.8881 -5.5751 -11.8423 -0.0476 -5.7856 -0.9314 3.7667 1.3444 +#> 6.7039 0.6751 4.4299 3.8280 11.4129 -5.1620 1.9157 6.0946 +#> 10.7073 0.7234 -2.4557 -4.2930 -0.6347 3.9729 -0.3295 11.2576 +#> 3.6426 3.6079 -13.2582 -1.7648 -8.7286 5.7397 2.7646 1.0751 +#> -14.4166 2.1538 17.6702 -8.6276 -10.8877 7.6990 2.8082 -12.0108 +#> 7.7238 6.9618 -4.0760 -10.4079 15.1706 3.4098 -7.7159 -6.0290 +#> -7.8506 2.2986 9.0994 3.0793 6.4497 6.6537 -4.5718 4.2199 +#> 4.2440 4.4275 -7.0432 -7.2756 3.1261 3.2780 12.7839 6.6176 +#> 1.0640 -2.5907 -2.0621 6.4645 9.3268 12.6343 -0.1254 -4.1593 +#> 0.9177 5.0589 -6.5546 6.2201 -3.0735 12.9743 -5.7553 0.5045 +#> 6.8044 5.5532 -12.0917 -13.2226 -11.2319 10.1235 -4.6331 5.0385 +#> 3.2687 5.4748 10.5017 8.8038 -0.7778 2.4431 8.2787 -5.0092 +#> -4.7944 -6.3961 10.3905 4.4328 9.9136 4.3065 -9.0920 16.1394 +#> 11.5727 1.0774 -11.0627 -5.5270 -11.5985 13.0164 -1.6924 12.3224 +#> 1.1779 1.9440 -4.8213 7.0950 3.4402 -2.0456 0.4378 2.0199 +#> 5.5184 -3.8164 11.2105 -5.6238 7.1289 1.6388 6.2446 -3.1492 +#> 0.6391 1.8168 -4.7819 2.8200 7.1741 5.2373 3.5003 -2.5255 +#> 1.7020 -1.8355 -7.1825 -1.3645 1.9672 -12.4413 2.1877 -1.5600 +#> 2.7693 3.3082 6.4872 1.8921 3.0838 3.5617 13.0991 6.4284 +#> -5.1973 7.7472 -11.7490 -6.4281 -12.7652 -1.0390 3.9047 2.6759 +#> -2.2705 12.7944 12.0051 10.2064 8.4377 7.6387 -2.0573 4.3096 +#> 4.9081 5.2918 6.1564 -2.0601 8.8497 7.3686 -1.2929 -7.7612 +#> -3.9980 2.4928 -3.9108 3.0321 1.7197 1.6843 -12.1051 -3.6635 +#> 5.1207 2.0376 1.3035 0.6390 10.8610 0.5200 -1.0097 0.6060 +#> -11.2807 4.0115 -3.0525 -10.3535 -13.5639 1.5597 -6.7436 3.3555 +#> -2.3282 3.8698 -2.8416 2.0518 -0.3548 14.5449 9.6512 4.0287 +#> -1.2852 -5.3337 9.6606 6.2242 -7.9623 3.7667 3.8953 -4.2340 +#> -4.8996 7.6168 0.9547 1.4163 21.0954 8.0410 2.2115 -6.1601 +#> -3.2272 9.9728 -9.7445 -9.8507 -10.5398 -11.1604 -3.1450 -9.0675 #> -#> Columns 25 to 32 1.8547 10.9740 -4.0183 17.7692 9.3797 -5.4057 -1.2404 3.7908 -#> 0.2848 5.8314 3.8718 8.4746 9.3938 -0.3301 -3.7456 -1.8790 -#> -7.1713 0.7990 -0.0932 4.3337 6.5727 1.5852 -3.6544 -5.7062 -#> -1.1724 -10.0680 -15.0025 -6.9689 -9.6004 7.3883 1.7708 7.3982 -#> -0.5131 8.0941 -0.3890 1.9520 -1.9299 3.8167 3.7566 1.6492 -#> 7.9042 -2.2868 8.7217 5.5398 -6.5718 2.3075 -18.9907 1.2899 -#> 2.5883 -1.2177 -12.2389 3.3108 -1.5127 -6.1679 -3.8565 13.5627 -#> -5.1450 5.9179 -3.6925 -10.1028 -7.9639 3.4012 4.1468 2.0707 -#> -9.5785 -2.3109 1.5583 -3.7295 7.2034 4.7588 -15.4971 -11.6062 -#> 3.6796 13.0790 2.7777 -2.5593 4.2404 6.4327 2.8832 -7.3154 -#> -8.5260 4.1016 -11.3898 3.9401 1.4674 -13.3377 0.7232 0.7624 -#> 6.6091 0.1427 -1.6529 -1.2078 -0.8534 -1.6778 7.6013 -0.3548 -#> 5.6827 -5.8490 3.1977 8.0379 -7.3799 4.6825 -1.2672 7.2958 -#> -1.9738 3.6717 -10.3418 -12.5794 9.9656 -5.8658 7.0017 -3.5312 -#> 2.2719 -1.7249 -9.5627 7.5573 -12.4113 10.4928 7.8016 0.9843 -#> 8.1371 6.7174 2.2147 0.4939 8.8809 -7.9415 5.5854 15.3170 -#> -11.3932 -1.0371 4.5466 -5.9271 -3.3451 4.6784 -2.8040 8.8742 -#> 2.4757 -1.5845 -1.1163 -0.1127 3.7453 -1.4919 3.8370 16.2731 -#> 5.6458 4.9561 7.9401 -3.1958 3.8495 -2.7730 -12.7283 -2.1418 -#> 3.3611 7.1985 7.2372 -11.4225 -0.6149 -0.4715 -1.4611 4.9257 -#> -9.7007 -0.9226 -4.5642 -5.6105 -7.3980 3.1887 -2.8203 1.5539 -#> 2.0339 -7.3502 -4.2530 0.4539 -2.2738 -7.3877 3.7860 1.1196 -#> 1.9136 -2.9839 -6.5101 9.0191 0.3577 -2.4797 -0.3782 5.8724 -#> 5.7521 3.2082 16.4502 0.2664 -4.4580 8.6922 -6.5666 -15.9535 -#> -18.4346 -10.0809 3.6438 2.8954 -10.2978 6.2441 9.7118 -1.3140 -#> -0.3073 -8.0157 -2.1891 -3.7042 -4.4942 -1.6702 3.9804 1.5787 -#> -1.2848 0.6467 -5.3219 -3.3679 -3.5367 4.1371 -11.7713 20.9084 -#> 4.5120 9.6014 -1.7913 -0.7869 14.9937 13.7954 0.0611 -12.0633 -#> -3.4014 -2.2428 -3.0984 -1.5958 -3.6751 -7.7030 14.9411 10.2175 -#> -1.6834 8.8222 -4.5406 12.8437 7.8638 0.8852 4.3863 -19.6971 -#> 0.6685 -5.6872 -17.9757 -7.1754 -1.7314 0.7158 9.9081 8.7799 -#> 1.5819 -4.4604 15.8837 -4.2586 -3.4666 4.1039 0.2826 -8.2974 -#> 1.1024 10.2194 4.3061 9.3984 10.0735 2.7659 -1.0396 -15.0943 +#> Columns 25 to 32 1.8838 -5.2315 -8.9970 -5.3463 6.5990 -8.7278 2.5317 7.2795 +#> -10.3640 2.6807 2.1797 6.0306 1.2948 -2.5437 0.6122 9.0732 +#> -2.3311 1.7300 4.0659 -1.0271 3.4003 -3.2376 7.3427 8.9921 +#> -3.9044 -3.9095 -7.6073 5.4136 4.8206 -6.4818 6.7405 9.6930 +#> 1.8212 -1.7141 -9.7601 -8.7617 -5.8624 0.2889 -5.9471 -2.0685 +#> -2.0515 -1.0642 1.0953 -6.5147 -0.7429 9.5914 -9.4436 -10.2337 +#> -0.4887 0.6223 9.0298 11.9800 -5.8018 2.9564 -6.5534 0.9106 +#> 5.3430 -4.7827 4.3220 -3.3741 2.1184 -3.1440 14.1775 8.2804 +#> -5.1914 2.5666 4.5424 6.3191 1.7201 -3.7996 7.2245 -6.8536 +#> -3.6347 2.8428 0.5575 -0.6153 0.8044 7.3047 -0.1687 -12.4056 +#> 7.9655 6.6323 -0.3890 2.3832 -13.7306 4.2921 0.0261 8.3286 +#> 8.9139 6.9578 0.6094 -8.1288 4.1101 3.7043 3.8990 4.2212 +#> 5.6857 -1.7151 6.1521 -0.6481 10.7693 -1.5247 7.6244 -8.0634 +#> -7.9516 0.8880 6.7325 4.9532 1.1588 4.8412 1.0281 -1.2694 +#> 13.3677 7.7116 7.8308 -2.7983 -4.3281 5.9776 4.1833 -6.2174 +#> -1.5231 -3.8248 -0.4930 -2.1026 -1.2988 -3.5005 2.2782 7.4686 +#> -0.8952 -1.4051 -4.2220 -2.3988 -1.8383 2.5641 -10.7840 -3.0684 +#> -0.0220 -1.4435 6.1759 6.3155 -4.7200 2.1764 5.9783 1.5432 +#> 1.7116 4.7562 3.9363 1.1410 6.4128 0.0694 -10.4346 -2.3504 +#> -4.3430 -3.2654 6.7803 -7.4416 0.3748 -0.4770 1.1880 0.0856 +#> -8.5723 4.1691 5.2118 3.7059 -2.9037 7.6973 -7.5632 12.0273 +#> 11.5666 12.6088 -1.4150 -5.6689 11.8927 -2.5614 2.3107 0.3495 +#> 10.9083 -1.2806 7.1950 3.1026 -0.7004 -4.7780 -9.0234 -5.7408 +#> -1.0090 -2.9239 2.8265 0.8185 -3.8453 2.5459 5.4261 -0.9739 +#> 0.6763 5.6249 -1.3803 2.9875 -3.0686 4.4291 -14.3752 3.7723 +#> -2.1913 -10.2968 -2.0582 -5.9442 -0.4236 5.2149 1.7975 -2.1567 +#> -7.6165 -10.5312 1.7332 -6.6905 -5.0284 5.5104 1.2902 -8.3583 +#> 2.1413 -0.0589 -6.4868 -0.6710 -2.1910 -10.2542 -8.6546 -15.6017 +#> 4.7712 -1.2127 8.0619 -4.8880 2.3137 1.0140 5.0451 1.9885 +#> -0.5853 -3.7111 4.7597 5.0333 1.9762 4.8363 0.8507 -19.1190 +#> 5.7191 -9.6450 1.8695 -6.9340 -1.0690 -11.5491 0.6299 1.9411 +#> -0.7498 -4.9341 3.0335 -3.8179 3.9865 -2.7851 -10.6340 -9.3775 +#> 8.4640 2.4687 3.2002 -0.9817 -9.5693 -3.5052 14.5246 -2.2038 #> -#> Columns 33 to 40 3.5032 1.6151 13.6459 -6.1995 -9.8488 12.3514 2.2729 2.7821 -#> -10.2329 4.8449 1.8473 -11.9907 -1.5774 4.7261 -4.6764 3.4796 -#> 3.8205 -5.8217 0.2988 -1.5748 1.5653 -11.6889 1.7686 1.9090 -#> 13.4749 -8.6482 13.3443 4.5522 -1.9011 0.3399 -5.3454 -9.5580 -#> 7.1295 -9.1016 -0.1034 2.5529 4.4182 7.2676 8.8859 0.2998 -#> 13.1355 -0.0824 -3.8959 2.2275 1.8356 -5.7940 2.1526 -2.6223 -#> -4.7088 -2.6130 3.9118 -0.8039 -0.8296 7.5224 0.0166 7.7747 -#> -0.1367 10.0605 -3.1626 -4.9235 9.0574 8.3608 -0.6431 1.6203 -#> 3.9886 -7.4451 1.0348 -6.2363 2.4075 -16.8698 7.9134 13.1190 -#> -5.1753 -10.0095 4.1602 -4.6037 7.9717 2.1947 -3.1838 1.4414 -#> -5.7036 17.6836 0.8175 -12.0724 10.1267 -12.3621 -6.8936 12.6102 -#> 0.2967 -3.6497 -0.3328 -0.5213 2.7169 4.6599 -0.7938 -6.6801 -#> -4.6639 10.4488 1.7619 8.8252 4.8165 2.1116 -14.7942 0.6958 -#> 10.5806 -5.0217 10.8438 15.9300 3.0902 -4.6161 7.6645 -3.7093 -#> -1.9758 8.7825 5.5702 -1.1931 20.7325 -1.6902 -6.5710 8.5370 -#> -3.4315 -3.9690 -2.7181 3.9475 -5.2950 -4.0683 5.1271 -8.8019 -#> -2.4889 7.4373 -1.2045 4.6231 6.4070 1.5636 1.8816 2.0895 -#> -2.4631 8.3441 2.7115 12.9789 11.4943 -4.0801 -2.7654 -14.1363 -#> 2.3585 -3.1852 -4.5072 3.1720 2.2918 -3.6508 -0.0585 4.2550 -#> -7.1227 14.0587 -9.2695 2.0586 8.1077 -8.0613 -0.0276 -2.7233 -#> 11.5503 -1.0402 -0.7068 7.1030 9.6921 4.0622 1.5635 -0.8581 -#> -12.0199 3.8998 -3.4075 -9.5343 -9.1043 -7.0554 -2.9911 -6.6626 -#> -5.6089 1.6227 6.6670 2.8772 2.9687 -11.4097 11.1582 2.4165 -#> 6.3019 3.1575 7.2511 8.8503 20.9788 -13.1751 -7.8800 5.9546 -#> 6.2591 -1.8061 0.9736 -4.3092 10.0755 3.7018 1.4896 5.1624 -#> 2.0282 5.3408 -1.1668 2.4538 0.4726 9.4383 3.5097 -6.0822 -#> -9.5976 -1.6313 10.0339 0.4870 8.2388 2.8234 -7.8079 11.4701 -#> -1.7225 9.5804 6.9717 17.5916 14.5329 2.5304 -11.2523 1.8387 -#> -2.7241 0.4796 2.8995 -2.0427 5.6780 2.6961 1.2184 1.7330 -#> -14.0950 14.8797 6.8516 -4.5487 -0.0471 19.4064 -9.8698 2.2338 -#> -3.9601 2.5363 8.8149 5.9725 3.4330 12.5849 5.8831 -0.3196 -#> 6.8494 -7.2550 1.9249 -1.6378 -4.1263 -2.4659 -3.9709 -8.6884 -#> 3.5879 -0.8758 7.2625 -4.9020 -4.2377 -0.3593 -5.0603 1.3574 +#> Columns 33 to 40 -11.8359 -1.1674 3.5947 -4.2214 -9.0593 9.2305 5.6302 3.1672 +#> -0.3669 1.4082 -1.1256 2.3696 1.3631 -10.5406 -5.1200 7.2436 +#> 2.4813 1.2822 1.1203 -14.3431 -14.9815 4.1509 -4.2795 -8.9770 +#> 6.0945 2.8545 -11.9397 3.3583 4.8196 -5.3744 3.0747 3.8545 +#> -10.9909 8.6085 11.3655 -11.3509 6.5653 2.5778 -1.4638 4.1041 +#> 6.9667 -3.0697 -1.0341 -0.8762 0.5616 -11.0444 -8.4441 10.5943 +#> -5.0842 6.0846 2.3173 3.1783 -2.6641 11.1517 -4.5040 -4.3047 +#> 11.6663 8.5665 -14.5643 -1.6608 -5.5992 2.0893 -6.1898 -5.4842 +#> 4.9422 -9.3724 0.8579 9.6816 9.3466 0.5508 7.8024 -1.8268 +#> -4.2879 -3.9464 2.3123 -2.0965 -2.2254 -12.8112 -6.3540 -4.0766 +#> 3.9309 -11.6332 -23.2086 -11.9200 -8.8749 2.2812 6.7785 12.3998 +#> 1.7818 3.9006 -2.3963 -1.2249 -5.9492 5.8874 18.9748 1.7110 +#> -3.7662 -2.1190 -12.5919 9.0871 5.4708 -14.0403 -4.3271 -3.2335 +#> -13.1970 -2.8872 12.7972 2.6439 10.0180 2.4739 -4.6104 -4.4681 +#> -10.9275 5.5363 2.0996 3.1780 4.9249 12.4357 8.3429 1.8119 +#> 10.7996 -0.3874 6.4108 9.3395 -6.0719 6.3334 -3.3859 -4.5842 +#> 1.4604 1.4527 -10.6700 2.0140 5.7853 -6.9652 14.9962 -1.8639 +#> -0.5917 8.5685 5.5385 -10.1893 10.7400 7.1663 -1.5148 11.3356 +#> -1.3234 -2.7612 -8.2776 -1.6519 -5.0230 -9.5948 -5.2549 3.2932 +#> -6.8445 -4.0829 9.4036 2.4799 -1.4283 -2.1257 -6.0441 -1.1196 +#> 2.3057 -7.1285 -4.0928 2.3285 -12.4510 -9.8804 -3.3696 0.5148 +#> 1.5211 9.8054 2.2292 -6.8698 0.9409 3.2807 0.4857 -9.6716 +#> 0.8528 -3.2264 6.1312 -3.5258 -13.0457 12.6642 6.7780 3.8981 +#> -8.9966 2.7833 -3.5231 4.2465 -5.7195 14.0048 -0.6942 3.2118 +#> -1.5793 -4.7026 18.4549 -8.8497 -0.7816 14.6262 -8.0896 -0.4770 +#> 2.3976 -9.9536 7.9479 3.7603 -0.3322 -6.5446 -11.2529 3.8992 +#> -2.9168 -4.0986 10.4816 -8.3925 4.8899 -0.3251 -3.6297 6.1881 +#> -3.8362 -1.9047 1.3935 -10.5810 6.2518 -4.7890 -7.1427 3.0364 +#> 0.5690 -7.6323 5.5362 8.2267 -2.2732 5.3475 6.2447 5.9902 +#> -11.4839 -10.8410 -1.3877 6.1463 5.1107 -4.5009 -0.6623 11.8616 +#> 2.8183 2.2219 -3.6330 12.6222 -4.2354 0.2443 4.0771 -1.5346 +#> 4.2749 -12.1141 11.4388 9.5722 5.9810 -6.3246 -1.6893 7.2998 +#> 0.7366 -0.4654 -0.9035 -5.3419 -0.2736 19.5346 17.8018 -0.1776 #> -#> Columns 41 to 48 4.7544 -3.8532 1.3537 18.0648 10.9368 -3.5256 3.2463 -11.0885 -#> -11.3760 -8.4545 -6.2563 5.3757 8.7233 -3.5149 6.3589 -8.7134 -#> -6.3385 3.0080 -2.7377 -11.4562 -1.4043 -11.9691 4.4455 -10.3454 -#> -1.7988 12.7498 3.4409 -11.4334 -3.6944 3.1703 -18.5209 13.9670 -#> -7.0067 0.2700 0.6592 -2.8205 -2.1231 2.6654 4.0859 -0.1331 -#> -0.1821 10.3581 -1.0482 18.0136 5.8157 0.6943 5.0057 -9.2490 -#> 16.3705 -3.4258 0.1996 2.7516 -2.5854 3.3346 -5.4813 10.1746 -#> -4.4923 10.9602 -2.0492 -6.0832 3.6700 -0.7903 10.9270 6.1423 -#> -3.9708 2.4995 -6.0148 3.8555 8.7296 7.2791 -1.8554 -6.3309 -#> 1.3187 -1.9468 -4.6737 7.0641 -0.5110 3.2483 5.6464 11.6303 -#> -12.5985 -1.3315 12.5610 -5.4499 -1.1348 -1.6720 -4.7975 -6.8854 -#> 2.6793 0.8099 -0.8208 2.0605 0.2827 -2.0339 6.4241 0.4315 -#> 7.9504 9.4412 11.9954 -4.2733 -6.7321 -10.7687 5.7605 1.0645 -#> 10.1457 -0.1423 -4.4749 3.1614 0.5063 13.8041 0.4637 -2.7746 -#> -8.8189 13.9607 6.1141 -4.7353 -4.7814 -11.4741 15.6631 3.3705 -#> -12.9460 3.0432 22.2455 5.7695 -7.4932 4.4910 -7.6372 1.0687 -#> 7.6966 7.9218 6.8505 -3.2164 -2.5481 -1.6493 0.3228 11.9375 -#> -5.3288 6.7775 21.9513 -2.1556 -16.4669 3.2823 -15.1787 -1.6592 -#> -5.5416 1.3512 11.4356 13.0597 8.0178 -9.0771 7.9728 -10.5826 -#> 4.3661 8.3695 7.9370 4.1265 -1.1032 3.1672 8.7553 7.8063 -#> 0.4222 2.5522 -11.8432 1.0525 6.8551 -9.1025 0.2412 4.1610 -#> -1.5518 12.8515 13.8811 -0.2850 6.1632 3.1550 -3.3247 -6.2828 -#> -0.3344 -8.8364 6.2271 3.0440 -13.1455 8.6795 -3.1012 -0.8562 -#> 1.7786 9.6441 -1.1621 -0.8870 1.6428 -6.4055 8.3717 14.0249 -#> -4.4673 1.8278 0.4781 0.6198 -1.5879 -7.5952 -3.1377 -3.3075 -#> 4.9676 6.9847 -2.2823 8.5192 4.7167 2.9328 -1.9337 2.2208 -#> 5.1345 4.7293 6.2842 6.9622 -3.0074 2.0482 -8.6324 11.0786 -#> 9.9139 -5.6082 1.8029 -2.0476 -3.7845 -0.8135 2.5508 7.4146 -#> -1.6769 8.3854 17.1547 3.6791 1.2201 -11.0141 1.3322 -1.4765 -#> 12.4462 -1.2692 -3.4418 -0.4186 10.4377 5.5881 5.5438 4.7403 -#> 16.0646 6.3947 -3.8445 -5.2718 2.5817 4.1743 -0.7499 2.3548 -#> 9.2805 0.4435 -12.8332 2.4571 -14.3137 -8.1492 19.0161 -4.3424 -#> 1.7508 -5.0976 -9.9643 10.7083 4.8640 0.9441 -0.6708 -10.0837 +#> Columns 41 to 48 8.8933 5.9160 -6.2404 2.9623 -9.6921 -7.6478 13.8534 -11.1659 +#> 0.1709 9.0619 6.2530 -0.9892 2.4302 3.6891 -1.1117 6.2600 +#> -7.3244 -5.0991 -5.6875 1.7317 -2.8637 1.1560 0.4200 0.7255 +#> 0.9692 -1.9280 5.1149 -2.1213 -8.8343 -7.3494 7.7670 -3.6599 +#> 5.2305 9.9525 -6.3719 -9.8013 4.8982 5.5371 -7.8636 4.1233 +#> 1.2551 -2.9130 -6.8755 -3.6328 10.0155 5.1864 -12.3542 -8.0395 +#> -0.2401 7.5571 -1.5095 11.3555 -5.7092 10.0492 -10.4731 8.1340 +#> -8.3481 -12.4499 -8.1132 0.5452 7.6533 -10.7303 4.6612 -2.9052 +#> 7.9080 4.2594 10.2944 5.4354 -1.4502 -12.3063 3.2777 -1.3526 +#> -5.3863 -2.6302 -10.5039 3.0438 12.8014 5.9081 -2.0973 3.7497 +#> -13.8028 -7.5860 11.1906 5.0728 -16.4770 -8.3798 -6.9643 -1.5966 +#> 6.3616 7.5875 -9.3551 5.9182 5.2882 -4.4134 5.3110 -1.6438 +#> -6.9771 -0.8297 -3.2343 5.3062 6.3401 -3.9372 5.8353 16.1633 +#> -4.6283 12.9429 1.4393 4.6592 -0.1652 4.1131 0.1173 7.2165 +#> 11.2960 5.7281 -3.3924 1.1614 -10.2384 3.7862 3.3649 1.2174 +#> -13.0654 0.5469 0.3616 -5.3268 3.9519 -15.4861 5.6096 -4.9858 +#> 17.7016 -3.5991 1.7903 8.9998 -6.9908 -12.5465 4.0526 -2.3573 +#> -5.9614 2.9571 -13.7574 7.0271 6.8848 13.9695 -23.0365 6.4403 +#> -12.4099 -5.9334 -1.1855 -8.6726 9.7408 -1.2389 -7.7672 -0.9879 +#> 2.4116 10.9301 -6.8317 -8.8723 -2.3849 -4.0453 4.7047 -7.9257 +#> -4.9343 0.7164 5.3387 5.4643 3.7066 -16.0775 -0.9404 12.8221 +#> 9.5986 0.1300 2.6382 1.1211 0.1772 -11.8479 14.9037 -8.0804 +#> -7.9290 4.6596 -4.7398 10.0910 0.9941 10.0994 -1.5582 -3.4470 +#> -0.8331 9.1043 -19.6188 -8.6376 0.4230 3.2650 -9.3410 -3.3828 +#> -15.8378 16.5037 7.2699 -4.4574 -3.4882 -0.2002 2.7750 -6.6233 +#> -3.3090 5.6364 -1.3068 -7.0144 5.1868 5.2018 1.9072 2.3533 +#> -3.6108 2.3837 -13.4241 -10.7397 10.7944 -4.5891 -1.6160 -8.1370 +#> -3.8928 -1.2637 -1.2216 -1.9646 -2.3584 3.5162 -6.0384 -1.5790 +#> 4.1064 6.6195 7.3277 -11.6298 1.9081 -4.0359 8.6479 -13.3586 +#> -0.3447 -7.8899 -13.3295 3.9589 11.9086 7.1476 -8.3025 5.0787 +#> -6.4558 4.2693 5.7980 0.4546 -0.0632 -13.5106 9.3856 -4.2842 +#> -6.3272 -9.1070 14.0095 -10.2001 6.7923 -5.1088 9.3252 -2.1318 +#> 0.8296 3.1492 4.6179 -2.9759 -0.3277 -9.1670 6.6892 -3.8158 #> #> (2,.,.) = -#> Columns 1 to 6 1.1040e+01 2.2389e+00 4.4628e+00 5.7466e+00 -2.6627e-01 -8.1387e+00 -#> 4.2474e+00 6.3469e+00 1.3473e+01 6.8586e+00 -3.1554e+00 -1.1583e+01 -#> -1.1430e+01 -3.9404e+00 6.4832e+00 8.8420e-01 -3.3189e+00 -7.1515e-01 -#> -5.3851e+00 -1.9954e+01 -3.6777e+00 -3.9789e+00 6.8651e+00 -1.2576e+01 -#> -1.2494e-01 4.7613e+00 8.2518e+00 5.1161e+00 2.6879e+00 -1.1478e+00 -#> 8.1778e+00 1.4736e+01 8.8339e+00 -5.1749e+00 1.2643e+01 -1.4105e+01 -#> 8.0566e+00 -1.2687e+01 -9.9194e-01 4.1174e+00 -2.6348e+00 -4.0748e+00 -#> -8.4652e+00 3.9182e-01 8.5976e+00 -1.7796e+00 1.9045e+00 2.7233e+00 -#> -4.8983e+00 -5.3415e+00 1.3234e+01 -2.1544e+00 5.6411e+00 1.3019e+01 -#> 9.0287e+00 1.1770e+01 1.4281e-01 8.8434e+00 -7.3315e+00 -1.2302e+01 -#> -4.0297e-01 -5.8028e+00 -2.7611e+00 -2.3790e+00 -9.7937e+00 1.5359e+01 -#> 6.2538e+00 4.3550e+00 -8.2056e+00 2.8763e+00 -1.0728e+00 -1.0987e+00 -#> 2.8934e+00 -4.6745e+00 -9.3801e+00 -3.2556e+00 -1.2905e+00 1.5976e+00 -#> 6.5937e+00 -1.0203e+01 -4.2327e+00 3.3749e+00 5.8875e+00 9.6595e+00 -#> 1.0829e+01 -1.6179e+01 1.3427e+01 4.3272e+00 -4.3343e+00 1.1981e+00 -#> -1.6241e+00 -5.2897e+00 4.4671e-01 3.6279e+00 -1.6293e+01 -9.8559e+00 -#> -9.2592e-01 1.3693e+01 1.5238e+00 1.2777e+01 -1.5975e+00 2.5551e-01 -#> 2.8383e+00 -9.2113e+00 2.6174e+00 -8.6557e+00 -1.3164e+01 3.6949e+00 -#> -1.8526e+00 6.1922e+00 3.9519e+00 -1.0871e+00 -1.0004e+01 2.8450e+00 -#> 3.3199e+00 5.9430e+00 -1.6988e+00 3.6933e+00 -1.6695e+00 9.7463e-01 -#> 6.8643e-01 -1.8230e-01 2.7946e+00 -9.0175e-01 7.5062e+00 2.9221e+00 -#> -1.0841e+01 -7.1443e+00 3.9800e-01 -4.6693e+00 -1.6905e-01 -9.1411e-01 -#> 8.9760e+00 6.5289e+00 2.0646e+00 2.0664e+00 -7.4163e+00 3.1503e+00 -#> 3.2287e-02 9.4439e+00 -1.1551e+01 -5.0678e+00 1.9310e+00 3.4262e+00 -#> -1.2702e+00 3.0408e+00 2.1001e+00 -3.5340e+00 -3.2536e+00 6.8281e+00 -#> -7.6196e+00 5.7684e+00 3.2679e-01 5.5921e+00 9.2952e+00 -4.7166e+00 -#> 1.3529e+00 3.3966e-01 -1.8293e+00 1.5309e+01 -1.0006e+00 -7.7701e+00 -#> 1.0581e+01 7.6868e+00 -1.2268e+01 8.1392e+00 4.6540e+00 9.6224e+00 -#> -5.1705e+00 -1.1192e+01 9.5458e+00 -8.6510e-04 -1.1793e+01 8.4559e+00 -#> 1.8310e+01 1.4111e+01 1.5774e+00 -2.1493e+00 -5.2412e-01 9.6496e+00 -#> -2.8237e+00 2.5916e+00 2.1321e+00 -5.9730e+00 -3.1469e+00 2.4400e+00 -#> -1.5843e+01 1.3855e+01 -1.3872e+01 -1.8868e+00 1.0080e+01 -8.2378e+00 -#> 8.8551e+00 4.9966e+00 -6.3457e+00 -1.0323e+01 5.0205e+00 -9.3470e-01 -#> -#> Columns 7 to 12 6.8024e+00 6.8142e+00 4.5797e+00 6.9012e+00 1.0459e+01 1.0657e+00 -#> -6.9185e-01 -6.4943e+00 5.4717e+00 -3.6463e+00 4.3075e+00 7.7041e+00 -#> -4.0044e+00 4.6633e+00 5.0276e+00 -1.4530e+00 6.8511e+00 1.2912e+00 -#> -1.3504e+01 -1.1171e+01 -5.0406e+00 -7.0066e+00 -1.7130e+01 -2.3064e-01 -#> -2.8190e+00 4.1526e+00 7.7969e+00 -6.6416e+00 3.5648e-01 -4.1884e+00 -#> 2.6731e+00 -9.3361e-01 -1.9250e-01 7.6096e-01 -4.5198e+00 -4.9756e+00 -#> -7.7441e+00 3.3261e+00 -1.1134e+01 1.9768e+00 3.2081e+00 -9.4453e+00 -#> -1.4551e+00 1.6707e+01 1.1992e+01 -1.2095e+01 -8.7680e+00 -5.7492e+00 -#> -2.0924e+01 9.0712e+00 4.3001e+00 2.3030e+00 1.5413e+00 1.7113e+00 -#> -2.4857e+00 -8.2347e+00 -1.2573e+01 -1.0862e+01 -4.1379e+00 -6.6089e+00 -#> -1.5728e+01 2.3377e+00 -3.2312e+00 -8.4338e+00 1.4856e+01 -1.8265e+01 -#> 5.0071e+00 -6.7687e+00 -3.5333e+00 -6.7396e+00 2.0115e+00 -5.8410e+00 -#> 1.7353e+01 -9.7737e+00 5.5499e+00 1.0203e+01 -3.5550e+00 -8.5098e+00 -#> 1.4913e+00 7.1976e+00 5.9653e+00 -9.6813e+00 5.5417e+00 2.8466e+00 -#> 3.6951e+00 1.7859e-02 -5.5736e+00 2.3252e+01 -6.3235e+00 -1.6749e+01 -#> 1.2651e+00 4.7339e-01 1.5961e+00 9.3233e+00 1.4838e+00 -4.3081e+00 -#> 1.2277e+00 1.7227e+01 -6.4262e-01 -2.7803e+00 2.6236e+00 -1.0040e+00 -#> 1.9608e+00 3.1339e+00 -4.8171e+00 5.3754e+00 -4.6124e+00 -6.1681e+00 -#> -3.4816e-01 2.3533e+00 1.2979e+01 -3.9980e+00 1.0758e+01 1.9073e+00 -#> 1.0177e+01 1.0798e+01 -8.6815e+00 7.7241e+00 -1.2984e+01 -5.2720e+00 -#> -2.7988e+00 7.2102e+00 -4.4924e+00 -6.3182e+00 -1.2623e-01 1.3144e+00 -#> -3.5231e+00 4.8029e+00 8.3415e-01 1.5933e+00 -4.7331e+00 6.3149e-01 -#> 5.2782e+00 -4.3263e+00 -1.2520e+01 4.0108e+00 3.0807e+00 -8.3008e+00 -#> 3.2957e+00 -9.9090e+00 -7.1363e+00 8.8062e-02 -8.3463e+00 -7.5363e+00 -#> -5.6222e-03 -2.9559e+00 -7.3561e+00 -9.5151e-01 1.2968e+01 -1.3217e-01 -#> 6.8911e+00 9.1908e+00 -3.3467e+00 1.7311e+01 -3.0840e+00 1.2651e+01 -#> -7.3550e+00 6.4477e+00 -4.7388e+00 -1.8323e+00 7.4196e+00 -1.2512e+01 -#> 8.4276e+00 -7.1850e+00 -1.0022e+00 -5.9997e+00 6.7695e+00 5.1475e+00 -#> 7.7367e-03 7.5006e+00 5.3158e+00 5.5656e+00 5.8394e+00 -4.7957e+00 -#> 8.5397e+00 8.4526e+00 2.3535e+00 7.2155e+00 7.2376e+00 3.4551e-01 -#> -3.7541e+00 1.3644e+01 -1.1201e+01 -9.4936e+00 -1.4025e+01 4.1256e+00 -#> 1.5837e+01 -1.2085e+01 1.8337e+01 -7.5301e-03 7.2932e-01 -3.3146e+00 -#> -8.1402e+00 -1.4226e+01 -1.6781e+01 -2.6737e+00 -5.7033e-01 4.2115e+00 -#> -#> Columns 13 to 18 -7.6116e+00 -2.2572e+01 -7.3696e+00 1.2489e+01 -3.6996e+00 -8.4814e-01 -#> -6.5330e+00 -1.4115e+01 -1.6207e+01 4.8898e+00 -3.3812e+00 -2.8668e+00 -#> 1.0369e+01 8.3727e+00 2.5078e+00 -4.5453e+00 -9.8145e+00 5.5830e+00 -#> 4.2195e+00 3.3645e+00 4.0064e+00 -8.8580e+00 4.7633e+00 2.1263e+01 -#> 1.0394e+00 -4.8507e+00 -1.1569e+00 3.6901e+00 -4.9992e-01 -2.2131e+00 -#> -1.2750e+01 -3.0266e+01 -1.2321e+01 -1.3099e+01 6.0695e+00 9.5454e+00 -#> -8.8311e-02 -6.3177e+00 3.9969e+00 3.2729e+00 1.2270e+01 1.4457e+01 -#> 5.8119e+00 -5.0032e+00 -4.5845e+00 5.5784e+00 7.0958e+00 -4.4555e+00 -#> 1.0096e+01 8.4955e+00 -6.9884e+00 -1.1447e+01 -6.7327e+00 -5.6462e+00 -#> -5.5768e+00 -2.3684e+00 1.2694e+00 4.3510e+00 7.8375e-01 5.9263e+00 -#> 8.8960e+00 -3.4571e+00 -1.0240e+01 6.3798e+00 -7.5809e+00 -1.7999e+00 -#> -8.4793e+00 9.5572e-01 -1.8657e+00 8.0276e+00 -1.9531e+00 1.1821e+00 -#> -6.2658e+00 -2.4873e+00 6.7244e+00 -2.7642e+00 4.8987e+00 1.1314e+01 -#> 5.8645e+00 8.9250e+00 -2.0626e-01 2.6696e+00 5.9454e+00 -4.5350e+00 -#> 3.3258e+00 -2.1198e+00 1.3239e+01 6.2403e+00 -1.0381e+01 9.9492e+00 -#> -7.3888e+00 1.7891e+00 -6.5434e-01 -2.0986e+00 5.7303e+00 3.8639e+00 -#> 4.8493e-01 -1.2327e+01 -1.0240e+01 6.6167e-02 8.3635e+00 5.0293e+00 -#> -5.1651e-01 -4.9654e+00 5.2045e+00 -1.3461e+01 7.8285e+00 1.7121e+01 -#> -1.0272e+01 9.2964e+00 -5.5075e+00 -2.4449e+00 2.7975e+00 2.6272e+00 -#> -4.2984e-01 -9.9611e+00 6.5585e+00 3.3719e+00 2.8657e+00 -1.7146e+00 -#> -3.3866e+00 5.4789e-03 6.0831e+00 -6.4697e+00 4.7145e+00 -2.1051e+00 -#> 1.2717e+01 7.5293e-01 -1.4031e+00 -4.8171e+00 -1.0169e+01 3.8149e+00 -#> 9.2181e+00 -1.5667e+00 7.8714e+00 -3.2686e+00 -1.9517e+00 -2.9886e+00 -#> -7.0515e+00 4.5613e+00 -5.9774e+00 1.5230e+00 4.9189e+00 1.2735e+01 -#> -1.5374e-01 1.9978e+00 7.7595e+00 -1.8426e+00 -4.5077e+00 -4.7388e+00 -#> -2.7195e+00 -4.0337e+00 5.6005e+00 -1.1810e+00 1.2080e+00 -6.5539e+00 -#> -8.3179e+00 -1.5172e+01 -6.6159e+00 4.3008e+00 1.7152e+01 1.1670e+01 -#> 9.5280e+00 1.1548e+01 -7.2594e+00 -3.2168e+00 3.3255e+00 -2.1210e+00 -#> -1.5480e+00 3.5520e+00 8.4671e+00 2.8069e+00 -4.7788e+00 6.6099e+00 -#> -3.6551e+00 -6.4310e+00 -7.5838e+00 3.0752e+00 1.7736e+00 -1.5723e+01 -#> 2.2205e+00 1.1095e+01 8.1703e+00 1.4510e+00 1.1793e+01 1.3099e+01 -#> -7.7663e+00 6.1615e+00 -8.8113e+00 -1.3853e+00 5.6379e+00 -3.9343e-01 -#> -5.7712e+00 -1.0588e+01 -1.5045e+01 -6.2267e-01 -5.5895e+00 2.6833e+00 +#> Columns 1 to 8 -0.3619 4.7507 5.2202 -4.4902 5.4633 -11.1327 12.1531 7.3012 +#> -3.2615 -0.0427 -12.4125 -3.6037 3.0216 7.3106 -2.0342 -4.7471 +#> 4.4592 4.2324 -4.7202 1.5370 2.8355 3.0606 -0.2418 0.9966 +#> 10.5738 -1.8525 -7.5965 -8.3688 5.5839 1.6967 9.1000 -4.0909 +#> -2.3065 -0.8993 -10.4005 -1.7036 -2.9172 -0.1753 -0.5019 9.0030 +#> -5.1154 -1.5346 0.0508 3.5257 10.2797 -4.9488 -6.9606 -3.0852 +#> -8.2109 3.0952 -3.6003 -11.8491 1.8730 5.3689 3.5278 -10.9552 +#> -3.0043 -1.6809 4.6055 -10.8986 2.4688 -3.0951 5.6058 -0.9880 +#> 6.7929 2.4666 -2.3536 11.9245 -24.2146 21.7510 -4.1076 -3.0655 +#> -0.7273 -1.4395 9.5528 1.8219 -0.2840 -6.2072 -10.0564 1.3881 +#> -1.9389 -2.2080 9.2557 -1.0880 -15.9549 1.4933 11.3134 4.6219 +#> -1.6346 0.6331 2.1482 -1.5393 -8.4749 -1.8970 4.2873 14.2499 +#> 0.2674 -1.4259 -2.8983 8.3435 -2.7769 -0.8330 -9.0909 -5.5239 +#> -0.3352 -1.6374 2.5282 -6.7981 5.6085 -6.6012 13.3529 3.3288 +#> -7.3179 8.2485 8.6610 -5.0976 -0.6880 -10.1271 6.5975 -0.3246 +#> -2.4045 5.9853 9.9342 1.0912 -2.1763 11.9397 -3.4239 5.0353 +#> -2.9325 1.8043 9.0347 -4.0091 -8.2913 -3.4115 13.9126 3.6494 +#> -8.6817 2.8140 3.6937 -5.9206 6.4151 -4.6408 8.8346 -11.9463 +#> -3.2275 3.7146 9.6190 -6.0859 6.5511 -10.1070 -9.6853 -7.4810 +#> -6.0314 3.4782 -0.5504 -8.6399 7.1504 -0.1491 1.6604 -13.3869 +#> -4.0242 -2.7070 9.9019 2.2772 -1.7343 -3.0806 -0.3007 4.1620 +#> 6.2330 11.9904 -9.5708 1.6246 8.2742 -5.1568 -9.4090 9.4752 +#> -7.7235 -8.5374 -6.4202 -10.2577 -7.4358 3.2625 5.5035 -0.7779 +#> -6.0555 -0.7879 2.9509 1.7311 3.9546 0.0975 -4.6859 -10.8845 +#> -4.2050 -7.4343 -0.7050 -22.9119 6.9556 7.9510 -3.0306 -15.3819 +#> -5.8523 -7.9260 4.7318 0.9929 0.8463 -1.2826 4.3472 -7.2309 +#> -3.1177 -2.4413 1.7888 -3.6426 13.6961 -13.3307 -2.2494 -5.7197 +#> 4.4676 -1.2391 -5.0863 0.1106 -3.0380 -5.5396 -1.6570 0.2648 +#> -1.9544 -2.7779 6.1225 0.0495 -2.6936 -5.7898 2.5726 -7.1372 +#> -6.7555 2.5534 8.6404 5.5291 -13.7406 2.5281 -1.7730 -1.5615 +#> 3.1372 -0.0816 -3.9026 2.4384 3.2868 -6.0401 -1.1705 5.2603 +#> -5.9826 1.4485 6.1562 -6.3634 -6.4530 8.2276 -6.7654 -3.9403 +#> 8.5875 -4.4492 -2.1708 -5.4673 1.6807 -10.7984 5.3017 -0.4199 #> -#> Columns 19 to 24 -5.2464e+00 -7.0874e+00 -4.7609e+00 -3.7282e+00 1.0706e+01 1.5297e+00 -#> -6.9263e+00 1.1783e+01 -1.1422e+01 -4.0469e+00 6.4235e+00 2.0727e-01 -#> -4.9231e+00 -1.5128e+01 6.5018e+00 2.1611e+00 2.1524e+00 -1.9323e-01 -#> 4.3420e+00 -1.3789e+01 -6.0091e+00 2.4006e-01 -9.3553e+00 -9.8307e+00 -#> 3.4428e+00 7.1870e+00 -1.1922e+00 1.1430e+00 5.4847e-01 5.8617e+00 -#> 5.1685e+00 6.3463e+00 4.5243e+00 -2.6326e+00 -3.9794e+00 -3.6855e+00 -#> -2.6699e+00 5.1123e-01 4.6196e+00 -4.7047e+00 4.5384e+00 -1.8685e+01 -#> 5.0849e-01 1.6162e+01 4.2410e+00 -4.2715e+00 3.1371e+00 -4.5112e+00 -#> 1.6873e+01 -1.2267e+01 -4.0977e-01 8.0116e+00 -7.4305e+00 5.1819e+00 -#> 2.2561e+00 9.6738e-01 -2.3555e-01 -1.0170e+01 -4.5446e+00 5.6410e+00 -#> 6.0245e+00 6.3343e+00 -6.5556e+00 -2.2797e+00 2.3705e+00 -3.5569e+00 -#> 1.8994e+00 8.7867e+00 3.5606e+00 -8.5007e-01 2.7260e+00 1.3389e+00 -#> -1.1418e+01 -9.5140e-01 1.0049e+01 -6.1606e+00 9.7031e+00 -1.6093e+01 -#> 9.8920e+00 1.4330e+01 -6.2582e+00 7.7795e-01 -5.7016e+00 8.8303e+00 -#> 2.4353e+00 -4.5353e+00 7.7747e+00 -8.8086e+00 3.7741e+00 -1.5807e+01 -#> -8.1247e+00 -8.7499e+00 -2.1982e+00 1.5405e+00 -1.4039e+00 2.3166e+00 -#> 5.2253e+00 -3.7718e+00 8.5709e+00 -5.2631e+00 -7.7831e-02 -6.2829e+00 -#> -5.4685e+00 -4.3472e+00 3.7960e+00 -2.9034e+00 1.7978e+01 -1.5647e+01 -#> -3.9083e+00 -4.4565e+00 2.4797e+00 -3.7251e+00 4.1443e+00 -5.5959e-01 -#> 6.9433e+00 1.5254e+00 1.3598e+01 3.9247e-01 6.8856e+00 -1.0771e+01 -#> 8.9114e+00 6.2955e+00 5.4177e+00 9.8818e+00 -5.2334e+00 2.4367e-01 -#> 1.5399e+00 2.3911e-01 2.7749e+00 -4.2119e+00 -5.4261e+00 -1.6565e+00 -#> 2.0196e+00 -5.7851e+00 -1.3748e+00 5.4834e+00 -1.1966e+00 -6.5270e+00 -#> 7.0821e+00 -3.0755e+00 1.2174e+01 -9.4236e-01 -5.6663e+00 -2.5944e+00 -#> 3.3522e+00 -5.0099e+00 -2.1456e+00 1.4415e+00 -6.1322e+00 1.2063e+01 -#> -4.2653e+00 -6.8232e+00 -1.5760e+00 1.0697e+00 -5.0486e+00 7.7071e+00 -#> -2.7145e-01 4.4730e+00 -6.8754e+00 -9.2159e-01 -4.1075e+00 -8.8420e+00 -#> 2.1405e+00 4.0768e+00 2.8086e+00 -7.6287e+00 8.4893e+00 -1.3900e+00 -#> 8.6783e+00 -2.5042e+00 1.1771e+00 1.9851e+00 -1.3014e+01 -2.3765e+00 -#> 1.0708e+00 8.6690e+00 2.8070e+00 -3.4836e+00 -2.7329e+00 1.0127e+01 -#> 1.8867e+00 -2.3190e+00 1.6788e+00 3.5833e+00 -7.0983e+00 1.2037e+00 -#> -8.4881e+00 -4.3573e+00 1.2775e+01 1.5797e+00 -6.7303e+00 2.0272e+01 -#> 8.9483e+00 -8.3223e-01 -6.2088e+00 -2.7979e+00 1.4796e+00 8.8738e+00 +#> Columns 9 to 16 2.0224 -0.8034 -3.0653 5.0708 4.0226 6.2013 4.5564 -2.4866 +#> 8.9358 -1.9102 -0.9943 -0.5796 -8.3736 5.9767 1.8103 -1.4679 +#> -2.4314 2.9648 -2.7773 -2.1391 3.7804 0.1174 -5.3776 -13.5547 +#> -1.7709 -2.1917 -0.5539 2.7033 -2.3779 -3.3447 -6.6735 11.0427 +#> 7.3811 -0.9950 0.5425 -8.7879 -0.7521 -1.6372 -3.5386 -15.3024 +#> 2.0978 -7.2410 -7.0986 -8.9580 5.6551 -2.8206 6.4418 1.5668 +#> -0.1735 1.7582 3.4132 2.3750 -4.9381 3.0042 0.5722 -0.3610 +#> -3.2509 0.1282 -2.0439 4.2236 8.2883 -6.0459 -8.2608 5.6743 +#> 8.7424 9.9933 -0.2225 1.1017 -6.6307 1.5030 5.6181 5.6833 +#> -2.9179 -1.8718 2.1876 -10.8360 -6.7929 8.4193 1.3080 -3.6647 +#> -7.7729 -13.5895 6.4875 10.7613 12.9773 -6.2093 7.2188 -11.9730 +#> 1.7696 2.4318 -0.1717 6.4465 13.9270 8.6617 8.4048 -0.2795 +#> -0.6029 -0.8038 0.1872 0.8294 4.6746 -7.9731 -6.2947 18.8942 +#> 2.6821 4.4625 12.6328 0.8270 1.5722 -4.2914 12.7210 3.6976 +#> -2.6175 6.8166 -0.6300 4.4706 -2.9356 12.0657 2.8697 2.0580 +#> -6.5758 -1.1455 13.3510 7.2848 -0.2675 -8.6391 -1.2929 4.9009 +#> -4.6129 4.4335 -0.4930 0.4913 -6.5639 3.8708 -7.1833 -2.6990 +#> 1.7341 6.7267 0.6351 -0.3281 1.7417 4.7041 -1.8089 1.0158 +#> -1.3860 -15.0241 -4.3741 2.2984 3.9842 -3.5326 -3.1056 -9.3567 +#> -7.6547 -4.7265 -3.4537 -5.2437 -9.9890 6.7698 -1.1372 -6.7663 +#> 1.9634 -5.6699 -4.1017 -1.3833 0.8551 4.6613 13.6113 -1.9618 +#> 4.6079 -0.9070 -10.1265 0.0651 -3.6174 4.0450 -4.2886 3.5547 +#> -12.1361 7.5136 1.5361 -3.5475 -0.8614 -5.8159 -9.1244 -3.6102 +#> 2.1536 -5.3244 -5.0122 -5.5311 -3.2828 -2.4702 -1.5056 -14.7999 +#> -10.4692 1.7969 1.1653 -8.8876 -8.0563 -16.9192 -6.0877 -16.3787 +#> -6.3155 -0.9767 -1.5991 -2.6369 -8.6226 -2.2824 2.5887 -0.3889 +#> -0.8857 -0.6065 -12.9114 -7.9108 4.4094 3.3875 -2.7987 0.7561 +#> -2.2638 -8.5777 -8.8930 -8.2750 2.9610 0.0045 -1.3776 -7.9761 +#> -5.3521 0.9971 0.1340 1.6546 -8.8413 -4.8501 9.8958 -0.6773 +#> -0.9346 2.6370 4.6053 11.1662 3.2531 1.8403 0.2207 5.7101 +#> 1.2262 6.7319 0.7726 0.1651 3.0679 -7.5237 2.7356 12.0989 +#> -2.6474 2.3850 1.8161 11.2325 -7.8550 -16.9366 4.8561 -4.3737 +#> -0.0253 4.3252 -7.9465 -2.0708 -1.9391 10.7683 -4.6019 -3.7239 #> -#> Columns 25 to 30 4.7605e+00 -1.4757e+00 1.5414e+01 -3.8196e+00 -1.6951e+01 -1.2996e+01 -#> -1.9973e+00 -4.6919e-01 8.8073e+00 -2.2466e+00 4.9008e+00 1.9037e+00 -#> -5.7509e-01 2.7547e+00 -1.0311e+01 -2.8828e+00 6.6137e+00 8.7601e+00 -#> 6.2616e+00 -5.0881e-01 1.0574e+00 9.8757e+00 1.5003e+01 6.7349e+00 -#> 4.1598e+00 -6.4312e+00 -1.6850e+01 -1.8725e+00 -8.4253e+00 -5.4590e+00 -#> -7.0131e+00 9.9630e+00 -4.2980e+00 -2.8688e+00 -4.8960e+00 -1.5651e+01 -#> 1.0108e+01 6.2334e-01 5.0382e+00 -7.0535e+00 -7.1729e+00 -1.6165e+00 -#> 6.7001e+00 -1.1708e+01 -1.5331e+01 -4.2425e+00 -1.5112e+00 6.2536e+00 -#> -2.0866e+00 6.0834e+00 -9.9227e-01 -1.0545e+01 1.6857e+00 -6.7036e+00 -#> 2.2843e+00 -3.1801e+00 5.8044e+00 5.1553e+00 4.9275e+00 -1.0224e+01 -#> -4.4992e-01 8.7138e+00 2.1283e-01 1.3802e+00 1.5857e+00 1.1157e+01 -#> -1.2385e+00 -2.4477e+00 7.9737e+00 -4.4014e+00 -4.9261e+00 -3.1951e+00 -#> -2.3980e+00 1.3084e+00 -7.7907e+00 7.2450e-01 -1.1586e+01 1.0383e+01 -#> -1.8079e+01 -9.0459e+00 1.4397e+01 -6.2526e+00 6.3617e+00 -7.8238e+00 -#> -3.8272e+00 -1.9251e+00 1.0159e+01 -1.6060e+01 -2.2783e+00 -2.2136e-01 -#> 9.4213e-01 -2.9319e+00 2.3522e+00 -1.1946e+01 -5.9928e+00 -8.4409e-01 -#> -4.5177e+00 -9.0236e+00 3.4303e+00 1.8347e+00 -1.2845e+01 3.2762e+00 -#> -5.9002e+00 -1.3134e+01 1.5702e+00 -1.1459e+01 -2.4262e+00 1.1036e+01 -#> 7.1187e+00 -4.8343e+00 -1.0063e+01 -4.6802e+00 -7.7001e+00 1.7641e+00 -#> -6.5346e+00 1.9943e+00 5.7655e+00 -7.6248e+00 -5.8852e+00 -3.1929e+00 -#> 3.2667e+00 3.4035e-01 -1.3065e+01 1.6010e+01 7.5781e+00 -9.4680e+00 -#> -2.7541e+00 9.0126e-03 3.2973e+00 -8.4961e+00 -3.8408e+00 2.5491e+00 -#> -8.2020e+00 1.2686e+01 1.0638e+01 1.8329e+01 -2.1860e+00 6.4469e+00 -#> -2.0596e+01 9.4246e+00 1.9946e+00 9.0939e+00 3.2005e+00 3.8513e+00 -#> -4.8925e+00 7.0237e-01 5.3233e+00 3.8712e+00 3.7284e+00 -9.4912e+00 -#> 4.7726e+00 -6.0445e-01 6.0763e-02 -1.1063e+00 4.0299e-01 -5.1322e+00 -#> -1.4589e+01 3.5644e+00 -1.2131e+01 -2.8696e+00 -2.1631e+00 6.4978e-01 -#> -2.0117e+01 -4.2851e+00 -2.0913e+00 8.8746e+00 -6.0943e+00 6.6141e+00 -#> -3.0863e+00 -4.9088e+00 -9.5859e-01 -8.5561e+00 -9.7085e+00 -6.5426e+00 -#> -9.3064e+00 3.8217e+00 4.1449e+00 -1.3908e+00 -1.4476e+01 -1.2525e+01 -#> 2.5078e+00 -2.1096e+00 -2.8901e+00 -5.3604e+00 -3.8419e+00 -1.0655e+00 -#> -1.2512e+01 -4.7244e+00 1.2641e+00 1.1913e+01 8.4535e-01 9.1668e+00 -#> -8.5001e+00 1.2013e+01 1.9289e+01 5.0782e+00 7.5338e+00 -9.1519e+00 +#> Columns 17 to 24 0.4586 -16.5602 3.2247 1.3494 0.8231 7.2414 -3.9239 -7.0053 +#> 0.9349 3.2037 -1.9182 -6.0207 -8.9720 6.7303 5.1336 -2.8499 +#> 2.3689 -0.0180 7.6802 -1.8705 0.8882 -1.7085 -3.7346 -2.9886 +#> 6.5192 -1.6577 -4.5703 9.4065 4.1678 5.3161 2.4356 0.8502 +#> 2.8720 -20.2435 10.6533 -16.5463 -7.4772 0.8646 -5.3559 -5.3760 +#> -2.7366 -1.3390 6.9657 9.5990 8.3194 1.8907 -3.1865 -6.8953 +#> 6.7979 11.0678 -3.5683 -6.1263 12.2529 -4.1589 -4.0510 7.0515 +#> 8.0900 -3.8846 -13.5482 -5.3754 -1.5216 -6.7216 -4.6792 2.7533 +#> -3.7959 13.9529 26.9659 -4.8575 -3.3514 5.3643 3.7068 1.0795 +#> -1.0770 -1.7608 0.4851 -2.5428 -1.6886 -0.4926 -1.3364 7.0277 +#> 5.4794 1.1259 2.7299 9.7866 0.9675 0.4050 6.4089 -3.7298 +#> -0.2842 -4.5766 -9.8293 -3.1239 7.8309 9.0241 1.3918 0.9099 +#> -1.0776 5.3834 -10.7990 9.2697 0.4384 -0.7660 5.0994 8.6788 +#> 18.2321 1.7815 -3.8933 2.1150 4.1487 12.4740 14.9617 4.2673 +#> 6.2371 4.7664 -10.7564 0.5711 -1.8074 -5.2167 3.1385 6.4971 +#> -12.7658 -0.4062 -5.3433 13.0974 -0.3448 0.3984 -8.1654 -2.8115 +#> -4.4679 9.9758 7.7362 3.7483 5.8953 -0.0196 9.5938 3.1453 +#> 8.0907 -2.6273 2.6493 4.7164 10.5790 -8.8270 -4.6447 11.1990 +#> 3.5821 2.2337 2.4201 8.1678 -5.2501 2.7953 -1.8781 -9.0544 +#> -10.0726 8.7515 15.8992 -4.6881 -5.3566 2.4174 0.2238 0.0809 +#> 2.8823 -6.3140 -3.2212 1.9314 -11.0092 1.8779 5.6983 2.9439 +#> -7.4484 -5.4048 0.6352 10.2212 -0.6127 -3.6856 -7.4190 -1.3941 +#> -2.8589 16.1370 -10.7102 -5.3420 14.1526 7.3273 -2.1112 -1.1856 +#> 12.5522 -4.7888 14.8623 -4.4579 2.7298 -0.1005 -9.9647 -2.8257 +#> 3.6617 11.1767 -4.5620 6.5324 0.3794 11.6022 3.4834 -7.5646 +#> -10.4129 -13.0055 -3.3670 -8.0003 -2.6251 5.5768 -6.8455 -5.1965 +#> -7.4526 -4.3158 5.7424 2.2327 -4.8341 -0.9311 -4.9753 -1.5844 +#> -2.1690 -6.3374 5.8079 5.4768 -4.3901 -2.3366 -5.9291 -5.6081 +#> -6.0447 3.9404 3.8917 -4.6146 -6.5145 3.2475 1.0771 1.7657 +#> -4.6528 4.2647 6.3065 4.6842 12.4760 1.2198 0.9179 0.9575 +#> -11.2894 9.0967 -6.6894 -0.5237 -10.1371 0.8038 -1.9630 2.8939 +#> -1.1554 -8.5433 1.7687 -0.2722 -0.5076 -0.0616 4.7847 4.8778 +#> -3.7165 4.0234 -0.4934 5.0402 -13.0122 -8.4023 1.5245 6.0530 #> -#> Columns 31 to 36 -1.5825e+01 -6.0525e+00 -3.6059e-01 -3.5320e+00 -1.0963e+01 9.0893e+00 -#> -7.6471e+00 3.8245e+00 7.4362e+00 9.0340e+00 -3.2226e+00 8.3234e+00 -#> 1.0664e+01 3.1036e+00 7.8870e+00 5.2952e+00 -5.6097e+00 1.1988e+01 -#> 7.5233e+00 -3.8525e-01 -1.0644e+00 2.8296e+00 -8.0137e+00 -1.5562e+01 -#> -3.9163e+00 2.3552e-01 2.7444e+00 1.1502e+01 2.2503e+00 -1.2105e+00 -#> -7.3549e+00 -1.0166e+01 1.9247e+00 -1.8355e-01 -8.8643e+00 -2.6318e+00 -#> -4.0218e+00 -3.7994e+00 -3.6637e+00 -9.0932e+00 8.2074e+00 -1.3125e+00 -#> -8.5332e+00 -1.0384e+01 -2.4280e+00 4.7237e+00 -2.0530e+00 6.6218e+00 -#> 1.9824e+01 -3.3879e+00 -4.7558e+00 6.3653e+00 -7.8356e+00 2.3336e+00 -#> -2.3862e+00 4.6184e+00 -5.4476e+00 -1.4052e+00 -3.0102e+00 3.4202e-01 -#> -4.5635e+00 -9.0183e+00 -9.8438e-01 7.6168e+00 -5.9883e+00 9.1950e+00 -#> -1.1475e+01 7.0631e+00 -6.4333e+00 -1.1149e-01 6.0743e+00 -2.4352e+00 -#> -3.1736e+00 2.1220e+00 -6.6557e+00 -3.4379e+00 -2.7073e+00 1.0520e+00 -#> 1.3549e+00 -7.3758e+00 7.0423e+00 -5.1716e+00 -6.6967e+00 1.9914e+00 -#> -5.8276e-01 1.3280e+00 -4.0232e+00 6.2002e+00 -1.1908e+01 1.5772e+01 -#> -6.6587e+00 -5.5256e+00 3.1339e+00 -5.5551e-01 -5.4036e+00 -9.0692e+00 -#> -7.6355e+00 -1.5741e+01 3.9675e-01 3.0156e+00 -2.0665e+00 3.9257e+00 -#> 2.0522e+00 -2.8533e+00 8.6485e+00 -7.8702e+00 4.1215e+00 -3.2170e-01 -#> -8.2318e-01 3.2963e+00 -1.1312e+01 -8.2619e-01 -4.2630e+00 6.1051e-01 -#> -1.3481e+01 -1.0202e+01 3.7420e+00 -9.4366e+00 -8.2541e+00 4.4941e+00 -#> 4.6272e+00 1.1581e+01 -4.7529e+00 -4.3113e+00 8.0156e+00 3.4779e+00 -#> 1.7016e+00 -1.1712e+01 7.2312e-01 -7.1206e+00 -9.3145e+00 9.2114e+00 -#> 5.9762e+00 -5.2956e+00 -3.9463e+00 -1.1333e+01 -1.3987e+00 -4.9694e+00 -#> 7.4142e+00 -8.1344e+00 -1.3305e+01 -1.7310e+00 -7.5089e+00 -4.8229e+00 -#> 7.3922e+00 8.3139e+00 -5.2840e+00 -1.4562e+00 -3.4649e+00 1.2898e+01 -#> -9.4360e+00 -1.3312e+00 1.1261e+01 -5.3233e+00 -3.2232e+00 4.1879e+00 -#> 5.3539e+00 3.0263e+00 -3.2155e+00 3.4591e+00 -3.5687e+00 -6.5521e+00 -#> -3.5393e+00 -1.3799e+01 -3.3037e+00 1.9697e+00 2.8058e-01 -4.6900e+00 -#> 4.9514e+00 4.1531e-01 -8.0968e+00 -3.9034e+00 -4.8970e+00 1.0481e+01 -#> -6.1879e+00 -9.2681e+00 -6.2762e+00 -2.2763e+00 4.9857e-02 2.0032e+00 -#> -3.8395e+00 -1.0061e+01 -7.0415e+00 -1.5563e+01 7.8947e+00 -1.0889e+01 -#> 4.6596e+00 -3.8205e+00 -1.4254e+00 9.8757e+00 -8.8425e+00 1.1036e+00 -#> -5.7265e+00 -1.0946e+01 -3.0999e+00 5.1787e-02 -4.1085e+00 1.6805e+00 +#> Columns 25 to 32 -1.1617 -4.5270 8.9619 8.7575 -12.2945 -9.4242 -2.1149 -3.8564 +#> 0.3031 -1.5149 1.4088 6.1547 14.7426 1.0636 0.8483 3.1450 +#> -4.2053 -4.8640 -9.3965 8.9148 -0.8327 0.6713 -3.2564 -3.8877 +#> -5.4048 0.6582 18.2467 5.1528 14.6201 11.7561 0.9013 0.9964 +#> 5.4964 -7.9256 -1.9682 2.5590 1.1515 7.8826 -4.0752 -2.1809 +#> 2.0503 16.1044 -13.6467 -4.1519 -3.6256 -2.2399 -4.3554 -2.5096 +#> -3.4623 7.6316 -0.6717 2.3535 8.9883 -3.6381 6.8627 6.3989 +#> 0.7791 6.0648 8.4828 2.4190 8.2937 15.2618 6.2531 3.5603 +#> 1.4592 -0.8026 5.2497 -1.7417 10.2363 -2.7339 10.1366 8.7154 +#> -1.0870 -3.8757 -17.0384 -13.5462 -9.9782 3.4288 -0.9570 1.8320 +#> 0.6053 6.3738 18.1769 9.5985 -6.5580 3.3676 -7.6121 -9.0703 +#> 4.3631 1.2025 6.0577 14.9365 -2.9591 -4.5177 6.4436 4.9191 +#> 7.9022 -4.1985 -10.5060 3.6277 -4.0699 1.3661 5.0642 4.9252 +#> 4.9476 5.5417 -9.6269 -5.8170 -5.0094 2.0519 -1.0189 5.3101 +#> 2.3852 -1.0250 -0.2472 -3.3311 -9.3618 -11.7413 4.3836 -5.2594 +#> 3.6495 1.8904 -4.2375 -1.8959 -7.7809 1.5159 13.1940 5.2597 +#> -4.8175 -5.2363 3.8440 -0.1087 0.2622 -13.3820 -3.2750 -0.3589 +#> 12.6262 10.2053 -4.2351 -15.5579 -1.6116 2.0824 -9.5421 6.1311 +#> 4.7387 5.8616 0.1864 0.4351 -11.7449 7.0052 -5.8976 -2.5118 +#> -8.3569 3.6318 -11.1709 -9.2369 1.4109 -3.0802 1.4126 6.0006 +#> -4.3494 -5.6542 -1.2183 4.7747 5.7316 -2.6122 2.2393 2.3839 +#> 5.6065 -5.1379 -3.2652 8.5963 -3.1247 5.6452 10.2332 -13.9666 +#> -8.0125 11.7461 6.3400 11.8589 13.8579 -7.4751 1.5486 9.6556 +#> -6.1476 5.4979 5.8627 -4.3102 -4.0461 -14.5948 -9.0272 8.3518 +#> -12.1698 4.8075 1.1471 7.7939 11.0065 -0.4064 2.3033 7.8946 +#> -9.7181 3.1563 -7.1208 -3.9173 0.8515 -3.4416 5.0353 11.1102 +#> -7.6501 11.1190 -9.5224 -4.1385 3.2457 -3.4650 -3.8158 2.1741 +#> 0.6098 1.0856 1.1420 6.7482 -7.6313 12.1166 7.6930 -2.4811 +#> -1.7017 -0.5707 -3.3727 -6.4599 2.0904 -8.8192 -4.3939 3.6186 +#> 7.8555 17.4277 1.0984 -10.5406 -12.2719 -9.1735 -0.3267 2.4835 +#> -0.5058 -12.7266 -7.5007 -0.5470 2.8935 -0.0474 -5.6342 1.8716 +#> 2.2625 -1.3686 -4.2506 -2.9258 -4.4997 12.8964 25.0262 -3.0842 +#> -9.9654 0.9141 16.3044 0.2827 11.2994 10.2884 -4.2485 -5.8201 #> -#> Columns 37 to 42 -1.1352e+01 1.1675e+00 4.1326e+00 2.0810e+01 -3.1170e+00 -5.3886e+00 -#> -6.3405e+00 9.9968e+00 4.5593e+00 4.3253e+00 4.3078e+00 -5.0758e+00 -#> -1.8509e+00 -2.5069e-01 -1.1716e+01 -6.5717e+00 3.8582e+00 4.9188e+00 -#> 5.6229e+00 -2.9105e+00 -2.3605e+01 8.7952e+00 -3.9078e+00 4.0413e+00 -#> 6.1460e+00 7.2539e+00 -4.2070e+00 -3.4539e+00 -3.6857e-01 -4.5547e+00 -#> -6.1082e+00 3.7526e+00 8.7095e+00 1.6753e+01 -5.0704e+00 -3.2953e+00 -#> -5.7529e+00 -3.3428e+00 1.3160e+00 1.1378e+01 -8.3256e+00 9.5483e-02 -#> -1.5073e+00 6.0489e+00 2.0360e+00 1.3820e+00 -2.4588e+00 -1.2680e+00 -#> -4.2967e+00 -9.2732e-01 -3.1906e+00 -7.1769e+00 4.9066e+00 -2.2283e+00 -#> -1.2227e+01 8.5371e+00 -2.6248e+00 2.4014e+00 -5.6499e+00 1.0123e+01 -#> -8.5863e+00 9.5664e+00 -5.9350e+00 -1.6468e+01 1.0391e+01 -1.0179e+01 -#> 3.1844e+00 1.2364e-01 1.3085e+01 -5.8785e-01 2.3136e+00 -9.7390e+00 -#> 4.6407e+00 4.9859e+00 -1.2739e+00 2.1925e-01 2.5094e-01 3.3886e-01 -#> 1.1058e+01 -1.0747e+01 8.3456e+00 -2.2283e+00 4.7523e+00 -7.7536e+00 -#> -1.3527e+01 1.4885e+01 -5.2081e+00 9.1698e+00 -7.9931e+00 -8.4733e-01 -#> -1.9517e+00 -3.4732e+00 5.4558e+00 -2.4234e+00 3.1985e+00 -6.9968e-01 -#> 8.7000e+00 -3.4409e+00 -1.1334e+00 1.4169e+01 -1.9912e+00 -2.1136e+00 -#> -7.1622e+00 1.2549e+00 4.2199e-01 3.1096e+00 2.5984e+00 4.3691e+00 -#> 3.0234e+00 -2.0207e+00 1.4672e+01 -1.1174e+01 5.1304e+00 7.1506e+00 -#> -1.1437e+01 3.4149e+00 7.5767e+00 5.9337e+00 -1.2918e+01 3.3527e+00 -#> 1.2484e+00 4.7930e-01 -7.1958e+00 2.7164e+00 2.0207e+00 7.7346e+00 -#> 1.3582e+00 2.8437e+00 -7.3608e-01 2.2923e+00 -7.7159e+00 -3.2784e+00 -#> -8.8959e+00 -3.6378e+00 -2.6879e+00 5.3815e-01 8.6462e+00 1.2901e+00 -#> -1.0886e+01 -9.1296e-01 -1.3696e+00 4.0285e+00 1.1338e+01 -3.0686e+00 -#> -7.4435e+00 -1.6110e+00 -6.7849e+00 -3.5437e+00 6.4861e+00 3.2415e+00 -#> 2.0525e+00 -1.4578e+00 -2.7559e+00 5.7021e+00 -9.3438e+00 9.2374e+00 -#> -2.7167e+00 1.1972e+01 1.5159e+00 8.5275e+00 5.1403e+00 -4.5702e+00 -#> 8.4622e+00 -1.0705e+00 -2.7975e+00 1.3748e-01 6.0727e+00 9.2312e+00 -#> -1.2966e+00 1.4843e+00 1.3079e+01 -4.8325e+00 -2.5283e+00 -1.0658e+01 -#> 9.5890e-01 8.5580e+00 8.0375e-01 6.0536e+00 -2.3652e+00 1.9715e+00 -#> -8.4933e+00 -6.1661e+00 -2.8968e+00 8.8597e+00 -7.1364e+00 2.2233e+00 -#> 6.3513e+00 -1.3840e+01 9.8739e+00 -1.0290e+01 1.4279e+01 -6.2812e+00 -#> -1.7568e+01 5.2109e+00 -1.6603e+00 5.6085e+00 -3.3452e+00 -2.7954e+00 +#> Columns 33 to 40 -7.1104 6.2031 -5.8101 -1.1152 -4.6315 -9.4966 6.7389 -6.6577 +#> 1.0362 -5.0733 3.8936 11.3512 0.7019 0.8807 -3.1198 -0.0273 +#> 1.5857 -3.3188 -0.1739 3.7079 -7.3753 8.4177 -1.2289 -5.4003 +#> -3.2097 0.4749 3.1526 5.3095 10.5141 -2.9392 7.5982 6.2324 +#> 12.0165 5.7773 -2.0571 -2.8175 -5.7637 -3.8129 -2.5565 4.2143 +#> 0.6730 -1.1841 5.6221 -7.0045 -7.0576 -5.1420 0.3235 11.0718 +#> -11.5064 -10.8466 13.9312 -0.1613 -0.7936 9.2455 2.2255 -8.9074 +#> -3.2557 2.0597 -8.9985 4.7375 11.7785 6.5120 -4.9715 -0.6753 +#> 6.4127 2.6258 5.9541 4.9620 2.7551 13.0173 -0.4617 -2.6421 +#> -2.2778 0.0185 -2.4465 -2.5310 0.7069 -8.0452 -8.8328 5.8252 +#> -5.3960 -8.0286 0.5873 -8.4356 -6.4207 -10.3039 -12.7500 12.8055 +#> 1.4164 0.9875 8.6126 0.4653 4.5835 5.6521 5.7095 7.5403 +#> -8.6152 1.4163 12.5893 -2.4590 5.7324 9.5341 -15.0938 9.9731 +#> 3.9700 -4.9081 -0.4361 4.0454 10.0282 -0.3105 -12.6996 0.4573 +#> 2.0415 17.8267 -7.0237 4.9820 13.3990 2.6581 -0.9705 -9.2812 +#> -3.2320 7.6963 -6.5363 -5.2591 2.9328 5.6946 -4.4450 2.0361 +#> -1.6764 0.3225 -10.5399 -0.3576 -5.9841 -14.3651 9.2570 -1.4252 +#> -4.3304 10.4637 8.6849 -4.2338 4.4932 5.6891 2.5000 -10.2220 +#> -2.7366 -6.6435 -1.8592 -1.8754 -3.4554 -2.3047 -5.9007 5.6179 +#> 3.4998 -7.6149 -3.8754 3.5602 -8.3153 -6.6275 -10.4509 -4.7872 +#> -4.7287 -5.7657 -6.2886 -8.5831 4.0117 -5.2058 -8.9663 -10.0599 +#> 1.3292 5.6377 5.2710 6.5351 1.7869 -3.9364 3.3611 0.7721 +#> -4.5771 -3.5253 3.4772 4.9476 -0.1588 7.3496 16.3549 4.5678 +#> -2.7939 -0.2169 7.6107 -12.8410 -2.0678 8.9172 -2.6089 -2.1153 +#> -1.9854 -8.3015 -7.7262 -0.7392 3.3416 -5.0926 4.2479 5.3253 +#> -1.2966 -7.5358 -1.9126 4.9271 -3.7803 -5.2679 0.3205 -2.1165 +#> 4.9917 1.4806 -5.7357 4.6555 -3.5482 -3.9291 0.7551 -2.4975 +#> 3.5481 -1.1284 -4.1767 -2.2818 -10.6350 -10.4669 -0.1940 5.9054 +#> 1.0773 -1.2175 -3.2846 3.6837 -0.5295 -4.2009 2.7542 -3.7241 +#> -3.7732 2.6368 6.0732 -5.1761 7.4727 9.7903 -6.9487 0.6081 +#> 8.6603 6.3393 -11.3216 2.9335 -4.2205 -1.1316 -1.9060 -3.1916 +#> 6.4110 9.1433 -10.6737 3.7257 3.7674 0.7303 -0.6654 4.3061 +#> 9.8283 3.7125 -0.2323 3.7521 3.2582 4.1825 11.0883 -4.7521 #> -#> Columns 43 to 48 -1.4316e+01 -1.7188e+00 3.2444e-01 8.9797e+00 -3.2084e+00 7.8822e+00 -#> 3.1460e+00 -6.4460e+00 5.2454e+00 -1.9715e+00 1.6432e+00 1.1744e+01 -#> 8.6926e+00 -2.2753e+00 -4.4450e-01 -3.1107e+00 2.9401e+00 3.6147e+00 -#> 6.4619e+00 3.0811e+00 -9.3294e+00 3.1618e-02 -5.4628e-01 -1.5545e+01 -#> 3.2211e+00 2.7079e+00 -1.7153e+00 6.0905e-01 9.2878e+00 7.0190e+00 -#> -1.1072e+01 -1.9385e+00 1.8828e+00 1.5127e+00 2.6904e+00 -1.7966e+00 -#> -9.0222e+00 9.6423e-01 5.8570e+00 2.7499e+00 -3.3381e+00 -9.7036e+00 -#> -1.7245e+00 -4.4181e-01 -3.4086e-01 6.2400e+00 1.5419e-01 1.2574e+01 -#> 9.0976e+00 -3.2707e+00 -1.0903e+00 -3.3040e+00 9.1338e+00 -1.2215e+01 -#> -3.1234e+00 -2.8812e-01 -2.1735e+00 -1.0646e+00 -1.2786e+01 -1.5235e+01 -#> 6.6138e+00 -4.3299e+00 8.6403e+00 9.3438e-01 -1.1749e+01 1.8446e+00 -#> 6.2344e-01 9.8338e-01 8.2563e+00 -8.2448e+00 -4.5447e+00 6.5609e+00 -#> -1.4375e+00 5.1289e+00 5.4863e+00 4.6983e+00 -5.3126e+00 6.0145e+00 -#> 5.5840e+00 -1.5232e-01 1.0648e+01 -8.1426e+00 9.6469e+00 7.1755e+00 -#> -3.7829e+00 3.3464e+00 1.6952e+01 4.4535e+00 -2.9354e+00 1.5281e+01 -#> -1.0402e+01 -3.4845e+00 1.1573e+01 2.1074e+00 -8.8245e+00 1.1603e+01 -#> -1.1021e-01 -6.7476e+00 5.0775e+00 1.6184e+01 -3.9077e+00 8.0282e+00 -#> -6.1079e+00 -4.1911e+00 1.3648e+01 6.0187e+00 -7.1858e+00 9.6539e+00 -#> 1.7198e+00 -1.1251e+01 3.8847e+00 1.0030e+01 -7.0925e+00 -4.5950e+00 -#> -8.4614e+00 -9.8298e-01 4.4841e+00 2.8005e+00 -7.3674e+00 1.0545e+01 -#> 5.7187e+00 2.9945e+00 -5.2711e+00 -3.7479e+00 1.5054e+01 -8.7987e+00 -#> -5.5156e+00 1.2471e+00 -4.1211e+00 3.1218e+00 -6.8213e+00 3.6497e+00 -#> -6.2814e+00 6.7489e-01 4.7816e-01 5.8086e+00 -7.7224e+00 -1.1090e+01 -#> -2.7705e+00 -5.7763e+00 4.7694e+00 -1.5708e+00 -9.1193e+00 -1.5565e+01 -#> 9.1999e+00 -3.0123e+00 -2.4817e-01 6.1060e+00 4.1478e+00 1.1212e+00 -#> -2.9603e+00 -2.1796e+00 -2.0766e-01 7.8280e+00 3.1220e+00 1.4492e+01 -#> 3.6359e+00 -2.0703e+00 4.0872e+00 4.5950e+00 4.9332e+00 -5.6241e+00 -#> 5.1562e+00 -8.1316e+00 4.7583e+00 1.3220e+01 -5.4718e+00 -3.3530e+00 -#> 4.5500e-01 2.9386e-01 7.0324e+00 1.1036e+01 7.7246e+00 -4.6796e+00 -#> -8.1972e+00 4.4841e+00 4.2891e+00 5.8185e+00 -2.5228e+00 5.5887e-01 -#> -2.3574e+00 1.5458e+00 3.8627e+00 1.2930e+00 -2.1494e+00 -1.3772e+01 -#> 3.3417e+00 -4.2553e+00 -1.0949e+00 5.9260e+00 -1.1277e+01 4.3272e+00 -#> -3.4267e+00 9.3572e-01 -1.9961e+00 -4.4621e+00 -5.6221e+00 -1.7291e+01 +#> Columns 41 to 48 13.3268 -1.0056 -10.2394 -14.0572 7.2373 -11.7891 -5.4356 11.6350 +#> 1.5034 -0.2444 2.7159 -2.6949 -4.0703 -9.9168 -0.1028 -0.5349 +#> -0.7993 -1.5907 -2.9266 1.9504 6.6516 4.0723 7.5052 2.6683 +#> 0.0950 -7.9544 -8.2260 -17.0586 -8.8210 2.6073 -6.1276 13.7919 +#> 3.4560 -4.0487 -5.4636 -1.1480 -11.5815 -8.1386 10.8291 -1.2520 +#> -12.3671 -0.4899 -4.8369 6.4535 3.7236 7.9121 11.4997 -0.7510 +#> 4.2753 2.2398 16.9714 -3.7689 7.5609 3.0315 -0.2744 -2.8969 +#> -5.8823 1.0620 6.1123 -1.4432 -4.3620 -3.0380 -12.4353 10.7294 +#> -6.1138 -8.9188 -12.5581 -13.1857 -6.2093 -18.3727 -3.3118 -10.2877 +#> 1.5605 2.2150 5.4671 9.2354 -2.3335 -2.7411 -2.6724 -2.9369 +#> 0.1683 -5.7333 -6.5652 -13.6436 -9.7716 4.7530 15.9291 12.4899 +#> -9.5900 4.0555 -1.4710 -12.2339 -11.1078 4.2110 -5.0619 4.1043 +#> 1.9375 3.3089 -5.0634 -6.1416 -7.4462 0.7009 -11.1027 -11.7545 +#> -1.9137 4.6833 -9.6380 -0.2342 -10.5227 -13.0958 -22.3145 -7.6393 +#> 13.7592 -9.2456 -12.5695 -11.3587 1.2518 -5.5265 -6.4806 -8.3060 +#> -9.0359 12.3764 -5.5282 18.9738 6.3271 11.4664 1.1077 5.8071 +#> 17.9974 -9.6266 -3.1348 -4.2414 1.6477 12.7555 1.1829 8.5258 +#> -8.4362 -2.7764 2.5933 -7.1246 7.5622 -2.8431 8.3987 -5.7898 +#> -3.5049 6.4722 3.0645 3.9859 2.8276 8.8630 3.9953 -2.4452 +#> 15.4691 -8.0395 -1.5533 6.2753 1.8991 -12.0606 -5.0461 1.4275 +#> -2.2957 17.5421 -1.3339 -7.7750 -2.2268 -3.0716 -15.3968 -0.7552 +#> 9.6343 4.5467 -3.1903 4.5959 -4.0611 1.0943 -0.1111 6.9238 +#> -5.9578 0.3011 1.7163 -8.8703 1.2913 13.4391 13.6061 4.1858 +#> -11.4033 3.1806 1.2891 -16.1160 18.8338 -1.5648 -3.3201 -2.6366 +#> -2.5553 14.1245 -2.2778 14.8919 6.8235 22.9245 5.5260 6.5431 +#> 5.9213 1.0869 6.3956 5.4595 7.6265 -8.0746 3.0931 6.7874 +#> 1.5071 -1.3201 -6.7472 8.5980 0.1973 -3.3340 -6.2912 2.9766 +#> 5.3211 -1.2440 -5.6185 5.0279 3.2060 5.5104 16.0627 4.4307 +#> 0.0718 -2.5977 7.5797 -3.5160 -4.5526 -8.2922 -13.4784 -6.1788 +#> -4.4021 -2.1830 -12.1799 -8.7476 0.3839 -7.2126 6.8421 3.3336 +#> -5.7626 -6.6735 1.4709 0.3446 -3.9542 6.2844 -11.6062 -6.3796 +#> 1.0421 -3.8594 -7.2021 -1.7108 -1.6160 1.6815 5.4795 -7.5773 +#> -6.4670 -1.7252 -0.6297 -11.1553 3.0178 -0.0184 -4.9736 4.4968 #> #> (3,.,.) = -#> Columns 1 to 8 4.9562 -3.7582 5.3943 -3.9742 -5.9963 -12.7356 -12.3488 -0.0281 -#> 11.2481 1.8273 4.5276 3.2045 -7.3007 4.8452 -2.3510 2.6769 -#> -4.4985 -1.0768 8.5073 -11.6589 4.7996 16.7321 -3.0965 -0.3856 -#> 0.6945 -3.6676 6.7533 -4.5750 4.0977 13.4447 -10.3970 12.6614 -#> -5.4403 -5.4518 1.6155 -8.4963 -5.9472 14.6116 2.3946 -1.3451 -#> -5.3157 -2.3617 2.7643 -2.6659 2.6909 -6.9647 -15.1995 5.5772 -#> 3.6503 5.7380 -0.8403 0.6548 -1.7302 -10.1615 5.2492 8.6817 -#> 7.2741 2.6035 -3.6202 -9.2834 -5.8462 8.7866 -2.8505 -1.4817 -#> -19.9435 -4.4367 -1.1491 11.0581 -7.7709 -0.3405 0.1401 9.1089 -#> 3.6538 -6.3743 -1.6704 -4.4341 11.5087 7.0363 1.0560 -3.5037 -#> 15.8845 -1.4452 2.1056 -6.1563 -17.7140 8.9579 -12.1511 0.6144 -#> 2.2474 -4.6580 0.3898 -1.0848 -1.7472 -14.7547 13.3952 -8.3100 -#> 11.9129 0.8427 9.8857 5.7461 3.1181 -4.4163 -0.3108 -5.2917 -#> -6.2343 1.1551 4.5635 11.1697 2.7615 2.6141 -7.0863 4.4991 -#> 15.9765 1.4206 5.0062 -2.9111 1.0533 6.9639 -11.3675 11.6931 -#> 1.9163 4.4746 -2.2195 -6.6301 4.6677 12.5423 4.5925 -1.8943 -#> 6.9265 8.7154 4.7434 -9.8159 -4.0058 1.6135 3.6586 -20.6469 -#> 8.3534 3.6571 -8.5923 -1.9266 18.2730 3.6534 -1.9278 3.3406 -#> -0.0040 10.9221 4.4209 -6.5203 6.8133 10.0404 0.2141 -2.6464 -#> 7.6266 -0.4131 -13.4900 -2.6130 5.9332 -1.1798 -11.2315 5.4657 -#> -0.3001 3.4722 -2.9582 -10.0573 4.1344 -1.6925 0.8711 14.5282 -#> 1.7112 4.8549 14.1942 2.7680 0.3115 -1.2966 5.9259 0.5383 -#> 0.7748 2.5614 -1.0904 -2.4082 9.1032 -11.0332 -3.9074 1.4275 -#> 2.6380 -1.4095 8.9158 10.1162 18.3666 3.2091 -6.9841 -6.9002 -#> 12.0612 3.2341 -4.0510 -10.9244 -1.5035 4.1921 -1.7979 1.8640 -#> -3.7546 6.1095 -7.6240 -7.1370 1.2163 -7.0363 -6.3017 0.9980 -#> -1.8646 7.0013 12.8986 3.9962 4.5674 16.9755 -0.0370 -3.8269 -#> 2.7722 -7.3149 5.8819 4.5695 14.4691 5.7079 -11.5897 -10.8310 -#> 8.9845 13.7087 -0.9190 2.1477 2.1493 9.3396 7.7154 12.4460 -#> 4.9266 -4.6779 -1.5574 12.9128 -12.8062 -12.4111 -8.2159 -14.7549 -#> -4.2986 -0.1995 -10.0720 -9.3908 4.2700 -8.7040 6.4533 -11.1542 -#> -0.6362 -0.8948 -10.9689 8.4188 0.0568 -4.8182 3.3533 -5.8279 -#> 2.6255 -7.6898 -7.6644 5.9319 -8.0142 -11.2382 -11.3271 -0.3779 +#> Columns 1 to 8 -5.1278 -4.5006 9.5095 9.0330 2.4264 18.7260 7.8777 7.2444 +#> 4.7839 -8.5126 -6.6213 -12.6814 5.2088 -2.2826 -11.0311 -6.3638 +#> 4.1031 -5.7867 -5.0951 1.3480 2.5836 16.7772 -1.3587 0.8495 +#> -10.2675 5.1549 7.8001 -1.1595 -0.7911 0.5795 4.7843 0.8563 +#> 0.4001 -2.3683 6.3588 5.2743 23.0337 19.6014 2.8997 6.6821 +#> 0.6768 -2.5441 -9.4527 1.7197 -3.5171 -2.7897 8.4855 10.2762 +#> 15.8093 -8.9259 -9.4807 0.3857 -9.0302 -1.5626 -17.3421 -0.3243 +#> 1.0756 6.5978 12.1907 -1.7837 -2.6270 -6.9303 5.1668 -7.1205 +#> -0.4221 5.9274 -4.8581 -4.2838 4.0266 -9.8524 -0.7421 5.3448 +#> -0.9774 3.8799 -7.5644 -8.0557 -13.6937 -11.5507 1.4400 7.6644 +#> -7.1578 13.6348 9.7534 0.4326 -9.2767 1.7136 22.0305 -0.0253 +#> 1.2827 0.5390 -3.6264 14.3977 3.9754 -10.2044 1.4494 -0.8788 +#> -8.7990 -8.7189 -4.1777 -18.2835 1.2326 -17.9329 2.7786 -3.0052 +#> 0.8874 -4.3759 -3.4577 -19.0189 -6.9200 -0.5800 -2.5248 2.2195 +#> -1.2808 -11.8056 0.3925 -0.3070 4.4410 4.1242 -2.2455 3.7403 +#> 0.8672 1.3068 9.4222 2.0591 1.1883 -8.2026 -1.0674 -2.2916 +#> -3.4802 -0.7958 2.2186 16.8960 6.2402 11.7586 5.3872 2.5588 +#> 3.0149 4.2899 7.5872 -13.4274 3.8662 4.8530 -4.2466 -9.9949 +#> -0.0567 -8.8183 -3.9920 -9.8587 -10.0767 -15.9883 3.4482 -1.4267 +#> 2.1360 -11.6248 -13.9227 -6.6362 -8.9768 -3.1309 -6.4980 25.6076 +#> 13.2209 -0.9525 -7.2168 -1.3732 -17.8097 -9.9051 -9.3736 -8.0395 +#> -11.5475 -8.8849 -11.0298 5.0507 -4.9686 -2.1899 -1.1698 0.7857 +#> 9.0478 -4.0544 -0.4491 7.9602 1.5543 9.2264 -2.7842 0.1068 +#> 4.4409 -3.3490 0.2244 9.1930 12.8143 7.1343 -3.8777 9.9727 +#> -2.8383 -11.2511 -8.8919 -4.4868 -4.4493 8.4346 -15.7925 -1.7161 +#> 2.5040 -0.7997 -5.9196 -3.4500 -7.8017 -3.0584 -2.4306 7.8968 +#> 2.3506 -1.7359 -3.1030 -1.1406 2.4846 5.0937 -2.7929 6.2975 +#> -7.5156 6.1988 -0.8792 -5.0672 3.5677 -10.4047 -2.2888 8.2631 +#> 0.9288 0.3885 -5.8831 -1.4473 -2.8584 4.3175 5.0746 7.5993 +#> 2.1791 3.4867 1.8659 -7.9709 -3.9872 -3.8741 12.2298 7.8602 +#> 0.4885 1.0647 1.4114 -2.3891 15.1261 4.7602 14.7145 -7.2278 +#> 2.1687 9.4927 -1.3812 5.8388 -7.3725 -14.1129 -0.1521 6.6170 +#> -1.6358 9.5396 1.8514 3.5563 -2.7231 5.5364 5.0568 -14.5332 #> -#> Columns 9 to 16 -9.7360 -6.4222 -5.7229 7.4474 -6.9485 -18.3427 -2.7103 3.7782 -#> 1.2750 10.4092 0.1234 9.9826 -7.2135 -5.7552 -1.9998 -1.9475 -#> 1.5161 -2.5462 -1.0372 2.6637 5.9223 6.5293 -17.3755 10.4248 -#> -11.9943 1.9396 -10.8378 -27.0799 24.8994 3.4319 0.5319 -13.5652 -#> -2.1803 -3.0364 -3.0484 -11.1355 1.9524 -6.2876 -0.3480 6.6803 -#> -12.3258 -14.2578 -11.3267 -2.5336 -18.2894 -4.2594 -1.9660 12.5184 -#> -6.8200 -8.1209 -9.5000 1.3338 1.5568 -5.8006 14.6046 -7.5704 -#> 4.2130 3.7024 -1.5168 2.1240 -12.3614 -7.4998 -5.9788 -5.3655 -#> -1.4886 -14.2426 8.8877 3.6402 -13.4771 7.7371 -8.9763 -0.4561 -#> -13.1842 -2.8242 -4.3459 -3.8975 -7.4657 8.8410 4.0576 3.8440 -#> 7.6390 3.3561 0.9564 10.3876 -6.1527 -1.8119 1.3460 5.9928 -#> 8.8839 -0.3631 -3.6710 0.2649 4.8643 -5.2106 14.0710 -2.3089 -#> -4.5459 2.2723 -1.4164 -5.1818 11.6005 6.0302 -0.6840 -1.0466 -#> -2.4372 -3.9876 -7.3022 16.0805 28.7031 -6.6200 1.3126 15.1169 -#> -8.7492 -17.0470 -3.5868 9.9832 -13.2698 14.2807 7.7293 -0.8965 -#> -8.0431 6.6999 -8.5087 -17.4936 15.0480 -1.8982 -5.8062 0.4042 -#> -1.0442 1.5605 -3.1818 -10.4155 1.5044 4.2108 -10.3067 2.6241 -#> -14.2217 8.4671 -2.2625 -13.4366 4.9955 11.3371 3.7654 -5.4279 -#> 5.1664 0.4539 11.9847 -5.2858 7.8836 -3.6860 -7.9037 4.0934 -#> 4.9691 2.4076 9.0446 0.2401 -17.3001 13.3294 -13.2066 4.6766 -#> 10.6484 -4.1710 -4.8643 8.5792 0.4483 -4.1667 2.6186 2.1822 -#> -4.8166 4.1797 -2.3908 14.4425 -8.0795 2.8944 2.9143 0.6577 -#> -5.5388 -2.0295 -0.4797 5.0858 -11.7329 4.8734 8.9627 2.9415 -#> -9.7544 -5.7999 -0.3570 -1.3464 4.3133 23.6557 -4.5242 7.1379 -#> 3.3175 -1.3137 -7.1024 9.0726 -9.1605 4.0476 -3.9804 4.8644 -#> 1.4142 -4.5650 -6.4153 3.6048 -13.3332 -6.1364 -11.7781 1.3500 -#> -1.1517 -4.6129 -19.7663 -6.7512 17.9404 -8.0486 4.8501 1.3591 -#> -5.8542 0.8734 3.2384 -1.5739 21.9474 -3.7593 -0.9735 12.2523 -#> -2.3665 -7.0931 0.6589 16.3679 -4.9314 10.7504 9.8569 0.4346 -#> -5.0901 -1.9335 -2.1915 7.1066 -6.8045 -7.4043 3.2150 4.0988 -#> -8.1054 -8.8176 -11.3086 -7.5997 1.9523 2.5381 7.7193 -4.2398 -#> 3.0093 5.9653 4.1393 0.3365 -1.2716 11.9673 -15.8381 -0.9102 -#> -1.5938 -3.4779 10.4909 12.1245 -15.0920 4.7935 3.1971 2.9640 +#> Columns 9 to 16 -10.1209 5.0800 -14.4894 -10.0485 -0.3948 -7.9548 3.6714 -0.9975 +#> 8.0411 -1.4947 -6.4890 3.2111 -3.5606 -4.2544 -0.7074 -7.2217 +#> -5.1016 6.4078 9.7789 6.6518 0.4305 -4.3517 -7.7457 4.7810 +#> 7.2694 7.8292 -1.1551 -7.4541 3.0155 6.6142 14.5037 -6.5980 +#> -6.6166 1.8296 -7.9309 -1.7234 -1.9408 -4.7944 2.2040 5.3681 +#> -1.8593 -14.4510 -5.8747 2.3867 6.4348 3.1423 -1.6486 -13.8432 +#> -5.4119 6.8913 11.1302 5.2678 9.0553 1.0027 -4.7439 3.1055 +#> -3.3472 -7.6898 11.3553 7.8453 -4.2988 0.4420 1.1463 -1.2659 +#> 9.9301 -10.9049 -12.6421 -5.0100 -5.2547 9.7823 -9.9843 0.8904 +#> -9.9175 -1.8258 6.7117 7.1737 -6.8342 -7.9616 -1.0889 -2.9683 +#> 0.1876 1.0047 -6.2214 -13.5494 -4.1035 -11.3754 4.1208 -2.1293 +#> -5.8780 -9.9904 10.1004 5.3111 -6.4698 -6.1977 -2.8318 9.5273 +#> 10.2986 -5.0245 5.8463 0.4889 -3.6013 0.0976 -11.6090 -4.5015 +#> 14.3856 -12.5066 3.9378 -11.2010 5.3067 -3.8067 3.9946 1.4835 +#> -5.4574 -18.1965 -2.7907 3.7717 0.1156 -7.8323 -2.2334 3.3117 +#> 9.4175 -2.7659 10.4894 4.2989 2.9681 9.3755 5.4636 11.8332 +#> -1.5391 2.4291 -7.9587 -10.5855 1.2087 -4.1796 7.4838 4.4501 +#> -11.9078 -1.4563 3.1055 2.1957 2.7735 -8.9930 -0.9006 -5.1821 +#> 4.1253 -4.4861 1.8479 4.7597 -3.6926 -9.7058 -2.4701 -6.3677 +#> 12.0557 -7.0326 -15.5973 1.6378 10.3505 -0.3404 -7.6398 -8.2503 +#> 2.6178 -0.4480 5.4727 -8.6452 0.2683 -4.2479 -9.1431 -3.1705 +#> -10.6440 15.4314 17.6512 2.7787 -1.0676 2.3829 1.4587 5.3460 +#> -0.5966 -8.2422 -1.0872 5.9343 0.2573 0.0817 -3.3590 6.2829 +#> 5.4267 -8.0981 -8.1271 0.8138 2.6016 0.0605 -7.6136 -4.5924 +#> 9.6271 6.6256 10.1982 1.8380 20.7458 -2.5219 13.0436 3.5439 +#> -1.2009 -0.9587 2.5393 3.2507 5.9707 6.7003 3.7681 4.5587 +#> -2.9848 -11.9670 -0.4371 -8.6889 8.6647 -1.2020 -9.0784 -5.1270 +#> -0.4679 13.3824 2.3437 5.5385 1.7472 -2.0704 10.1693 -2.0073 +#> 1.0152 -5.2033 -4.1154 -3.2404 7.0626 -2.0554 -2.6823 -0.5493 +#> -4.2003 -19.8706 -11.9461 -3.9365 2.2946 -5.4263 -16.3099 -1.0841 +#> 18.8134 -11.2480 0.7597 0.3864 -2.9618 11.1220 -6.8384 0.9543 +#> 4.0662 7.1095 6.6421 11.1504 3.0409 0.1461 13.8598 2.8963 +#> -9.8873 10.0620 10.9858 1.3170 -6.6131 7.4196 -1.5304 2.4688 #> -#> Columns 17 to 24 -7.8446 10.8300 3.1495 -6.4143 -4.9418 1.5859 5.7119 4.1122 -#> -8.2818 -2.5108 0.3972 5.5391 2.6315 5.6176 4.3199 -1.2731 -#> -3.7133 -7.0713 -9.7593 3.5059 4.4236 4.6572 -5.9136 6.1746 -#> -11.5718 3.8795 -2.3085 -3.9470 1.1945 -1.6105 -1.4233 4.4411 -#> 0.0810 6.6555 3.7000 14.6576 11.7133 10.7315 2.1382 -2.0174 -#> 11.3570 3.8331 2.8234 12.0460 5.1824 -8.6710 -6.2794 0.8210 -#> 5.8555 3.8700 1.9546 -6.2020 -2.1532 -0.5159 3.9648 7.4020 -#> 5.3760 -0.2685 7.1646 -0.2744 3.5396 5.4370 -1.4105 -15.6596 -#> 11.7633 -10.5656 6.0806 0.3545 2.7078 0.2011 2.5850 -2.2982 -#> -1.9521 11.4095 9.7509 -2.7458 -1.9989 1.4426 3.1751 7.4327 -#> -0.7683 -11.2969 0.6807 1.4418 1.1541 1.8743 -6.6355 -2.5485 -#> 2.3949 11.4733 -4.1730 1.7488 -2.7655 1.3965 2.2042 -7.7285 -#> 2.0386 -3.8169 -5.1341 2.4189 -0.9858 -1.9761 -2.5394 -2.1106 -#> 2.8564 13.8817 3.5312 7.7483 2.4548 3.4022 -6.1050 14.8377 -#> 5.4632 -4.0690 8.5569 -0.5126 13.3378 2.3015 1.6362 4.0142 -#> -8.9410 1.1707 1.4379 -3.3880 1.1356 -3.8432 -0.2028 14.7388 -#> 9.5698 0.8845 3.6077 1.9991 5.9551 4.0725 -12.3633 -3.3834 -#> 5.4765 -10.8793 -1.3330 -0.4239 -0.2444 -4.3707 4.3048 -2.0963 -#> 7.7052 0.6048 -3.1704 1.7206 -6.3772 -2.8244 -1.9311 7.0662 -#> 13.3648 -1.6975 12.1434 5.2608 3.4101 -5.8776 -9.4342 -11.3617 -#> 4.4433 5.9981 -0.4610 3.9429 2.5325 -6.9121 -0.0131 -6.9599 -#> 4.6063 -0.7668 2.4319 -10.1652 -6.0697 -2.6817 -2.0906 -2.6583 -#> 2.4389 2.1851 -2.5737 4.2586 -0.7670 -10.1724 6.2627 7.3546 -#> 9.2102 12.2677 0.6505 0.8508 8.8404 -2.4235 -0.3184 13.9138 -#> 0.4299 -7.7316 -4.0262 -1.0616 -1.8941 -4.5296 -2.2042 3.9778 -#> -2.5706 -1.2521 5.6562 1.2311 0.4175 -11.0818 -4.9537 -2.0501 -#> 10.8075 0.0243 -1.6829 -1.8148 3.6318 -7.6898 3.4834 13.7268 -#> -1.2697 5.0217 -2.6425 11.8827 8.9970 5.9287 -12.2312 7.6060 -#> 11.1753 0.4443 -2.5237 -0.2271 -1.2767 1.0772 11.0985 8.0461 -#> -5.9804 7.4466 12.3405 -5.5906 -5.0035 11.6740 -8.4227 -11.0415 -#> 8.5981 13.2756 0.7086 -7.9883 -5.1583 0.4455 6.2542 -0.3895 -#> -10.7068 -0.0316 -7.3415 13.2332 -1.9071 6.7170 -0.1882 -2.2628 -#> -7.5274 -4.2322 4.3580 -0.3955 -1.2535 -0.3756 4.1293 -3.7303 +#> Columns 17 to 24 7.5862 -5.1936 7.6388 5.8353 11.7061 0.5586 2.5924 1.9125 +#> -3.9885 -3.5434 1.9906 -6.1350 -1.1435 0.0343 -7.1282 9.2190 +#> -2.6253 2.8304 0.3522 -0.2029 -0.4975 -10.6151 3.0706 -8.6368 +#> -6.4932 -2.8950 10.1423 -3.4150 -9.9279 0.8822 -3.1574 -0.2743 +#> 4.4764 4.6848 -2.4851 -1.5540 11.4432 -1.4824 10.7468 1.9491 +#> 3.1593 7.4647 7.0182 5.1533 -2.2870 -4.7152 1.3693 0.7984 +#> -13.2692 5.1763 -13.7849 2.4519 -3.0231 5.8470 -4.8133 12.4372 +#> -1.0103 -2.9673 3.7567 -7.1660 -7.1880 -3.3324 14.3220 -6.6137 +#> -14.4350 -0.1077 -0.8339 -2.5992 -4.7385 -7.6101 -10.0769 -6.2583 +#> 2.9348 -3.5775 1.3642 -4.0891 -1.4119 3.3728 -7.7035 5.2910 +#> -12.4565 -2.8200 5.8915 9.6994 10.6242 3.0506 2.9140 0.1081 +#> -3.2337 -4.1424 -10.2011 8.0144 1.3814 -7.3781 1.0551 -6.8396 +#> -4.5594 3.5548 1.2273 -5.1877 -1.3088 -6.1823 9.0818 4.9969 +#> -0.1863 -5.1163 2.3162 -3.4166 3.4583 4.8357 5.0724 8.2961 +#> -7.6159 -1.4426 8.4888 8.2496 -9.5194 1.6780 -9.8950 -0.6717 +#> 9.3602 -4.5074 -1.1037 1.4025 1.1228 -13.7274 4.4925 -19.3559 +#> -5.4191 -1.3136 4.8185 13.3719 6.3094 5.8786 -12.9161 5.3763 +#> -8.1083 0.3903 8.0846 -3.9731 -9.1568 6.4466 -9.1130 2.1779 +#> 2.8756 -12.0547 4.6775 3.6406 3.5585 -4.0128 5.1379 6.3532 +#> -3.0196 2.7591 5.1131 13.2092 0.9012 -7.0422 -2.8851 0.7359 +#> -3.5815 -6.7956 -9.8269 -3.7708 11.1237 2.8918 -7.3674 5.2442 +#> 2.7471 -8.5420 -8.6237 1.7127 1.3216 -2.3913 -0.5696 0.1381 +#> -7.1711 -1.1567 -2.9633 10.2240 -3.7295 2.3287 8.9156 -2.2737 +#> -3.1349 10.9526 -0.4448 3.9403 7.9590 -2.2257 0.9995 -6.4912 +#> 5.1741 -4.0589 2.3938 -3.3143 -2.1102 1.9795 5.4914 -4.7404 +#> 4.5089 4.2776 3.0584 -5.2834 -0.0054 -0.3052 -10.0477 2.4862 +#> 7.8940 10.7260 9.4198 -10.9637 3.1990 -0.3634 -2.4300 4.1442 +#> -1.4876 -6.3949 1.8075 2.9876 9.0423 2.6924 4.7369 0.6878 +#> 3.1631 -2.4461 4.5630 0.9205 5.2506 2.7286 -2.6849 -0.1385 +#> -7.0602 5.6859 -0.9651 8.3329 1.0673 -1.5509 1.5679 9.1858 +#> 6.1669 6.9448 5.8911 -6.9180 -0.8339 1.1331 12.7957 -11.7288 +#> 1.5151 -1.5748 -1.9829 -1.2508 -0.8887 -7.4564 6.3130 -6.5703 +#> -0.0470 -3.5726 0.1762 -9.5157 -2.8015 6.6279 -6.3339 -6.5166 #> -#> Columns 25 to 32 -11.3413 -0.6342 1.2782 -1.0040 0.3941 -1.2505 -7.8863 2.5857 -#> -2.3209 5.3974 -0.6238 -1.9349 -5.1256 -0.9068 -1.6616 -2.2188 -#> -5.5235 -1.3256 7.7680 0.0419 4.6588 8.5270 -14.0638 -5.7350 -#> 23.6620 5.5299 3.8752 -8.5570 1.7381 12.4320 1.1287 -2.2732 -#> 6.0077 11.7650 5.9815 -1.2362 -2.6430 -1.9138 -9.4792 -9.2415 -#> 13.9983 9.2234 2.0461 -5.2399 6.2495 -1.0772 -8.9246 2.9772 -#> 5.9896 4.1438 1.1027 -1.9300 7.9763 -11.2774 -5.4039 -8.4377 -#> -3.3392 4.2929 -1.5951 -0.7069 -5.5264 -0.2146 2.6092 -3.2463 -#> -2.7116 2.4720 11.3886 5.4341 2.0100 13.9830 1.6636 1.8237 -#> 8.7153 7.0944 4.8265 2.3740 -2.2886 -3.3963 -8.7984 -2.1433 -#> -11.5131 12.9237 -5.3272 5.0744 -6.4152 13.7479 -0.6910 2.8149 -#> 3.7939 -4.5852 -9.2190 -6.7536 7.5973 -8.4979 5.2731 3.2761 -#> -0.0529 -8.7274 -6.3776 -6.3154 10.8041 -0.3989 -2.7530 4.2337 -#> 6.1232 -10.6163 -8.4237 14.1920 1.7618 3.2803 6.9025 -3.0799 -#> 17.9500 -4.1805 -11.3844 -3.2068 5.9128 -1.0761 -21.9036 3.2809 -#> -3.7840 -8.8256 1.5954 7.9607 -4.1433 -1.0089 -1.6928 9.5089 -#> 3.4930 10.6973 -6.1665 -6.6312 -0.4147 6.9757 -9.2026 -10.0955 -#> 6.7164 -0.0767 -2.7187 -11.2172 2.1860 -1.5859 12.1095 3.5310 -#> -3.8810 -12.2251 14.5198 6.4680 -4.0062 2.9629 5.1757 14.1900 -#> 4.1051 -0.6954 -8.3595 2.2953 6.4439 15.2789 1.7176 3.2482 -#> -1.8213 -0.6504 6.6178 -10.4879 11.9210 7.6479 -2.6390 4.8771 -#> -8.8219 -0.2972 -9.5819 9.2868 6.3488 2.5390 11.7205 2.8740 -#> -0.9322 -2.2826 -1.1460 -6.5040 -0.7949 -4.9513 -2.1422 -0.7358 -#> 13.7824 4.6837 9.9722 -6.0029 4.5716 12.4649 -9.9900 6.4003 -#> -0.3243 8.9251 -2.4305 -4.2840 -4.8234 5.5312 -10.5267 6.2358 -#> 0.0412 -7.5629 -0.1690 0.6404 -5.2020 -3.4128 -8.0235 -0.3811 -#> 9.9647 9.9041 0.7346 -8.5584 -0.1955 5.9069 -8.0969 -5.4198 -#> 5.9976 -8.5542 5.4198 -3.4565 -11.3585 6.2732 -7.2283 -6.8408 -#> -6.9500 4.6178 -6.5540 5.1711 6.6195 2.7765 4.6212 7.9218 -#> -11.7170 -2.9426 -8.0669 6.0971 -8.5434 3.0232 -5.7801 -4.7574 -#> 8.4081 0.8452 8.0060 -4.1084 -0.0990 -9.2372 9.4923 -5.6565 -#> -12.8302 -4.7184 -8.1430 -1.4103 -4.2225 4.9293 6.2137 5.6808 -#> 0.6241 2.8501 -1.8625 2.9744 -4.9602 2.5389 8.5130 4.8167 +#> Columns 25 to 32 -12.4241 -2.3832 13.8483 -3.8934 -7.0924 -3.4670 0.6426 -1.6474 +#> -5.4108 -3.1627 -0.3126 4.8923 2.2859 -2.5586 0.7236 14.1091 +#> -5.8512 -6.9455 3.5901 -7.0167 -5.4983 3.3523 -0.5075 6.9942 +#> 1.1806 7.7490 0.3640 -1.2004 0.8674 -8.4129 12.1921 -0.9055 +#> -2.8286 -9.3626 0.0124 -0.1729 -9.7298 4.3014 -10.2263 -2.0678 +#> -3.5253 -13.2619 7.5419 14.6206 -1.8729 5.4491 -5.4452 8.1817 +#> 8.5916 -12.3966 5.0856 -5.0745 1.0893 5.3363 -14.7136 -2.4518 +#> 16.3930 -1.4149 -10.8442 -10.6631 14.5356 -1.5279 -1.1152 -10.4117 +#> 6.7540 13.9913 -2.8122 8.0562 -5.3550 -7.6456 17.5554 8.8891 +#> 2.6385 -0.3676 -1.2063 5.2374 -6.3293 6.0667 8.8077 -3.8650 +#> 1.5460 1.6829 -3.3137 3.8334 8.9971 -5.6662 3.6407 -11.1187 +#> 9.1573 -5.5213 -2.0661 -1.0791 -1.8727 -10.4344 4.4783 -2.4332 +#> -7.0515 5.5582 8.4178 -4.2324 7.0754 -4.3007 -1.1885 9.6686 +#> 16.0408 5.7062 -2.9906 -3.6330 8.3559 5.6473 -6.7970 -0.3358 +#> 11.3944 3.5495 2.2231 -3.1987 8.4890 5.8760 -5.5237 -2.5542 +#> -7.8125 1.8106 -9.5020 -1.9635 -0.1818 -7.1811 -1.3380 -1.1443 +#> -18.4412 0.3937 15.5625 -0.0757 -2.7383 -9.6661 -3.6543 -6.4844 +#> 0.7384 -5.1209 -4.5795 6.8482 6.8603 9.3952 -7.4578 -1.7903 +#> 4.4217 -9.0189 8.1757 15.6252 2.5907 8.5404 1.5811 -0.3775 +#> 1.3801 1.1166 7.4831 12.0481 -2.9186 4.9001 3.8718 7.7390 +#> 1.3553 -6.4193 2.3037 1.1987 -1.3614 0.6529 -3.2446 11.3841 +#> -7.0650 -17.0139 9.4169 4.1874 -3.1104 -9.0348 9.0261 5.0205 +#> 10.5669 6.3751 -14.4034 -4.4137 3.4566 2.5257 -11.6567 -11.9979 +#> 10.8478 -2.4995 14.0791 3.3079 2.0940 4.3690 0.4446 4.1133 +#> 9.5151 2.8601 -15.0337 12.3219 -2.9705 0.8964 -11.6670 2.3322 +#> 1.6548 -5.2045 -4.8811 6.7065 -9.4835 0.5309 1.7449 6.3624 +#> 7.8491 -4.2668 4.8472 10.2011 -8.4658 13.6301 -1.1898 9.9359 +#> 3.3706 -4.5419 -5.4958 10.8610 -10.5074 -3.3321 6.8788 -10.3222 +#> 13.0443 5.1878 7.4450 6.2690 6.3905 -2.3382 2.0605 -4.1870 +#> 4.8844 3.1407 5.7249 -1.3035 10.8346 1.9627 -1.1333 5.6709 +#> 0.1207 7.3492 -3.1298 -10.1734 3.7775 -9.6167 -4.4163 0.1131 +#> 13.8647 4.3644 -7.7183 3.1355 -11.2283 -9.5480 1.8866 -6.4450 +#> 18.4618 3.9663 -10.1564 0.2834 -5.4781 4.2410 11.2886 -1.0867 #> -#> Columns 33 to 40 -3.6500 -2.8977 -9.5778 2.7114 -1.0903 -1.8082 -1.5907 -2.7820 -#> -2.2093 6.8268 -4.4927 -3.6955 -5.9770 -0.7346 4.8322 7.7719 -#> 6.6788 10.7728 3.5679 4.8409 -6.9984 -7.0787 9.2602 -4.7396 -#> -2.6668 6.8936 3.9008 0.6742 9.4432 10.4602 -8.3675 -7.7000 -#> -12.6144 -4.0425 4.8042 -1.1339 -1.6229 -3.6535 5.7386 -2.7556 -#> -6.4437 -3.8952 2.4651 -1.3158 5.6072 -1.4178 4.2024 -5.7168 -#> -1.0680 -1.5215 -5.3585 4.8564 -1.5816 6.3936 -4.4383 -9.2617 -#> -16.5886 -10.7780 -3.7502 -4.6051 3.0135 2.6677 6.4653 0.4712 -#> -2.2415 7.8049 -2.4149 11.1596 -0.9475 -4.2648 7.2630 -3.0681 -#> -12.9061 3.0613 6.8384 0.5073 5.1894 0.5770 0.2758 4.9556 -#> 7.6316 3.3277 0.5884 6.6436 -7.7094 -0.2284 5.5000 -12.2994 -#> -7.0150 -4.5404 3.0001 1.8709 -6.1011 3.0254 -6.1995 6.7020 -#> 6.1677 -0.5276 -6.2246 -6.4439 -6.6510 -1.6874 1.4696 -12.6055 -#> -1.5107 4.3904 -8.7457 4.4525 5.1152 -10.7965 -15.5326 -0.2713 -#> -4.2965 11.2479 -1.4450 1.0726 -11.2088 2.0681 0.3535 -0.9314 -#> 7.5914 0.7966 1.4415 -1.1603 2.0057 3.8741 -6.7373 3.2602 -#> 0.3362 -3.4889 -8.5000 -1.5220 -2.7686 -1.4101 0.6254 -5.9871 -#> 2.6305 5.9587 -6.0280 4.6396 -11.8046 2.4536 -3.5823 -10.0967 -#> -0.1964 -8.6662 2.6549 2.3293 0.8565 0.5143 -4.3031 -3.5940 -#> -7.2017 -10.6218 -12.5678 0.4210 0.4910 -3.6761 8.4244 1.6943 -#> -8.5529 -2.2835 -4.7047 -2.7886 3.0916 -6.8411 -0.9513 7.0117 -#> 9.7563 2.3959 8.8850 4.0535 -2.0100 4.7188 -4.1533 -8.6275 -#> 4.3480 5.3681 -3.4141 3.3045 -3.1704 -3.2342 5.7581 1.7282 -#> -6.3818 -10.8806 -5.7432 -4.5982 5.5797 -3.4485 -1.1335 -4.5567 -#> 3.6543 0.1287 -2.1096 7.8689 -2.1157 -5.4079 1.6017 11.3682 -#> -2.5885 3.2719 2.9070 -5.1318 12.5928 1.2289 -5.1828 6.7898 -#> -15.2362 -3.3284 11.5042 -0.6523 -1.5897 -0.7097 -11.7615 1.6562 -#> -1.8654 1.9799 -0.0391 -4.9505 -1.6498 -8.5602 1.9597 -3.4851 -#> 2.5662 -1.3787 -2.7014 7.4054 -2.2759 -8.8455 -8.5638 -6.0573 -#> 7.1246 4.2721 -7.5306 -14.3989 -3.6718 1.1478 12.7831 0.1247 -#> -7.4369 -7.6452 1.9472 3.2242 3.6734 11.6259 -12.0304 2.0364 -#> 1.5119 3.8946 -12.3758 -7.1929 8.3145 -15.0007 7.2717 2.4664 -#> -4.5497 2.5463 -2.5948 0.9746 -1.7025 -2.7597 1.9230 1.6655 +#> Columns 33 to 40 4.6275 -6.2525 0.2273 0.5162 -5.7469 5.0596 -0.8002 3.8485 +#> 4.3975 2.4000 0.4999 1.5539 -1.0836 0.2710 4.5753 3.6675 +#> 5.1464 -1.8309 6.7341 6.0218 2.5222 6.3212 12.2786 3.4571 +#> 8.4806 6.5521 -0.7626 -3.0051 -4.7200 5.4536 -3.7861 6.3004 +#> -0.4939 -7.1265 -1.8313 5.6391 10.8495 -12.8465 3.4814 -0.0409 +#> 5.6473 -4.2682 -16.3963 -2.9322 8.5197 -6.8839 -4.5054 -0.7375 +#> 0.7164 -3.4073 10.4581 -11.7912 -15.0000 3.6911 -12.0530 6.9812 +#> 12.7442 2.3489 12.6276 1.2311 -6.8680 -1.0760 1.8756 6.6510 +#> 8.1730 7.1948 -0.7325 13.2429 11.1476 5.3274 6.7781 -4.0018 +#> -1.3343 -3.5368 -6.0568 4.6049 1.5299 0.7766 6.5568 -9.3556 +#> -5.0528 11.1602 4.3642 -8.3959 1.8170 -0.9587 -10.7672 -5.3321 +#> 0.2872 7.0115 -7.5109 6.7913 -1.2421 10.2332 -1.5095 3.5513 +#> 5.9695 6.1847 -2.4982 -2.6016 3.6351 12.2742 4.7726 0.1290 +#> -4.2477 13.5339 5.6733 3.9681 -5.2078 3.8811 -6.9092 -10.2976 +#> 7.7438 8.1314 -0.2777 -5.5457 3.9099 14.1153 -3.9083 12.4361 +#> 7.8012 1.3680 4.9069 -0.4541 11.2196 -6.2943 8.5874 -2.2029 +#> 1.5521 0.6741 -1.4565 4.6728 -9.4164 -1.0169 -9.4322 1.3428 +#> 6.7654 -0.5750 7.6354 -2.9348 -0.3384 8.9554 5.6896 3.0233 +#> 5.4142 -9.2444 -7.5921 1.5481 9.0746 -10.9617 1.6996 -1.6802 +#> 7.0393 -0.2783 -8.7650 6.1778 -3.5712 4.5086 2.2729 -4.6979 +#> -5.0930 10.1904 0.5958 -1.3349 -5.1781 0.9546 5.2487 3.5548 +#> 3.4535 -2.3363 -12.0578 -3.0157 2.8398 9.7810 -1.7136 4.3189 +#> -1.6257 6.5888 8.7434 -3.3883 -12.9276 -1.2550 -5.0422 -0.2425 +#> -4.3318 -3.5018 4.9429 1.3254 -3.2689 4.1785 -0.6368 12.4192 +#> -2.5082 4.0454 -4.7524 -7.9615 -8.5946 -10.8607 -3.8227 -4.9559 +#> 2.6004 1.3901 -5.8091 -1.1440 -3.1478 -2.5544 8.6592 -4.3201 +#> 5.8534 -11.1834 -7.6218 1.8461 1.0347 -2.5180 4.4297 -12.1894 +#> 1.7355 -7.1326 -9.4678 -9.4416 10.9211 -13.5729 -1.8866 -3.7971 +#> -0.9690 -3.4447 4.9763 0.1120 0.2413 -2.9995 -1.4582 3.3018 +#> -0.7116 5.0436 3.3002 3.2822 2.6303 4.0156 3.9552 -0.3802 +#> 6.4840 -6.5995 7.7690 6.0942 5.5695 -0.5471 6.1880 -4.2917 +#> 0.3795 4.7843 -4.9220 6.1237 17.6069 -11.2040 2.7889 2.3809 +#> -9.3396 -2.1523 1.6649 3.2519 -11.7225 6.6564 2.7327 3.9382 #> -#> Columns 41 to 48 6.1813 6.2912 7.3241 0.7908 2.1313 11.1295 3.3205 10.2126 -#> -3.8660 6.1652 5.3523 -1.6550 -7.9229 5.9736 -10.1264 -0.1634 -#> 0.8225 -3.2113 -3.1650 -2.8780 -17.5243 -1.3155 0.5709 -0.4941 -#> 5.9318 -7.8656 -10.8085 13.1046 -5.3957 -13.6984 2.6905 3.0666 -#> 0.4034 -4.6984 -8.2092 -1.9694 -13.3521 -7.1029 -5.8867 -3.3301 -#> -3.1147 4.8214 2.9073 -2.1222 7.6735 4.6024 9.5707 -5.2442 -#> 15.9312 -0.7414 0.4791 -0.9335 6.7204 2.6602 -2.1277 -4.0374 -#> 2.9042 -2.8153 14.4209 1.4202 -12.9603 13.3820 3.0815 1.4037 -#> -14.4248 9.5510 -4.2185 -2.1044 -5.7456 -10.7641 8.7872 0.1149 -#> 1.9290 -4.3031 -5.0268 2.0541 9.3620 -6.5746 -6.5838 6.1757 -#> 1.8804 5.5209 -1.6502 -5.0906 -13.2797 19.7620 -3.4012 -4.2540 -#> 1.3196 8.8311 -5.2938 0.7205 11.5397 4.6186 0.1667 4.4330 -#> 8.2320 3.5439 -0.0781 13.5372 -1.3183 -2.8995 7.4505 -0.4583 -#> -12.9408 -11.3869 1.4935 -12.7002 -5.3621 -8.7744 0.8293 -0.3609 -#> 2.6342 -0.4168 -4.7770 1.8892 -3.1467 -6.2088 -7.4270 0.3072 -#> -2.0058 -10.8628 3.7055 -5.4891 -0.8779 -0.8320 0.4250 1.8867 -#> 8.6515 -4.1764 12.1561 3.2225 -3.6669 0.8715 1.6936 -4.9513 -#> -0.8066 -6.8987 2.7274 -2.5657 -2.2977 -2.5447 -5.2651 -1.3667 -#> 0.0641 -1.1050 9.1838 -0.7779 -6.5939 10.3293 -0.6831 -3.4317 -#> -0.9526 -3.3516 14.0134 -1.8953 10.8101 5.9014 -1.4601 11.3590 -#> -8.8058 9.7172 -1.8348 2.3232 7.8532 2.6836 -6.7784 -4.1494 -#> -5.1763 -11.3810 4.2428 2.8889 -9.5280 10.5218 10.3084 2.3278 -#> 3.5902 8.9550 3.7468 0.4195 11.1313 14.4605 7.6010 6.7326 -#> -1.8484 4.9172 -1.4582 6.6719 2.1260 -9.1362 0.8069 4.6898 -#> -3.9948 8.9041 -2.0977 0.1985 8.5756 -4.3336 -2.5226 -6.8194 -#> -6.7353 -5.9661 9.1023 -8.8217 9.8306 6.4111 -5.0250 -0.2583 -#> -0.5733 2.9590 1.0549 13.1248 -2.2757 -5.6781 4.8615 -10.4787 -#> -2.8304 -6.6133 1.1348 1.9259 -7.5608 -0.0975 -1.9719 -6.2450 -#> -14.2544 -2.1351 -3.8724 -1.3873 -5.2126 -4.2660 -13.4798 -2.1145 -#> -2.4491 11.6818 7.1726 8.7630 6.2928 4.9393 6.2456 3.5497 -#> 4.1296 7.1968 4.9017 0.2784 1.5593 13.0424 1.4521 11.3329 -#> -2.2085 8.1701 -2.5337 -3.7367 9.3092 -9.0775 8.4497 7.4483 -#> -2.8945 17.4843 -11.3043 -2.8576 11.5501 8.4810 2.3641 5.4486 +#> Columns 41 to 48 -5.9479 5.7317 -1.3896 0.4347 1.9078 7.1824 6.1996 -1.7376 +#> -6.0027 4.1752 2.8431 1.2113 -16.6840 1.9912 -0.8933 0.8581 +#> -2.8098 -3.0469 0.4930 -6.6300 -1.5747 1.3298 1.7320 -1.8426 +#> -6.1226 3.4930 -1.5902 2.0032 -11.9943 -6.5760 -0.2418 -0.0180 +#> 8.1420 14.2809 -1.5614 7.6316 5.5185 9.7685 -0.0391 -11.4884 +#> 4.5187 -4.5102 -7.0836 -15.6763 -7.2520 4.9916 -11.4424 11.0989 +#> 3.2945 -5.1948 7.6977 3.6042 -13.4441 13.6879 -8.7203 8.3680 +#> -6.1572 2.6177 -9.6676 0.6766 -3.0961 -8.5274 0.3879 -2.0255 +#> 2.1640 11.4470 6.3551 10.5476 -11.7410 -0.3454 10.9232 -11.7186 +#> 2.0374 -6.7611 -0.4781 -2.3366 -0.2988 5.7967 -11.7675 2.1235 +#> -0.4893 4.5329 -7.4395 5.4373 11.5770 3.8953 1.2265 -0.1558 +#> 6.5420 3.0139 -2.8958 0.8739 1.7625 0.5393 -3.9136 1.8008 +#> -4.5155 -2.4164 -0.7993 -17.7445 4.5658 -7.2534 -6.3549 15.2915 +#> -1.7141 5.7668 3.8222 -0.4085 -6.1782 -3.6481 -3.2473 2.1868 +#> -1.8065 6.8444 -0.0644 -13.5806 6.3467 -1.1588 -2.0386 -4.1606 +#> -2.7892 -6.8375 0.4811 6.8965 8.1999 -7.6161 6.4258 -4.8142 +#> 4.9122 -7.0249 10.3665 4.4574 17.5032 -6.8933 5.4158 0.6083 +#> 2.3044 -10.6202 4.9231 -3.4690 -7.5559 11.8718 -19.5934 6.9781 +#> -8.8568 -9.7555 -9.3617 -0.9874 -0.4350 -1.2576 -5.6463 5.0209 +#> 2.6090 -2.1196 4.1956 -6.4248 -19.2466 1.1152 -1.0907 -5.0427 +#> -6.5680 0.7271 -1.4618 5.2254 -4.1702 -1.0971 6.3611 3.9536 +#> 0.4930 5.8133 -6.1054 -5.7126 4.7208 -4.1726 0.1972 2.2802 +#> 14.6402 -3.5125 9.4125 -0.9477 -3.0400 4.0266 -4.2958 -1.4770 +#> -10.2021 8.6072 -13.6291 6.0448 -19.6731 16.7642 -7.7336 1.4413 +#> 0.8893 -3.5400 2.5201 4.0507 -7.3271 -0.9926 -2.7731 -4.9037 +#> 4.8718 -5.2791 -3.5868 1.9349 -12.1135 4.5263 -2.7548 -5.2707 +#> -2.0506 -2.6427 -4.2782 -13.0922 -5.9973 7.0806 -5.7817 0.7641 +#> 4.9468 -1.0644 -0.7425 2.7752 10.9227 7.2345 -7.5723 -4.3290 +#> 0.9754 3.8847 -8.3308 9.4071 -11.7713 -5.7789 11.2804 -10.8883 +#> -4.1672 -3.3255 0.0253 -7.2869 2.3881 9.2545 -8.6679 3.7378 +#> 7.3384 4.0322 3.0322 -3.4796 10.0678 -8.2047 14.3237 -3.1908 +#> -0.3416 6.2229 -4.0288 14.5847 11.5170 -4.0508 7.2624 -13.4191 +#> -3.2283 12.1488 -2.5061 8.8063 2.0179 -2.3577 -4.7272 0.1128 #> #> (4,.,.) = -#> Columns 1 to 8 -8.9373 1.6399 -9.3011 1.4044 3.6362 -3.6884 4.1129 -0.9177 -#> -1.4104 -3.4010 11.6886 -2.0425 5.0381 10.3211 2.3125 3.7280 -#> -0.9357 -2.5987 -0.8097 -7.3440 -2.9790 0.2844 1.1797 -2.0219 -#> 0.7954 26.6022 -0.1761 -10.4019 -3.9853 -4.7054 -2.7099 -11.5866 -#> -4.8281 3.9997 7.6639 2.1329 1.7832 -3.4229 -4.4235 -8.0600 -#> -4.2906 0.4296 14.0283 11.0699 -2.6126 0.9977 -10.1659 -20.9791 -#> -0.2850 -4.6091 3.8426 -3.2056 1.4130 0.4385 -2.6187 -4.8739 -#> -5.6108 1.3174 7.9251 11.1445 2.7707 -7.1793 -7.8399 -3.0120 -#> 5.1576 -13.3771 0.9829 -5.2116 -4.3821 8.2142 -3.4522 -15.3296 -#> -4.0186 12.0413 -2.8010 5.3009 -1.6269 -7.3934 0.1458 3.5200 -#> 0.1809 -1.2963 14.7856 4.9618 -1.2995 1.4195 -6.5237 -2.4909 -#> 16.6593 -5.1476 4.8693 7.8189 -4.1561 -3.6614 1.0279 9.9730 -#> -1.9016 5.1086 4.6128 -1.8447 0.4608 10.1100 -4.6261 4.2095 -#> 4.9385 -8.9146 -0.5647 -3.1531 6.9549 -17.0622 -2.5873 -1.0333 -#> -5.8255 4.3723 14.0465 9.6723 -9.6586 6.4133 -1.3056 -12.5534 -#> -5.5134 2.5385 3.7693 -1.7086 -4.3682 -3.3993 -7.5111 9.0573 -#> -4.5008 -2.2101 3.2887 9.2156 8.1258 14.3153 -6.4282 -0.2755 -#> 10.0607 -4.0007 4.2165 8.6627 -5.6962 6.5382 -0.7017 -3.3520 -#> 10.9792 -12.7363 -11.1796 6.1025 8.0711 2.8154 -0.6251 8.5030 -#> -15.3810 -0.3471 6.0013 8.3160 -2.4042 3.0223 -16.8555 -6.2519 -#> 12.9764 -4.6472 1.4249 8.6086 3.9251 5.5342 1.4575 -9.6606 -#> -7.7859 -0.0378 -8.7754 0.2117 -11.5043 -5.5715 3.8264 0.7867 -#> -5.8436 5.1906 -2.9890 12.7835 -2.3970 3.9694 -2.1704 -7.6511 -#> -11.8085 -0.1376 -7.4560 4.4743 3.5176 -7.5724 -16.7047 -10.5319 -#> 15.3922 -4.8711 -13.4026 11.7450 2.0306 9.9591 15.9956 -2.2847 -#> -1.6474 2.2722 0.9578 -0.9675 -0.1793 8.5340 6.2574 1.9746 -#> -0.1732 0.8481 5.0246 10.0355 2.5423 4.7380 -4.1744 -5.7309 -#> 5.0698 -3.8620 -1.4083 10.5425 10.8409 3.9629 7.7026 12.7015 -#> 2.9825 -5.9775 -5.2800 7.1343 4.5012 4.4923 2.3546 -3.2652 -#> -4.3361 -7.6220 10.1317 2.4227 3.2601 6.8644 4.4243 5.2520 -#> 17.7917 -0.3951 -4.4227 -0.2346 -1.7326 -9.4963 -5.2619 -0.7878 -#> 5.3519 0.6308 -1.0294 -0.9731 6.0362 0.5765 -8.1187 -3.5761 -#> 0.3248 -3.5005 3.3166 1.9265 1.7660 -1.5276 6.8794 -1.3940 +#> Columns 1 to 8 -4.9538 17.0664 0.1018 0.8898 0.1483 4.9361 -17.0535 0.7465 +#> -9.7028 0.3020 7.2783 -8.4018 -7.3146 13.2967 6.0084 2.0292 +#> -3.1330 3.2949 -2.3359 -11.3128 -4.5336 -8.9633 8.6274 2.9212 +#> -4.3949 -1.9623 3.8915 8.5586 -0.3818 11.3559 -0.0292 15.9034 +#> -1.2517 4.6891 -6.5938 4.2879 5.1477 3.8499 -13.1017 -5.5372 +#> 10.8488 -4.1179 13.7386 18.8632 -6.0685 -11.5582 -0.9416 4.5622 +#> 10.9829 -4.9632 -8.2331 -8.5790 -11.2950 -5.5146 13.6615 -17.2773 +#> 2.2473 -10.0643 -17.8956 1.4249 -12.5284 -5.8794 -0.0446 -3.2953 +#> -10.8570 6.2512 22.7700 -1.5846 11.3006 9.3694 1.1273 15.1241 +#> -8.0787 1.4591 -0.7498 1.4827 11.0985 -3.2901 0.8047 -0.2792 +#> -0.1312 12.3284 -10.1663 -3.1726 7.2779 -0.9327 -4.0697 -18.1894 +#> -3.4733 0.3368 -4.1347 -10.1425 6.5577 1.3228 -3.3832 3.3631 +#> -8.1466 -4.9133 4.4610 1.3269 3.4562 10.2674 10.4606 9.2153 +#> 4.7160 -0.3078 -6.4491 4.5003 4.2012 19.7595 4.4831 -9.6957 +#> 0.5640 7.5643 -3.3368 -3.7371 1.6558 2.1598 -3.9336 0.7328 +#> -1.8770 -12.2776 -5.6247 -2.0085 -0.2937 1.3901 5.8792 -0.3626 +#> 2.8689 6.4288 9.9220 -6.9003 11.5421 -5.9807 -3.5995 9.9424 +#> 16.9640 -5.0639 -9.7111 13.6573 -20.1911 -3.9469 0.8993 -5.9625 +#> 5.5202 6.4470 -0.0795 8.9561 -8.2949 -12.2004 1.2568 -6.1407 +#> 5.0151 -10.5003 8.3629 10.6703 4.6395 3.9177 -4.2513 6.3293 +#> -6.0909 -3.8411 1.6986 -7.4332 -5.3671 9.3974 -0.3591 -14.4400 +#> -12.9444 -9.0201 3.6336 -9.8184 1.6553 -2.3285 -16.4687 13.0409 +#> 14.7907 -0.3785 1.6089 1.0973 -0.2062 -2.7016 8.9478 -1.9907 +#> 4.2796 14.7379 -5.2530 10.3486 -13.3774 -3.1824 -10.3652 -8.9548 +#> 5.2245 -10.1186 -1.6432 10.7965 -6.6028 14.4216 6.2035 -0.6116 +#> -4.5198 -10.8784 2.0572 6.0678 6.4600 9.7799 3.3135 8.0133 +#> 16.7118 -0.9669 9.2267 12.0206 -5.4836 9.4263 -2.1159 2.3712 +#> 1.5325 5.1238 1.3927 0.0318 7.2269 -1.9335 -13.0037 4.9660 +#> 1.7762 -7.8126 5.1079 3.1876 -6.7152 3.6575 -7.1749 1.0768 +#> 12.4921 10.3854 14.9333 13.8814 0.6242 0.7227 1.0904 -4.7628 +#> -1.5955 -4.7258 8.7674 -2.4229 1.5223 5.0432 8.0320 0.7806 +#> -1.9712 -11.0011 -0.6494 2.1866 9.1960 17.8894 9.7038 19.3642 +#> 0.6245 5.2097 -6.0672 0.0554 -1.6622 7.1765 -12.6029 -4.2613 #> -#> Columns 9 to 16 -1.9001 0.7127 -3.7042 -3.1003 -2.5198 -2.0575 10.7891 -10.1290 -#> 4.4664 -1.3785 5.6772 -7.5936 -2.2864 -4.5115 5.9416 -2.9930 -#> 1.5377 4.4286 3.8994 7.8767 -6.2099 -9.3024 9.8539 0.7968 -#> 1.3931 1.4117 -5.5809 -17.4775 -3.3827 -6.8981 -7.4701 -0.2287 -#> -3.5261 7.2352 9.7388 10.1538 9.3579 4.1696 3.4854 -4.4308 -#> 3.2155 2.2456 -5.0688 16.8926 1.8043 11.3323 2.9362 -17.2064 -#> -2.5780 2.1673 8.9328 1.9443 2.6978 -3.6639 4.2491 -7.3115 -#> -0.0379 -0.3899 -11.1863 -7.5292 2.2368 5.0501 10.9519 4.0228 -#> -2.3787 -6.3793 3.8723 19.7264 -2.1229 -8.1372 4.8397 13.4221 -#> -0.1609 3.9356 7.6169 -0.8369 10.8069 4.9614 -0.4863 -5.5323 -#> -4.1389 1.4834 7.0239 -4.2083 -5.1313 -15.0423 3.8272 -4.4371 -#> -4.0908 2.8806 1.0398 1.6341 1.6630 2.1113 -5.2521 -0.4592 -#> -2.3149 -4.1857 -9.5716 3.2153 -9.7876 -1.8165 -15.7325 -6.8080 -#> 7.6909 9.9126 5.9013 0.0249 14.8729 -14.5049 -1.9218 -2.0016 -#> 11.1113 10.2416 -2.4377 18.4841 -3.1200 -12.1874 -7.3556 -17.1354 -#> 7.0546 0.5588 5.1830 -13.3850 -10.7262 -6.7357 -3.4260 2.6514 -#> -1.7956 1.6426 1.9519 -1.5552 8.5460 1.7914 -1.6229 0.8920 -#> 6.2778 -9.5054 -7.6767 -8.9659 -8.5650 -6.6644 -4.2292 -12.1641 -#> -1.5601 -4.5378 -9.5443 -0.4477 -11.3808 2.5598 -7.0608 4.8732 -#> 4.9494 -3.9585 -18.3522 3.4936 5.4241 1.2551 3.3881 -6.7100 -#> -2.2057 0.6258 -9.7220 -0.1111 -1.0761 5.7311 -0.8607 -5.8128 -#> 4.0180 -1.9138 -2.3049 2.2944 -1.7141 -6.2059 7.9873 0.2988 -#> 8.3576 0.5287 4.6250 12.8253 -11.8698 -3.0675 -2.3879 -3.5915 -#> -6.9098 -6.2171 2.3000 3.1197 3.8548 11.2952 -14.4839 -5.1035 -#> 3.6927 -0.5734 -9.4985 -1.7064 -4.4952 -2.9143 -6.4201 -2.4317 -#> 5.8637 -5.0499 -5.3575 -5.7396 5.6403 6.2517 1.5342 -3.1273 -#> -5.5487 2.7170 5.6355 -4.3530 -0.3532 3.4242 -8.7395 -2.6661 -#> 0.6817 -2.5049 -0.9142 -3.5636 9.6664 -0.0384 -3.9433 -1.6649 -#> 5.6631 2.3630 -5.0465 5.2111 2.0411 -5.5932 -6.4517 -6.7330 -#> -3.7180 -1.7679 6.3940 0.5621 1.1296 -1.2607 7.3263 -7.4035 -#> 1.4558 -3.6146 9.7355 -3.0817 -9.2252 -0.9715 8.1628 1.2281 -#> -7.5844 -5.2732 -4.6288 1.9936 -0.4796 6.6172 -9.8305 21.6248 -#> -2.4536 -6.4660 1.2808 -3.9041 -4.9362 -3.5620 3.0095 -9.3904 +#> Columns 9 to 16 -9.4526 -5.1694 0.8052 6.4252 -4.8743 -3.3787 -5.0041 -1.5757 +#> 5.6536 -6.1082 0.4980 2.2175 2.3369 6.5422 -5.9298 -4.0904 +#> 7.0242 12.3883 5.4531 0.2439 2.2845 -1.5235 4.0821 2.1533 +#> 0.8479 -8.1672 -2.8806 -13.3586 -13.3332 9.7919 3.7125 -0.4628 +#> 2.7213 -15.5696 -4.0435 13.3024 4.9801 -10.9466 3.6240 -2.1048 +#> -5.7670 -9.3248 1.5184 -5.0647 -1.3344 -4.4918 -9.4087 1.2730 +#> 1.6222 12.4523 8.0576 -3.9077 5.4055 3.1740 4.7621 -8.8532 +#> 1.6750 -0.7258 -6.4816 0.4637 -6.9306 5.7544 -3.4786 1.8384 +#> -4.1522 -0.5614 -7.3469 -12.8664 -2.1448 12.1254 4.3776 19.4482 +#> 9.5074 -9.2374 -3.5238 7.1686 -0.6648 -3.3887 -0.7183 3.3822 +#> -0.6954 -3.2278 -7.7428 -8.0954 -0.1779 9.4753 -0.4611 3.8342 +#> -7.1168 -0.7272 -4.0091 6.8376 3.3126 -11.2768 -11.9588 3.7387 +#> 8.7197 -4.2730 -0.0243 -13.1229 6.6868 5.8360 -5.3161 -8.4720 +#> 9.0815 -16.8222 -19.6291 0.3442 10.8628 7.7137 -0.4684 5.5136 +#> 19.0643 -1.0228 -19.6937 10.8928 13.1494 -4.9341 -3.2061 -5.5275 +#> 14.1608 9.0933 5.9093 4.9352 3.4136 -5.3891 -1.2159 13.3120 +#> 0.2920 -6.3951 1.8682 10.8309 -11.4926 -0.3472 4.0011 -6.3251 +#> 14.3717 -7.6303 2.5789 -7.2259 -3.7276 -0.6800 1.5227 -5.6462 +#> 15.5434 0.7905 5.5928 2.5895 -7.6942 -8.4170 1.5323 1.1262 +#> -4.2404 3.2167 0.6378 2.5575 -5.9038 7.9403 -1.5059 -10.1904 +#> 4.9039 -0.4195 -0.2075 5.6368 12.5629 3.1815 0.4401 5.6081 +#> 2.8867 15.8964 9.0444 4.0786 -3.9410 -9.0092 5.6049 14.2726 +#> -8.8588 8.6507 1.8858 -4.8800 3.3269 -5.1670 -8.7176 -6.7934 +#> 7.2771 7.7853 1.0167 -8.1125 -1.2514 -1.9101 -11.1754 0.6801 +#> 9.7963 9.7818 11.1680 5.0844 0.7207 -3.2902 -5.6092 -2.4688 +#> 0.7519 -2.6117 6.7347 10.9780 3.1813 -4.0894 -6.2478 -7.5200 +#> -3.4983 -14.0286 4.4232 -2.7626 -4.5084 -4.5566 -5.1855 -0.4736 +#> -1.2931 -7.8607 5.1282 2.1210 3.6382 -7.3358 5.5641 5.9660 +#> -4.0157 5.5307 -2.3370 -0.9843 1.7820 1.1129 -5.0038 -4.6232 +#> 5.3592 -3.6845 -7.7299 -8.3634 3.1842 -8.6789 -10.7706 0.7496 +#> -6.2583 -9.0696 1.2158 -4.1395 5.9447 11.3430 -0.0200 -2.6999 +#> -0.9095 7.2894 -6.1060 8.9229 8.7341 -1.1903 -5.4205 11.9848 +#> -13.3732 15.1797 -1.9534 -1.4681 -5.1740 1.0646 -0.2240 5.7325 #> -#> Columns 17 to 24 -16.2372 -19.7274 -0.1741 -11.2008 -3.2621 6.3569 -8.8925 -0.4303 -#> -5.4173 -7.6782 8.2397 1.4117 3.6337 -3.4932 -0.8219 11.9920 -#> 19.2885 2.5153 -12.7047 1.5867 -8.2949 -3.2381 5.2396 0.1813 -#> 12.2386 6.4969 -4.7977 11.9596 -0.3093 -0.3655 -0.3141 2.8275 -#> 2.3578 -1.3342 -3.8336 2.7758 -1.4061 -8.5146 -0.3398 -8.0748 -#> -20.0700 -15.1627 -0.5593 6.0172 -8.7065 1.4813 0.0891 1.6563 -#> 0.2059 -4.4305 1.2511 3.0906 0.2580 12.1064 -12.8778 5.0823 -#> -2.4030 -19.5416 3.9174 16.3369 3.0540 -8.3036 4.4496 12.4252 -#> -6.7507 0.8657 -8.0464 -0.3716 -0.4432 -3.8865 -0.0692 -9.6786 -#> -0.0698 4.0154 2.6120 -6.7098 0.9287 -1.1052 -2.9665 4.0454 -#> 4.2824 3.9316 2.0320 2.0653 6.2146 -16.8679 4.5053 4.8263 -#> -14.8595 13.3765 1.6610 2.0509 7.3690 2.4040 6.5151 1.7843 -#> 10.0346 6.3441 -2.4157 -0.6306 1.8411 5.8174 3.8976 3.5893 -#> -7.7210 19.3262 7.3632 6.9095 -3.1356 -4.3910 -0.6466 -13.5442 -#> 16.1872 -0.0640 -5.5496 0.1496 -1.1111 0.9940 -6.1868 2.5441 -#> 5.3269 4.6372 -10.4780 -8.9268 3.1330 0.6902 -5.6594 -3.4153 -#> -10.7116 -2.4769 -6.5876 4.9373 -0.7942 -8.1281 5.3133 -9.1278 -#> 5.4327 10.0923 1.0010 13.1000 1.8219 1.5296 -1.7788 6.2398 -#> 2.8703 -9.3128 8.1365 4.9767 2.0086 -2.9929 4.9331 -7.5836 -#> 4.2043 -3.4412 4.2323 -2.8825 -5.4795 -6.1055 2.8913 7.2720 -#> -1.9965 2.1791 16.0918 13.4114 -5.0736 6.8383 7.0362 1.3149 -#> 12.9917 1.7107 -9.1155 7.2644 3.6828 -1.5261 -6.1586 9.2209 -#> 5.1585 -6.3146 -10.2284 -17.6754 0.5683 -0.8958 -4.6662 8.7225 -#> -0.9275 1.6786 6.2533 -7.8757 3.7599 -6.7295 -1.7024 1.6199 -#> -5.5077 5.7192 0.1597 -1.1456 0.1365 -0.2670 9.3889 -10.7641 -#> -3.3875 -12.9637 5.6332 -0.7724 -6.7348 6.3785 -6.2865 -7.7037 -#> -16.6626 10.6243 3.7678 6.9794 10.9552 -4.2435 3.0685 2.4144 -#> -3.6866 7.1044 -0.2200 0.8470 -3.6753 -7.6094 5.4141 -15.7434 -#> 8.7154 7.6460 11.4999 3.9323 6.8737 -5.2896 -9.2223 1.9661 -#> -12.7411 -20.4097 -10.3811 -1.7526 -3.3337 3.1244 1.5932 -5.8384 -#> -6.4045 -21.4485 -3.3501 7.6588 -1.9086 3.0637 -8.2305 4.4404 -#> -6.2674 3.3319 7.3105 -18.2420 7.0885 1.8895 4.2690 4.4079 -#> -12.1091 -0.3891 7.2408 -5.9361 -2.7110 3.0618 -4.8158 3.3095 +#> Columns 17 to 24 -0.1331 -8.1388 12.7922 3.3258 -5.6629 7.4248 -4.9095 -8.0389 +#> 3.4657 -2.4242 -2.6167 1.2324 10.0951 5.5803 0.6295 2.6036 +#> -3.3638 -1.9595 -7.3638 -11.9056 4.7550 3.1624 -0.9780 -2.5876 +#> -0.5969 -5.5221 -4.4383 12.7997 1.2124 9.4294 -3.0904 11.7336 +#> -14.1096 6.3596 0.5233 -0.2465 -1.7029 7.2990 4.9932 0.1218 +#> 4.6058 0.7510 18.0496 -4.4725 -3.9143 2.4044 -0.9663 -2.6935 +#> 10.4438 4.3372 -4.4080 3.4308 -9.0953 1.5391 -1.8040 4.6151 +#> -7.7248 -7.7384 -12.3472 -2.2128 -0.7154 -4.8985 -1.6933 0.2770 +#> -3.5148 7.1338 -5.0431 8.8337 2.6480 14.4746 8.8678 8.8424 +#> 3.0300 -4.3456 -3.1803 -4.0484 5.3796 -8.4654 -0.0492 -1.4700 +#> -6.6846 -12.4447 -2.8775 3.9531 4.3241 -11.2811 -1.6303 -3.0871 +#> -4.2804 7.8158 -5.0589 -2.3532 -0.8418 4.6156 10.1509 -11.8587 +#> 6.8313 -7.1164 8.8702 -5.9242 12.1045 -4.4453 2.1769 4.4285 +#> -6.2365 -11.9182 9.7006 -0.8717 2.7998 -7.3961 -0.6116 -4.3501 +#> 2.9189 1.7219 5.1603 -0.7957 -4.6677 12.5717 7.9300 3.8336 +#> -4.6292 4.8424 -4.9006 -5.9140 3.9654 -8.3432 5.4315 -3.6888 +#> 0.4656 9.1473 -3.0684 6.8504 -3.8456 4.6941 -3.0155 -4.9095 +#> 10.5029 -3.3362 2.9867 -12.2039 -9.9891 -1.2972 -4.1580 6.2572 +#> 15.5327 1.0353 4.0335 -7.1014 6.8567 -2.5069 -4.2205 -3.5161 +#> -1.1913 -1.6592 0.7782 0.0653 6.7694 6.6510 -2.5791 0.2762 +#> 3.3173 -4.5636 -3.6844 0.6628 7.0458 3.2901 10.7002 -3.3850 +#> -7.0339 8.7440 3.5230 -0.3093 -3.6322 15.5498 4.4980 -5.6337 +#> 4.0016 1.6836 -0.8491 2.8689 -5.0198 -2.5678 8.5274 6.1681 +#> 7.4687 -3.7890 0.7388 -5.5171 -10.2168 1.2003 -2.7124 3.7007 +#> 0.2401 -0.0776 4.7264 8.3669 2.7766 7.5978 7.7334 10.6074 +#> -1.6771 -8.0268 3.6667 2.2805 10.3176 2.7354 0.7035 4.1152 +#> -1.3598 -1.0257 13.9195 2.7950 -3.9540 7.6296 -7.1117 3.3921 +#> -1.2608 3.2111 6.1984 -0.8062 5.5680 12.7021 -2.3154 0.6187 +#> -6.3551 -1.7433 -7.4351 -4.1475 -6.6872 -4.7233 1.9106 1.0367 +#> 9.7669 6.7408 9.0259 -2.1641 1.5355 -4.1114 -7.2800 0.8181 +#> -8.4810 -10.3591 0.5962 -2.6823 0.4105 -2.0187 11.9195 -2.1013 +#> 0.9877 9.0195 -14.8369 -2.3161 8.2303 5.0258 4.8822 5.4192 +#> 0.3760 -7.1636 -3.1500 12.1934 -2.0959 7.7844 2.4019 12.7033 #> -#> Columns 25 to 32 2.3998 -17.3448 -12.2325 0.3315 -15.4217 -1.8867 -15.4240 12.3478 -#> -3.7151 6.9167 9.9820 1.5672 -11.4572 1.9856 7.3794 -0.1111 -#> 2.3240 -8.5145 0.3523 8.1304 -1.9023 7.6816 0.2831 -4.7317 -#> 23.5435 2.0492 -20.7813 14.2461 11.3960 12.3975 -11.7466 3.0538 -#> -7.4008 -1.0536 -4.7788 11.5750 4.3103 0.4249 3.9897 -4.0659 -#> -0.4591 -8.9794 0.9216 -4.6796 -8.7029 9.5869 8.7900 -7.8309 -#> -2.9604 2.2297 1.1120 -0.7426 7.9197 7.3716 -10.5481 4.1526 -#> -3.9723 2.9816 -9.3026 5.5909 -10.0158 -15.0935 8.5722 1.3201 -#> 1.1391 -0.4905 11.8503 -1.8477 -2.4021 15.0876 10.7224 -2.2367 -#> 4.5792 0.5044 3.7245 0.0699 7.8139 6.8465 -3.0149 5.3561 -#> -2.2293 14.3296 1.3361 -7.7686 -0.4618 8.6373 12.8193 -5.5542 -#> 5.4532 -4.3885 9.4846 -17.4665 8.1103 -0.7766 -1.3261 3.4329 -#> -1.9946 -0.3186 0.0346 -9.3878 3.0484 -10.4834 -2.9084 -7.7686 -#> 2.0548 -10.5485 -3.2662 22.6547 -8.1382 11.3451 -15.2585 6.4609 -#> 0.1131 -9.3124 9.9692 4.3660 -0.1986 -10.1405 -4.0936 -1.1072 -#> 17.0797 -3.4246 -2.5888 -0.7562 -8.3573 -0.1185 1.1050 7.3033 -#> -6.9426 -6.1876 2.3584 9.0243 -8.2849 -13.8852 -0.2879 -0.9890 -#> -3.9229 2.9294 14.3624 -6.8364 -14.9054 2.0942 -1.3370 11.7426 -#> 10.4417 2.0153 2.5499 2.6353 -5.2696 -1.1515 12.8907 -6.8000 -#> -3.3850 -0.4643 8.3822 -4.8786 -18.5677 0.2475 10.1148 5.0903 -#> 6.1073 -5.6765 3.8480 1.6076 0.9365 7.3126 -3.0763 -7.4835 -#> -6.2270 -7.8434 -1.8985 3.3555 11.7319 -3.2799 2.3244 15.7276 -#> 8.4865 8.8633 21.3272 -16.4600 -3.8408 -8.0375 1.0212 -4.3776 -#> 12.8918 1.8276 4.7197 -12.4718 5.4431 -3.3738 5.3581 -13.0529 -#> -3.7650 -3.9353 16.2862 4.6456 -7.5221 -0.1671 -6.6627 -1.2377 -#> -4.4990 -21.3608 -4.0670 15.6238 -7.5423 -9.5070 -1.6854 3.0255 -#> 9.1725 3.4281 5.9490 -2.2386 12.4893 -1.6703 -0.1803 -9.1139 -#> 4.4431 1.9173 -6.4646 9.7265 -12.4915 -3.9208 -2.5061 2.8529 -#> -10.7019 -8.1675 11.7109 12.2299 3.7126 -1.5526 0.0804 3.4049 -#> -7.0743 7.0589 -11.8581 -12.1029 -16.7801 -5.9686 -5.1970 0.2153 -#> 1.8325 3.2412 5.7683 -8.6227 -2.8456 -5.8553 -3.2370 6.9899 -#> 12.3013 -5.2388 5.6335 -1.6851 -11.4705 -6.1349 1.0349 -8.6584 -#> 4.2542 14.5048 2.5535 -17.0956 -5.7988 16.1823 3.0141 7.0088 +#> Columns 25 to 32 1.7915 1.9421 -12.5396 -2.6273 -2.8991 7.5406 8.2528 1.3058 +#> 4.1038 4.4207 -0.2532 6.6594 2.6148 -9.5122 12.7956 -6.2985 +#> 9.3943 3.5340 5.8143 5.9309 1.6031 8.6264 2.9542 -2.7643 +#> -0.4618 -0.3964 -3.3333 3.0343 12.4622 -2.5630 -2.2609 -4.4235 +#> 2.9094 5.7771 0.3549 -9.0132 -1.1143 -12.7987 0.1648 8.5159 +#> -3.2158 10.4830 14.5092 8.3573 -6.8970 -5.1728 7.1444 7.7108 +#> -1.8354 0.4364 -0.0991 -0.4200 1.0284 -2.8920 -0.1213 -7.7263 +#> -5.0105 9.6236 2.7993 0.0891 13.7755 0.0757 2.6481 -3.0413 +#> 6.9997 -5.9592 -3.1074 5.1188 3.7140 4.8939 6.5528 1.9294 +#> -2.0337 6.4219 -4.3378 6.0576 -8.5588 -3.4454 -0.1000 -6.3920 +#> 7.8070 -1.2557 2.3798 2.0639 2.0221 14.2894 -0.9472 -7.3541 +#> 2.8692 1.1086 -0.7842 -1.5497 -1.8071 4.7527 10.5505 0.4261 +#> -1.4595 10.3455 -0.3468 0.8917 0.4904 -3.9149 -6.0385 -6.4965 +#> -21.0780 -4.0180 -13.6822 -9.3426 13.3920 -6.6782 4.0213 -2.7457 +#> -12.6224 8.4789 3.8033 0.0928 14.3213 -3.5705 -7.8285 -1.9612 +#> -1.0248 -0.9685 0.8791 2.6815 12.3738 12.1512 4.6639 7.2081 +#> 7.6757 4.3961 -15.0027 -0.0203 -7.6104 0.2860 4.9189 -3.9617 +#> -12.8767 11.5503 -2.9478 8.2208 -2.5722 1.9909 0.4056 -11.0252 +#> 0.6986 12.1214 7.3867 5.7368 -0.5448 -4.7764 9.5780 -9.3266 +#> 1.8298 11.9755 8.2976 20.6372 1.1700 -1.0028 -5.6784 0.0464 +#> 1.1606 -0.1259 1.6886 -8.1117 13.5447 3.0210 15.2366 -2.9625 +#> 10.6484 6.7875 7.7560 6.0996 8.6048 5.5214 -1.9629 -3.2145 +#> -4.2199 -3.7075 1.4024 -5.9278 -14.6722 -0.2868 -5.9519 4.2351 +#> -2.8086 3.6744 8.1327 -5.1570 5.9282 -9.8394 7.9207 -10.0697 +#> -6.5019 2.3591 9.0967 -7.5837 5.1467 -10.0112 -6.4589 3.0311 +#> -4.0117 10.8028 -5.4538 7.9693 -0.4555 -2.8254 3.9369 6.1478 +#> -14.7618 20.3557 0.1404 1.4985 3.4667 -12.5460 3.2561 1.0795 +#> 3.5059 10.9255 -2.0731 11.8909 -6.7959 4.9230 -4.5309 2.8546 +#> -5.2453 3.3038 -3.5618 -4.7227 7.3414 -14.6970 -0.2040 0.7135 +#> -8.5081 5.4009 -2.2874 -0.6616 -4.2162 -0.5637 9.7156 -7.1103 +#> -3.4036 1.1124 -5.8518 -3.9441 -5.1406 0.0861 -1.4743 10.0881 +#> 4.3507 -2.6391 -6.6786 -6.0558 1.2268 -5.5369 -7.1206 9.6841 +#> 1.9493 -3.8393 5.5592 -4.3794 3.7729 6.6821 -8.5172 -10.1452 #> -#> Columns 33 to 40 1.7074 -2.7095 -5.2740 -5.3750 -11.1734 -11.5376 1.2613 8.9030 -#> -9.7125 -3.7753 -7.5031 -0.7469 -0.7143 -11.1453 -3.4587 -9.8992 -#> -1.4763 0.0384 6.2177 0.9006 3.5500 -0.8569 -2.1706 -1.7766 -#> -7.8055 -4.5051 2.6745 12.8302 7.7032 11.8959 1.4649 -17.2213 -#> 1.7518 -9.0725 -6.0132 -3.7355 -0.6736 -1.7817 2.1679 -9.3729 -#> -7.0022 -11.5829 -17.9102 -14.6837 -6.6562 -7.1852 6.0695 5.5540 -#> -11.7927 -5.6766 -3.0036 -2.3266 1.5770 2.8812 8.7181 0.4599 -#> -1.0159 -5.8263 -16.4976 -11.9847 12.6645 3.8689 10.7117 -1.5844 -#> 3.7081 0.7444 -8.1803 -6.8067 -7.9231 -3.9081 -5.9678 9.1194 -#> -8.0077 -4.4849 5.6012 5.4904 0.2560 -4.3410 -1.3505 -9.2316 -#> -4.5165 0.3389 -5.6415 -6.9742 1.8363 2.0317 0.4151 -3.6613 -#> -3.5573 -3.8663 -2.2184 -0.8321 3.6882 0.6856 2.9591 3.8303 -#> 8.2161 4.0893 4.6047 -1.8199 4.3622 -5.2914 17.8723 -0.6648 -#> -1.0932 6.2708 3.1357 9.0462 -10.0658 18.2034 -30.1197 8.5641 -#> -11.7677 5.8088 -2.8664 -1.4192 13.0200 -14.9370 0.6709 -2.9731 -#> -3.5297 -1.1649 0.3058 5.2004 -14.7660 14.8186 -7.3994 8.6478 -#> -2.5049 -1.9505 -0.6840 4.6141 -11.7132 10.3747 -10.6767 -0.7528 -#> -15.6232 12.4212 5.1873 0.5407 7.8264 10.1926 -2.0064 4.7172 -#> 2.5496 -2.0950 -4.1168 0.9357 -10.7598 6.7054 3.1847 -4.4976 -#> -1.6174 10.3704 -11.4205 -9.5627 -2.5331 0.6784 -10.3460 10.6275 -#> -4.5203 0.6928 1.9589 -0.1332 10.7521 -0.8171 3.8033 -2.3770 -#> -1.5401 5.7727 1.8338 -4.8751 10.4040 -0.1402 0.5706 9.6135 -#> -5.1437 -0.0033 7.3305 -7.7063 -3.8892 -11.6323 -1.3622 -1.1627 -#> 13.3520 -9.0883 11.4072 -7.6520 -3.3230 -3.5779 4.2729 -15.3720 -#> -5.4414 10.3479 9.3171 4.2304 2.9741 0.8669 -11.3900 -2.6916 -#> 3.4813 3.9532 0.0644 5.0330 -1.2378 -1.8766 -6.9203 11.8834 -#> -11.3476 -20.5107 -1.1963 -1.3385 -1.6884 16.3563 1.6985 -9.4230 -#> 9.1603 7.9446 7.2743 15.5263 -12.4083 6.3507 -16.7579 -1.2418 -#> -5.2214 5.9875 9.8173 -3.4455 9.0035 0.2276 -4.4667 -2.9136 -#> 0.8931 11.0414 -11.4794 1.4996 -14.8614 -12.1374 0.5295 25.4513 -#> -9.3430 -4.1520 -2.2349 2.5586 4.5160 5.2115 6.6060 6.2346 -#> 14.7500 -6.3226 -1.7460 -1.9870 -12.6927 -0.8219 5.2718 -0.2050 -#> -1.9830 -0.0687 -4.7570 -4.9114 -1.7514 -18.3255 -2.6171 1.5965 +#> Columns 33 to 40 1.7944 9.6654 -5.1828 15.6712 -10.3182 -8.3474 -9.1313 -4.0947 +#> -4.5724 -6.4915 -2.4431 1.1316 1.6765 5.5296 3.3109 -7.1138 +#> -0.7226 4.9546 -9.0213 0.6548 2.3676 3.2202 -3.2923 4.3670 +#> 4.5269 -3.8368 8.0302 -4.3532 -10.5294 1.3717 1.3317 -10.7339 +#> -10.6755 -2.3517 2.2936 -5.6746 -1.3547 -4.7458 -14.2411 0.9359 +#> 2.1120 1.5688 8.9188 -8.4964 -4.7687 5.7369 -9.0722 -4.4397 +#> 7.5678 -3.6873 -13.7921 6.4553 -4.3638 5.1729 16.6935 5.6425 +#> -7.3490 0.5864 3.2818 -4.5007 5.4528 0.5475 -8.5895 -7.1810 +#> -18.7596 -6.7974 -0.2788 -2.4544 -0.0506 1.7208 3.3684 14.3746 +#> 2.2358 6.4422 1.0786 1.7485 -7.8871 6.2159 -2.0504 -8.6937 +#> 3.2477 -11.7993 16.4577 16.0322 -3.3962 -6.7041 -14.0971 -2.4493 +#> 2.1327 -1.5607 9.5967 -4.6507 2.6041 -7.3442 -0.2307 7.2612 +#> 11.4513 -1.2515 4.7416 -7.2613 0.3372 -5.9227 -6.5181 -0.0349 +#> 17.1649 8.2724 16.0801 1.7508 -13.1674 -7.9881 0.5811 -3.2498 +#> -7.5355 10.0308 0.0994 -1.0809 -4.2856 -5.6888 10.9808 4.8577 +#> 0.8526 2.4289 3.9054 -12.4695 15.2395 0.7965 5.1226 7.6248 +#> -0.1209 10.0877 -1.0276 7.0150 -2.4394 -11.8494 1.6988 -2.8065 +#> -4.1253 2.2478 2.1135 2.4452 -9.0294 15.1470 -4.0179 11.6732 +#> -16.4247 0.6552 4.1020 -0.5220 14.7177 2.2686 -3.4419 -3.7484 +#> -2.5307 5.5296 -16.8233 17.5541 -9.4626 1.7360 9.1913 -7.8520 +#> -4.4076 -5.1286 0.4988 3.2959 10.6389 -6.5947 5.7131 0.1022 +#> 3.0960 -1.7295 -6.0314 -0.9746 2.2244 -1.9477 7.4134 -11.1667 +#> -5.4340 -3.5664 -2.8090 -3.7406 -8.3990 4.4483 0.9569 7.9536 +#> -7.8570 -7.4177 0.5273 5.6952 -4.5484 1.6877 1.9365 5.5348 +#> -1.7936 -4.6500 -1.6409 0.7421 1.4263 3.9479 15.6831 7.9632 +#> -4.7777 11.1107 -11.0362 7.2675 -8.2301 6.9253 -4.1978 -4.0135 +#> -7.1066 10.7231 -11.3776 0.4588 0.7363 1.6726 -10.5130 2.8376 +#> -10.2797 -0.3830 3.4307 -10.5552 -6.6430 12.1009 -11.4593 -5.7689 +#> -7.4406 0.5292 -4.7744 10.3637 -1.1919 -2.4052 3.0385 0.3617 +#> -6.5426 0.9877 13.3807 1.9240 -3.6421 -4.4329 -7.8066 4.5369 +#> -2.9725 8.3699 -2.7591 -4.4072 10.4015 -5.1302 -0.4192 5.6535 +#> -17.0773 8.3026 -1.6549 -12.3443 -5.6288 4.7483 2.1258 12.6425 +#> -9.1716 -5.8298 -13.2522 4.4055 6.1547 -4.2546 1.7965 12.7544 #> -#> Columns 41 to 48 13.1469 16.3241 -9.5096 -7.9503 -5.8903 1.4845 3.5997 -3.2972 -#> -0.2769 -4.5006 5.6813 2.3416 -2.4813 0.8077 1.2412 3.2028 -#> -3.1104 -0.1819 -2.6743 2.3650 4.9108 -9.3813 -1.2675 -5.7836 -#> -16.8457 -0.2598 6.2999 1.5642 2.1965 -10.8874 0.8298 0.6867 -#> 1.6406 -2.1335 -5.3743 4.0202 9.0518 -4.3672 -0.4848 7.5687 -#> 13.2089 0.0184 -15.4310 -3.9341 12.7642 0.4858 -0.8550 0.0537 -#> -11.6616 -1.7800 -5.4005 -0.6940 0.2097 3.4405 -4.8381 -9.8920 -#> 14.0421 -4.1125 -5.8660 3.0663 -1.4779 6.1736 4.8914 -2.6142 -#> 2.3728 -8.8320 -1.4000 -10.7431 13.7504 -0.3255 -9.8459 -8.4252 -#> -6.7076 -6.9181 5.7444 11.9882 1.8702 -1.4792 -1.4045 13.8206 -#> 1.9241 -3.6843 -10.4069 -4.3291 4.4341 1.6662 -4.2902 -9.6582 -#> 0.4927 -3.8589 0.4731 1.3278 4.1912 7.5662 -1.6354 10.4284 -#> -0.5050 8.8472 0.8561 -4.2709 -7.3363 7.3410 12.7846 -8.7095 -#> -1.2324 0.7144 -2.9817 -14.2587 3.4681 -11.2354 -6.6864 9.6205 -#> 7.9781 -1.9659 -7.9456 0.1395 -2.1838 -1.9923 3.2518 -10.3654 -#> -2.0977 4.2419 0.9674 2.3197 -10.0479 -7.7920 -2.7702 13.2239 -#> -2.8001 11.2738 -6.1056 -5.1717 -4.9363 8.1373 5.2983 -8.2018 -#> -5.6889 -7.8704 2.1990 7.1298 -0.7015 -0.9009 9.0010 4.0715 -#> 4.0209 -1.8575 -4.0008 -2.7558 -4.3971 9.8000 -4.1534 -5.9647 -#> 17.2192 6.2090 -2.3315 3.7335 0.1062 8.8375 4.9632 0.1628 -#> -6.7845 -9.5097 1.2010 -0.8135 5.6752 -1.4077 -1.1803 8.0863 -#> 11.0266 2.1652 -13.4578 5.4363 -3.1816 -7.5622 12.6702 -5.5490 -#> 11.5781 8.2979 4.9476 7.6994 -0.9640 -6.0966 -2.0411 -4.1657 -#> 2.9375 10.2070 18.4995 -6.5145 -4.5667 15.3985 2.2779 -11.5283 -#> -2.6135 -4.7623 -5.1036 -5.7793 0.9005 -7.5000 0.2239 0.9951 -#> 8.0807 0.8971 -10.2686 -6.8935 -11.4938 3.7583 -4.6565 1.4418 -#> -5.9638 -3.1656 -2.6535 -2.4869 8.5280 -0.5289 -8.7591 1.3365 -#> -11.9118 16.4476 5.1364 -9.7505 -8.8722 9.3799 11.1230 -7.8695 -#> 6.6437 -9.3812 -8.0302 -4.6728 -1.1732 -5.6723 0.8056 -0.3872 -#> -3.6792 11.9203 4.2696 -6.0437 -18.1020 2.5408 22.5066 -5.7841 -#> -7.2307 -6.7341 3.0000 4.8462 -2.0850 6.5586 -2.3047 -9.5330 -#> 3.5926 8.4891 14.9880 -8.0819 -0.7994 6.8544 -4.8742 4.6444 -#> -3.4799 -3.4725 4.6508 -2.2688 3.3988 1.6961 -0.7697 -2.0110 +#> Columns 41 to 48 10.4242 3.1320 1.2581 -3.4752 -1.5902 -6.2231 -1.3401 -9.0054 +#> -6.6450 -0.6798 10.4509 17.3850 5.5067 -1.2147 0.9674 3.2124 +#> -7.7335 -4.5666 -2.5000 -10.8190 -1.9409 -11.0484 2.7665 -5.4809 +#> 14.2366 1.6283 -9.2481 2.3911 0.4189 1.2038 -3.1223 15.6948 +#> -6.3077 13.1516 8.8934 2.1760 0.4611 -14.6990 -5.7214 -7.4077 +#> 0.8884 3.7535 2.5364 9.7466 -1.0477 -2.4947 -0.8093 -6.0150 +#> -20.8732 4.5651 -1.1351 8.2958 -0.7052 4.0349 0.7925 5.0308 +#> 6.7334 -2.4410 -0.4197 -9.0668 3.5575 6.0665 7.0734 5.7171 +#> 0.8682 -8.3602 1.7150 4.9446 14.1675 -1.4004 -4.9063 -0.8648 +#> 5.5100 3.1794 -0.1524 6.0978 -1.9435 6.3809 2.8350 -11.7947 +#> 15.4958 8.9630 -1.8836 -8.1126 6.3941 6.5516 -14.4257 -1.6778 +#> 1.8916 7.4190 4.6872 -8.9565 -3.8634 11.6727 4.0424 -2.8454 +#> 8.3891 1.8846 -2.2345 3.8343 5.3066 8.0993 10.8464 -0.6487 +#> -9.3410 -1.4843 -5.2631 4.9416 6.7498 9.6985 -2.9187 -10.0374 +#> 1.0237 4.5799 -5.1251 -4.0137 -0.9095 5.4507 -2.9494 7.7387 +#> 0.4382 -18.4949 -10.1129 -19.1296 3.3367 1.6957 5.9458 5.6122 +#> 11.2678 -2.9941 -3.4139 1.2834 -2.7782 -3.3348 -18.0017 2.9574 +#> 8.8864 6.3527 -6.9859 2.0352 -8.5715 8.6193 -0.1473 1.4147 +#> -1.7023 -0.1260 0.9072 0.9205 8.4909 6.5449 -0.1538 -0.3061 +#> -16.5199 2.0427 5.7692 7.2746 5.8624 2.5903 6.8141 -4.7442 +#> -0.3602 5.7233 5.2932 0.9407 0.8377 5.7711 1.1102 -2.9801 +#> -5.5069 -0.4660 -0.3820 -6.7548 0.7122 -2.6577 -11.6022 -5.3100 +#> -2.6787 -0.8460 5.7715 -0.4013 4.1315 6.8955 4.6016 -7.5422 +#> -11.0394 9.2489 -2.5416 -1.6594 -11.8009 -9.0010 11.0188 1.0725 +#> -15.7346 -13.6959 -7.6170 3.1942 10.3479 4.5251 4.6551 3.8973 +#> -0.0437 -4.1671 2.5428 6.4254 -0.0564 -0.8211 13.6499 -4.7940 +#> 3.3952 -1.0584 9.5605 8.0207 0.2116 0.4704 8.5148 -2.1023 +#> 1.8515 1.5253 5.5502 3.7621 0.4732 -3.6509 -7.0887 -5.0539 +#> 4.5907 -2.9360 5.4367 -1.4830 2.6270 -9.4941 8.6761 1.4446 +#> 4.7818 3.2452 -2.6271 4.8159 2.1423 20.6770 6.8175 -16.9261 +#> 5.2292 -13.3160 7.3324 -7.2283 6.8718 -9.7000 -1.2644 -0.4218 +#> 7.4633 -5.6851 4.5657 -9.4115 4.6865 1.5752 9.3894 -7.5724 +#> 7.7920 1.1129 5.0314 -4.5645 -8.6955 5.1585 -8.6221 6.1632 #> #> (5,.,.) = -#> Columns 1 to 8 0.4391 -1.9098 2.1484 2.2646 5.2900 2.4446 -4.2038 11.1062 -#> 4.8973 4.6691 -0.1577 4.2984 6.9359 6.8015 -8.0088 0.2365 -#> 2.3555 -3.1241 5.4234 -3.6021 -3.1128 -3.5330 0.4087 -14.0087 -#> -21.4126 4.3750 14.8495 0.9699 -23.3634 -9.8421 5.1767 -0.3107 -#> 8.8056 -6.8391 4.3331 3.3613 -3.3439 -6.6336 -4.3775 4.2387 -#> 0.9767 -5.5275 -5.2243 1.3197 -7.8901 -15.0289 0.7283 -4.1920 -#> 9.3987 3.4653 -3.4113 -5.7025 -3.7569 -2.7637 -8.1449 8.4361 -#> 6.8643 -4.8464 8.5880 0.4491 2.4999 4.5187 -10.8066 -0.0955 -#> -3.5246 1.8129 -7.8024 3.5076 2.1022 -6.6070 -6.1765 -15.9043 -#> -3.9591 1.8916 -0.3776 1.3119 -4.7591 -1.5195 1.4179 2.9710 -#> -1.6701 -15.0615 0.8259 6.0667 5.4124 -2.4430 -16.1106 11.8974 -#> 0.9344 9.1141 -8.1943 5.6707 4.5830 -3.3910 8.2350 10.4890 -#> 9.7430 1.3351 3.8455 -4.2307 -2.5922 7.3867 6.6023 2.8679 -#> -7.8984 0.8581 -12.1087 -6.0889 -1.5231 -6.6723 10.8628 -5.9127 -#> -5.5918 -0.2745 -1.9311 -0.2384 -8.2658 -3.8147 -0.2191 7.8357 -#> -6.2771 7.5198 5.3943 6.4536 -2.6440 -4.4889 0.2298 8.3339 -#> 2.2442 15.3852 -3.1567 -12.4926 -6.3730 6.0722 -6.5579 1.1781 -#> -12.8170 13.8433 11.4968 -7.2771 -13.1532 0.3584 6.8091 7.4388 -#> -1.5858 -2.2859 8.7732 2.5848 6.5458 -5.5752 -6.7938 4.3016 -#> 9.7460 2.3616 0.1275 -2.4913 7.3516 10.8242 1.6281 7.0327 -#> 4.9148 -0.2744 -0.9559 0.1036 0.1812 3.7332 -5.5575 -3.6512 -#> -6.9862 -2.7739 6.1212 5.5122 -1.4691 4.1725 3.8487 -5.0333 -#> -5.1841 -8.2360 -0.5528 -7.2601 -1.2878 4.2392 1.6847 9.3671 -#> -17.1302 -2.2287 -6.7963 -14.4281 -0.8908 -3.4021 11.1699 -3.4806 -#> -3.0202 9.0764 -3.9049 -2.3577 -4.3465 -1.2582 5.9626 -4.2595 -#> 1.1443 6.7906 4.8259 0.8769 -2.3707 1.1460 1.2335 -3.5128 -#> -2.2088 9.7532 -6.1144 4.8872 -23.0924 -0.9535 0.8545 -1.0388 -#> -1.0195 5.6566 2.3717 -8.6502 -5.5510 13.9087 2.7717 1.1356 -#> -3.9602 -2.6769 6.9075 -3.2793 1.0274 -6.7747 -0.8613 9.1491 -#> 10.5364 -5.7910 -14.2881 -3.7272 10.0372 20.2330 -12.9053 -2.4491 -#> -10.0494 6.0045 1.5262 -9.1165 -13.6801 -4.7538 0.3811 7.3859 -#> 1.4289 5.4423 -2.4157 -13.3889 6.3931 -3.5567 12.3840 -13.6936 -#> -4.7881 -1.5958 -6.8650 3.8896 3.9981 4.8909 5.9710 5.3680 +#> Columns 1 to 8 -3.6093 -1.2903 6.0764 -5.1115 -7.6917 -0.9253 -3.5324 -2.7800 +#> 7.3703 -5.3460 1.0978 3.3237 0.9294 1.0375 10.3985 -1.3122 +#> -0.6326 -0.3629 -1.4199 7.5534 11.3806 3.7091 4.0511 5.9779 +#> 5.5138 -2.5809 7.5506 0.2660 -9.4975 4.2891 4.3326 2.8226 +#> -1.1666 -3.5603 -4.0434 12.5883 -1.4867 -5.0171 -7.8023 -6.6774 +#> 5.7493 6.2103 -4.6133 -8.9547 1.8888 11.5672 -7.0186 1.9114 +#> 5.3774 4.3061 -10.7863 0.7188 -4.8939 2.3105 -3.5734 3.5475 +#> 5.2596 1.4163 -1.0784 1.2077 4.2229 8.7776 5.1203 10.8890 +#> -0.8582 -13.8450 3.5654 1.7786 -2.6548 -4.6490 -4.5554 -7.5593 +#> 3.9904 6.7616 -3.5313 4.1491 -5.0224 8.5076 -6.1398 2.6448 +#> -15.9534 2.5789 -10.3548 -1.1433 7.0974 7.0014 4.5219 -4.6371 +#> -0.9696 7.0986 -2.5356 -1.0885 -0.5055 1.4852 4.0661 -1.2982 +#> 17.8750 -3.0686 3.8783 -8.3333 -0.2141 -0.7964 4.4238 0.7044 +#> 11.0701 15.9539 0.8754 -3.8492 4.7610 -13.9683 -1.3725 -4.2814 +#> 8.7233 4.4653 -3.3720 -8.1746 3.3640 -7.3720 -2.1872 -10.1125 +#> -0.6742 -1.2536 1.5203 -0.2783 4.5542 7.5349 5.3550 3.8258 +#> 1.9188 -2.6132 0.1669 0.2289 -4.0798 -11.8757 -6.5052 -2.6050 +#> 11.1608 3.8010 -5.5448 -4.2009 5.9176 15.7901 -1.9691 -4.6589 +#> -5.0632 -1.6900 -6.7378 -4.0569 -8.2325 11.0169 2.2513 -7.8544 +#> 7.1254 4.9569 -2.9606 1.4131 1.7487 5.9914 -5.2062 -4.9863 +#> 4.0760 -0.9362 1.6509 1.4951 -0.6076 -5.2641 7.0497 7.2765 +#> 0.0120 -2.8936 5.6908 2.6436 2.9146 -3.4320 -6.6517 3.6918 +#> -8.2027 1.1693 -3.0631 -12.9093 1.9812 -3.7766 1.4477 0.5365 +#> -5.6776 -1.6180 3.6932 6.0489 -8.9460 1.6324 -9.9353 -3.1687 +#> -5.4912 -0.7259 10.8582 3.9291 6.0256 -9.6215 -7.0297 -1.8847 +#> 7.2034 3.1613 11.4869 3.1896 0.4487 1.7262 8.4426 1.9541 +#> 14.4420 -13.1784 1.8565 -13.5320 5.1753 2.0535 -12.2221 4.7491 +#> -1.8997 2.5124 -4.1269 -7.5790 -13.6419 1.7943 -0.1242 -0.4601 +#> 5.2074 -2.2256 11.3320 4.0449 8.8761 -7.3174 0.6415 -8.6672 +#> 2.8333 -5.2061 -2.5673 -21.6389 -3.1647 2.8634 -1.7418 -5.4007 +#> 3.5700 -3.2492 6.7390 2.6306 15.3877 -6.2330 -4.6603 0.1342 +#> 6.1644 -0.8718 1.6882 -10.7350 0.5103 0.0737 10.0092 -10.7578 +#> -12.3648 -10.3327 5.6151 0.6158 8.8201 -0.5401 -16.0075 5.4813 #> -#> Columns 9 to 16 2.2322 -7.0163 7.4025 1.4474 -16.0608 -8.2812 3.9479 -12.4157 -#> 5.5612 4.6730 14.6888 -6.9282 -2.4945 -2.1730 6.0320 -4.8716 -#> -3.6143 1.5976 -7.2834 -4.2591 18.0512 3.4045 4.9996 9.2229 -#> -1.9145 4.3505 -4.2095 8.8816 4.4741 13.3800 -8.6061 -15.9516 -#> -0.1081 -0.1761 -4.2558 -0.4345 5.6086 1.8441 10.3737 5.1652 -#> 2.2998 -1.7684 -10.6446 -2.0748 9.0934 0.4708 -10.4841 1.5032 -#> -5.1979 -5.4615 9.0303 6.3415 -5.2508 9.9896 -8.8075 5.4614 -#> -1.3947 7.3096 -4.8045 -4.1015 5.2261 -4.1460 3.2539 5.1348 -#> -6.8889 0.6101 -5.8317 -11.2515 5.8717 7.2147 1.3801 8.7224 -#> 7.5277 1.7141 5.3771 -3.1537 -4.7789 -2.5609 -0.3410 -5.8091 -#> -3.9199 1.3565 0.4403 -10.9124 14.8008 -1.0440 2.7889 3.3954 -#> -1.7609 -6.7693 3.1827 4.2683 -16.9452 1.0002 2.4671 -4.3169 -#> -4.8054 1.8201 -14.5093 7.1921 0.2319 -6.2055 -11.8342 6.1623 -#> 1.7320 -9.1120 15.5501 5.9678 4.7828 5.1776 -1.9527 -13.2768 -#> -10.0481 2.2758 3.7188 -0.5877 -6.5601 4.4341 0.3167 -3.7434 -#> 1.2645 -9.2064 0.6251 -2.0461 -6.9692 3.2809 1.6773 -1.2751 -#> 11.8582 -2.5059 -1.4314 4.9035 -1.7204 -1.3300 -9.6967 6.4524 -#> -0.5472 2.9117 -10.2421 2.0473 -3.7858 9.6259 -16.1022 3.4859 -#> 2.1549 3.3371 -12.6224 0.8640 -4.7667 -2.4520 -4.1159 2.7331 -#> -2.1234 5.4642 -14.9485 -9.0224 0.1193 -7.7338 -10.7780 6.8374 -#> 1.2525 12.7835 -8.2089 5.7755 6.3247 -5.0586 -1.0041 -6.4176 -#> -4.7794 -5.4369 2.2887 -8.2611 0.6869 1.6407 1.2151 15.0891 -#> 0.3344 -3.4176 -5.3505 1.7168 -7.6792 -5.8174 -0.8998 -2.9376 -#> 4.0955 1.8786 -7.8103 -0.1581 10.2054 -7.6718 -3.1871 -4.9185 -#> 8.1981 3.2560 4.0140 1.1940 1.7563 -8.6477 -2.5206 -5.7887 -#> 1.5151 1.2714 4.4403 2.6299 2.0904 -6.3844 -1.2666 -8.5626 -#> -10.9549 -1.0200 7.7347 7.2965 2.4207 2.6545 -10.7580 3.1383 -#> 12.8288 -0.2713 -9.4372 6.5619 -0.2274 -4.2176 -12.9764 -1.5689 -#> -6.1175 5.0343 7.6258 -9.1574 0.1178 2.2677 2.1093 -6.2908 -#> 4.1674 -5.4791 2.5661 -10.4149 -7.1355 -9.9736 1.3936 4.6197 -#> -3.8975 -5.1808 -0.8492 -0.0343 -11.0863 5.1938 -1.0113 -2.9068 -#> 7.4677 -8.5787 -5.1975 1.3381 5.0835 -6.4174 6.8993 -17.7437 -#> 7.7881 -0.0287 5.9182 -7.3728 -1.9345 -3.7842 3.4900 -16.8634 +#> Columns 9 to 16 4.7200 -6.3086 -5.9259 -1.9333 3.6707 -6.0153 -4.9750 -9.1986 +#> -5.3187 -1.8890 5.0312 4.8679 10.5360 3.8239 6.1423 1.5564 +#> 7.3208 4.5365 -2.5451 -0.2957 10.7177 2.7732 1.1820 -1.6323 +#> 4.6218 4.7184 2.4337 5.9134 -8.1048 -2.2897 3.9997 4.7079 +#> 7.3253 0.7482 4.8612 -6.8182 12.5977 -3.4985 -6.3466 7.7690 +#> -6.9580 1.3085 13.5707 -11.2307 -11.8208 -3.0141 1.6700 0.5377 +#> 1.2800 4.4075 -2.4731 5.9489 3.7127 8.8916 11.1587 0.6376 +#> 16.0744 2.2650 -2.0935 7.3797 -4.6004 -6.6549 4.3910 0.4648 +#> -1.3918 -16.4661 -0.2961 6.9812 8.7332 5.5621 10.5068 10.7219 +#> -3.6059 0.1250 4.2744 -6.4661 7.4152 -7.3853 4.3772 -1.1507 +#> 6.8563 -0.8522 6.9527 2.9505 -8.6145 2.4127 -5.0714 -8.1235 +#> 0.9298 9.2375 1.1895 7.0327 4.3848 -0.5507 -8.7177 -3.7877 +#> -2.5716 4.9430 -11.3342 10.6734 -2.7472 -2.0509 10.6890 6.1280 +#> -7.5804 -5.0121 -2.4420 -0.9926 -3.4466 -4.1375 3.0898 9.7536 +#> -10.6370 -4.7997 -3.4790 7.3145 3.2049 0.3345 -7.6432 -5.0167 +#> 4.0509 0.0487 -0.8984 -7.3605 -5.2877 -4.7661 -1.4034 -14.0970 +#> -5.4763 1.6764 -6.1587 6.6751 -0.2484 -3.7689 -4.2502 -0.4615 +#> -0.5020 -2.1828 -1.7352 -1.9906 9.3432 -5.3914 -2.0886 5.5507 +#> -4.3778 -3.1400 7.3510 -2.0299 1.8315 -5.6617 -4.6774 -3.8033 +#> -4.2352 -4.7447 5.4779 -5.5180 3.8161 -12.3682 4.4654 4.9991 +#> 0.5159 -4.8037 0.9870 8.1954 12.9885 11.4685 2.8773 2.1255 +#> -1.8920 5.1400 2.8560 9.4664 4.4385 5.3896 -4.8819 -5.4284 +#> -2.4695 12.7925 0.0343 -5.2716 -0.1562 7.4265 7.2671 2.9827 +#> 8.8573 -13.8755 -2.6458 -1.3662 2.3347 -7.1429 -2.0023 -5.4036 +#> 2.6558 1.5291 8.0116 -8.9875 9.0377 4.3665 4.4356 -5.5887 +#> -0.7960 -0.4837 5.0131 -10.9679 1.6610 -1.9107 -0.5137 -2.5154 +#> 0.9609 -5.5622 1.6186 -7.6001 2.0875 -13.4089 -3.1850 -0.3483 +#> 5.6879 4.0862 9.4920 -0.8225 4.8359 -0.5075 -6.3772 2.1161 +#> -6.1866 -1.8260 -2.1230 -4.1048 -5.6706 8.4782 -1.4349 -2.1027 +#> -11.7711 -12.6506 -7.2038 0.0483 -9.8998 -8.1198 -2.2843 8.0265 +#> 4.7740 5.2374 -11.9683 1.2057 -6.5984 7.2167 6.6216 1.1750 +#> 5.7323 1.7619 15.7807 -4.1072 -1.3083 6.3711 1.5233 12.2112 +#> 12.7156 -7.7441 -3.1003 9.9384 11.1962 4.2520 -5.6135 -0.0401 #> -#> Columns 17 to 24 1.6771 15.3690 3.7065 14.0798 -3.7558 -14.2482 -15.8616 -4.6108 -#> -9.0802 3.9591 -0.1773 -0.3126 -0.1790 -2.6130 8.8423 -5.2414 -#> -6.9821 -10.3903 -5.4097 -7.7257 2.0685 5.1675 -5.2855 2.8596 -#> -4.9021 -7.2088 7.6342 5.6961 12.7943 17.9261 11.7175 -0.8168 -#> 2.5747 -3.4802 3.6670 -4.6425 1.4458 7.2083 -4.3110 3.6268 -#> 4.0800 -7.1349 4.5110 11.2754 2.0006 1.3698 -5.7636 -2.4166 -#> -1.7837 1.6697 4.5390 8.0320 6.6240 -1.3921 5.0736 6.5151 -#> -10.3835 -2.0104 -2.1694 7.6783 10.7268 8.1382 -3.9772 -10.8648 -#> 18.0275 -15.5641 -13.7067 0.6534 -3.9048 2.4265 12.8767 -5.0597 -#> 8.7382 9.9576 8.0037 5.4577 -7.9091 4.1477 2.0845 2.2091 -#> -8.5455 -3.5102 -9.4138 -1.0351 5.3878 1.6572 -0.7054 -1.4215 -#> 8.5940 -4.8951 7.0747 0.1936 -1.3161 -1.9277 -4.8872 7.2786 -#> -8.8369 5.5785 6.5429 -4.4698 2.2424 3.5187 -13.6622 13.3914 -#> 3.3157 1.1893 3.4218 0.6910 15.3152 -25.8847 -1.6705 -7.5419 -#> 1.2810 3.5057 -5.8798 11.3180 -0.9019 0.7256 11.0449 1.4503 -#> -14.3743 4.2423 7.3155 -1.0684 10.6818 4.7645 4.5571 -16.5317 -#> -11.8851 5.7834 11.3538 0.4630 7.3968 3.2219 -7.4244 -1.9372 -#> -9.4704 -4.0520 1.9144 3.3256 8.3051 -3.5069 6.0086 1.8476 -#> 5.8126 5.6365 -3.7384 -11.9932 7.9528 3.3870 0.0428 -8.3264 -#> 3.3812 10.5610 -2.1292 1.3869 -9.4373 -3.9937 8.0604 -2.4713 -#> -3.9936 0.3866 -7.1512 10.2174 -3.1713 8.1932 -2.9202 3.2001 -#> -3.5016 -6.3447 -3.2122 5.7092 -0.7514 5.7600 0.1821 -3.7173 -#> 9.9094 -4.9634 -3.0044 1.4675 -5.3280 8.1705 8.6492 9.4478 -#> 12.2444 0.0311 5.9437 0.7478 -6.0319 -0.5225 -5.2490 -0.2744 -#> 6.4196 6.0773 -6.8921 6.7982 1.2868 -0.0129 4.0809 -0.3024 -#> -4.7182 15.5866 0.2722 6.8277 2.2576 -1.9896 -0.1264 -9.8248 -#> -12.0991 1.7634 4.6613 8.1298 9.6500 8.8975 3.5907 5.0736 -#> 4.8709 13.9266 3.8506 -6.3351 0.5635 -7.9125 -4.9749 9.1720 -#> 2.3501 4.4443 -3.0040 4.8984 0.7580 5.2943 2.1156 -9.3190 -#> -2.4022 12.4300 -0.4464 3.9948 -6.7848 -3.4612 -16.2256 -6.0657 -#> 0.3598 5.1294 1.4181 11.9905 7.0827 9.3332 -3.5159 -3.1946 -#> 8.4195 -10.1014 4.3162 -10.1488 9.6877 -3.9963 -13.8788 -3.8362 -#> 10.7531 3.6968 -0.5402 12.8164 -12.9496 -12.6744 -4.4541 4.8352 +#> Columns 17 to 24 3.9098 4.9373 3.6534 -0.1740 3.9276 9.7328 6.6432 -5.5194 +#> -3.2305 -3.6783 -7.5728 -9.8615 -1.4127 4.9206 -1.5615 14.3436 +#> -0.2013 -9.1078 -1.5541 -3.7882 -2.7034 -0.8736 4.2783 -7.9201 +#> -9.3736 -2.3664 -2.2441 3.2980 2.4759 4.4908 -13.0367 -3.6673 +#> 8.0704 -3.4354 -3.0902 13.1151 -1.1653 2.7296 -2.0878 -2.4590 +#> -5.1876 -4.9021 10.5444 5.1420 -4.3786 -13.0182 10.6908 3.1293 +#> 4.8361 -3.1101 -4.8765 -7.6380 5.0944 -1.9056 2.9414 11.2553 +#> -0.8109 -12.6191 4.1921 -0.3207 0.7677 4.0676 1.3634 -2.5494 +#> -1.6581 7.6767 -16.5640 -7.0681 -0.8868 11.2187 -11.4188 -0.7190 +#> 15.3214 -5.2683 5.2794 -6.0327 -1.2566 -7.8449 -5.4212 20.0736 +#> 6.7585 -1.0774 4.8847 -3.8640 20.1669 6.4108 2.6663 -5.8079 +#> 9.5129 -1.3992 -4.5772 -5.5883 -3.0176 -2.8765 10.5112 -2.7861 +#> -5.6070 6.0464 0.8493 -0.7762 -6.4840 -8.7144 4.7385 -2.6547 +#> 11.3915 1.7881 -0.3019 5.3339 8.9403 -0.0411 -7.7044 6.4859 +#> 4.5514 -3.8223 -2.6058 8.2557 -5.8033 -6.0084 -14.2220 -6.4799 +#> 2.1465 0.3249 3.6368 1.9013 -2.9698 -0.9082 -0.6275 -13.0249 +#> 4.5448 6.2852 8.0968 2.7184 -4.7658 2.4863 -4.2074 4.6784 +#> -0.4202 -5.7364 -2.3302 -3.0505 1.2563 -6.0202 -6.8703 11.9602 +#> 0.6975 -10.8923 6.2297 -0.2160 -3.9045 5.4529 8.8124 -6.5614 +#> 3.4633 -3.2704 -0.6613 -1.5254 6.6068 2.5122 -2.0393 8.7533 +#> 2.7281 1.6842 -4.5081 -7.8430 2.1580 8.5329 9.1651 8.7632 +#> -0.9653 0.1799 -2.9776 6.1305 -0.7675 4.1214 7.8482 -10.3551 +#> -3.5780 -1.2504 -4.2268 -5.0339 -0.9126 -12.0816 0.6536 -1.3908 +#> 0.3395 -5.1659 -0.7571 0.7869 8.0917 1.1259 -3.8546 -0.6517 +#> -4.3516 -1.2481 -2.9169 5.4756 -1.5554 2.1704 -7.4433 0.9777 +#> -4.0195 5.0333 1.9417 -8.5874 0.6927 -8.1338 -5.2322 13.4028 +#> -6.0866 -1.8895 -1.7305 2.6646 -5.2163 3.1847 1.6071 7.9114 +#> 12.4972 0.3906 3.0124 6.6523 -1.0086 -3.4964 7.4790 -1.4859 +#> -5.7057 11.5525 -4.2333 -1.9120 4.2660 3.3701 -4.1573 7.3720 +#> 1.3896 2.7235 -3.7331 -2.2059 -4.8854 -6.5039 -5.8629 -0.1352 +#> -6.9085 0.5322 7.7444 -6.4550 0.6128 -2.7922 5.3740 -8.3980 +#> 7.4638 7.2766 -3.0916 2.2767 -9.5935 -12.7509 -4.9414 -9.5226 +#> 1.1695 1.6495 -12.0151 -6.1535 1.1875 13.5077 -1.4520 -6.5941 #> -#> Columns 25 to 32 3.0754 0.9656 11.4195 -1.6933 -2.7757 10.5142 1.0324 11.4810 -#> 8.3067 -12.5141 10.5343 -0.3537 7.5259 6.7108 -2.0825 -1.2409 -#> -0.3672 -8.2046 -1.6047 0.8231 7.5048 3.2305 -11.0317 2.1045 -#> -24.1941 -14.2835 0.0603 3.4999 0.9970 -1.3493 -21.5908 3.9657 -#> 5.0227 -1.3011 -1.1914 -0.2580 -2.6511 10.8058 -7.1249 3.4567 -#> -4.3163 -5.5814 -7.0375 -3.9281 -4.2662 8.3106 6.4370 -6.4413 -#> -22.2715 -1.5557 -7.1393 -13.8717 4.1784 -3.9443 -0.4707 4.6257 -#> -1.1398 9.7803 -4.3700 11.5757 3.7639 3.8352 7.0444 9.1612 -#> 7.0127 -1.5018 -6.6758 -9.8043 -5.0684 -8.0241 0.1096 -9.0917 -#> -1.9075 -4.6612 -3.8810 4.1255 -8.3587 -0.0907 -9.8954 0.6749 -#> -1.0257 -4.4097 -14.4159 -3.3711 4.3964 6.4112 -3.2049 2.1978 -#> 9.8993 -3.4522 3.5321 -9.1970 8.0096 1.7683 12.9511 -6.5953 -#> -5.5456 0.3709 5.9697 3.9595 7.7234 2.9263 -6.2956 8.9204 -#> 8.2467 -7.8250 9.2086 -14.7752 -1.7631 2.3624 7.4934 -3.4432 -#> -3.3292 -8.0017 -11.9161 0.0641 5.8049 15.1777 -4.0188 3.4478 -#> -5.8577 -7.9515 -4.2061 4.6872 5.1803 10.4527 -3.0946 -8.3235 -#> -0.1268 4.5932 1.4038 6.5119 -8.2524 -2.4885 1.8776 3.0738 -#> -11.7010 4.4528 -1.9193 5.6535 7.9257 1.6852 5.6245 -9.1236 -#> -2.1876 5.5739 2.5077 -1.3249 -0.3368 -12.2279 5.1158 9.0283 -#> 7.9109 9.3029 -0.0488 7.3976 -11.4432 0.1573 8.6820 -6.8024 -#> 7.6055 6.7798 0.9633 0.9630 -8.1765 -2.1018 -4.6992 2.1717 -#> -5.0130 11.0520 -15.4249 0.6652 4.8052 -0.2627 2.0799 -7.4576 -#> -5.7443 7.9972 -15.6795 -6.0751 -12.8827 3.2815 1.8649 7.1333 -#> 0.3538 -9.3284 -6.3689 -3.5229 -12.5694 -2.9542 8.0311 22.9101 -#> 9.3625 2.6567 -5.3627 -4.1245 -15.9260 -8.2227 3.4513 5.8330 -#> 10.5383 6.4723 5.7762 13.8513 -11.9889 -0.1678 -6.6589 -16.1521 -#> -17.6372 -6.7296 -8.8622 0.8981 -4.1522 7.4152 5.9546 -1.9361 -#> 5.4169 -4.5138 11.0641 5.9831 -10.9107 6.2039 -0.9973 4.2854 -#> -0.9271 3.5063 -8.0455 -7.9462 5.0734 -0.4818 2.4464 3.7186 -#> 12.8333 -0.2911 6.5849 6.0337 -2.4569 12.1207 -9.7083 7.0773 -#> -10.5021 3.0687 -7.7843 -4.2201 -5.2013 -10.5865 5.5016 2.3961 -#> 12.5794 -20.6448 16.9042 5.0654 7.4768 -1.5273 6.9260 6.6722 -#> 6.9695 -17.0422 -1.9053 -7.2440 -2.6287 7.0495 4.2520 -2.3256 +#> Columns 25 to 32 -6.8371 -4.1226 -5.8224 -0.6768 12.5131 -8.3509 -0.2055 2.6692 +#> -0.2869 1.3097 3.2707 -4.5621 1.1508 2.0422 -0.3216 -0.8699 +#> 3.4153 4.8268 1.1675 10.1520 -5.3198 -3.5681 -4.6958 -1.2969 +#> -11.8568 1.9811 -4.6707 -10.8200 -0.0186 -1.2309 -8.4277 -13.6789 +#> -0.6888 1.2529 0.8965 3.8172 -3.7613 7.7163 -0.9515 4.4871 +#> -3.4012 0.2477 -2.0684 -3.2691 1.0814 0.6780 11.7068 6.5404 +#> -5.4002 -7.8514 6.4168 1.5765 4.2092 -13.0321 8.1352 -0.5110 +#> 9.9061 2.6390 -3.1091 3.0181 -6.4826 2.5061 -4.5573 -7.6747 +#> -7.4247 4.2733 -8.9704 4.5667 -15.4277 8.4194 -21.0000 -3.9035 +#> 2.9435 3.9285 6.4788 0.6277 9.9094 -0.4325 5.6967 5.5039 +#> -3.9571 -8.7044 2.3540 -8.3360 7.0798 3.1047 -5.2919 -25.0818 +#> -1.9560 2.7290 0.5972 8.1528 4.1175 -0.5435 -2.2930 -1.2568 +#> 1.4238 9.6548 -1.3722 0.7341 1.6796 5.7211 -7.4429 -0.9193 +#> 7.4011 -2.0598 -6.6328 -5.7342 3.1807 -11.3711 -0.3847 9.0154 +#> 2.2146 -1.2954 2.2340 9.4409 -6.7533 0.9151 -1.1049 7.2711 +#> 8.8156 -1.4804 -3.7520 3.3352 -9.6580 2.1291 -1.4695 2.7972 +#> -13.1061 -5.3580 -8.5591 -5.1088 7.5780 0.5580 -3.4692 4.7791 +#> 0.9782 4.6440 6.4239 -1.8737 -6.3170 -4.8075 10.8375 -13.9694 +#> 7.6675 -2.7296 3.0915 -8.7033 -1.4836 4.2377 -5.2127 -6.1850 +#> 7.3466 -9.5054 1.0619 -2.6130 8.8450 0.0129 4.5675 6.9152 +#> 4.6388 1.1716 1.9376 -2.0347 11.5259 3.6685 -2.1238 -2.1908 +#> -8.3402 3.9436 2.4728 3.3376 -4.6346 3.6179 -4.3339 7.4139 +#> -4.5564 -2.6069 2.5472 8.8655 -1.6108 -11.2277 5.7506 -8.5124 +#> -3.1873 -0.3410 -3.9907 -2.0182 1.8068 -7.3776 -9.1523 1.6266 +#> 0.8815 -4.4806 5.5074 -7.5697 0.4252 1.0953 -2.0151 -5.7848 +#> 6.7691 0.2646 4.1325 3.6660 8.2494 0.2321 7.8379 7.5525 +#> -6.2269 -1.8113 -10.2644 -0.6707 -3.3515 5.9845 -7.8346 7.7601 +#> 0.6047 3.6865 -0.8295 2.2470 -3.9284 4.3618 2.9738 -6.4380 +#> 4.6167 -3.6654 -0.5332 1.9339 5.6124 -0.4041 -3.8660 5.6782 +#> 1.8679 -0.1434 -7.8072 8.4092 -11.9295 -0.4093 -6.3832 -3.8197 +#> 13.2778 5.4347 0.0776 -1.3714 2.4193 4.3692 -4.3794 1.0340 +#> 7.8648 5.3887 0.7544 11.6970 -13.4516 14.0049 -10.4700 -5.6420 +#> 1.6491 -1.3838 5.0096 2.7583 -2.7833 5.9338 -18.7820 -9.9932 #> -#> Columns 33 to 40 -10.0027 3.8379 1.0881 -5.4896 -3.4261 -2.0429 8.5668 0.6859 -#> -0.2681 1.0662 -3.8098 3.2342 2.6800 -8.1446 -2.7712 -4.1984 -#> 4.7517 -2.0557 -7.7677 5.1592 0.5944 3.9591 4.3850 -1.2466 -#> 8.1936 3.0673 -0.3543 -10.5834 2.9359 -10.8625 0.6194 -2.0070 -#> -5.1751 -2.5127 -1.9720 -6.5750 -0.1732 4.2047 13.0346 -2.6657 -#> -1.8758 8.4967 -2.0558 -4.6847 -2.2359 -7.7241 -3.3445 15.3346 -#> 5.9761 1.3729 -8.9394 -4.0960 -4.1077 -6.0165 0.7958 -1.0748 -#> -13.7243 -7.8930 2.7955 0.2883 -5.9294 -1.7375 5.3112 -3.4478 -#> -0.9705 9.0413 -2.2212 3.6686 -5.7288 -4.2316 3.9254 0.0704 -#> 2.9807 0.6605 -7.1388 -3.8520 2.1888 -2.2726 9.6096 -3.6688 -#> -3.6191 -4.2340 2.6247 6.8139 -7.8175 -4.5176 -1.0235 -5.8777 -#> -4.2742 5.0907 1.7450 -3.6382 -3.0324 2.9528 0.7540 -0.4634 -#> 10.9585 0.0143 -7.0358 6.8255 4.3354 0.3694 -1.9219 3.4484 -#> 0.5214 -0.1948 7.1384 -7.7696 0.6348 11.3390 -1.7319 -4.2369 -#> -2.6518 6.9048 -11.4995 -5.9130 11.9696 8.3919 -4.3782 -14.1445 -#> 10.4091 -6.7464 -4.8839 -2.3931 5.8914 -1.0612 -13.6791 2.6800 -#> 2.1091 2.4291 -11.2586 5.8175 -6.0045 7.4189 -3.7206 -0.2299 -#> 15.1141 -1.9943 -2.2590 -0.6403 10.9068 2.3399 -9.3898 -1.8449 -#> -1.6461 -0.8309 0.3367 1.4051 -2.9572 -1.9485 -0.5922 7.7519 -#> 7.2909 -1.9546 0.1197 1.6492 1.1757 -0.4608 -0.5773 2.3481 -#> 8.1006 3.4007 7.7575 -1.3652 -6.8355 -8.0055 13.8854 5.6229 -#> -5.3073 -2.1287 4.6341 -0.0629 -3.8079 5.9468 -6.5184 -4.4899 -#> 9.1216 4.1665 1.5098 2.4553 10.6162 3.4273 -9.7515 -5.4893 -#> 8.5937 6.0146 -10.0838 7.7629 5.9513 -9.6387 -0.2607 9.8624 -#> -0.9794 4.6749 -1.4617 -2.0968 -2.9037 2.0378 -0.8534 -2.7718 -#> -2.9497 -0.1574 1.7163 -5.7772 10.2611 3.0250 -5.9499 5.6355 -#> 13.6754 6.2193 -15.8523 2.5245 -7.9110 -0.2794 4.3846 2.2639 -#> 1.1008 -3.4523 -4.9767 2.7175 21.2093 10.1844 -13.1447 2.7406 -#> 1.5942 0.2254 2.8626 -4.0579 -6.1620 3.5378 9.9390 -9.9881 -#> -1.0034 -6.4215 1.5955 3.2818 9.3604 -1.8906 -9.9140 1.6970 -#> 4.4405 -1.0841 -1.2114 -6.8427 -1.5594 4.5440 -5.3968 -2.4016 -#> -4.8038 6.7719 -0.6516 1.4910 2.9071 -1.2205 -5.2272 5.3886 -#> -3.9530 7.0034 4.7422 -4.3794 0.1748 -10.2672 -3.6337 1.0284 +#> Columns 33 to 40 -4.9647 3.2308 11.2127 -4.2686 -2.2796 1.5136 -7.9512 -9.7500 +#> -0.8298 -3.5603 -8.6381 4.3740 -1.2116 -6.4308 9.1653 1.0100 +#> -8.8526 10.1238 4.5236 -10.3172 -11.7608 -4.4520 -12.6157 2.4357 +#> 6.1329 -3.0763 2.7849 5.8316 7.1820 -1.9002 23.3693 -4.3429 +#> 0.4906 2.0583 -1.9251 -15.1282 -4.4896 7.8140 -16.8594 10.9248 +#> -1.9579 -5.4513 10.3123 5.6427 -11.2760 10.6730 1.7441 -7.5491 +#> -7.3830 -16.6631 -5.7639 14.9311 4.9644 -6.7296 1.6038 2.6538 +#> 3.0646 -5.3544 -0.7530 -10.1325 8.3229 -7.3668 -6.0994 5.1784 +#> 6.8978 4.3912 -7.3905 14.2001 -2.5608 -0.0105 8.5202 -9.8634 +#> 1.8981 3.0568 -1.1767 4.4078 -10.5444 6.4013 -2.7760 -5.0415 +#> -1.9648 9.3269 -9.7062 1.6772 -2.3212 11.6579 -0.8811 -10.7980 +#> -17.1476 -6.0371 0.8275 3.4982 3.1655 -8.2893 -9.4494 7.3165 +#> -0.2917 -2.5819 12.3945 8.9153 1.4974 2.9821 -3.9309 -12.7116 +#> 14.6344 7.7295 -0.9487 6.4490 21.7108 -5.0010 -8.6624 1.4799 +#> 1.6928 -11.3204 16.9016 -4.0679 5.2997 -14.9579 -4.8888 -0.6290 +#> 7.6388 -7.4302 6.7221 -1.8618 -5.3975 -0.0817 -8.6780 14.2581 +#> -4.4833 -6.4302 -1.7611 -3.8002 9.6617 -5.6859 8.8622 -5.4209 +#> 13.5970 -5.3826 -8.2613 5.1612 -0.6774 -5.5049 0.3484 -4.2407 +#> -1.5025 -3.3588 6.3384 2.6384 -1.6093 5.5890 -2.2253 2.2145 +#> -3.2322 1.0356 2.5133 6.9882 -3.1620 0.7647 8.5024 -9.6295 +#> -9.3587 -7.0906 -0.4331 3.1136 0.5888 -6.8040 -7.3496 -5.6686 +#> -16.0691 -4.9708 16.1615 -8.4935 -13.3348 -13.1005 -4.3912 8.8010 +#> -4.8395 -7.2985 -10.4431 9.4019 15.2359 3.0031 0.7892 -1.3266 +#> 4.8531 -5.3962 -4.2206 4.6176 0.1133 3.1347 -2.3769 -5.9446 +#> 9.9675 -12.0881 -6.1321 0.1691 16.1158 8.3490 1.8235 0.4516 +#> 2.1709 1.9073 -6.8797 4.6197 -7.2654 2.7184 14.5499 -11.2745 +#> 8.1649 -6.7803 21.4585 -6.6413 4.9666 8.7409 0.6242 -11.9948 +#> -3.8563 -9.1333 2.3507 4.1116 -8.7302 10.6971 3.8956 3.3786 +#> -1.1030 -2.1147 -3.6249 -2.4057 6.1311 -2.7988 6.4180 -7.8030 +#> 7.4551 4.3960 4.9457 8.0116 12.7592 7.1989 -4.0122 -11.1286 +#> 11.2901 3.6341 5.1785 -9.3690 9.7895 -5.5296 -10.6360 -5.7740 +#> -9.8512 -3.8732 8.5334 3.5523 0.8450 9.8966 3.3078 2.1560 +#> -1.8370 3.1390 1.6548 -15.7944 5.7936 -2.7611 3.6734 -0.4723 #> -#> Columns 41 to 48 3.9176 9.3826 -18.1409 -1.2317 18.4283 14.2380 10.9816 16.3214 -#> 9.3704 8.3192 -3.7702 9.3107 -6.9517 -1.9656 -2.1970 -5.9058 -#> -6.1956 8.0480 -0.4568 -7.2946 -7.2395 10.2508 -1.6460 -17.5588 -#> -5.8348 6.3836 -15.9138 -7.1003 0.7682 -8.1109 -4.9736 4.3648 -#> -12.1081 7.9106 11.7615 -7.2715 7.0097 4.9865 -1.4555 -2.7906 -#> -7.2788 -6.8688 -5.7403 0.8716 2.4997 2.0539 6.3284 -3.5148 -#> -0.6817 -14.6984 -1.2534 14.0554 3.7611 2.5724 -6.4090 0.5131 -#> 3.0084 3.5823 1.3246 0.2096 5.1705 -0.3375 -4.4088 0.3239 -#> 8.4180 -2.8182 13.1569 7.0721 -9.1781 0.7615 12.5517 -14.7409 -#> -5.2442 2.1872 -10.3611 3.4234 -7.4395 -6.3410 -0.9863 1.5954 -#> 7.6250 7.5944 9.0311 0.7588 -9.2115 10.0633 -7.0872 -6.7356 -#> 5.4534 -16.1096 5.8245 3.7962 5.2196 -3.4442 -2.0334 -0.8086 -#> -9.7670 -8.5133 -0.4997 1.8853 1.8471 17.3971 -2.1560 6.0896 -#> -2.7300 0.1150 7.3367 3.4939 2.0422 2.3700 22.4666 2.8843 -#> 1.3771 0.3651 -12.6147 20.4125 1.0130 10.7655 -0.1040 -6.2868 -#> 7.7744 7.6784 -10.9426 -6.5254 -3.0292 12.7293 -1.1339 1.9785 -#> -7.1104 -0.2583 -0.5166 10.2442 5.2892 5.7679 2.6449 2.1772 -#> 10.5209 -10.9635 -16.7092 15.8249 -2.4129 7.9576 1.9455 -3.7210 -#> 7.9507 3.4032 6.5550 -0.7690 -1.6202 8.4765 -3.6645 -3.3324 -#> 7.6527 5.3433 -1.8237 17.1745 -9.4284 0.2114 11.2357 -10.2487 -#> 1.7741 -1.8712 2.9795 -0.9704 -2.9409 -6.4351 5.4212 -0.7826 -#> 1.7348 5.4718 -10.0168 -1.3006 4.4316 4.7486 -4.0691 0.6938 -#> 7.6392 6.3423 -4.4581 -9.5702 -1.7579 -6.8640 -5.6261 -9.5138 -#> -4.7566 -14.3245 -9.4253 2.2101 -8.7745 -0.5797 0.4113 7.4549 -#> 0.7994 8.8246 -6.1234 5.6424 2.0019 -4.7824 1.0309 -0.9710 -#> 0.6113 9.3829 -9.3782 4.8075 13.1326 -1.6003 13.6170 15.4264 -#> -13.4268 -3.9375 -4.0792 -2.3965 2.2949 1.7475 -2.0046 9.6053 -#> -5.2425 1.8676 1.0152 6.7712 1.6528 6.5646 7.5009 6.0387 -#> 6.4326 -3.4127 -5.5756 13.0351 6.9856 3.1156 8.2280 1.4301 -#> 7.9989 -1.2362 1.1412 7.9949 3.8730 7.1355 7.6854 5.3798 -#> 10.1957 -12.0346 -5.5023 11.2652 13.5204 -10.4236 -2.8808 1.6140 -#> -3.5676 -4.3917 1.2228 -6.8104 1.3814 -12.4657 8.9958 -3.9858 -#> 10.2427 0.1554 0.9914 7.9511 -2.0589 -10.3418 4.6433 1.2253 +#> Columns 41 to 48 13.1554 4.5368 -11.5878 5.8889 -0.8603 -4.7708 -2.2904 8.5790 +#> 1.0638 -11.4233 0.3845 8.7523 8.9705 5.4159 0.2301 1.5610 +#> 0.6623 -12.0311 0.0608 -0.6107 4.0495 -2.9505 -9.9194 3.3289 +#> -18.5428 0.8324 -10.6181 10.4965 -7.9841 -2.9386 7.5027 6.5604 +#> 1.5604 -7.4559 5.5142 -2.7431 -1.0200 1.9060 -1.6006 12.4555 +#> 1.1579 6.6539 5.4687 6.2231 6.4771 7.4508 -2.4626 -5.4610 +#> 0.8283 3.2415 -4.4330 -0.6756 12.4004 13.7175 -19.5081 -10.4585 +#> -3.2825 7.8557 -9.6537 -2.6479 -4.8104 -2.9254 -1.2627 2.6681 +#> -7.4804 2.9199 6.7078 -0.3411 3.2901 3.1529 10.8884 3.2526 +#> 4.4639 1.5764 4.8269 -2.5805 0.3061 -3.0173 5.6379 1.1188 +#> 0.8612 -2.2516 -9.9114 1.1516 -13.8117 -2.7587 9.8810 -0.4223 +#> 3.9313 4.8661 -5.0012 -0.0035 7.4963 11.3205 -4.9284 -13.0219 +#> 2.6311 9.0599 7.2511 -2.9654 15.0330 -2.7267 8.7870 5.4603 +#> 2.6643 2.5921 -5.4289 -2.4073 2.9424 4.9401 14.6705 8.6155 +#> 4.0140 7.6933 5.0783 1.6056 6.1860 3.1892 4.9573 -10.5108 +#> 5.5388 -9.8183 -4.7116 -15.2184 -8.2728 -13.3160 -1.7169 3.7088 +#> -0.1177 4.7125 -7.9725 2.1683 -3.0102 4.6499 10.7717 -2.0351 +#> 5.2081 -3.4989 5.9486 4.2025 3.3391 -0.7537 -2.6627 -9.4868 +#> 7.5867 0.7116 2.4756 1.1903 4.8731 0.0058 -12.6252 -2.8911 +#> -5.0843 0.7840 1.9415 -3.1917 4.1606 8.4567 -0.1356 2.3008 +#> -0.3319 6.3024 -6.5113 -2.4423 1.5237 -1.3574 -6.2694 1.5541 +#> 4.4886 6.0935 -7.2428 15.6083 10.4199 -5.8588 -14.6442 -5.4900 +#> -0.3305 -5.6454 7.4324 2.3234 11.2828 21.6849 11.9538 -5.7894 +#> -9.8699 9.4737 3.7661 2.9901 3.5069 1.6304 -18.5274 2.8883 +#> -11.4057 -3.4616 -2.0560 -2.5238 6.0255 14.8701 7.9306 -0.4151 +#> -1.1735 -6.2336 -4.4946 -2.2977 -5.0078 1.5601 8.7399 1.6781 +#> 2.0795 14.5577 0.0213 -2.3282 -1.4510 -0.9494 -0.6021 -5.8925 +#> 7.9549 -0.6925 3.3029 2.2754 -7.2076 -3.0433 0.4470 -5.6953 +#> -7.6109 2.3223 -8.8471 8.0376 -7.0846 7.4322 0.8175 -3.9597 +#> 9.9533 9.2735 -1.7063 0.1567 14.3063 0.6450 10.3923 3.4195 +#> -4.5897 0.4865 4.5298 -9.0790 -10.4982 -6.5221 11.7866 2.2506 +#> 0.1368 -8.7985 -11.9200 0.2101 -11.3728 1.7139 2.6160 -5.5503 +#> -6.6139 6.6750 -2.4265 5.9767 -1.6100 2.0475 -5.2217 -8.7147 #> #> (6,.,.) = -#> Columns 1 to 8 7.2243 8.3392 -1.3849 -3.9614 -2.4271 -21.3446 -6.4720 -8.3673 -#> 4.5975 -3.0668 0.3427 -11.5763 6.4074 0.9656 7.8276 6.9289 -#> -1.3308 -6.9574 -10.1657 -4.7257 2.6630 -3.6961 4.3472 6.0477 -#> 5.5895 -2.4206 8.7348 7.3996 -7.2864 17.2307 -4.4007 -8.8312 -#> 8.1620 -1.6408 -7.1717 -5.5062 5.5247 1.3972 -9.0990 1.6231 -#> 11.3486 4.4702 4.3114 8.5954 -2.8606 -2.3586 -3.0956 3.8136 -#> 0.6174 -10.7279 10.3699 -0.2084 -7.2189 6.3811 7.4373 -5.8331 -#> 4.2271 7.9513 -2.5008 -5.1251 18.8871 -6.0456 -9.1673 8.8651 -#> 13.8916 2.3057 -4.5864 4.5505 -2.0082 -2.5087 0.9130 -5.7247 -#> -0.8457 -5.6887 -6.4370 14.4166 -2.5539 6.7175 3.5127 -4.8348 -#> 4.2450 -0.7323 -4.0768 -10.9424 15.4770 -13.5984 -3.0378 3.8981 -#> -3.0415 0.1284 -4.0245 14.9781 -12.0422 2.1056 3.5895 1.7076 -#> -6.8926 -0.9696 8.6010 1.8853 -8.9117 8.2546 4.2719 -6.5448 -#> -0.2702 9.9633 13.8592 -7.1001 -0.2188 -10.0001 -13.6396 5.4360 -#> 5.0542 -8.5357 -1.4633 13.0656 0.2974 -2.0189 16.9817 -8.3320 -#> -3.1462 6.0385 -0.4890 -5.6040 -0.0464 2.3662 -5.6113 3.7265 -#> 2.3273 -0.7567 4.0286 0.0487 6.6288 7.2548 -5.5849 -1.8846 -#> 2.4761 -8.6424 9.6763 13.4918 -8.8691 -7.4611 6.6871 4.9654 -#> -4.5020 8.7862 -9.5270 -3.2915 16.6043 -4.4464 -4.8521 2.0281 -#> 5.3709 9.0742 -0.1652 0.2802 5.4078 -10.7173 8.5004 -0.5599 -#> 1.3871 2.2028 -6.8016 7.5116 0.0626 -3.4792 4.5549 4.6654 -#> 4.8865 7.4633 2.3566 -2.2726 4.8016 -0.7082 -3.7235 6.7077 -#> 10.7146 -6.8378 -2.8024 9.2662 4.8256 -0.8213 4.1914 -3.8058 -#> -7.5799 4.1754 8.7691 16.6743 -7.9259 -0.3989 8.3513 -11.5092 -#> -0.1406 -7.8099 -12.4845 11.8588 6.0273 -7.4990 1.4426 0.4183 -#> 2.9798 5.2181 -9.0369 -0.1315 3.9919 -12.7542 -6.8516 -1.9525 -#> 0.9739 0.1558 9.1333 4.2886 -5.1501 17.3377 -2.2212 11.3604 -#> -11.1918 -2.9488 7.4725 -1.0213 4.8964 -1.2393 -6.3039 0.7197 -#> 8.7943 7.6386 -3.0797 -1.4985 13.1746 -6.3380 5.8917 -0.8362 -#> -0.2607 -8.8499 9.9483 1.3202 4.7826 -3.1972 -4.9586 -4.9227 -#> 7.2917 -8.7727 5.4313 17.7468 2.7473 -4.0644 -8.2797 -8.8895 -#> -0.4174 3.1143 -0.2159 5.9918 -2.2373 -0.7149 4.2587 -1.6637 -#> 5.2812 -2.3961 0.0392 6.4498 -13.8805 -15.2444 6.3047 -6.9894 +#> Columns 1 to 8 4.9643 11.6540 -2.9674 -8.4480 -5.2572 9.7754 -1.1589 -0.1752 +#> -5.8853 -1.3620 2.0815 2.8105 -1.8144 -7.6909 7.0140 0.6261 +#> -1.1412 6.5285 2.8428 -0.8144 -3.6057 -1.6444 -2.6238 -3.7695 +#> -6.7091 -3.7541 0.8434 -0.9902 -0.8535 -4.1227 -1.9138 4.1991 +#> 12.4982 -3.3525 -11.3399 1.8386 -5.2578 -0.0574 3.7135 -1.1334 +#> -6.8838 3.0344 -0.4220 1.3971 1.0182 -0.7255 2.8937 1.7662 +#> -17.6761 2.9822 6.0978 -2.3869 0.3980 4.5247 -4.5784 -6.4718 +#> 10.9750 -7.9185 -1.5992 3.2146 8.4577 -1.5642 -2.9899 6.5457 +#> 1.4073 -17.0348 -20.3213 0.0156 4.8215 -1.3920 2.3412 2.1156 +#> 1.0515 -8.9364 12.1244 3.2727 -3.7608 -5.6263 2.1407 -4.9717 +#> 18.2835 1.9000 -2.8328 -7.1934 2.3920 9.4755 2.3911 8.1935 +#> 11.7145 -2.0491 -1.3463 -16.0241 -3.6619 11.3029 5.0877 3.8353 +#> -8.5743 7.7737 -9.8040 2.4682 -7.3646 1.7419 -8.5698 -2.7935 +#> 2.3935 -13.4116 2.1091 7.2845 0.4879 -5.8124 -6.3251 -8.7806 +#> -5.9028 -0.2727 -13.6060 -4.1157 -5.9458 2.6326 -4.3760 4.3805 +#> 7.9992 0.0014 -4.2887 8.9887 10.2583 -4.2636 1.1477 6.7995 +#> -4.0028 -1.6544 11.6460 -16.1031 -3.0375 3.8146 1.6839 4.5914 +#> -7.3443 -4.5123 -11.0934 3.7476 4.5815 6.2037 -4.5460 -2.3024 +#> 0.6555 1.5199 -4.2085 9.5304 6.5171 -6.9942 2.0877 1.3464 +#> -13.1823 -3.1720 4.0616 2.5459 -2.5710 -3.8787 -8.6574 1.9016 +#> -0.2266 -7.4780 5.9274 1.9775 0.4186 1.7564 2.7752 -7.7965 +#> -6.9900 -0.4437 1.8363 -1.8641 -8.5694 -8.5238 -6.2230 -1.5640 +#> -4.1086 1.6125 -4.2173 -11.7851 -0.2085 11.9441 -0.1282 2.3233 +#> 0.5848 -4.9375 -7.9527 0.5647 -4.1062 6.8245 -4.9902 -3.6350 +#> -3.3431 -7.9958 -3.5205 11.3099 3.0815 -6.2839 -5.7426 -1.8254 +#> -2.3275 1.2306 6.9384 2.6011 -5.6683 2.8506 4.1322 -6.2968 +#> -6.3331 4.0115 -0.7671 8.3992 -6.1710 3.9180 5.6787 -6.4543 +#> -8.9433 2.7003 -6.2055 6.7646 -1.8495 0.8824 2.5887 2.5353 +#> 7.1492 -3.1850 4.6098 -6.6905 -6.2035 -4.2005 -1.5706 1.1120 +#> 3.0364 -6.0301 -19.7268 4.7421 7.2392 15.4236 -0.8455 -1.1945 +#> 3.0844 -4.0971 -3.1498 9.3190 -0.7303 -3.3551 2.1204 0.4835 +#> 9.0847 -8.7505 -10.4133 0.7508 10.7469 -1.6424 -2.5937 9.1267 +#> 5.4801 -6.6073 -11.3375 -1.5280 -6.3908 7.1243 5.5544 -3.7556 #> -#> Columns 9 to 16 1.3572 -9.2639 -3.6357 7.5815 16.4982 -5.5324 3.9752 -4.3839 -#> 7.2428 -7.1613 0.4689 3.8176 -2.4002 0.4925 1.8304 2.1115 -#> 0.6485 -3.9916 1.7729 0.7433 -0.5789 -1.0123 -3.7413 5.1510 -#> -3.0748 6.1491 2.2909 -3.0659 12.8628 -2.2961 22.6958 -0.5127 -#> 6.3820 -2.0064 7.3224 7.8251 1.0473 -8.0397 -10.5067 0.9349 -#> -11.4967 3.2232 8.5371 10.9801 9.2796 -4.1002 -3.6480 1.2015 -#> -1.4213 2.9341 -0.9396 10.2263 0.6507 -0.9364 5.5842 -1.1230 -#> 4.6090 -6.3211 9.4724 12.5845 -1.7015 4.1189 -12.2793 -7.4774 -#> -5.4928 -0.5904 -0.2987 -0.3799 9.6282 4.2732 -10.4704 13.7935 -#> 1.5482 -0.3901 3.1564 -7.1711 4.9637 -5.9584 1.0225 -0.7327 -#> 0.7841 -21.3481 19.5244 5.3517 -6.1240 3.0176 -2.3114 1.9763 -#> 8.1032 2.8423 0.5301 2.2284 -9.0271 3.6010 -9.2538 -4.2355 -#> -1.7511 8.3882 0.8187 4.0027 -5.3239 4.3685 3.4672 -10.8329 -#> 0.5106 3.5901 1.4955 0.8682 0.7976 -5.2305 -5.9664 -6.8258 -#> -9.1375 0.9898 16.0165 -7.1575 0.5436 1.2575 -14.7935 -1.9664 -#> 5.6937 -3.7379 -6.6352 3.8704 8.8477 6.6741 17.8161 -0.2798 -#> -2.6651 4.0551 7.2325 4.8498 -4.1348 -10.7248 5.0212 -7.8985 -#> -1.6667 12.8498 8.3556 4.0415 -9.2053 1.0740 12.3649 -3.1267 -#> 9.4538 -7.2539 0.2545 14.1017 -0.7963 14.3931 -10.0916 2.4066 -#> -12.3760 6.1386 5.3391 -9.2521 -9.7553 8.1339 -3.7875 -10.5115 -#> -3.7960 5.5098 4.1127 3.2031 -3.0166 -2.5891 -10.9013 5.1421 -#> 0.8645 -10.0569 1.1566 4.1279 9.3596 -3.6824 4.2446 -8.3089 -#> -10.0649 -10.2554 -4.7482 -15.6579 -10.9623 -1.0678 -0.2935 6.8447 -#> -6.6380 4.3850 0.8957 -11.3851 1.7734 9.1456 -6.6717 -4.7344 -#> -4.5926 2.4124 11.3638 -13.4111 -0.5375 -0.5407 6.2940 4.6291 -#> -12.7819 0.4595 0.4682 4.7299 6.6059 -2.4985 -4.2601 -3.3678 -#> 11.4470 -5.6345 10.5911 5.3314 7.6042 -2.4948 7.9147 -5.3031 -#> -3.2883 6.0255 -3.3904 -6.2489 0.7011 -4.9232 -6.4242 -15.4514 -#> -0.8515 -8.9004 8.9762 7.3365 -1.6781 1.1573 -1.3595 0.8473 -#> -10.0676 -4.2501 -4.2951 0.9042 11.0537 -6.8604 -2.8014 -8.3547 -#> -0.3443 1.6298 2.3734 5.8460 -2.7301 -1.4867 -2.6336 2.5605 -#> -1.5731 5.2042 -6.0489 -3.9748 -3.5646 4.9206 4.5472 -0.7470 -#> -4.9682 -0.6410 -2.7100 -11.2327 7.9666 0.3811 5.3322 9.1792 +#> Columns 9 to 16 12.6808 0.5330 -12.8937 -2.7859 15.5644 0.9624 -6.2626 11.9900 +#> -0.1552 7.7801 -19.0119 -7.2924 1.6066 -5.5839 -7.1300 3.2822 +#> 11.9017 0.7113 6.2135 -4.5541 7.0030 -3.4481 -2.9240 -6.0002 +#> -9.9037 2.1092 -10.0918 -7.9957 -1.3933 2.6607 -0.8607 7.0772 +#> -1.2853 3.1144 -2.8884 7.5204 1.1099 6.9568 -1.2871 -1.0250 +#> 8.2587 -7.8240 2.3218 13.2488 -8.0419 -1.7403 -2.0949 3.9785 +#> -1.3944 12.9996 -5.2578 7.0092 -14.0327 2.5392 -9.2376 1.3824 +#> -11.2618 -4.2080 11.3066 -17.6130 12.5172 -6.8254 -0.0752 -4.1533 +#> -0.3817 -10.8712 -22.6696 -7.9308 -7.5825 -2.3434 13.3477 -5.4850 +#> -0.9359 -1.8684 13.2252 0.1293 -3.2058 3.6114 -1.6075 -4.8637 +#> 5.8733 -13.6607 -2.0309 -2.4843 3.5198 -0.6333 5.1845 -1.7883 +#> 3.3861 -3.3107 5.9968 -9.4100 -7.8720 -1.3470 -1.2180 -3.4692 +#> 14.5247 -13.5109 9.5149 -8.8031 -0.0814 -9.8632 5.8781 8.4470 +#> -4.4439 -10.0945 -1.6390 -0.6935 14.8900 3.2884 1.3039 12.0052 +#> -3.5714 -8.7955 7.3329 -0.1114 4.0742 1.3243 -1.1030 -3.6246 +#> -0.4619 -1.9217 15.9153 -1.9339 5.3932 -15.1013 13.9770 -7.1978 +#> 8.7619 3.1970 4.5950 -0.9813 1.7180 0.1048 -7.3949 3.8046 +#> -4.4874 -4.3989 2.0573 3.1280 1.9856 -6.4502 -9.7534 -1.9015 +#> 0.5677 -4.4264 3.5549 -8.7555 4.8288 -2.4239 -7.1515 -3.0211 +#> 4.8010 -0.3816 -1.5811 5.5144 -8.4569 0.1808 8.1338 6.8291 +#> 1.5628 12.1958 -12.9617 -10.5019 -2.1109 7.1425 -6.9045 -0.4025 +#> 3.4785 5.1717 4.8155 5.7336 -8.1549 -1.8475 -1.4936 6.4870 +#> 5.9224 4.6069 1.4809 -2.4848 -1.5496 -5.0209 -4.0053 -18.6442 +#> -0.7273 5.0900 -9.5803 4.0261 3.5100 5.8257 -6.8150 -2.2200 +#> -3.2454 18.3768 5.1196 1.3036 -3.4961 0.8341 -5.4575 -15.2099 +#> 3.1255 5.8000 -2.8170 8.2330 -0.8312 -1.1323 -1.2057 -2.6477 +#> 1.2513 -4.9404 -8.6827 0.3445 1.3802 -1.3472 -0.7938 4.0705 +#> 8.3097 -2.8209 3.6688 8.4533 -5.4572 -3.4359 -0.3630 -1.2266 +#> -16.7214 7.5560 -0.8280 1.6409 -2.4671 5.9001 7.4502 -5.8646 +#> 8.3846 -18.6311 -4.9897 -1.7705 8.6169 -1.8708 -6.5290 -2.3889 +#> 4.4340 -6.3649 9.0433 -8.1999 14.2589 -11.0242 16.2992 -3.4696 +#> -10.9789 -3.5818 11.3362 -2.9983 -9.1604 3.9324 10.5847 -12.7304 +#> -17.8350 10.2712 -9.4794 -9.9219 -1.2685 10.4942 -6.4026 -5.0820 #> -#> Columns 17 to 24 -1.9419 -5.7336 11.9942 4.7601 -1.1021 0.1526 -2.7318 0.4802 -#> 9.9439 -9.4554 1.3996 1.3670 7.8745 4.9322 10.9325 -4.1276 -#> 5.9729 -0.2948 -3.4857 -6.6527 4.3021 -4.2746 4.1884 0.5543 -#> -3.9633 7.9015 -8.6306 5.5853 0.2484 8.5325 -5.4779 17.4002 -#> -2.2753 4.9165 -1.4233 -4.3736 -0.2018 2.6888 0.5883 1.5900 -#> 2.7971 -0.4647 0.6217 -12.6645 0.1814 -2.6693 -2.5864 1.4766 -#> 0.1495 3.5149 0.8440 -7.1651 2.7019 5.8067 6.0512 3.3628 -#> 7.3606 0.4496 9.6615 -1.6973 -6.6609 -11.4058 -6.6486 -12.5350 -#> -5.2156 0.2924 -5.0834 -16.2553 0.3100 -2.5564 11.9973 4.2131 -#> -5.6752 -10.3026 4.2528 2.2497 4.1874 14.9477 -2.5128 1.4847 -#> 13.0678 5.6539 -7.4028 0.5487 4.6282 0.2611 9.8012 -4.0386 -#> 2.3555 -2.8775 2.5253 -0.3183 -9.4194 7.1053 -2.6442 -2.2717 -#> 3.3136 7.7977 -4.0650 3.8109 3.1865 -17.8396 -12.4813 -4.0486 -#> -0.3119 -4.4636 -3.1033 1.1887 -11.4041 3.6654 6.1167 4.8608 -#> 19.5184 -7.2677 3.1874 -9.7602 3.3361 -7.0507 -3.6390 7.9522 -#> 0.9450 1.6529 -6.6908 18.1123 3.8180 5.7401 4.9581 -0.9846 -#> 1.0836 12.0571 -5.5106 1.3248 1.5558 -10.3360 0.0579 -2.6499 -#> 3.1612 5.1826 -1.1608 6.2212 -3.1358 -8.7089 -7.4634 -0.5527 -#> -8.9989 -1.6155 7.4303 6.5009 10.9928 -2.6357 0.0780 -16.1420 -#> 2.0072 0.7219 0.9024 -0.4102 0.1720 -11.4900 -3.7621 -2.7232 -#> -8.0794 -2.6456 1.0228 -7.7458 -3.6464 0.9997 -3.6155 -3.4252 -#> 14.5272 -0.7068 4.7087 4.9182 -4.6983 4.6151 1.5049 1.8046 -#> -6.9474 -3.9977 -9.7340 -8.4386 5.1468 1.1116 0.7188 3.6614 -#> -16.7842 -2.0017 0.0629 1.1708 4.9418 -14.1091 -8.3668 -3.3363 -#> 2.5515 -3.3029 1.2168 3.5226 9.0527 6.2613 1.6803 3.9490 -#> 5.5269 3.0326 5.7349 7.2353 -2.8270 -4.0605 -3.7755 0.9408 -#> -6.9021 16.4827 -12.1811 -1.6658 1.7088 -5.2144 -4.3741 -6.3384 -#> -2.3419 6.4386 -2.0155 13.3472 2.9459 -7.8724 -11.8473 -4.2770 -#> 0.5906 1.5462 4.4134 -0.2814 2.9043 1.3267 6.0800 -1.0883 -#> 1.9978 -4.2010 2.7048 2.1470 -0.1110 -1.9612 -11.0203 -14.5765 -#> -8.3450 -2.3093 1.0679 -7.3374 -6.5944 4.6366 3.8195 -4.5595 -#> -7.4569 0.4207 -2.8997 -6.5984 -4.0737 -18.4086 -7.7615 -3.0192 -#> -2.1727 -12.4999 7.7563 -0.1128 6.9967 17.2992 7.5060 4.3193 +#> Columns 17 to 24 1.8320 -3.1759 -1.1398 2.5388 -13.5057 1.9528 13.1616 0.8494 +#> 1.8656 -3.4356 -4.0238 3.0397 0.7453 -6.3322 -1.2731 -6.4666 +#> 3.1645 -5.7271 0.7629 9.0021 -2.9395 5.8612 11.9598 -2.3970 +#> -2.0722 6.3475 -3.3564 -8.6790 7.7533 5.1905 -1.1516 5.1052 +#> 4.1972 -10.7616 7.7671 5.5924 -14.5078 2.6350 -2.6565 5.7551 +#> -3.9981 -1.9179 0.8381 11.2294 5.2864 1.7804 3.6303 9.0391 +#> -2.8847 -11.2142 -5.0437 4.9487 10.5897 -9.5995 -3.9740 0.6665 +#> 6.6208 6.5008 -3.9683 -11.7661 -9.3548 -6.6910 -17.1924 -12.7470 +#> -3.2187 8.7711 -0.0417 11.7885 12.8211 7.0743 -10.0382 12.7762 +#> 5.1791 6.2349 -0.0541 -3.5075 -0.3101 -9.7404 1.6222 -6.4921 +#> 2.5555 6.4526 -7.6652 -10.2250 -11.1258 10.9575 1.1775 4.6364 +#> -6.6211 -4.1664 -4.2235 -2.0842 -5.1163 -2.5953 3.2280 -6.3467 +#> -5.4063 -4.8193 6.9550 -0.4367 -9.9244 -11.2391 -6.3830 5.4532 +#> 11.0889 1.1156 -8.4720 -7.0509 -8.0857 -4.7256 -9.9408 -0.6579 +#> -5.8587 1.5096 2.5751 11.6155 -1.6031 0.2888 10.7469 -0.6830 +#> 6.5458 -0.4497 1.7877 -4.0397 7.0951 4.1959 -1.2999 4.4580 +#> -5.7290 1.6790 0.7839 -6.0438 10.6346 7.2770 26.1049 11.2897 +#> 4.2910 -8.8323 -1.8377 1.7808 6.2114 10.9089 -6.5466 3.4632 +#> 6.1936 5.0888 5.5520 3.3283 -9.9337 -0.4847 1.1930 -2.1552 +#> -1.0579 -2.9818 7.2234 9.4281 13.3017 -1.9589 -5.1578 9.1888 +#> 8.7781 5.6805 -1.5421 -0.4016 -5.9968 -14.2629 -3.7544 -1.8125 +#> -7.7402 -3.1605 7.6911 10.1392 -2.9578 -0.4831 12.3641 4.6766 +#> -4.3519 -9.2597 -4.8442 -4.2942 9.0184 -1.5517 0.6835 2.6036 +#> -5.0041 2.5134 5.5086 18.9455 6.1282 11.1234 -0.8727 3.1704 +#> 3.5416 -5.2895 1.0142 5.0341 14.8271 2.0903 2.9948 16.1239 +#> 8.5684 0.9961 -3.9033 -0.5856 1.7722 -2.4203 -11.7632 -4.0131 +#> 11.3016 -7.3752 4.6808 10.2017 -1.1311 -3.0729 -8.2118 7.7834 +#> 5.9561 7.4157 2.6822 2.1982 -14.1028 1.8127 1.8970 -0.1184 +#> -0.2686 0.0259 2.5236 -2.2401 14.7103 -3.1769 -4.6914 4.0513 +#> -0.1675 -0.4423 -3.4561 2.6460 -8.6589 1.3691 -8.9794 2.1208 +#> 0.7164 -0.6444 7.9152 -3.0760 -2.1215 -2.8115 -4.8829 6.1151 +#> 2.0674 5.5024 0.9901 0.4501 -13.4113 -2.2804 -4.7955 1.6406 +#> -2.4607 9.0029 -4.8079 1.3073 -0.5409 13.5237 -1.8930 -7.9569 #> -#> Columns 25 to 32 0.5489 -2.7851 1.2910 -2.2830 10.3267 13.5500 1.8178 -9.0620 -#> -5.9758 -3.8377 -6.0791 -3.0669 -4.7702 5.1763 -6.1482 8.1660 -#> 2.9291 -6.6904 -2.7593 -3.2061 -6.3909 2.5089 -7.2814 2.2478 -#> -4.3237 -2.3705 2.0448 -8.5145 -6.7395 -4.7917 1.4278 18.8616 -#> 0.9036 -4.9193 -1.8782 1.3285 2.3220 6.1395 1.8885 -2.8320 -#> -2.3343 -8.1989 2.6961 9.1824 7.3328 4.9421 3.1049 0.7268 -#> -4.9143 0.4837 5.9807 1.4539 -0.0714 -3.9909 7.5337 6.3653 -#> -0.3442 6.1906 1.8804 -0.9160 -0.4821 -3.0456 -0.8775 3.1517 -#> -1.5952 -0.0023 -7.2607 5.9104 0.8051 8.0313 -10.2251 6.7405 -#> -2.6411 -3.0006 7.9548 -1.5612 3.7520 10.1731 2.7219 -0.3250 -#> 12.8345 4.2254 -5.2382 0.9249 -11.7240 -6.5759 -12.0975 1.4645 -#> -4.2454 -0.7754 -3.2727 6.7287 -2.9156 -8.4225 7.2463 -9.1077 -#> -1.6413 -11.3781 7.5926 10.0934 1.9571 -9.0014 1.8181 -13.9184 -#> -2.2377 14.4022 -0.3523 -0.0334 -1.3645 8.2180 6.2597 6.9555 -#> -2.1595 -12.7020 5.5225 8.1006 -5.1287 12.3235 -7.3240 2.6665 -#> -7.5441 -2.7023 -7.9101 -6.7034 1.2142 0.8901 1.9602 -3.6886 -#> -1.1957 -9.2353 8.6472 2.5590 9.5425 -0.2925 14.7062 1.1890 -#> -15.5934 -2.7007 -6.9255 1.8178 5.3152 -5.9453 -8.6526 9.2370 -#> -6.7564 4.7754 -0.7221 -1.1370 2.2326 2.2628 -1.2350 -9.8540 -#> 1.0357 -0.1774 3.2705 1.8051 2.4900 5.3541 -2.1648 2.5351 -#> -0.0126 5.2413 -4.6247 -3.5731 -4.6246 -2.1948 9.3360 12.5130 -#> 1.9397 4.2349 2.4305 -1.1732 -1.8129 -3.7480 -8.5892 0.7909 -#> -2.0414 -0.3399 2.2622 5.8912 5.0537 4.1629 -2.7187 -8.9195 -#> 6.0152 -4.7058 18.0248 13.2227 -1.2188 3.7254 -2.6734 -2.7519 -#> 1.6365 -4.4861 -5.3771 0.5365 2.0207 8.5161 -0.0189 4.6858 -#> 9.8376 0.5626 0.3699 2.3950 7.1727 8.5187 7.6857 6.0384 -#> -8.6334 -9.1464 8.0587 -0.2189 7.0526 4.0767 7.4329 5.8209 -#> -0.0165 -5.7914 6.4489 7.2829 13.7562 -5.0777 5.1799 -10.5214 -#> -3.0783 7.1251 -10.0074 -2.4632 -1.2107 14.3788 -4.4379 4.4232 -#> -1.1358 -1.6174 4.0920 16.6688 12.7050 -5.0852 -0.8548 -8.4431 -#> -7.3369 3.8981 1.5434 2.7106 8.9853 0.2032 0.9945 11.2380 -#> 5.3462 -8.5643 -4.6187 9.0935 -6.7361 -5.9773 0.6195 -10.5092 -#> 2.4468 -1.5816 -0.6258 3.5935 -4.3062 0.5268 -13.2230 0.6924 +#> Columns 25 to 32 -0.7563 -3.1377 -1.0834 -2.9332 -10.1355 16.8718 6.0414 -2.7665 +#> -4.9733 0.3464 0.5792 5.5910 4.2675 -8.5153 1.2330 -5.9161 +#> -5.1655 3.6488 -3.8651 1.6267 5.8430 -1.6695 -3.6735 -8.8895 +#> -0.8614 -1.8020 10.4417 9.3554 -2.0547 -1.2789 -10.6759 3.3694 +#> -3.2737 1.7721 -10.0527 5.2873 -0.4413 -2.3700 11.6751 8.7873 +#> -1.1394 4.7643 0.0122 -4.4051 -2.8833 -0.8338 -3.8994 1.0279 +#> -2.0636 2.4231 6.0373 -3.8296 10.7801 1.3824 6.2070 4.4545 +#> -6.8036 -2.2004 0.0728 -1.3673 -4.0747 -0.7697 -3.6788 -2.8313 +#> 0.5440 -9.7875 -0.6648 17.4862 6.6901 -4.3782 -3.8415 -10.0056 +#> 5.4451 4.1119 -5.7517 -1.1534 -2.4415 3.2575 -4.3561 -12.8561 +#> 6.2678 -7.6002 -1.2815 -5.6933 2.6289 8.6845 -12.2586 -23.6212 +#> -5.9066 4.5378 -5.7446 -0.1118 -2.3540 10.3066 -4.9900 -8.2678 +#> -4.3486 6.0296 -0.1091 0.5328 -9.1646 -2.5070 0.0259 -9.7795 +#> 1.3511 2.3896 1.1812 -4.2610 -5.9330 3.0627 6.1465 8.0010 +#> -6.1905 2.7403 2.3075 -6.5115 15.6600 -1.0810 -7.4869 -6.4436 +#> 1.7964 -0.2026 -1.9205 -9.8444 1.3398 -7.1471 3.0225 -4.9096 +#> 5.1326 -8.3308 2.4599 -6.3199 9.2679 14.6197 0.8064 -6.3736 +#> 1.3317 4.6877 3.8298 -5.5421 7.2200 7.4253 -4.9861 -9.4068 +#> -0.0105 4.7573 -0.2542 -3.4788 -5.9175 -6.1011 -0.9577 -8.1726 +#> -4.0756 -0.0195 2.9053 -1.1690 -3.5253 -5.1791 -1.5896 -11.1682 +#> 3.5392 -5.4845 -1.2150 1.6893 1.1187 -2.5950 1.5323 -8.0660 +#> 2.6975 -0.0813 -1.0512 1.2911 -1.3961 9.6188 -0.1213 -0.6774 +#> -8.0032 0.6810 3.2119 -6.4014 8.0465 2.0252 -0.1155 6.9570 +#> -5.3503 3.8933 -0.4020 9.4015 -5.8290 4.2362 5.3367 -2.3122 +#> -1.7339 0.7496 10.9024 1.3516 -4.2970 -16.5161 0.4227 13.8916 +#> 0.1649 0.2497 2.3354 -2.2941 -5.2923 -9.3202 1.1490 -5.3199 +#> -7.1270 1.9761 9.3285 -4.1921 -6.7225 -11.0163 3.4185 -5.1099 +#> -6.3438 4.2835 -1.2014 -1.1916 5.0112 1.1191 -2.2677 -2.5989 +#> -3.4770 0.8742 1.2298 -0.8830 1.1060 3.1815 1.3849 1.8166 +#> 1.3781 -0.3195 1.2158 -8.5691 -4.7141 6.2978 6.7545 -14.0948 +#> -1.9397 -9.4017 4.5247 -3.9402 -0.7005 -12.1142 6.0034 3.6635 +#> -1.3631 7.6884 -2.5925 -0.7068 8.4824 -7.9555 -5.5553 4.6373 +#> 1.9530 -3.4722 2.5056 13.4879 2.9922 3.5060 -9.9254 1.3374 #> -#> Columns 33 to 40 -7.6027 -2.2487 2.1900 11.9675 -3.3487 -9.6965 -2.9535 -0.7557 -#> -3.2157 4.8165 4.5860 -0.2468 -2.9938 -6.0932 -0.5876 13.6491 -#> 4.5272 -2.9818 -2.8203 6.5853 -5.0184 1.7838 0.4984 -1.9432 -#> 1.1660 -3.9935 -1.8290 5.3999 6.6793 2.8560 4.8642 6.5097 -#> 10.5755 3.2100 -3.6359 0.4694 -0.8818 -11.4375 -4.1460 -10.3099 -#> -5.6870 6.3213 8.4095 4.9459 -11.2726 -14.5157 -7.3464 -8.7629 -#> -8.5790 5.5107 6.3126 -5.6498 6.9198 -8.4822 -7.7633 -6.6192 -#> 7.5044 13.3505 9.8939 -4.0529 -5.3569 -12.1263 -5.1392 -12.6179 -#> 0.2750 -8.4532 12.4254 1.4368 5.1401 -3.1507 -4.9469 -3.5927 -#> -8.8588 1.4924 -2.4861 -10.4314 0.3319 5.8148 -4.7300 9.7827 -#> 5.0628 8.7692 -2.4614 2.7285 3.5396 0.4264 5.5486 -0.9388 -#> 7.9110 -0.0486 4.8019 -7.9210 0.9448 3.8109 -2.0076 -0.4768 -#> 4.4348 11.1488 -8.0820 5.5546 -1.5815 2.9275 6.0719 3.2779 -#> -2.9327 -8.0244 -2.7404 2.2129 -4.0796 -3.0339 3.3700 -13.2439 -#> 11.6239 -6.7035 -7.9736 4.2794 3.0072 -5.7825 -10.7079 2.9405 -#> -8.3416 -8.4063 0.1600 20.7802 -0.7024 -1.7364 3.3059 7.9594 -#> 3.2505 0.9716 -5.6477 3.5167 -1.7800 -9.7658 -6.5191 -3.1310 -#> -7.3276 -1.4290 7.9197 -0.0182 -0.8833 1.8832 8.2552 4.7918 -#> 3.2498 -2.9161 12.6821 6.7317 -6.3797 7.1543 7.1523 -0.3207 -#> -8.8900 7.2855 -0.7633 5.5332 4.4910 0.5948 4.8468 -6.2392 -#> -0.6436 4.6097 9.0892 -13.1945 4.9483 1.6102 -2.2883 -5.4042 -#> 1.6717 -2.0507 -4.9608 12.4468 -1.3210 4.8892 -0.3961 2.6101 -#> -0.7270 -17.2108 -5.1492 6.3074 11.6628 1.2956 4.2730 -0.8359 -#> -5.0182 -5.8871 8.5032 -2.6112 -6.5407 16.0788 -3.7708 8.1343 -#> 3.7324 -8.7810 -2.2099 2.1963 -2.1305 11.4818 -1.9305 0.0411 -#> -5.2839 -3.4533 1.1692 0.5344 -2.7929 -5.6060 -5.2001 -3.6439 -#> -2.6511 4.7609 19.7984 -9.0056 -6.3521 -8.7703 -17.5731 3.9446 -#> -9.1425 2.8850 -4.3467 -1.8818 3.7963 6.1533 3.1709 2.0593 -#> -4.9947 -8.8462 1.5875 8.7470 -1.5148 -0.5422 3.0933 -0.3313 -#> -10.4418 15.3400 -3.9069 1.0759 1.3927 -11.4530 -5.4137 1.2727 -#> -9.3344 -12.7449 22.2623 -4.2399 2.1708 -1.2364 -6.7290 -2.2009 -#> 4.0125 6.1803 -5.5008 4.6972 -5.7827 2.0033 4.9381 -7.6543 -#> -12.5544 -0.4399 9.3944 -5.7592 5.7587 11.4921 2.9829 7.2321 +#> Columns 33 to 40 -6.1832 -5.5194 5.9813 3.2443 11.8303 -2.3967 7.3920 -6.8471 +#> -6.6810 4.6580 -14.9449 -6.4745 3.3112 -0.3352 4.9441 4.0487 +#> -5.2918 -0.7715 -5.5605 2.8274 8.0645 8.4500 -3.4707 0.2175 +#> -0.9468 -4.1640 -1.4569 -8.5820 1.2725 7.2586 8.7897 6.7605 +#> 5.1701 4.4470 -3.2844 5.3602 13.4440 -7.0952 0.3988 -8.6702 +#> -6.1545 -1.0399 -6.9969 -7.6437 0.3062 7.8289 0.9772 -6.0135 +#> -0.9278 -3.9188 -6.6503 -7.6383 -10.1762 1.5280 -16.5555 3.8182 +#> -2.2838 -16.5922 -1.5669 -2.7178 0.2448 3.7886 -0.1248 20.3742 +#> 6.0117 12.0355 4.2730 12.5069 -11.8570 -5.2133 16.8669 8.0135 +#> 6.6300 8.3672 -6.1728 5.9324 -4.3560 1.3924 1.4100 2.8521 +#> 3.2527 0.4953 0.3846 -8.6821 12.1525 16.1244 1.2107 9.5278 +#> 6.4177 -14.2299 -8.8850 1.6650 -9.9103 -3.3309 -1.4360 6.3248 +#> -0.0775 -4.1022 -11.8284 9.0366 10.0805 0.9275 -8.2746 23.4432 +#> 3.1072 11.8098 0.1838 -6.6031 3.0782 -4.0114 1.0860 15.9008 +#> 5.3532 3.2763 8.9540 -8.1582 1.8324 2.9592 -7.4635 -1.4551 +#> 0.1688 -4.6066 7.4812 13.9261 -12.0265 -7.4429 0.1429 12.6227 +#> 9.9611 5.8392 5.3770 -1.3641 3.0233 -3.5825 -8.9927 -3.0129 +#> -6.4104 2.7867 -5.6676 -3.6038 8.3032 -4.6934 -14.7684 -4.8762 +#> -7.3044 1.7220 3.8237 -6.7114 7.1139 1.3514 2.1974 4.4814 +#> -5.4253 4.5850 -8.8358 -1.8651 -2.0532 8.8240 5.6605 2.2542 +#> -13.1664 -1.7792 -7.4426 2.6703 -2.2109 -3.0612 12.6064 16.2590 +#> 4.8702 -1.3904 8.8560 -4.9722 -14.8709 5.0430 9.8667 -6.5478 +#> 0.4754 -4.9684 -2.3022 0.5134 -15.5100 4.9844 -14.2375 -5.6916 +#> -6.6430 -2.4035 -5.0891 12.8923 12.1796 -10.2156 -6.2614 4.3246 +#> -4.8086 8.8537 1.2531 -6.1469 -9.8700 -10.7666 -6.4972 0.1212 +#> -5.4890 7.1199 -12.8535 1.5374 -0.2666 4.2801 3.3113 -2.0686 +#> -13.5307 3.0859 -3.1014 2.7386 9.9933 9.1629 -3.5990 3.3388 +#> 10.5478 6.6070 -1.2144 -9.4319 -0.3269 4.4826 5.0256 -9.0302 +#> -6.8853 3.6933 1.3049 -6.6004 2.2866 1.3105 -2.4859 1.5785 +#> -5.1796 1.7918 -1.6358 17.3760 5.8859 -2.6087 -6.4701 2.9131 +#> 4.9040 -9.5469 9.3257 3.9669 -3.0699 5.8389 -5.0715 8.1503 +#> 11.1543 -2.5340 3.2666 7.3285 -3.8323 -6.3271 9.6846 5.1109 +#> 2.1329 2.8956 9.4863 -6.8131 -1.6180 2.1172 0.5781 -9.9176 #> -#> Columns 41 to 48 -5.2301 -8.3919 0.4745 12.1680 8.2690 0.7700 -1.3986 -15.0949 -#> -3.8473 5.5070 8.4038 13.3590 0.2387 -7.7313 -1.4964 -15.2668 -#> -0.8133 0.6185 8.4404 1.8471 -7.6965 9.7787 11.1924 0.3181 -#> 13.5075 5.0517 -10.7942 -10.8533 0.4656 -0.7475 7.3318 0.0555 -#> -9.7146 -4.7262 7.8232 2.9174 -0.7198 -1.2243 5.9117 -0.7655 -#> -5.3252 9.9646 10.1692 13.3378 3.5901 1.3333 -4.9229 -6.5326 -#> 2.8730 -7.6642 -10.3643 -0.7128 11.7228 2.5994 -8.3758 -8.6394 -#> -23.6110 -2.7063 6.4992 -3.1165 -1.9089 -10.5941 -7.0350 -4.2015 -#> -7.3903 -3.7530 -0.4041 14.3706 10.5003 6.9389 4.9652 4.0909 -#> 13.5233 16.0055 10.8386 2.8506 -2.9455 -4.1969 -7.9186 6.8712 -#> -12.5978 2.9476 19.6807 1.1032 -4.8529 2.4183 -1.7873 -10.5665 -#> 7.4521 5.0772 -0.3968 -5.8041 3.3329 -11.9797 -6.1260 3.4263 -#> 3.5739 -2.4945 1.2146 -18.5102 2.0377 0.5358 3.7605 -8.1046 -#> -5.1213 -5.6798 -18.7280 18.6834 3.4769 10.0587 9.7840 1.1688 -#> -5.1982 2.3021 -9.6020 1.8456 14.4900 -4.3919 -5.9182 -10.1780 -#> 1.4465 -3.4884 0.3757 15.2833 -4.7847 -6.8593 0.2916 -1.5028 -#> -10.1953 -5.9133 -0.5997 -6.4112 6.6287 3.9975 -8.3894 -9.4420 -#> 11.0555 12.4552 -4.2484 -8.0617 -8.0408 -0.1720 -0.0028 -3.1412 -#> -2.9399 4.6805 12.8330 6.3452 -5.2365 -4.5358 -8.0995 2.9361 -#> -11.0020 5.1860 -9.4381 1.0695 -4.4030 5.9915 -11.9550 5.6597 -#> -1.6070 4.5199 6.4365 -12.4429 -1.7472 -3.1261 4.8034 6.6773 -#> -4.5037 -9.0047 -2.9427 4.7468 -3.2937 2.3801 5.7475 0.3027 -#> 10.0648 -1.4978 -4.0922 2.3945 -2.6983 4.7756 -5.0650 -9.1463 -#> 4.9168 2.3982 -0.2161 -12.9496 2.4841 20.3679 -3.3443 3.2126 -#> -1.2465 0.5778 5.6310 -9.3660 -2.0670 3.2506 -6.5215 -5.0134 -#> -13.8826 -3.2358 -5.5231 10.6802 1.2777 2.4405 2.5325 -0.5911 -#> -3.6412 -2.0257 10.3329 -9.5415 15.9104 -2.6404 -5.0439 -11.1058 -#> 6.9971 2.9543 1.2786 -5.9788 -1.6136 3.2090 0.8348 -4.1627 -#> -8.3324 -14.0545 -3.3994 3.9126 0.4258 4.7417 -1.5262 -6.5741 -#> -3.9503 -8.7550 10.9331 7.1293 -0.4960 -5.8905 -4.1936 -7.0265 -#> 1.8810 -5.1962 -6.0232 -0.7553 3.0264 -6.1715 -10.1147 -6.3372 -#> -4.4186 -1.0833 -7.4865 -0.2799 -8.6644 8.1144 3.3730 0.9093 -#> 5.1813 12.1565 5.8088 7.9778 2.0510 -3.9625 -5.2565 -4.9910 +#> Columns 41 to 48 -7.9284 4.9873 4.1779 -6.1162 -3.6498 -2.8251 4.2869 3.7481 +#> 7.4979 -2.6753 -5.0769 1.8825 2.2537 -3.3097 8.7565 -1.7117 +#> 5.5308 16.7922 0.2933 -6.3333 -1.1939 -1.7231 -1.7436 -1.6388 +#> 2.7287 -1.7296 3.9157 2.4734 -0.3337 -0.3143 -4.0392 -6.0396 +#> -5.0149 1.1354 -5.4459 -6.2580 0.8626 12.8061 0.0037 6.0455 +#> -14.2236 -5.8951 10.3078 -3.4013 -3.7706 -8.4288 -8.2334 -6.5647 +#> 1.3883 -9.6861 -8.7980 11.1817 -8.9138 -1.5163 4.6568 8.6941 +#> 6.2352 9.7211 2.6712 6.4424 -7.0919 -2.6066 -5.8110 1.1625 +#> 10.3115 -10.1433 -2.2922 5.6314 1.9124 5.2211 -0.6296 -9.4591 +#> 1.3935 -5.8105 14.4314 -8.8372 4.8992 -1.9700 -1.1853 -1.3094 +#> 9.3849 6.3331 -5.9613 -1.4827 -6.5452 3.6386 -5.1805 -9.5687 +#> -11.9346 -1.2591 3.4297 -3.0498 -10.3658 -0.7680 -0.3425 7.4374 +#> 2.8123 -10.4788 0.6407 9.1444 -4.2235 -9.3844 6.6531 -1.0592 +#> 3.7813 -5.5850 0.4505 6.3826 2.1851 -7.7569 14.8819 11.0986 +#> -9.2318 -14.0717 2.8687 6.7881 -23.6733 -2.2515 -8.3746 15.4845 +#> 3.3173 12.8945 1.9848 5.9713 -8.0577 14.2965 3.3777 -2.9870 +#> -1.3728 -11.5388 -0.5859 -5.7856 6.0864 1.7767 -6.1268 11.7812 +#> 8.0477 3.2329 -3.9264 3.0232 1.9102 -11.9499 4.4760 -6.6128 +#> -1.8114 7.0128 -1.0925 -2.4541 1.5321 -4.8009 -8.8199 -3.8976 +#> 12.8671 -10.3419 0.9053 -1.1208 3.1236 -2.3496 6.1610 -10.7058 +#> 3.6526 5.5662 6.3228 5.2560 -3.3483 -2.6746 5.1082 4.0885 +#> -16.7589 -4.8014 5.0005 -11.4625 -12.0761 11.7755 -7.3408 3.9676 +#> -2.7970 -3.5067 -9.6463 2.9909 3.1489 -9.3044 4.1732 -4.4003 +#> -3.6463 -2.6350 -0.8322 15.4660 -7.5054 -4.4430 -8.8983 -7.1481 +#> 6.9156 -5.6635 -18.2293 -2.0972 8.0925 3.2215 -0.3246 -8.5379 +#> 6.6599 -0.1653 7.0766 -6.8543 7.7798 -6.0151 12.1312 -11.5232 +#> 4.2149 -1.2537 8.6473 -6.1466 2.5240 -3.1094 -2.6999 -1.8230 +#> -6.6593 1.3472 2.6683 -6.7798 -3.1435 9.7068 1.5972 -1.4003 +#> -2.5564 -0.9439 4.1342 -0.1172 5.3395 -4.2704 -2.6884 -5.3049 +#> -1.3948 -2.7909 -0.5867 6.2574 -11.6177 -7.3170 -2.7714 -0.0627 +#> 0.8082 5.2609 -3.2268 9.1061 -1.3636 -1.6273 4.1140 1.9453 +#> -0.5227 2.1425 13.7563 4.0745 3.7256 15.3566 8.3517 4.5250 +#> -1.5137 -0.4835 -7.8734 -9.9395 3.9390 5.2584 -9.2616 -4.4245 #> #> (7,.,.) = -#> Columns 1 to 8 -12.5637 -2.5921 7.1782 7.8186 12.1642 -5.9426 -14.8531 1.3177 -#> 2.1248 -5.9844 -4.4564 -4.5092 6.7187 4.8098 -7.4867 -1.2874 -#> 1.5000 -12.1292 8.3217 -5.3953 -9.2065 -5.9762 -2.2805 7.4073 -#> 0.2294 -16.4882 3.5766 2.2640 -5.6411 11.0628 -1.8238 1.7027 -#> 7.3974 -2.1059 0.7667 -1.1699 -13.5659 0.0552 -7.4127 -2.4198 -#> -5.3633 -3.2434 9.4996 0.8949 -7.2516 3.3304 -2.1083 6.2119 -#> -6.0371 0.9534 5.7248 0.9747 3.8753 -9.6830 1.2595 4.4478 -#> 4.3532 4.1490 1.4650 5.5049 -6.8201 0.3387 1.8417 -2.3344 -#> 5.4369 -3.8337 -12.7340 3.3148 -13.8978 -11.2822 -6.7416 -6.7818 -#> 6.7436 -0.0121 3.2164 -8.8355 2.4412 8.5234 0.2344 -11.6883 -#> 4.8147 -10.3121 -5.8911 7.6287 -3.2987 -9.7721 2.5174 7.5296 -#> 2.0386 4.1301 -1.2932 6.6238 0.4111 -3.3190 8.2287 -0.0189 -#> 0.6954 1.3338 5.9806 4.5208 14.4822 -2.7630 -2.8924 10.0675 -#> -8.4399 -1.8747 4.0748 -3.2390 -3.1167 11.0646 0.7673 2.3934 -#> 6.4320 -4.3108 4.2115 5.2095 -3.3331 -4.7085 0.3097 10.0811 -#> 1.0512 -2.9661 2.0181 5.1407 3.9240 1.8381 -7.2879 3.1933 -#> 6.5491 -4.1914 -1.2071 4.5877 7.1293 -2.2568 -3.5221 0.0526 -#> 2.4886 2.6609 0.6132 -5.0607 8.6768 0.5913 3.3273 4.5410 -#> -4.4869 4.9565 -0.4279 3.9000 -4.1421 -4.8384 -11.0614 -1.9895 -#> 4.3772 -0.2396 3.2004 3.2212 3.7800 5.5176 -7.6825 -2.1287 -#> -9.9844 3.5585 3.2863 -5.5902 -4.6006 3.6855 8.6799 1.5601 -#> 1.8843 0.4628 1.8192 0.8578 2.7260 -5.9450 3.4451 -7.6032 -#> 6.0221 -3.7656 -3.4224 -1.3820 9.8885 2.1165 5.3021 7.6218 -#> 7.4587 -8.7724 4.6601 3.4141 12.8465 3.5597 5.6033 5.5195 -#> -7.8602 4.5715 0.3401 -4.5892 -0.1992 -5.6559 6.1851 7.1525 -#> -2.7145 0.3003 8.5194 -4.0813 -7.0842 -0.4283 -10.4691 -2.9705 -#> 1.7503 -1.6545 7.4607 6.6784 -1.7863 -1.5975 7.0052 3.1186 -#> 7.2097 4.8913 1.6908 -8.0299 7.3147 13.8606 -6.3935 -0.7843 -#> -6.8344 8.7484 3.1693 3.3031 3.8666 -3.8317 4.2813 1.1445 -#> 5.0369 3.8190 -10.9977 3.7152 14.4262 3.8348 -17.4962 -0.2185 -#> -2.1229 -1.1563 3.0345 1.8111 -3.2802 -11.0495 3.9942 0.6429 -#> 7.0119 3.9208 -0.6389 -1.0716 5.4505 14.2304 -1.4294 9.7966 -#> -3.1686 -0.6396 -6.7988 0.9020 4.8284 8.9844 6.6045 -1.4290 +#> Columns 1 to 6 -8.3951e+00 -2.8141e+00 -1.9402e+00 4.1384e+00 1.0136e+01 2.4281e+00 +#> 6.8307e+00 5.9611e+00 -2.6984e+00 1.1535e+00 6.4386e-01 -7.2545e+00 +#> -3.2027e+00 3.9534e-01 5.7615e+00 1.7997e+00 9.1843e+00 7.1464e-01 +#> 6.6893e-02 6.7879e+00 3.1322e-01 5.5399e+00 -5.6604e+00 2.4444e-01 +#> 1.1373e+00 -5.3004e+00 -2.5642e+00 9.0171e+00 4.3727e+00 3.4009e+00 +#> -5.0005e+00 5.9274e+00 6.4470e+00 -5.4805e+00 -5.0443e+00 -2.2729e+00 +#> -7.1226e+00 -6.0422e+00 7.0493e+00 -7.1967e+00 7.0638e+00 1.3255e+01 +#> 2.2497e+00 -7.3121e-01 2.7929e+00 9.6507e+00 1.9365e+00 5.8614e+00 +#> 8.5673e+00 7.4503e+00 -3.3146e+00 9.1761e-01 -1.7096e+01 -8.8513e+00 +#> -2.9284e-01 -4.4896e+00 -2.9050e+00 -8.0677e+00 -7.6166e+00 -5.5766e+00 +#> -1.0739e+01 1.3708e+01 -1.3877e+01 3.0163e+00 -3.8852e+00 -4.8990e+00 +#> -6.1831e+00 -3.2277e+00 1.4503e+00 1.3461e+01 1.0566e+00 4.1018e+00 +#> -1.4416e+00 -6.2414e+00 1.0434e+00 -1.1244e+01 -1.7559e+00 -1.1035e+01 +#> 3.9959e+00 -1.0495e+01 -1.5808e+01 -1.0375e+01 -1.0629e+01 2.0521e+00 +#> -3.5685e+00 -5.4997e+00 1.0150e+01 -8.0230e-01 6.7681e+00 5.7076e+00 +#> 1.3212e+00 -2.3734e+00 1.8750e+00 1.2222e+01 9.9166e-01 -7.7316e+00 +#> -1.3964e+01 2.8276e+00 2.8477e+00 -5.8115e+00 -2.0284e-01 -3.3701e-01 +#> -2.2911e+00 -8.8825e+00 1.6204e+01 3.5824e+00 -3.4929e+00 -2.7017e+00 +#> 2.4032e+00 3.4200e-01 3.1894e+00 -1.3613e+00 1.2333e+00 -1.0706e+01 +#> 7.3336e-01 -1.9942e+00 4.9378e+00 -1.3575e+01 -4.5079e+00 -9.7759e-01 +#> 4.8102e+00 -2.5514e+00 -3.8803e+00 -5.5959e+00 -1.7002e-01 -3.0349e+00 +#> -1.5298e+01 9.8673e+00 1.3495e+01 8.0240e+00 6.9426e+00 4.0904e+00 +#> -3.2232e+00 -1.3324e+01 3.6674e+00 3.2886e+00 1.9104e+00 5.8670e+00 +#> 8.5357e+00 -3.3991e+00 2.0941e+00 7.5265e+00 -8.3860e-01 5.0003e+00 +#> -1.7020e+00 -6.0616e+00 -1.8175e+00 8.2393e+00 -3.9429e+00 -2.9742e+00 +#> 9.7033e+00 -1.0581e+01 3.0951e+00 -6.3763e+00 2.1652e+00 -4.9442e+00 +#> 3.4387e+00 -8.0203e+00 1.1168e+01 -9.4117e+00 -7.6029e-01 5.4250e+00 +#> -3.9966e+00 3.7018e+00 -8.7184e-01 -2.3836e+00 -6.4874e+00 2.2617e+00 +#> 9.0030e+00 -6.0216e+00 -4.0307e+00 -8.2122e+00 4.1802e+00 -4.5851e-01 +#> -5.5599e+00 -8.9511e+00 -1.0650e+00 -3.7465e+00 -4.4242e+00 -4.9428e+00 +#> 8.7741e+00 -3.1887e+00 -5.6206e+00 -7.5571e+00 -3.7323e+00 3.9213e+00 +#> -3.6121e+00 -2.5497e+00 -7.9240e+00 -1.0469e+01 -1.3243e+01 -1.4155e+01 +#> 5.2936e-01 5.5162e+00 3.9372e+00 5.6735e+00 8.3649e-01 8.5373e+00 #> -#> Columns 9 to 16 1.3615 6.7430 -9.0831 2.4756 3.7019 4.5959 -9.2389 2.4258 -#> -6.5283 4.2182 -14.7588 6.0469 -13.5433 0.6464 -14.0126 -0.1585 -#> -0.4165 -2.1957 9.4785 16.3252 -11.8770 -4.1678 -7.0924 3.0644 -#> 5.4771 11.0229 -6.5639 -17.6075 -7.7505 -4.6961 8.8753 6.9847 -#> -4.9094 3.5675 -1.0285 6.8008 -0.9435 -2.4753 -5.8107 -11.1148 -#> -1.3169 3.8941 6.4620 0.5569 -11.8652 1.5728 -10.2236 -9.3686 -#> 3.9423 -0.3688 5.2345 -9.8630 -3.5721 10.8983 -11.1943 -4.6832 -#> 0.7959 -1.5588 -8.7820 3.3694 -5.9139 6.2685 1.9696 -11.8555 -#> -0.0112 -8.3372 13.6339 3.5996 -6.6341 -3.1870 -2.4487 -17.8097 -#> -3.1110 -0.1339 -5.4582 3.0029 5.9676 1.6402 4.2996 -1.7442 -#> 3.0221 -7.6143 2.1927 12.5149 -17.3180 -4.4666 -6.2207 -5.8665 -#> -1.7066 -5.6932 -2.6972 -9.1662 10.9101 0.1779 2.6959 -7.8873 -#> -5.7650 2.6454 2.5285 -7.5337 1.6641 5.1793 -4.0750 9.8508 -#> 4.6218 -4.3656 -16.7795 -0.9792 -1.7471 -0.4700 0.4574 4.0469 -#> -0.6444 -3.1186 1.3270 -7.1979 -10.8263 2.7043 -9.5467 2.8626 -#> 0.1377 -1.6119 -4.6476 -1.0249 4.4360 -10.2520 -0.8962 7.3968 -#> 5.6244 -4.7285 1.2343 -6.5620 0.1256 9.5666 1.6124 -5.8117 -#> 9.5864 -12.8068 6.7174 -0.9492 -11.1470 2.9382 2.7348 7.2478 -#> -1.3516 -9.1671 -9.7949 4.4277 4.5211 -10.2511 -1.0271 -3.8912 -#> -3.4992 -9.1626 2.4570 -2.6582 -6.1909 16.2682 -2.3738 2.5297 -#> -4.0920 -8.7836 -0.3173 1.5961 -15.0735 2.7017 6.4111 -4.5299 -#> 0.8594 -4.6585 1.7853 3.0959 4.5689 1.6610 6.1223 1.7822 -#> 11.7365 -2.0738 6.6759 -0.0544 -4.5379 -2.9902 6.5058 -0.1362 -#> 1.2749 7.0817 3.4731 -4.2940 -1.4348 -4.4507 6.7489 -5.4719 -#> 4.1983 -13.3311 -3.0335 4.3130 -6.7434 -3.9381 7.5854 2.0121 -#> -1.6503 -2.0314 -6.4046 6.1112 3.9059 6.7474 3.3489 7.1325 -#> 5.0405 4.1070 -10.8778 -5.6209 -0.6011 -14.6451 -2.2682 -9.9665 -#> -0.4697 -4.2318 -14.3726 11.9567 7.8060 4.3527 10.3033 2.6359 -#> 4.1752 -10.0265 -8.4880 -1.4998 -6.4839 3.2508 4.9882 0.2567 -#> -7.8847 0.6969 0.6589 9.8041 0.4428 12.1619 -8.5297 -7.3596 -#> 13.3403 1.4642 -7.9892 -7.5376 4.4967 7.2595 1.9894 -8.7621 -#> -3.1823 8.1989 -1.4497 0.5435 -1.0498 1.2208 0.1718 4.7092 -#> -3.5594 -1.3115 -3.3914 2.5498 -1.9613 1.6140 -5.5963 -1.0378 +#> Columns 7 to 12 -6.8527e-01 -6.6750e+00 -2.5422e+00 2.6952e+00 -1.0972e+01 2.0498e+00 +#> -1.5139e+00 1.7229e+00 9.3764e-01 -6.3206e+00 6.7317e+00 -3.6483e-01 +#> -2.4645e+00 -3.0016e+00 -2.7610e+00 1.4589e+00 -5.5854e-01 5.8668e+00 +#> -1.9516e+00 9.1403e-01 2.0154e+00 -1.5233e+01 -6.8524e+00 -1.2154e+00 +#> 2.8007e+00 -9.3648e+00 -5.1805e+00 -3.4628e+00 -1.4907e+01 7.1098e+00 +#> -2.7267e-01 6.1041e+00 -1.0484e+01 -3.1599e+00 7.6270e+00 7.5906e+00 +#> -5.0598e+00 4.8635e+00 -1.6033e+00 9.4448e-01 8.7785e+00 -2.4621e+00 +#> 2.0608e+00 -1.4733e+01 5.3305e-02 -9.5760e+00 9.6377e-01 7.2839e+00 +#> -3.5491e-01 2.4551e+01 -4.5958e+00 -1.3547e+00 6.3573e+00 -1.7185e+01 +#> 8.4866e+00 5.0311e+00 5.1737e+00 1.0702e+01 9.6019e+00 -3.6383e+00 +#> 3.4809e+00 4.5234e+00 -1.1526e+01 -8.0553e-01 -1.7719e+01 -1.2959e+01 +#> -6.5285e+00 -8.9087e+00 -3.7754e+00 -1.3278e+00 4.3877e+00 -3.1284e+00 +#> 1.6558e-01 -8.8985e+00 2.9997e+00 5.4439e+00 1.1366e+01 6.5668e+00 +#> 7.6865e+00 -1.3078e+01 6.7320e+00 -4.4706e+00 -2.6305e+00 9.3138e+00 +#> 5.8655e+00 -6.2844e+00 -5.2472e+00 4.3737e+00 -4.8817e+00 -1.6727e+00 +#> -9.8008e+00 -5.6359e+00 -6.3795e-01 5.9198e+00 8.7384e-01 -2.0822e+00 +#> -4.0524e+00 -1.4944e+00 1.5081e-01 -4.3437e+00 -6.3435e+00 -7.3562e+00 +#> -3.2516e-01 -2.4081e-01 -1.8803e+00 -3.3390e+00 8.2726e+00 4.6187e+00 +#> 9.4028e+00 2.6572e+00 1.1060e+01 -1.1542e+00 -6.4159e-01 -2.1363e-01 +#> -7.8265e+00 1.1543e+01 -8.8785e-01 -1.0960e+01 -1.6193e+00 6.3225e+00 +#> 1.1244e+00 -4.2009e+00 9.6153e+00 6.5037e+00 8.6408e+00 -1.0582e+01 +#> -7.4054e+00 -3.4970e-01 6.4795e+00 8.2708e+00 -3.5835e+00 -4.6855e+00 +#> -6.9420e+00 3.8119e-01 -1.6974e+01 -7.0275e+00 -1.0874e-01 1.5721e+00 +#> 5.3262e+00 7.1328e-01 -3.1265e-02 -1.0126e+01 9.5178e-01 3.5983e+00 +#> -5.6046e+00 -3.9303e+00 -5.9183e+00 7.1817e-01 -1.2804e+01 5.4781e+00 +#> -9.3229e+00 1.5218e+00 -2.3454e+00 -2.5600e-01 9.8657e+00 -5.3319e-01 +#> 3.1509e+00 -3.6389e+00 -1.7747e+00 -9.9224e-04 1.2906e+00 6.6964e+00 +#> -1.7609e+00 1.1773e+01 -3.5776e+00 3.8019e+00 -1.1530e+01 2.8797e+00 +#> 4.1346e+00 5.8012e-01 6.1191e+00 -8.9286e+00 1.2805e+00 -4.7190e+00 +#> -1.0720e+00 8.4785e+00 -7.7949e+00 -1.6484e+00 1.0007e+01 -7.1135e-01 +#> 9.3990e+00 -7.7036e+00 -3.3507e+00 -2.6082e+00 2.5511e-01 6.0275e-01 +#> -6.6124e+00 5.0587e+00 -1.8642e+00 1.0930e+01 -3.0234e+00 -3.4596e+00 +#> 6.0355e+00 1.4031e+00 5.5284e+00 -5.4199e-01 -8.0791e+00 -8.9905e+00 #> -#> Columns 17 to 24 -6.1352 0.7855 5.2496 -10.7202 1.1006 6.4437 3.0041 2.3786 -#> -11.1167 10.4239 0.2350 8.9435 -3.6779 4.8471 0.7140 3.8333 -#> 9.4847 0.8165 -3.8158 6.3221 -10.7197 0.8852 -5.6699 9.1540 -#> 4.0945 -3.8234 -0.7001 -1.1315 0.4381 -3.8137 4.0863 6.7160 -#> 1.1146 -8.9878 2.3985 -0.9147 -0.8046 2.3567 -10.0703 -2.1854 -#> 1.8469 -2.6686 -8.5182 -5.4251 6.5746 -7.8745 3.0007 -3.6743 -#> -4.1822 -9.5143 -4.3121 7.2566 2.6041 0.5658 -2.4030 0.4245 -#> -11.7281 -0.9243 5.2365 -2.4933 0.3209 -1.5823 -7.3054 0.5439 -#> 1.9351 5.7115 -14.8850 5.1515 -9.2994 0.5744 -10.7098 2.5380 -#> 3.1209 -1.7344 6.0623 -0.9722 1.9104 -4.2705 -4.6320 2.4032 -#> 12.0585 4.4151 4.1575 -0.8302 -9.3978 3.5050 -6.6041 20.1393 -#> 5.0416 -3.7321 -0.9123 1.5950 8.2849 1.2795 3.4207 -7.8130 -#> 1.9756 -6.4791 7.8072 2.6787 4.0306 -9.3902 5.1214 1.6635 -#> -4.7427 9.7381 -14.8424 12.1369 -2.9461 -2.3381 -12.2591 -4.3829 -#> -6.2556 1.9847 -1.4314 6.6051 2.5084 -5.3805 -4.3812 0.1995 -#> -0.7258 -1.5201 -5.3685 5.4863 1.4409 -1.3454 -0.1501 -5.4024 -#> -4.7067 -4.7743 2.3876 2.4734 2.3301 -0.7725 -2.1543 -1.3893 -#> 1.5617 4.9953 -10.5440 10.1063 7.0310 -3.7266 -2.5219 -3.8014 -#> -3.4102 11.2320 -3.4485 3.0704 -6.8103 5.8674 -6.1492 3.8883 -#> -8.3267 1.2774 -1.3571 1.9461 1.9410 -9.3266 -4.4550 -0.7253 -#> 8.4757 -2.4299 -3.2148 4.9527 4.3251 7.9487 -3.1187 0.4195 -#> -1.0336 -5.3404 6.4247 -7.5989 1.9064 -6.2831 1.7634 -6.0175 -#> 10.4051 -10.9130 6.5344 -3.2940 1.8782 -6.1374 9.9976 -2.0857 -#> 1.6580 5.6035 1.0687 8.1476 -8.0305 -10.1498 -7.4877 0.9982 -#> 1.5491 5.0754 0.2085 -4.2722 6.8387 10.7773 5.8851 -0.3984 -#> -4.5910 4.0407 -3.8389 -8.2975 5.9267 1.9849 8.1880 -9.6735 -#> 4.7822 -11.5824 0.7062 5.2624 8.5684 -6.3034 -4.5508 0.3868 -#> -2.6230 9.7855 1.0649 7.0056 -4.0832 -7.7116 -5.7604 2.8494 -#> -5.7510 -1.5148 1.2035 2.5692 2.1899 10.1006 -14.8982 -4.4679 -#> -9.8924 1.5419 14.2471 -0.8770 -0.2646 -5.9197 -6.6279 6.6351 -#> -6.1204 -2.9526 -3.7854 -0.1987 0.0084 -0.0103 -6.4451 -8.6840 -#> -4.4767 -1.5587 -0.5513 -7.6515 9.3017 -6.6286 6.1258 -8.2799 -#> 2.5453 10.2133 1.3841 -2.8850 2.5809 1.0825 1.8629 9.2428 +#> Columns 13 to 18 5.9678e+00 -9.7474e+00 7.0909e+00 -1.1148e+01 -6.0623e-01 1.9961e+00 +#> 5.0613e+00 -1.6512e+00 1.4175e+00 -6.6100e+00 -6.8726e+00 -1.2160e+01 +#> -3.7654e+00 -1.9567e+00 2.1503e+00 1.7443e+00 -2.3711e+00 3.1486e+00 +#> 2.0366e+00 -8.2696e+00 -1.3348e+00 -1.1307e+00 3.6387e+00 2.6933e+00 +#> -2.4525e+00 7.7111e+00 6.7042e+00 -8.0989e+00 5.3495e-01 -3.4528e+00 +#> -6.7053e+00 -6.0537e+00 -4.5964e+00 4.8832e+00 -4.2174e+00 -6.8970e+00 +#> 7.5400e+00 -1.0487e+01 -1.0757e+01 7.1635e+00 -2.6506e+00 -9.8790e+00 +#> -1.2661e+01 -9.7000e-01 5.1582e+00 -4.5297e+00 4.7732e-01 7.3898e+00 +#> -7.0311e-01 2.1044e+01 -2.3547e+00 7.8111e+00 -1.7919e+00 -3.6426e+00 +#> 3.4680e+00 4.1260e+00 2.2663e+00 7.7358e+00 -1.6395e+00 6.7834e-01 +#> -2.2727e+00 -6.5998e-02 7.1943e-01 1.0914e-01 -2.6693e+00 1.7026e+01 +#> -5.2759e+00 7.5612e+00 -1.1254e+01 -7.7761e+00 -1.5269e+01 6.3323e+00 +#> -4.1168e+00 -5.3404e+00 1.0368e+01 4.1417e+00 -6.7626e+00 2.8983e+00 +#> 6.3700e+00 5.6963e+00 1.0627e+01 -3.0159e+00 -8.9321e+00 -9.8520e+00 +#> 1.0976e+01 -1.9774e+00 4.4388e+00 -6.9221e+00 -8.7519e+00 2.5778e+00 +#> -1.3879e+01 7.2988e+00 6.7988e-01 -1.7957e+00 1.4793e+01 2.3950e+00 +#> 1.0804e+01 -1.8943e+00 -6.7267e+00 4.4527e+00 -6.4357e+00 1.0441e+01 +#> -7.0792e-01 -7.7788e+00 1.0123e+01 -4.6441e+00 3.1377e+00 1.5371e+00 +#> -7.0618e+00 -9.1846e+00 8.2692e-01 -4.3087e+00 1.3263e+01 3.5648e+00 +#> 1.4579e+00 3.5876e+00 -6.5496e+00 8.9657e+00 8.7569e+00 -1.5873e+01 +#> 3.7011e+00 1.2739e+00 3.5723e+00 -8.5728e+00 -4.6815e+00 4.1472e+00 +#> -2.5111e+00 -3.9447e+00 -1.0414e+01 2.5352e+00 5.2065e+00 6.6207e+00 +#> 3.9797e+00 -3.9310e-01 -1.1669e+01 -2.9539e+00 -9.0991e+00 -4.8434e+00 +#> -2.0420e+00 -8.9539e+00 4.0166e+00 -4.5039e+00 6.7315e+00 4.5821e+00 +#> 7.4207e+00 -5.7509e+00 -1.1452e+01 -1.2195e+01 1.0693e+01 -8.0132e+00 +#> 3.5018e+00 1.6697e+00 -3.0522e+00 -7.9866e+00 -6.6618e-01 -7.1744e+00 +#> -5.5304e+00 -9.5552e+00 3.5623e+00 -2.9378e+00 4.5727e+00 -5.8968e+00 +#> -8.9109e+00 -1.9113e+00 -3.4822e+00 1.2512e+00 1.4414e+01 8.4879e+00 +#> 4.1685e+00 3.5275e+00 2.0563e+00 -5.5765e+00 -3.3186e-03 -6.8468e+00 +#> -6.9165e+00 2.2722e+00 5.2716e+00 -3.7791e+00 -8.1951e+00 3.2392e+00 +#> -9.2950e-01 1.1593e+01 5.8807e+00 5.0382e+00 -5.1739e+00 -6.4292e+00 +#> -9.6982e+00 1.0057e+01 3.2115e-01 -6.2153e+00 1.0875e+01 1.0603e+01 +#> -7.7052e-01 4.3582e+00 -6.7134e-01 -9.0197e+00 7.6164e+00 7.8076e+00 #> -#> Columns 25 to 32 2.0304 6.7152 -4.7547 12.5145 -4.5016 -4.0179 -1.2330 9.1559 -#> 14.2909 1.4769 0.6636 8.6500 -2.1221 1.7183 -5.6604 9.8726 -#> -2.7097 4.5264 4.3187 -7.1046 -0.9723 5.9788 -5.5353 -1.4749 -#> -8.6412 7.7463 -5.2986 16.9666 14.3188 21.4422 3.3956 1.1343 -#> -7.0317 6.6929 -6.0369 0.6207 -3.9455 1.8005 -0.5599 1.8508 -#> 2.0361 2.7921 -4.3200 -2.3661 2.9078 1.1786 5.4417 -8.1194 -#> -7.8485 -8.0762 3.2615 -5.6470 0.3354 5.4427 -2.8058 -4.3386 -#> 3.7953 3.5336 -4.9533 -5.2637 -10.4107 -12.4107 2.0646 9.5287 -#> -2.7825 -4.6040 9.5309 -2.1843 5.5711 2.9852 -1.9312 -6.5354 -#> -3.6551 -9.9503 -5.3903 3.4260 1.4111 -0.6740 2.9563 -2.8675 -#> -1.4813 3.6377 2.2588 2.0900 5.0809 5.0369 5.1234 9.0396 -#> 5.2403 -6.4151 6.7793 -3.0877 -12.6484 -1.7808 1.7761 -0.5235 -#> 1.3175 5.3689 -0.2347 -7.3404 -6.2206 -0.3964 -2.9847 -12.7738 -#> 0.5948 -2.7842 -9.6191 8.1294 0.9181 9.2779 -5.9096 8.0563 -#> -0.3378 2.3956 4.6235 -0.7978 -5.5830 -0.4862 -0.2478 -1.7941 -#> 0.0686 10.2917 -0.9817 5.5907 3.5988 -1.3564 2.4055 -0.9436 -#> 2.0567 6.2534 -2.2078 -6.1647 0.5606 -2.7191 3.2266 -11.3656 -#> 4.2932 -0.4873 8.3800 -2.3044 1.8511 8.4027 6.6752 -16.2451 -#> -2.3357 -2.6602 7.1631 0.3165 3.9131 -7.8905 -3.5363 -4.4079 -#> 5.4912 -7.6143 -2.0331 -2.5894 3.9076 -18.9411 3.6695 -13.5628 -#> 1.0256 0.8843 -1.2594 -5.4993 1.2796 5.1276 -2.8727 1.2982 -#> 0.8742 6.3731 5.7563 0.4166 -2.4657 -6.1710 5.1675 7.3948 -#> 1.8751 -3.1597 6.6459 5.1926 1.8573 -12.9828 -10.6121 -5.7677 -#> -3.8644 -11.0444 -4.1877 -8.5450 1.3069 -0.6498 -4.9872 -11.6447 -#> -1.1720 6.4177 5.2251 4.5926 2.8544 5.3088 4.0496 0.8798 -#> 8.5778 2.0765 -3.0492 2.6605 5.9763 -1.0507 12.9023 2.8845 -#> -2.5537 6.7471 -2.3166 0.1698 7.3152 4.9079 -5.6434 -18.8992 -#> -2.4438 -2.8801 -10.5118 4.0220 -6.5804 0.2695 -3.0997 -9.9757 -#> -1.8960 0.7844 5.0251 5.0129 3.5039 -1.9246 -2.0302 11.9824 -#> 3.2717 6.6071 -7.0335 -4.5295 -23.8735 -9.4977 0.9193 2.7063 -#> -2.8423 -12.5636 8.4244 -2.9791 -3.4124 3.0636 1.3172 -1.1011 -#> 9.9325 0.5829 -6.6425 -2.1746 -5.1121 -9.9372 -3.6588 -1.7340 -#> 8.5402 -6.0357 -0.9178 7.4435 2.9151 8.6923 3.0402 3.1449 +#> Columns 19 to 24 1.1778e+01 2.0303e+00 -1.7353e+01 1.0082e+00 4.7677e+00 4.2631e+00 +#> -1.0307e+00 1.6196e+01 -2.0418e+00 -1.0223e+01 1.6997e+00 7.1068e+00 +#> 7.2591e+00 3.5400e+00 -9.1838e-01 2.0646e+00 4.0366e+00 -6.4190e-01 +#> -6.5149e+00 -8.8414e-01 -4.7157e+00 3.4367e-01 2.8840e+00 1.1695e+01 +#> 9.9104e+00 -2.4610e+00 -2.5285e-01 2.5838e+00 -2.4302e+00 -1.3316e+01 +#> 5.6723e+00 4.1739e+00 1.2386e+01 9.6780e+00 9.1150e+00 1.1069e+01 +#> 7.3253e+00 5.6702e+00 6.9528e+00 -9.8993e+00 -6.4625e+00 -3.9740e+00 +#> -1.2725e+00 7.3350e+00 7.4261e+00 3.6785e+00 -5.1651e+00 4.0601e+00 +#> -1.1368e+01 4.8053e+00 -9.5350e+00 -2.2408e+01 -6.8687e+00 6.5463e+00 +#> -2.6451e+00 -7.2305e+00 1.5720e+00 6.9827e+00 1.7672e+00 -2.5382e-01 +#> 1.2897e+01 4.4085e+00 -1.2364e+01 3.8108e+00 1.1252e+01 9.4853e-01 +#> 7.0377e+00 5.0779e+00 -3.5618e+00 -2.5290e+00 -5.6112e+00 3.3708e+00 +#> -1.3505e+00 -2.9384e+00 -1.5031e+01 2.5628e+00 -2.9015e+00 1.3181e+01 +#> -1.6910e+01 -1.5070e+01 -1.4999e+01 5.2842e+00 -3.4203e+00 7.3843e+00 +#> 3.2007e+00 -1.3544e+01 -7.9596e+00 1.5601e+00 -8.5167e-01 7.3819e+00 +#> -5.0569e+00 -1.6224e+00 2.3452e+00 3.4707e+00 1.5098e+00 -5.0575e+00 +#> 7.4184e-01 -3.1959e+00 1.3687e+00 -6.6558e+00 -2.4546e+00 7.8908e-01 +#> 1.0274e+00 -2.0857e+00 -1.5564e+00 1.5786e+01 -4.8150e+00 -3.8841e+00 +#> 6.8518e+00 8.3873e+00 -1.3750e+00 1.6364e+00 7.8108e+00 4.5983e+00 +#> -1.8995e+00 3.0577e+00 5.1708e+00 7.0246e+00 6.0169e+00 7.6654e+00 +#> 9.4888e+00 8.9839e+00 -1.8928e+00 -2.1697e+01 2.0800e+00 4.9818e+00 +#> 1.0982e+01 1.7532e+00 -1.2756e+00 -4.3803e+00 -1.3254e-01 -2.1600e+00 +#> 1.3998e+00 7.1849e+00 5.7207e+00 8.3973e-01 -8.0522e+00 -7.9664e+00 +#> 1.1255e+01 3.7207e+00 -6.4622e-01 3.3669e+00 -4.1269e+00 2.9135e+00 +#> -6.4567e-01 6.5039e+00 9.7643e+00 8.0798e+00 -5.7328e+00 -3.1476e+00 +#> -3.4270e+00 3.8212e+00 4.5256e+00 -6.6063e-01 8.2981e+00 2.4167e+00 +#> 5.6869e+00 1.5453e+00 3.2006e+00 1.0211e+01 1.0770e+01 6.4138e+00 +#> 1.0142e+01 -5.2491e+00 -6.3918e+00 2.9710e+00 1.9272e+00 -6.7866e-01 +#> -4.5972e+00 5.7021e+00 7.1677e+00 -4.3380e+00 -3.1425e+00 -3.9138e+00 +#> -4.2965e+00 -6.1071e+00 -1.3520e+01 -4.8378e-01 9.3025e+00 9.0143e+00 +#> -8.2621e+00 1.7109e+00 -2.1493e+00 -1.5014e+00 -2.9105e+00 1.1343e+00 +#> -8.7325e+00 -1.2430e+01 -1.0334e+01 -1.4823e+01 4.4890e+00 -3.1203e+00 +#> 8.8041e+00 -3.0359e+00 -4.3546e+00 1.2777e+00 -1.8532e+00 -8.5947e+00 #> -#> Columns 33 to 40 -17.0318 -20.0478 -11.5813 -0.8116 0.6021 -9.7061 -5.2541 -7.5142 -#> -10.3421 -3.6284 -8.4016 -2.5618 5.3978 -9.7860 10.5355 -5.2481 -#> -4.6916 1.1132 5.8259 4.2710 -0.0016 8.1836 5.5236 -10.1070 -#> 7.3221 6.5633 -3.7985 3.9131 10.9585 -17.2966 -5.5431 11.2191 -#> -11.6022 -1.6023 -4.5938 -1.9300 -0.4376 -7.2064 -7.8986 -0.3702 -#> -14.3030 2.5388 1.2509 -2.7564 -10.5961 -3.5264 -12.7500 -22.1331 -#> 12.6453 7.9274 -8.0327 -1.6566 5.7491 -5.4175 -3.5723 -1.2638 -#> -8.9598 -3.8361 5.2146 -4.4693 -8.4153 -8.1980 4.2895 -4.7867 -#> -3.0841 1.6341 -0.0632 4.4309 3.3442 1.7426 11.5374 -17.0239 -#> 3.0663 11.0731 -2.8260 -5.2820 5.2010 4.3655 0.5538 1.9090 -#> -3.2084 5.2607 -0.3004 3.4364 2.2088 1.5744 6.6938 -7.3395 -#> -6.4610 5.9028 -1.2363 -2.9217 0.4373 1.8325 -3.0021 3.0616 -#> 2.2177 -4.2643 7.7422 -2.1623 -5.7365 8.1062 -10.1076 5.0764 -#> 0.9173 8.6032 1.4039 4.1027 3.0920 -22.4057 7.6579 4.9080 -#> 0.8167 -10.3278 1.2432 -2.0476 5.4842 -10.7095 2.1405 -3.7050 -#> 1.7803 -22.8740 -1.0902 2.1489 9.6227 -4.6850 -5.7256 11.1705 -#> 14.4816 -9.7424 3.4600 3.2909 -16.3311 -5.0685 -5.5932 9.4876 -#> -5.1677 2.5401 5.2312 -6.5760 3.9057 -9.6523 2.2021 3.4456 -#> -6.3658 -2.7545 17.0195 -9.7690 1.8630 9.1328 3.1055 3.9391 -#> 3.1821 -9.0758 10.9002 -3.9246 -12.8605 1.5466 11.6374 0.1441 -#> -3.7770 18.8824 4.3626 -2.8148 -4.7249 4.2569 8.2437 -7.0431 -#> 3.5508 -6.7707 0.8483 13.8608 -2.7573 -3.5297 1.8513 2.4026 -#> 7.9127 9.2728 -9.9353 3.2328 3.6214 7.5038 0.4667 9.2028 -#> 8.3492 6.9488 7.7495 -9.1828 -5.8187 11.7171 -7.3384 -3.0282 -#> -5.8212 9.4377 3.9020 0.9103 -9.7031 4.4426 17.3763 0.3510 -#> -3.7819 -11.6569 7.6439 2.5470 -7.9286 -3.5027 0.1709 7.4708 -#> 4.4982 15.0765 -2.6611 -7.6918 -1.7578 -0.8912 -7.4240 2.7192 -#> 5.5335 7.5726 8.4664 -5.9477 -3.5248 11.7820 0.5398 13.5853 -#> -5.4325 -2.7990 3.2807 8.9660 -0.7183 -13.0419 13.5544 2.4505 -#> 0.3033 -9.9993 -11.8875 8.8428 -9.5778 5.9503 3.4623 -12.5212 -#> -3.6057 8.8081 -1.8937 2.5456 -5.3832 -5.6471 1.6094 4.5927 -#> -10.4760 0.6477 3.8404 2.4465 -5.8038 7.6152 -6.2678 -5.6440 -#> -9.8051 10.2489 -6.0445 -3.1732 1.9759 6.9409 6.3362 -10.0141 +#> Columns 25 to 30 4.4787e+00 -9.3065e-01 -1.7664e+01 -2.0360e+00 -5.1864e+00 6.4398e+00 +#> -4.8408e+00 6.0288e-01 1.0376e+01 -8.4517e-01 -1.2970e+00 1.5763e+00 +#> -3.2578e+00 -3.0883e-01 -6.7075e+00 4.2618e+00 9.8819e+00 3.7765e+00 +#> 6.2440e+00 3.6400e+00 1.8097e+00 1.8065e+00 2.3145e+00 3.4393e+00 +#> 3.8853e+00 -2.0669e+00 -8.9789e-01 2.2191e+00 -1.1850e+01 2.7285e+00 +#> -2.5323e+00 -4.2356e+00 -3.8099e+00 -4.0471e+00 8.5190e+00 1.1156e+01 +#> -7.6452e-01 5.5126e+00 -6.9312e-01 -1.9476e+00 6.5130e+00 1.4541e+00 +#> 1.2041e+00 -4.7862e+00 -8.5009e+00 1.4784e+01 -2.8520e+00 2.3623e+00 +#> 7.9495e+00 -9.9608e+00 8.4147e+00 1.2598e+01 3.6789e+00 -4.3962e+00 +#> -4.4529e+00 -3.3200e-01 5.6498e+00 -8.8855e+00 -5.5853e-01 -2.5021e-01 +#> -1.0037e+01 -4.7682e+00 -8.7609e+00 -6.5646e+00 -1.2336e+01 -7.2553e+00 +#> 3.2602e+00 8.5436e+00 -2.7624e+00 -1.2816e+01 -4.3327e+00 -1.2763e+00 +#> 1.1232e+00 -1.1936e+00 -1.1497e+01 5.6471e+00 1.0081e+01 -7.3018e+00 +#> 7.0586e-01 1.8574e+00 -8.6821e+00 -1.9661e-01 -7.3537e+00 -4.4018e+00 +#> 1.0473e+01 5.9140e+00 -6.1157e+00 1.4409e+01 -5.4832e+00 2.4922e+00 +#> 5.1351e+00 -3.7543e+00 -3.7731e+00 1.1105e+01 -1.1417e+00 -3.9379e-01 +#> 4.8027e+00 -1.0496e+01 1.6196e-01 -1.3399e+01 -1.2744e+00 4.3051e+00 +#> 1.8298e+00 3.9455e+00 -4.9313e+00 9.3004e+00 1.2386e+00 8.9960e+00 +#> -4.7810e+00 3.1256e+00 -1.9344e+00 -3.4038e+00 -6.6274e+00 1.4678e+01 +#> 1.1509e+01 -4.3661e+00 1.2026e+00 -3.1667e+00 4.0481e+00 7.6234e+00 +#> -6.3613e-01 7.2668e+00 -4.9612e+00 -5.3380e+00 -5.9953e+00 -5.6242e+00 +#> 3.7431e+00 5.4591e+00 6.8118e+00 -1.9543e+00 5.9023e+00 6.3021e+00 +#> -6.0931e-02 3.4162e+00 -8.6933e+00 -9.5165e+00 3.5879e+00 1.5674e+00 +#> 1.5797e+00 1.3125e+01 -1.2312e+01 7.6845e+00 -9.6787e-01 1.5916e+01 +#> 4.9078e+00 4.9698e+00 6.7853e+00 2.3861e+00 -1.7449e+01 8.7595e+00 +#> 1.7868e+00 4.2192e+00 3.7081e+00 1.8986e-01 -4.2665e+00 1.5277e+00 +#> 5.9242e+00 -8.3228e+00 -5.6850e+00 1.0597e+01 -2.9350e+00 1.1962e+01 +#> 1.3618e+00 -1.8300e+00 -1.6569e+00 -7.2417e+00 -3.9782e+00 9.8272e+00 +#> 5.0099e+00 5.2188e+00 7.7963e+00 3.9949e+00 -8.8119e+00 -1.3700e+00 +#> -8.3802e-01 -6.9503e+00 -1.6966e+01 3.3190e+00 2.3536e-01 4.5285e+00 +#> 1.6660e+00 -4.0373e+00 -5.4824e-01 8.1201e+00 9.1323e-01 -6.5349e+00 +#> 6.4955e-01 -3.9583e+00 3.6544e+00 4.9247e+00 -1.5390e+01 3.1637e+00 +#> -2.0783e+00 1.0263e+01 8.6899e+00 7.7819e+00 -4.2334e+00 4.9994e+00 #> -#> Columns 41 to 48 -0.5757 -1.3542 7.1357 -2.2870 -2.8012 3.0657 5.0447 -15.8321 -#> 0.1535 0.3076 17.1299 8.4095 -2.6762 5.0797 1.7882 -4.3992 -#> 0.4592 -3.5133 -1.8708 0.8405 1.9575 -8.9528 7.9915 -4.7535 -#> -3.2901 -2.6491 -4.4144 9.6573 -10.8269 10.2468 -8.4412 -1.5340 -#> 0.9673 10.0159 4.7422 9.9742 -0.5255 -8.9552 2.3655 -1.5989 -#> 1.6231 -6.4483 10.5318 -4.9875 1.7577 9.2817 -2.5071 -4.0078 -#> 1.4573 -3.2758 -1.6678 5.4910 12.7946 5.9670 -6.0930 -4.4879 -#> -5.1614 -4.0308 0.7992 -6.0837 -0.3915 -9.4512 -1.2862 -0.5319 -#> 0.4310 -0.9557 -6.4016 2.8146 0.5966 -2.8999 1.7562 5.5143 -#> -5.1265 4.3655 4.9410 14.8872 -2.8013 -9.8312 7.9825 -4.1148 -#> 1.8815 5.4926 -7.3356 2.1816 3.0059 3.2300 6.5676 -2.0128 -#> 1.1780 8.0908 -12.3752 1.3859 6.4601 -2.0588 -7.2472 -5.7050 -#> 5.9115 -0.6530 0.9260 -8.3563 -0.4082 0.7834 -5.9661 -0.5926 -#> 11.0495 11.7859 3.0577 -4.4598 -4.8840 22.0590 12.1955 4.7068 -#> 20.0985 -4.0410 4.3649 10.0583 6.1918 -1.7880 -1.9070 -6.3624 -#> -5.8225 0.6477 0.3729 -0.1409 1.5204 7.7822 0.8015 -3.2746 -#> -2.1589 0.5456 11.5298 11.6881 -2.7155 -5.6491 0.4327 7.7103 -#> 1.0508 -0.0593 8.5939 -11.2689 -8.9750 2.3534 -7.5014 0.4162 -#> -2.2612 3.0069 6.5653 -7.6077 13.5167 -8.2681 7.5790 3.1596 -#> 5.2002 -9.1811 12.2834 -23.1594 -3.5273 -1.1709 12.7870 4.4330 -#> -0.0276 3.1669 -2.0981 -4.0901 4.8303 -8.5770 1.6487 8.6810 -#> 7.7900 -6.3951 -12.2464 2.8105 11.5417 14.2515 -2.9096 2.5551 -#> 1.0313 -10.4730 -10.2800 -12.7194 5.4654 -2.9958 4.4049 -11.4623 -#> -4.4022 3.6837 2.7533 -4.6619 -4.6065 -3.2298 21.5926 3.3128 -#> 5.3238 3.2650 1.3261 13.1070 -3.3967 -4.7740 -5.1533 3.8692 -#> 1.0264 -4.9872 7.8718 -5.9630 3.3917 -7.7953 -2.6791 2.2528 -#> -7.6961 2.1876 3.5483 16.0663 -10.7377 17.4609 -0.0595 0.9353 -#> -3.0517 3.1029 19.1180 -5.9878 -10.0726 0.6224 3.0131 4.7465 -#> 9.6907 7.8143 1.0314 12.2337 17.0781 7.3258 8.1910 2.9945 -#> -3.3594 -1.7302 7.7383 5.9785 -6.6533 -7.8406 -8.4122 -0.4406 -#> -2.0650 -9.0354 -5.8994 0.9212 3.4177 -13.0390 -11.4290 -11.9110 -#> -2.7836 -11.7873 -3.5815 -5.7332 -11.7829 -13.6638 8.5906 -4.0360 -#> 5.6294 -4.0216 2.0273 4.5414 -5.5946 3.0184 3.4674 -2.7942 +#> Columns 31 to 36 -6.2444e+00 7.1131e-01 -1.4371e+00 -1.2998e+00 -1.4302e+00 4.2263e-01 +#> 3.9920e+00 3.3123e+00 -1.0602e+01 -8.0142e+00 6.2564e+00 2.8434e+00 +#> 1.7837e+00 5.2434e+00 -2.8463e-01 -2.8071e+00 1.8934e+00 -2.3110e+00 +#> -6.5888e+00 1.2508e-01 -1.4155e+00 -4.6978e-01 2.1861e-01 7.5095e+00 +#> 6.8007e+00 -1.5046e+00 -6.1754e+00 4.5389e+00 7.0344e+00 -4.3467e+00 +#> 9.0569e-01 -3.4367e+00 -3.2700e-01 6.2864e+00 7.2274e-01 1.6950e+01 +#> -5.0609e+00 4.0758e+00 -2.7577e+00 -2.0745e-01 -7.6169e+00 -2.4548e+00 +#> -1.1708e+01 8.7182e+00 -5.7260e-01 1.0934e+00 7.7361e+00 1.3312e+00 +#> 1.1148e+01 1.1147e+00 8.2015e-01 -6.9233e+00 -5.7323e+00 -2.5504e+00 +#> 3.9960e+00 -1.5409e+00 2.2143e+00 4.3206e+00 1.9112e+00 1.4457e+00 +#> 7.5939e+00 4.7057e+00 7.9776e+00 8.1253e+00 -9.5355e+00 7.5308e-01 +#> 6.5773e+00 -2.1522e+00 1.3564e+00 7.1834e+00 2.6551e-01 -4.0239e+00 +#> 1.2855e+00 2.5885e+00 1.2113e+01 -5.2584e+00 2.6398e+00 -3.7213e+00 +#> -7.6153e+00 4.0957e+00 -8.6184e+00 -7.9122e+00 -5.0349e+00 -8.7189e+00 +#> -8.2374e+00 1.4646e+01 3.1650e+00 -6.6946e+00 -3.0146e+00 -1.6389e+00 +#> -2.8947e+00 3.4498e+00 -1.5436e+00 -1.0615e+00 2.4674e+00 -4.9741e+00 +#> 1.4443e+00 -3.8404e-01 4.1229e+00 -3.5863e+00 -1.1883e+01 4.5949e+00 +#> -8.0185e+00 -4.4125e+00 6.3695e+00 5.6233e+00 -6.2819e-01 2.6117e+00 +#> 5.9543e+00 3.3249e+00 2.6931e+00 1.4289e+00 3.7362e+00 5.3448e-01 +#> -5.1573e+00 8.1121e+00 1.0221e+00 -2.4819e-01 -4.6568e+00 8.5596e+00 +#> -2.2423e+00 3.8916e+00 1.3687e+00 1.6066e+00 -2.5599e+00 -6.8182e+00 +#> 4.2081e+00 4.8813e+00 -5.4527e+00 1.7301e+00 -9.5751e-01 -6.8657e+00 +#> 7.3364e+00 4.9294e+00 9.4030e+00 1.2489e+00 -6.0483e+00 4.7255e+00 +#> -5.5124e+00 3.6873e+00 -2.6349e+00 3.2796e+00 2.6952e+00 3.8045e+00 +#> 3.3623e-01 1.3803e+01 5.5073e-01 -3.7316e+00 -9.7750e+00 7.7576e+00 +#> 2.6992e+00 -3.1129e+00 -8.2849e+00 4.5016e+00 5.2788e-01 1.0252e+01 +#> -1.2969e+01 4.4722e+00 -7.5532e+00 8.1663e+00 6.2965e+00 1.1526e+01 +#> 1.1833e+01 1.8134e+00 -3.7881e+00 3.1859e+00 1.8822e+00 3.6497e+00 +#> -3.2139e+00 1.1189e+00 -3.3406e+00 1.9521e+00 -3.6090e+00 -1.6607e+00 +#> 5.7041e+00 3.0485e+00 3.8008e+00 3.1866e+00 1.5065e+00 1.7498e+00 +#> -4.0508e+00 2.9718e+00 1.2703e+00 -5.9761e+00 -3.8426e-02 -1.6752e+00 +#> 4.4547e-01 5.9566e+00 -1.0320e+00 -5.8180e-01 -4.0027e+00 5.1696e-01 +#> -1.9607e+00 5.9724e+00 -3.7509e+00 6.8757e+00 3.2013e+00 -5.5150e+00 +#> +#> Columns 37 to 42 -6.4819e+00 9.0894e+00 -7.8052e+00 -3.7613e+00 1.2816e+01 2.1493e+00 +#> 1.2992e+00 -3.9447e+00 3.9124e+00 5.9837e+00 -4.9299e+00 -5.7494e+00 +#> -4.2350e+00 5.8432e+00 -2.7184e+00 -9.1419e-01 8.4152e+00 -2.3614e+00 +#> 1.4124e+01 8.9136e+00 5.3466e-01 -1.3477e+00 -4.4206e+00 -9.8608e+00 +#> 8.3651e-01 7.7631e+00 -8.7169e+00 1.1039e+00 -9.6138e-01 6.6820e+00 +#> 8.6026e+00 -1.0605e+01 3.6970e-01 4.2069e+00 -2.0269e-01 5.4052e+00 +#> -5.2573e+00 1.4911e+00 9.9503e-01 -3.4705e+00 -1.9677e+00 8.3456e-01 +#> 1.4981e+00 1.1222e+01 -4.0447e+00 -6.1750e+00 1.3435e+01 -4.3735e+00 +#> 4.9577e+00 -7.6459e+00 1.1665e+01 2.9231e+00 -8.0358e+00 -1.6056e+01 +#> -7.7867e+00 -1.0212e+01 5.6575e+00 -1.1990e+01 4.8936e+00 1.1337e+00 +#> 7.9012e-01 5.6039e+00 -1.4291e-01 -4.8736e+00 1.1421e+01 4.9406e+00 +#> -9.1965e+00 7.6237e+00 3.1152e+00 6.2133e-01 1.2733e+01 -5.3896e+00 +#> 4.9871e+00 -5.2846e+00 -2.7870e+00 -1.3440e+01 3.1454e+00 5.3618e+00 +#> -6.3547e+00 2.7333e+00 -1.3748e+00 -1.8790e+01 -1.3703e+01 -1.8334e+00 +#> -3.5498e+00 1.4475e+01 -6.4595e+00 -8.0995e+00 7.7304e+00 -2.0251e+00 +#> 1.1512e+00 5.1444e+00 1.3033e+01 1.1946e+00 1.0004e+01 -2.5338e+00 +#> -3.1957e+00 4.4541e+00 -2.5417e+00 -2.4648e+00 1.3660e+01 4.8155e+00 +#> -4.0732e+00 -4.5985e+00 2.8042e+00 -2.7015e+00 4.3687e+00 2.9475e+00 +#> -7.9163e-01 -2.3531e+00 2.4937e+00 7.7356e+00 -1.3997e-01 -1.5672e+00 +#> 6.6296e+00 8.4515e+00 2.2220e+00 -1.7747e+01 -1.0495e+00 1.1736e+01 +#> -1.3087e+01 -4.4062e+00 8.5306e+00 -3.4212e+00 -5.2333e+00 7.4359e+00 +#> -1.4756e+00 1.2122e+01 -1.4518e+00 3.6727e+00 7.2365e+00 -5.3126e+00 +#> 2.8248e+00 -1.4126e+00 1.9080e+00 -1.7101e+00 7.1504e+00 -6.1774e-01 +#> 1.3368e+00 1.2437e+00 -1.8364e+00 1.5488e-01 7.3460e-02 2.2573e+00 +#> 7.9123e+00 1.2507e+00 5.1374e+00 -1.8863e+00 -7.5174e-01 -1.9342e+00 +#> 5.6390e+00 -1.0565e+01 1.2107e+01 -5.1673e+00 -4.0482e+00 7.2103e+00 +#> 5.0608e+00 -1.0691e+00 3.8633e+00 -1.1259e+01 3.7769e+00 5.3623e+00 +#> 1.6280e+00 -4.3256e-01 3.8420e+00 2.7479e+00 5.3882e+00 -5.7460e+00 +#> 1.6793e+00 -4.8205e+00 8.6883e-02 -5.4335e+00 -4.3271e+00 2.5010e+00 +#> -6.4518e-01 -7.1352e+00 2.1878e+00 -8.7050e+00 5.3164e+00 4.5168e+00 +#> 8.7106e-01 -4.2204e+00 -2.2179e+00 -8.3934e+00 4.2304e-01 -5.3060e+00 +#> -5.8044e+00 2.0624e+00 3.9457e+00 -1.6974e+00 1.7185e+00 -1.8120e+01 +#> -1.8008e+00 7.9389e+00 -2.0292e+00 9.9911e+00 -4.1638e+00 -9.4970e+00 +#> +#> Columns 43 to 48 8.4844e+00 -1.0993e+01 1.8227e+00 4.6265e+00 8.9364e+00 5.3606e+00 +#> -2.1073e-01 -4.0747e+00 -7.3497e+00 -9.5296e+00 2.0769e+00 4.8151e-01 +#> 5.1941e-01 -7.9306e+00 -5.7831e-02 2.7548e+00 1.1401e+00 1.6086e-01 +#> -4.2443e+00 -4.9963e+00 -1.8685e+01 -1.0191e+01 -3.2068e+00 3.8290e+00 +#> 4.5830e+00 2.9600e+00 2.3919e+00 2.4379e+00 2.1759e+01 5.7938e+00 +#> -3.6696e+00 -9.7537e+00 -1.6006e+01 1.6218e+01 9.8195e+00 5.1813e+00 +#> 4.2435e+00 1.1194e+01 3.7096e+00 -1.1631e+01 -1.0264e+01 -1.0055e+00 +#> 1.0176e+01 -1.1411e+00 -5.0666e+00 -5.7718e+00 -1.2300e+01 -5.1066e+00 +#> -9.5501e+00 -1.6391e+01 -1.4565e+00 4.7620e+00 -1.5637e+00 2.0570e+00 +#> -8.3540e+00 8.5855e-01 2.4663e+00 2.7962e+00 -1.3909e+00 2.4639e+00 +#> -3.8378e-01 2.8382e+00 4.2586e+00 -7.7955e+00 -1.3444e+01 1.4908e+00 +#> -3.2816e+00 1.4829e+00 -9.8738e+00 2.7608e-01 -1.1751e+01 1.8662e+00 +#> -1.6035e+01 -1.1692e+01 -1.5651e+01 -1.1390e+00 -3.4574e-01 -4.6043e+00 +#> -1.1310e+01 4.9410e+00 -9.6355e+00 -6.4729e+00 -1.0246e+01 2.7205e+00 +#> 1.1236e+00 -1.0729e+01 1.0165e+01 -7.4414e+00 7.2387e+00 6.5626e+00 +#> 7.2329e+00 -1.1892e+00 5.7062e+00 -2.4368e-01 -5.6294e+00 -9.3834e+00 +#> -3.2415e+00 4.3069e+00 3.3141e+00 3.5591e+00 4.4030e+00 -1.5876e+00 +#> -2.2057e+00 -5.5644e+00 4.4626e+00 -2.2278e+00 5.4963e-01 3.4175e+00 +#> 6.0468e+00 2.5824e+00 -4.5323e-01 1.0158e+00 2.1676e+00 -3.2730e+00 +#> -3.4195e+00 -5.4786e+00 -9.8319e+00 1.1339e+00 -2.3785e+00 7.5867e-01 +#> -1.6855e+00 4.9282e+00 -2.3620e+00 -3.9824e-01 -8.8625e+00 -6.2472e+00 +#> 6.7280e+00 -1.2828e+00 -2.5061e+00 -8.0683e+00 3.2559e+00 1.0335e+01 +#> 3.1020e-01 3.3637e+00 1.4163e-01 -6.4928e-01 -1.0285e+01 2.7480e+00 +#> 9.8778e+00 -4.2855e+00 7.4078e-01 9.6354e+00 8.0766e+00 7.8449e+00 +#> -3.0624e+00 1.1307e+01 4.1798e+00 -1.0048e+01 -1.5100e+01 1.7636e+00 +#> -6.1965e+00 -5.9411e+00 -4.2883e+00 2.8164e+00 -4.2836e-02 1.2683e-02 +#> -5.2290e+00 -1.1419e+01 -2.5172e+00 9.8837e+00 8.6055e+00 -2.7170e+00 +#> 9.8374e+00 4.8829e+00 5.9017e+00 1.3813e+00 1.0165e+01 3.8461e-01 +#> -4.0441e-01 2.4763e+00 1.3952e+00 -7.5240e-01 -6.3666e+00 2.2227e+00 +#> -8.6997e+00 -2.0698e+01 2.4564e+00 1.0931e+01 7.2985e+00 7.0016e+00 +#> 3.6372e+00 -4.2753e+00 9.1222e-02 1.1007e+01 6.9259e+00 -6.6803e+00 +#> -4.3660e+00 -3.8195e-01 7.0135e+00 -2.6839e+00 -7.3719e+00 -5.8731e+00 +#> 2.5388e+00 2.1216e+00 1.3341e+01 -3.4585e+00 -7.8873e+00 1.6598e+00 #> #> (8,.,.) = -#> Columns 1 to 6 1.6767e+01 7.3242e+00 2.2688e+00 -1.1885e+01 -4.0734e+00 3.3318e+00 -#> 1.1166e+01 1.7645e+01 -6.4430e+00 -4.1363e+00 -2.6064e+00 -1.7742e+00 -#> -5.5551e+00 8.0560e-01 -4.0773e-01 6.0480e+00 2.0295e+01 -3.6027e+00 -#> -1.4078e+00 -2.3412e+01 1.2144e+01 5.2885e+00 7.3681e+00 2.1041e+00 -#> 2.5697e+00 -6.0522e+00 1.5430e+00 -1.5490e+00 4.9428e+00 7.4056e+00 -#> -1.9167e+00 -6.7456e+00 7.2300e+00 1.8173e+00 1.1700e+01 4.0283e+00 -#> -4.1001e+00 -1.3626e+01 7.4635e+00 -2.5361e+00 -1.2605e+01 4.6264e+00 -#> -3.6862e+00 -8.2448e+00 -1.7109e+00 -8.2455e+00 -6.6692e+00 -1.0588e+00 -#> -1.0147e+01 -5.4059e+00 -9.8364e+00 1.0415e+01 1.1184e+00 1.0907e+01 -#> 6.4281e+00 6.9009e+00 1.4842e+01 -2.3334e+00 -2.5984e+00 -9.1516e+00 -#> 6.5785e+00 -3.3714e+00 -1.7497e+01 4.7739e+00 7.6406e+00 1.7642e+01 -#> -5.3044e+00 2.2508e+00 -1.0266e+01 4.9423e+00 -1.1276e+01 5.0438e+00 -#> 5.4556e+00 4.8300e+00 -1.0827e+01 -8.4740e+00 2.4237e+00 2.5224e-02 -#> -1.2140e+01 4.3315e+00 1.0711e+01 1.6573e+01 7.7428e-01 -8.4217e+00 -#> 4.2287e+00 -1.9173e+00 -5.8150e+00 -7.3568e+00 1.4070e+01 -4.5091e+00 -#> 5.3989e+00 6.2967e+00 2.2752e+00 -7.8721e+00 3.6903e+00 -4.5436e+00 -#> -3.9085e+00 -6.5628e+00 6.9691e+00 -1.7535e+01 2.0640e+00 2.8361e+00 -#> -1.1818e+00 -1.7437e+00 -7.5470e+00 1.0356e+00 3.6821e+00 -9.2207e+00 -#> 4.5794e+00 -5.4332e-01 -1.0079e+01 1.0920e+00 -7.0514e+00 -6.2585e+00 -#> -1.0217e+01 7.5117e+00 -1.0014e+01 -1.2400e+01 1.1899e+01 -6.6089e+00 -#> -2.2282e+01 -5.4358e+00 1.4162e+00 1.1134e+01 3.8861e+00 -6.9433e+00 -#> -4.4862e+00 -4.4780e+00 3.9681e+00 -8.0532e+00 -1.5911e+00 -2.8791e+00 -#> 3.1209e+00 2.8594e+00 4.0225e+00 -2.8797e+00 1.2730e+01 -3.6122e+00 -#> 8.4232e+00 -6.2612e+00 1.2269e+01 5.9408e+00 1.1985e+01 -5.5113e+00 -#> -4.0361e+00 -4.6405e+00 -1.3339e+01 6.2889e+00 9.8516e+00 -7.8917e+00 -#> -6.0944e+00 5.6074e-01 6.4615e+00 -2.0297e+01 4.6597e+00 -1.1273e+01 -#> -8.1253e+00 -1.5362e+01 1.7911e+01 1.8621e+00 5.5949e-01 2.8031e+00 -#> 2.0541e+01 2.3189e+00 6.7424e+00 -4.7597e+00 -6.9405e+00 -1.1465e+01 -#> -1.7620e+01 1.1792e+01 -7.7298e+00 5.4394e+00 -2.1601e+00 -4.3825e+00 -#> 1.9074e+01 1.3653e+01 -1.3568e+01 -1.2175e+01 -1.7947e+01 1.0558e+01 -#> -1.0617e+01 -1.4844e+01 -1.3629e+00 -3.0530e+00 -4.5964e+00 -8.6009e-01 -#> -6.2399e+00 1.8821e+01 -1.3541e+01 6.2537e+00 3.8780e+00 2.3052e+00 -#> 1.0811e+01 8.7568e+00 -6.4851e+00 1.1809e+01 -4.8284e+00 7.9604e+00 +#> Columns 1 to 6 -2.1991e-01 1.6673e+01 -3.4127e+00 -1.0752e+00 4.6394e+00 -1.1888e+01 +#> 1.8849e+00 3.4665e+00 1.0323e+01 1.4550e+01 -1.1867e+00 -5.2968e+00 +#> -2.2804e+00 7.7377e+00 4.6782e+00 2.2482e+00 1.8199e+00 8.1519e+00 +#> -5.7545e+00 1.5397e+00 -1.7574e+00 -5.6203e+00 -6.7317e-01 -1.5588e+01 +#> 9.1743e+00 1.5010e+01 6.8509e+00 1.1166e+01 -4.2284e+00 4.9119e+00 +#> 5.7229e+00 -5.1175e+00 -9.0236e+00 5.2948e+00 -6.2189e+00 8.7238e+00 +#> -5.9541e-01 2.5129e+00 1.7511e+00 -7.8496e+00 1.3394e+00 2.0330e+00 +#> -3.7668e+00 7.1366e-01 -3.9535e+00 -1.3764e+01 1.6906e+01 -5.6384e+00 +#> -5.5460e+00 -4.9393e+00 2.0917e+01 1.6005e+01 -2.2261e+00 -1.0742e+00 +#> -9.8901e+00 -2.7049e-01 -1.5903e+00 -2.6724e+00 1.6916e+00 6.9718e+00 +#> 9.0495e+00 6.0712e+00 1.5977e-01 4.7806e+00 4.8632e+00 -1.1852e+01 +#> -1.1590e+00 8.4845e+00 1.4225e+01 -8.9999e+00 8.8118e+00 -1.1974e+01 +#> -9.8565e+00 -1.9384e+01 1.1098e+01 -3.7382e+00 3.4371e+00 1.3571e+01 +#> -1.5114e+01 -5.4141e+00 -3.7146e-01 -1.1213e+01 -1.2086e-01 -1.0811e+00 +#> -3.7298e+00 1.7604e-01 8.6379e+00 -4.3931e+00 1.6817e+01 -4.1764e+00 +#> -1.0573e+01 -1.9006e+01 -8.9126e+00 -3.1380e+00 -1.1939e-01 1.0582e+01 +#> -1.0518e+00 8.2879e+00 1.0026e+01 5.0245e+00 1.1725e+01 -1.4497e+01 +#> 8.8632e+00 -1.3124e+00 -4.7036e+00 8.3910e+00 1.0005e+01 3.0517e+00 +#> -5.9235e+00 -7.5394e+00 -7.5574e+00 1.8704e+00 -2.7083e+00 2.5931e+00 +#> -2.4205e+01 -6.4595e+00 8.2175e+00 3.9585e+00 -6.7921e+00 1.4578e+01 +#> -1.1433e+00 -1.5799e+00 9.5144e+00 4.8407e+00 5.2871e+00 -6.1489e+00 +#> -2.5805e-01 8.9328e+00 1.7978e-01 -5.2197e+00 7.0472e+00 -2.5667e+00 +#> 6.3651e+00 5.0874e+00 6.1812e+00 -4.6365e-01 -4.4905e+00 5.4756e+00 +#> 6.1972e+00 3.8184e+00 3.5411e+00 -1.1327e+01 4.2856e+00 2.0127e+00 +#> -1.9146e+01 -1.0122e+00 2.7957e+00 1.4016e+00 -1.2378e+01 2.9539e+00 +#> -4.9825e+00 -1.1890e+01 -5.8867e+00 2.0944e+00 -7.1109e+00 5.0450e+00 +#> -7.1506e+00 -4.4769e+00 -3.5039e+00 -7.2694e+00 -9.4921e+00 3.4874e+00 +#> -2.1425e-01 -8.6799e-01 -1.4497e+01 -5.6408e-01 -1.3404e+01 9.1972e-01 +#> 4.8577e+00 -3.8483e+00 6.9185e+00 -1.1345e+00 6.2568e+00 2.4765e+00 +#> -3.0364e+00 -1.3705e+01 1.0795e+00 -2.4379e-01 9.6982e+00 1.8064e+00 +#> 3.8231e+00 -8.2187e+00 1.3277e+01 2.2467e+00 -2.2437e-01 5.5701e+00 +#> -1.3268e+01 -6.1761e+00 -9.3416e-02 -1.0753e+00 -1.5743e+00 -5.4825e+00 +#> 9.7482e+00 1.4574e+01 2.8919e-01 -7.1233e+00 2.7676e+00 -1.8234e+01 #> -#> Columns 7 to 12 8.9290e+00 1.3536e+01 1.6096e+01 -7.1693e+00 1.2483e+01 1.1455e+01 -#> 5.2199e+00 5.6739e+00 1.1925e+01 6.1331e+00 3.9429e+00 -4.3705e-02 -#> -5.7627e+00 -6.5053e+00 -3.2291e+00 8.3001e+00 -7.4037e+00 -3.3170e+00 -#> -8.6931e+00 -1.4302e+01 -5.0089e+00 -8.2270e+00 -6.2948e+00 7.2968e+00 -#> -3.5095e+00 -5.2325e+00 7.1503e+00 1.1552e+01 -9.3108e-01 -7.8535e-01 -#> 9.4466e+00 4.1475e+00 6.0636e+00 9.1754e+00 6.2806e+00 2.5567e+00 -#> -1.3670e+00 -1.1705e+01 4.5845e-02 -9.9646e+00 9.0778e+00 1.5874e+01 -#> -8.5052e-01 -9.3265e+00 7.1684e+00 1.2459e+01 -7.0863e+00 -1.5340e+00 -#> -1.0967e+00 -6.8202e+00 -4.1483e+00 4.5109e-01 7.2468e+00 -3.0846e+00 -#> 1.8900e+00 -2.9275e+00 1.2501e+01 -2.6666e+00 -1.0941e+01 7.5141e+00 -#> 1.2399e+00 -6.7585e+00 -3.3682e+00 9.8080e+00 -3.6451e+00 -5.7484e-01 -#> 2.3451e+00 3.7670e+00 -4.2033e-01 -5.0846e+00 4.7556e+00 -1.1006e+00 -#> 8.3667e+00 3.6975e+00 -1.0243e+01 -6.9477e+00 1.0840e+00 8.2389e-01 -#> -1.3075e+01 3.0193e+00 2.7116e+00 -1.9629e+00 5.1973e+00 -4.0322e+00 -#> 6.1567e+00 -2.2938e+00 9.9950e+00 -3.3615e+00 8.2121e+00 -7.6035e-01 -#> -2.1567e+00 4.1321e+00 2.0549e+00 -5.1987e-01 7.5985e-01 -2.0032e+00 -#> -2.2345e+00 3.1456e-01 7.0497e+00 3.9596e+00 4.0817e+00 -7.9889e-01 -#> 1.2495e+01 -6.3476e+00 -5.9824e-01 -5.4388e+00 -8.6041e+00 -4.8455e+00 -#> 1.6624e+01 -1.1884e+01 -2.9734e+00 1.5225e+01 -7.3970e+00 5.7698e+00 -#> 9.8040e+00 4.3454e+00 7.0386e+00 5.5264e+00 -8.3666e+00 3.2071e-01 -#> -2.4268e+00 -1.5725e+01 9.1409e+00 -4.2469e+00 -2.6802e+00 3.1987e+00 -#> -6.8343e+00 8.6813e+00 -9.9641e+00 1.4666e-01 6.9162e-01 -8.6080e+00 -#> 9.1200e+00 6.2205e+00 4.8410e+00 -2.9351e+00 4.7084e+00 -1.6139e+00 -#> 1.8724e+01 3.2264e+00 -6.2778e+00 6.9400e+00 -2.6209e+00 9.7244e+00 -#> -2.7610e+00 -5.3795e+00 1.8431e+00 -2.5759e+00 4.9165e+00 -7.0577e+00 -#> -3.9320e+00 -4.4469e+00 1.7978e+00 -2.6139e+00 -2.1787e+00 -2.3098e+00 -#> -9.8916e+00 -1.7884e+01 6.5494e+00 1.5247e+00 9.8584e+00 4.3776e+00 -#> 1.6337e+01 -9.0102e-01 -2.9395e-01 1.1285e+00 -1.3418e+01 -2.7005e+00 -#> -5.5151e+00 4.3336e+00 -3.2838e+00 1.0588e+00 6.9050e+00 -2.5221e+00 -#> 1.2281e+01 2.1200e+01 1.0232e+01 -9.2227e+00 3.8279e+00 3.2877e+00 -#> 6.6581e+00 -8.2416e+00 -1.6229e+00 -5.3015e+00 4.4514e+00 2.3358e+00 -#> -4.5829e+00 1.2662e+01 -1.1380e+01 2.7443e+00 4.3965e+00 -4.6810e+00 -#> 1.2336e+01 1.2162e+01 8.9652e-01 -1.9086e+00 1.8242e+00 -3.1326e-01 +#> Columns 7 to 12 6.0727e-01 -1.7450e+01 1.1066e+01 -1.9776e+01 -9.8348e+00 1.0180e+01 +#> 2.9319e+00 2.8190e+00 3.7577e+00 7.5215e+00 2.1050e+00 -9.6115e+00 +#> -1.2627e+00 2.1427e+00 5.3874e+00 3.1262e+00 5.5355e+00 4.2578e+00 +#> 6.3264e+00 -7.9869e-02 3.2069e+00 6.9090e+00 -1.5014e+00 -3.4749e+00 +#> -7.2077e+00 -1.1029e+01 -1.0292e+00 -6.8186e+00 6.8771e+00 2.7332e+00 +#> 1.2670e+00 4.8276e+00 -5.7310e+00 -1.3312e+00 3.3062e+00 -4.3557e-01 +#> 2.1689e+00 1.1304e+01 2.6170e+00 1.7815e+01 -3.4710e+00 -1.7491e+00 +#> -3.0858e+00 -2.5723e+00 -2.0404e+00 7.3582e+00 -1.0074e+00 -6.5688e+00 +#> 7.2729e+00 9.6421e+00 5.5380e+00 1.1065e+01 9.7760e+00 8.9167e+00 +#> -5.9246e+00 -3.4873e+00 -1.1045e+01 9.4299e-01 5.3879e+00 -2.0889e+00 +#> 4.6444e+00 -1.8008e+00 -1.3506e+01 -7.0103e-02 -4.6622e+00 -1.5777e+00 +#> -1.1722e+00 5.8696e+00 -4.5934e+00 3.6396e+00 -4.0691e+00 7.9076e+00 +#> -1.1641e+01 8.5029e+00 1.3203e+00 1.4275e+01 -1.2416e+00 1.5976e+00 +#> -5.9534e+00 -3.2740e-01 -3.0885e+00 1.0430e+01 9.7626e-01 -3.2039e+00 +#> 9.0722e+00 -5.2142e+00 4.9964e+00 3.0354e-01 5.1905e+00 1.5381e+01 +#> -3.9549e+00 4.7760e+00 -2.9696e+00 -1.2167e+00 -1.2935e+01 2.0562e+00 +#> 7.0679e+00 -1.2820e+01 6.6629e+00 5.4050e+00 1.1801e+00 4.4521e+00 +#> 9.7868e-01 8.8204e+00 -4.0238e+00 7.7560e+00 3.2546e+00 1.4237e+01 +#> 1.0233e+01 3.4770e+00 -3.6457e+00 -1.6441e+00 6.4198e+00 2.2741e+00 +#> 3.6677e+00 -6.1098e+00 -5.5732e+00 -7.4927e+00 5.7719e+00 5.5598e+00 +#> -1.0086e+00 1.4438e+00 -5.7456e+00 3.3578e+00 -1.4560e+01 -1.2021e+01 +#> 1.4861e+01 6.8248e+00 6.8424e+00 -9.8675e+00 7.4436e+00 1.0655e+01 +#> -8.8432e+00 1.5606e+01 9.1526e+00 2.3035e+01 -1.4971e+00 7.7885e+00 +#> 9.3633e+00 1.5908e-01 -1.1322e+00 4.9238e+00 1.3105e+00 7.8418e+00 +#> -3.7211e+00 1.3144e+01 9.0097e+00 4.4444e+00 -5.7108e+00 4.3818e+00 +#> -1.1863e+01 -2.7300e+00 -9.1255e+00 -6.2644e+00 -1.3065e+01 -1.0623e+01 +#> -8.3062e+00 -9.0573e-01 -7.3139e+00 -4.5755e+00 -8.2685e+00 3.2447e-01 +#> 1.5757e+00 3.8333e+00 -4.7134e+00 -6.4036e+00 5.3045e+00 3.4003e+00 +#> -1.6621e+00 -4.7173e+00 -4.3790e+00 -3.9702e+00 -1.6218e+00 -2.1331e-02 +#> -3.9922e+00 6.5052e+00 -6.6319e+00 1.3835e+01 3.8747e+00 1.1379e+01 +#> -9.1570e+00 9.7005e-04 -9.3493e+00 2.5065e+00 -8.8862e+00 -5.2180e+00 +#> 2.7796e+00 2.4641e+00 -1.2143e-01 -1.6348e+00 7.7353e+00 -7.5575e+00 +#> 8.0157e+00 8.2147e-01 2.1710e+00 -3.7390e+00 2.7682e+00 1.3253e+00 #> -#> Columns 13 to 18 -6.5864e+00 -1.0712e+01 1.9184e+00 -7.9737e+00 8.0341e-01 -6.6428e+00 -#> 3.9817e+00 -4.6648e+00 -2.4271e+00 1.6329e+00 3.4401e+00 5.3616e+00 -#> 2.2465e+00 -5.6628e+00 4.1712e-01 2.4878e+00 8.9113e+00 1.2079e+01 -#> -1.0932e-01 -1.2413e+00 -1.4202e-02 1.0870e+00 -3.4432e+00 1.7965e+01 -#> -1.1225e+00 -3.2036e+00 7.4699e+00 -1.8162e+00 3.0432e-01 -2.7530e+00 -#> 6.3094e+00 1.5723e+00 -4.2568e+00 -1.4982e+00 1.3841e+00 -1.3459e+01 -#> -1.0469e+00 -3.9008e+00 -1.3422e+01 1.2605e+01 -2.4243e-01 -6.4081e+00 -#> 4.0135e+00 5.0065e+00 7.7655e+00 -3.5427e+00 -7.3873e+00 1.6300e+00 -#> 5.7995e+00 1.1139e+01 -6.9868e+00 3.4718e+00 3.2083e+00 8.6517e+00 -#> 3.8380e-01 -4.6602e+00 -5.0067e-01 -4.6188e-01 -1.0016e+01 7.1506e+00 -#> 8.1005e+00 -8.0245e+00 1.1189e-01 2.7982e+00 1.0693e+01 2.4759e+00 -#> -4.8095e+00 4.7866e+00 -5.4752e+00 -2.1411e+00 4.1932e+00 -1.1467e+01 -#> 4.3658e-01 -5.7922e+00 -3.6600e+00 4.0172e+00 -4.1100e+00 -3.1343e+00 -#> -6.9230e+00 1.6722e+01 1.2365e+01 -1.2803e+01 4.0601e+00 -9.9373e+00 -#> -5.9905e+00 -4.2004e+00 -3.8107e+00 1.3444e+01 -6.3351e+00 1.8679e+00 -#> -7.2217e+00 1.9600e+00 1.8366e+00 -9.0836e-01 6.3407e+00 2.5531e+00 -#> 5.2419e+00 -1.9273e+00 2.6189e+00 -1.6239e+00 -5.9923e+00 -8.3155e+00 -#> -5.4557e+00 1.4535e+01 -1.5117e+01 7.2507e+00 -4.4490e+00 -1.6060e-01 -#> 1.4703e+00 9.8438e-01 4.8113e+00 -8.3850e+00 8.4060e+00 7.6364e+00 -#> 2.7511e+00 1.2935e+01 1.8571e+00 -2.6687e+00 -1.7088e+01 4.8235e+00 -#> -3.5270e+00 7.8432e+00 -3.4736e+00 2.9558e+00 -4.3210e+00 8.6346e+00 -#> 7.3180e+00 4.5550e+00 2.7375e+00 8.4454e-01 3.2313e+00 -8.0284e+00 -#> 3.5681e+00 4.2156e+00 -1.3016e+01 -2.6686e+00 7.8514e+00 -1.1472e+01 -#> 4.8989e+00 -3.6281e+00 -3.1882e+00 -9.6628e-01 -1.1654e+01 3.9920e+00 -#> -6.1137e+00 -1.6611e+00 7.9596e-01 -2.5879e+00 3.8247e+00 5.4991e+00 -#> -3.2835e+00 2.8457e+00 -4.2449e+00 5.1501e+00 -8.8733e+00 -4.2499e+00 -#> 2.7270e+00 3.2537e+00 -7.4448e+00 -3.9524e+00 6.0253e+00 -2.9487e+00 -#> 8.9148e+00 1.3280e+00 -4.6664e+00 -1.0784e+01 3.8554e+00 -8.9095e-01 -#> -5.5961e+00 6.6477e+00 8.5598e+00 2.1061e+00 6.4038e-01 5.2681e+00 -#> 1.1707e+01 -1.2155e+01 -5.7153e+00 2.8509e+00 -1.4552e+00 -1.5719e+01 -#> 2.7364e+00 8.6112e+00 -1.3925e+01 -7.8262e-02 -3.3054e+00 -1.2692e+01 -#> 1.5652e+00 -1.6843e+00 1.4623e+01 -5.7382e+00 1.3702e+01 -6.5341e+00 -#> 8.5536e+00 -2.3798e+00 -1.0280e+01 6.6825e-01 7.5417e-01 4.4648e+00 +#> Columns 13 to 18 -7.6920e+00 5.9269e-01 -7.1614e+00 9.5002e+00 7.1864e+00 -7.5563e+00 +#> -6.5451e+00 8.7649e-01 -5.2288e-01 1.6775e+00 -1.5374e+00 -5.7273e+00 +#> -3.5930e+00 -1.9869e+00 7.3439e+00 -5.8190e+00 -2.2919e+00 -8.5922e+00 +#> 5.2737e+00 6.9455e+00 3.0809e+00 4.3869e+00 4.8120e+00 -6.1846e-01 +#> -1.2605e+00 -4.5768e+00 -1.2679e+00 5.4245e+00 7.8463e+00 8.2616e+00 +#> -1.1359e+00 -1.6792e+00 -1.2544e+00 -3.9346e+00 -6.1791e+00 -6.8598e-01 +#> -4.9028e+00 -1.9557e+00 -8.6282e-01 -9.4554e+00 -1.6487e+00 1.8983e+00 +#> 6.5064e+00 4.5290e+00 -4.7162e-01 -9.3650e+00 7.5711e+00 -8.2074e+00 +#> 1.4114e+01 -4.6716e+00 1.4927e+01 1.0862e+01 6.3469e+00 4.1091e+00 +#> -1.7531e+00 -2.6965e-02 -3.8979e+00 2.8491e+00 -3.5397e+00 5.6824e-01 +#> 3.3010e+00 8.1866e+00 -1.6086e+01 -6.2636e+00 9.4287e+00 -1.3996e+01 +#> 6.8234e+00 -6.6048e+00 5.9744e+00 -5.3237e+00 8.4941e+00 -5.3328e+00 +#> 2.9628e+00 3.5036e+00 -3.8744e+00 -4.8421e+00 9.9429e+00 -7.7878e+00 +#> -1.6670e+00 1.6263e+00 -1.5387e+00 2.1850e-01 9.7224e+00 -1.5103e+00 +#> -2.4517e+00 4.4724e+00 6.1326e+00 1.2219e+00 3.6750e+00 4.7848e+00 +#> 7.1773e+00 -6.3502e+00 1.8285e+00 5.2765e+00 -5.7786e+00 -3.5509e+00 +#> -4.9454e+00 -8.0056e+00 1.7885e+00 1.4500e+00 1.7754e+00 1.0025e+00 +#> 5.7514e-01 1.3720e+01 -6.2601e+00 1.3260e+01 -5.1964e+00 -4.8863e+00 +#> -2.3515e+00 2.5392e+00 -6.5431e+00 -3.7797e+00 -3.4916e+00 -1.2545e+01 +#> -6.3052e+00 2.8412e+00 4.0891e+00 -9.2261e+00 -7.8222e+00 4.0850e+00 +#> 4.6204e+00 -1.3443e+01 9.2306e-01 -1.3092e+00 2.0783e+00 -6.4244e+00 +#> 5.3291e+00 -7.3519e+00 7.4421e+00 -6.1952e+00 -3.4651e+00 1.2389e-01 +#> -5.1909e+00 -4.0824e+00 -8.4516e-01 -3.5375e+00 4.0704e+00 4.3225e+00 +#> 1.1315e+01 -2.6833e+00 6.0377e-01 1.4464e+00 5.2066e+00 -5.5932e+00 +#> -7.0935e+00 -8.3806e+00 -5.8547e+00 -3.6996e+00 -1.3486e+01 2.7401e+00 +#> -7.4829e+00 -5.2384e+00 -4.1449e+00 2.7311e-01 8.5946e-01 -1.8546e+00 +#> 2.6979e+00 -2.4109e+00 9.1093e+00 -3.7012e+00 -4.6450e+00 7.0998e+00 +#> -1.7188e+01 -5.8254e-01 -2.5691e+00 8.2737e+00 -4.1726e+00 2.9384e-01 +#> 7.8109e+00 -2.2146e+00 -2.4523e+00 -3.4486e+00 7.5098e+00 -3.4225e+00 +#> 8.9175e+00 -3.4010e+00 -1.0550e+00 5.2823e+00 1.1399e+01 7.6961e-01 +#> -1.6160e+00 4.5074e-01 3.5103e-01 -1.4586e+00 8.9113e+00 -1.4385e+00 +#> -7.9514e+00 -7.7930e+00 7.1304e+00 4.4759e+00 9.9903e+00 3.4564e+00 +#> 1.4941e+01 2.3468e+00 1.0423e+01 3.4320e+00 -1.6993e+00 5.8884e+00 #> -#> Columns 19 to 24 -4.4853e+00 -2.1131e+00 2.3472e+00 1.5614e+00 -9.1763e+00 1.3149e+00 -#> 3.3663e+00 -1.1476e+00 -4.6271e+00 -6.8828e+00 -2.7274e+00 8.1809e+00 -#> -6.0512e+00 -1.9255e+00 -1.2470e+00 2.3085e+00 5.1944e+00 2.7115e+00 -#> 6.8703e+00 -9.9798e-01 5.7421e+00 1.2826e+01 1.3569e+01 -1.3485e+01 -#> -7.8852e+00 -5.3608e+00 -1.2194e+01 -2.2309e+00 7.4194e-01 3.8362e+00 -#> -7.9683e+00 -3.7411e+00 9.5716e-01 7.7865e+00 -1.2211e+00 1.3266e+01 -#> -8.6393e+00 1.0575e+01 1.8087e+01 -6.3996e+00 -2.0487e+00 -2.4887e+00 -#> -1.4954e+01 -2.8493e+00 -1.0947e-01 9.6807e+00 9.7672e+00 -4.4043e+00 -#> -1.3268e+01 2.1509e+00 -1.2294e+00 1.0422e-01 6.8833e+00 1.1067e+01 -#> -1.1917e+01 -5.0383e+00 6.8034e+00 -1.3399e+01 3.1128e+00 -1.1073e+00 -#> 1.5490e+00 1.5129e+01 -5.0927e+00 4.9878e+00 1.4499e+01 -2.5065e-01 -#> -2.5972e+00 7.2299e+00 7.2963e+00 -1.6446e+00 -9.7672e+00 2.2963e-01 -#> 3.6946e-01 -1.1741e+00 -5.4392e+00 -3.6667e+00 2.6146e+00 -4.2971e+00 -#> 6.6354e+00 7.4895e+00 -1.1072e+01 6.1214e+00 -2.4847e+01 -1.9569e+00 -#> -5.5859e+00 3.3497e-01 -5.0915e+00 -7.4545e+00 -1.7294e+00 6.5300e+00 -#> 6.9209e+00 -2.9821e+00 -1.0543e+01 1.0908e+01 7.0799e+00 -7.8464e-01 -#> -4.7602e+00 -1.8840e+00 -5.3847e+00 -5.1367e-01 -6.6393e+00 1.1736e+00 -#> -3.5408e-01 6.9321e+00 4.6526e+00 -3.6509e+00 -1.8282e-01 -8.6614e+00 -#> -1.3014e+01 9.7575e-01 -3.6201e+00 4.9557e+00 3.5110e+00 -1.1991e+00 -#> 4.2591e-01 -7.2595e+00 -8.4041e+00 -6.0745e+00 5.0202e+00 8.3158e+00 -#> -1.0734e+01 6.3550e+00 1.2172e+01 -6.8847e+00 -7.6938e+00 -1.1426e+01 -#> 1.0593e+00 -4.3771e+00 1.9166e+00 7.3604e+00 6.9675e+00 -2.2241e+00 -#> 9.0315e+00 2.5758e+00 -1.6054e+00 3.1866e+00 -3.8649e+00 2.2475e+00 -#> -4.5584e-01 -8.9639e+00 2.4443e+00 -1.3255e+01 -7.6367e+00 -1.4364e+00 -#> 6.0904e+00 5.5175e+00 2.9946e+00 -1.7121e+00 -1.1451e+01 -2.2756e+00 -#> 4.5679e+00 -2.0458e+01 8.9792e-01 8.5564e+00 -6.2259e+00 -2.7014e+00 -#> -4.5265e+00 3.2387e+00 -6.7524e-01 -6.3879e+00 4.6612e+00 -7.4260e+00 -#> 4.2265e+00 -9.4775e+00 -1.7111e+01 -4.3356e+00 -1.8778e+01 -6.7874e+00 -#> -2.2547e+00 6.5539e+00 2.1854e-01 -1.2272e+01 -5.7822e+00 -6.0883e+00 -#> -8.5049e+00 -2.1965e+00 -5.3985e+00 9.2189e-02 -3.4820e+00 1.0115e+01 -#> -8.2111e+00 7.4293e+00 2.7259e+01 7.2645e+00 1.7943e-01 -9.3449e+00 -#> 7.5274e+00 -5.9399e+00 -7.6304e+00 1.6460e+01 -9.6886e+00 8.5740e+00 -#> 1.2634e+01 7.2209e+00 9.3985e+00 -1.0274e+01 -6.0598e+00 9.2750e+00 +#> Columns 19 to 24 6.1759e+00 6.0554e+00 -1.0185e+01 -6.2366e+00 2.9177e+00 4.6337e+00 +#> -5.8615e+00 8.1667e+00 1.2196e+01 -7.4644e+00 -8.1270e+00 1.8900e+00 +#> -5.7314e+00 2.4860e+00 6.7279e+00 -1.8599e+00 3.7760e+00 4.9084e+00 +#> 5.9388e+00 1.0217e+01 2.7069e+00 6.4934e+00 -1.2533e+00 -6.9669e+00 +#> 5.6870e+00 2.5023e+00 -8.7799e+00 -9.3814e+00 1.6580e+00 -5.7190e+00 +#> 9.6274e-01 2.9697e+00 -3.8694e+00 -1.7460e+00 -2.8062e+00 1.5483e+01 +#> -7.7445e+00 1.3139e+00 1.0859e+01 -2.5925e+00 -1.6224e+01 1.0072e+01 +#> 2.9867e-01 -1.3264e+01 3.5334e+00 6.3471e-01 6.7853e+00 -1.3984e+01 +#> -3.7439e+00 -2.4743e+00 7.8707e+00 4.2741e-01 -1.3062e+00 -1.0236e+01 +#> -2.7701e+00 -6.6087e+00 -3.5980e-01 2.6709e+00 3.5123e+00 8.5269e+00 +#> -6.3297e+00 -2.0583e+01 -1.1579e+01 -2.4796e+00 6.0775e+00 -4.1803e+00 +#> 7.4053e+00 2.3480e+00 1.9527e+00 -8.5983e+00 1.2637e+00 -4.6078e+00 +#> 1.1062e+00 1.2743e+01 -5.0619e+00 -3.7008e+00 9.8096e+00 8.0512e+00 +#> -1.9407e+00 1.2887e-01 9.4372e+00 8.3466e+00 -1.0280e+01 -7.1358e+00 +#> -6.1644e+00 1.1022e+01 9.2906e+00 -1.9282e+00 7.3731e-02 2.8973e+00 +#> -1.2167e-01 -7.1812e+00 5.8666e+00 5.1820e-01 1.1867e+01 -1.2023e+01 +#> -5.0495e+00 -3.6320e+00 2.3960e-01 -9.4786e-01 -8.2408e-01 1.7076e-01 +#> -8.9994e-01 -1.2141e+01 1.5057e+00 5.0502e+00 -1.0551e+01 9.8551e+00 +#> -2.9791e+00 -7.1137e+00 -7.4300e+00 2.0750e+00 6.2254e+00 8.0560e+00 +#> -4.0976e+00 -4.5619e-01 5.6149e+00 -1.4378e+00 -5.2066e+00 1.0559e+01 +#> -1.6755e+01 -1.0811e+01 1.6114e+00 -4.3966e+00 -3.3135e+00 5.0921e+00 +#> 4.0110e+00 2.1547e+01 -4.0566e+00 -2.4677e-01 1.3307e+01 2.9139e+00 +#> -5.4219e+00 -7.3401e+00 5.8030e+00 -3.1524e+00 -1.2266e+01 -4.1048e+00 +#> 7.2911e+00 -6.4379e+00 -1.7985e+00 -5.6837e+00 -1.0565e+00 7.3205e+00 +#> -6.8436e+00 5.7475e+00 1.0584e+01 5.5899e+00 -8.1106e+00 5.2447e+00 +#> 1.8906e+00 7.2292e+00 5.0902e+00 4.5608e+00 -9.9641e+00 1.2505e+01 +#> 4.5123e-01 1.0429e+01 -8.5217e+00 -8.4860e-01 -4.6473e+00 1.2811e+01 +#> -1.5758e+00 1.1252e+00 -1.6221e+01 6.2742e-02 1.3340e+00 2.5629e+00 +#> 3.6046e+00 -2.0595e+00 2.5240e+00 8.4628e+00 -8.3138e+00 -1.2419e+00 +#> -6.2531e+00 -9.2732e+00 -7.5239e+00 -2.8955e+00 -1.8137e+00 3.0279e+00 +#> 1.8885e+00 -3.4502e+00 3.5810e-01 1.3649e-01 5.4780e+00 -1.2280e+01 +#> 2.6698e+00 -1.1913e+00 -6.9305e+00 -3.4033e+00 8.0761e+00 -8.2204e+00 +#> 4.0017e+00 -7.5190e-02 -6.1816e+00 1.2372e+01 9.1658e-01 -8.7322e+00 #> -#> Columns 25 to 30 -1.6500e+00 1.2556e+01 4.7886e+00 -1.2065e+01 -1.1266e+01 1.6267e+01 -#> 5.6686e+00 7.5015e+00 4.9051e+00 -5.6231e+00 -8.5556e+00 -8.8199e+00 -#> 2.3384e+00 1.2451e+00 -3.1599e+00 1.2265e+01 -4.1203e+00 -5.0749e+00 -#> -1.7114e+00 -7.9214e+00 2.9079e+00 3.2128e+00 2.6000e-01 -1.0404e+01 -#> 6.9109e+00 8.9490e+00 -8.1327e+00 3.3842e+00 9.4082e-01 -1.2268e+01 -#> 1.9725e+01 7.5589e+00 -3.3212e+00 -6.2901e+00 4.8042e+00 9.6396e+00 -#> 1.6098e-01 6.2835e+00 -4.2581e+00 -9.5596e+00 5.3666e-01 4.8876e+00 -#> 1.1118e+00 7.3037e+00 -8.2213e+00 1.0765e+00 -4.7982e+00 -1.8013e+01 -#> 2.7118e+00 1.0105e+01 4.1488e-01 5.4600e+00 2.4152e+00 -6.6327e+00 -#> 8.3795e-01 4.6922e+00 -2.7764e-01 -3.3877e+00 5.3466e+00 7.5152e+00 -#> 8.2482e+00 1.9565e+00 -1.5874e+01 1.2110e+01 -7.5095e+00 -4.2220e+00 -#> 1.7925e+00 4.7533e+00 1.5763e+00 -9.5619e+00 1.4166e+01 8.8671e+00 -#> 1.2803e+00 -7.2786e+00 1.9553e-01 3.9502e+00 8.1943e+00 3.4421e+00 -#> 1.4177e+00 -8.2810e+00 1.8800e+00 4.0896e+00 4.0136e-01 3.8002e+00 -#> 8.3683e+00 -7.2149e+00 2.0337e+00 -7.7635e+00 -1.4227e+00 1.4717e+01 -#> 7.8810e+00 -4.5467e+00 7.2520e+00 -5.8690e+00 -7.0170e+00 1.0698e+01 -#> 6.4822e+00 -3.0455e+00 -5.8516e-01 3.7871e+00 -2.0598e+01 2.5791e+00 -#> 1.1569e+01 -1.0959e+01 4.0629e+00 1.4087e+00 5.1808e+00 1.4298e+01 -#> 9.5239e-01 8.9995e+00 -3.8282e+00 -5.4321e+00 -2.1114e+00 -5.2939e+00 -#> 6.0747e+00 -5.8771e+00 2.1364e+00 3.0143e+00 -1.2609e+01 8.1266e+00 -#> -6.9970e+00 4.0875e+00 -4.6289e+00 4.5900e+00 5.8148e+00 2.1470e+00 -#> 5.9624e+00 -8.3459e+00 -3.9486e-01 1.1296e+00 -7.7978e+00 -8.5532e+00 -#> 2.5669e-01 -9.6446e+00 -2.1524e+00 8.1044e+00 -6.5395e+00 9.2771e+00 -#> -1.6091e+00 8.4890e-01 7.0265e+00 -3.2210e+00 3.4743e+00 8.7699e+00 -#> -4.3547e+00 -1.0256e+01 -5.4367e-01 -1.8768e+00 -9.9176e+00 1.3847e+01 -#> -3.5261e+00 -7.5636e+00 -2.1715e-01 -7.0807e+00 -1.4738e+01 1.4545e+01 -#> 7.3968e+00 7.2117e+00 -1.4983e+01 -3.1700e-01 4.2418e+00 -3.1306e+00 -#> 1.5941e+00 -2.3137e+00 4.9425e+00 1.1920e+01 -6.7446e+00 1.3191e+00 -#> -1.9429e+00 8.0296e-01 -7.1434e+00 -6.7915e+00 -5.0054e+00 6.8658e+00 -#> -8.3416e-01 6.5902e+00 2.2657e+00 4.3535e+00 -6.2116e+00 9.4883e+00 -#> -4.5389e+00 8.2557e+00 -5.1779e+00 -2.0092e+00 2.3989e+00 1.1704e+01 -#> -1.7732e+00 7.6986e-01 6.3287e+00 7.3798e+00 2.2958e+00 2.1280e+00 -#> -2.2058e+00 7.9456e+00 2.4047e+00 -5.4327e+00 4.2458e+00 8.6109e+00 +#> Columns 25 to 30 -3.0744e+00 -3.2717e+00 -3.7389e+00 3.1911e+00 -1.0997e+01 -1.8438e+00 +#> -7.5858e-01 4.3105e+00 5.5301e+00 2.1326e+00 -2.1913e+00 1.0136e+00 +#> 1.0817e+01 1.5704e+00 4.6649e+00 1.0951e+01 -2.3652e+00 -1.5745e-01 +#> -1.5238e+00 1.0873e+01 -2.3438e+00 -4.2410e+00 5.2481e+00 -1.8236e+00 +#> -3.6706e+00 -7.9073e+00 9.2750e+00 1.3319e+01 -1.0781e+00 2.4713e+00 +#> 5.7494e+00 6.4843e+00 5.4720e+00 5.0992e+00 1.2212e+00 -4.7350e+00 +#> -2.2603e+00 -6.5329e+00 -1.3011e+01 -3.4549e+00 1.3042e+01 3.6660e-01 +#> 3.2602e+00 -5.0753e+00 4.9790e+00 1.4474e+01 4.2875e-01 -1.0536e+01 +#> 2.7088e+00 -1.6873e+01 -1.4222e+00 -9.5343e+00 -1.4356e+01 7.0343e+00 +#> -3.0289e+00 1.2548e+01 -5.3476e-01 9.6326e+00 1.8405e+00 -1.2127e+00 +#> -3.6812e+00 1.0437e+01 8.0682e+00 -1.6652e+01 -1.0142e+01 -1.5206e+01 +#> 4.3219e+00 -1.0197e+01 -1.3338e+01 -2.9317e+00 -1.8587e+00 -9.4293e+00 +#> -2.9066e+00 3.7189e+00 -3.3114e+00 1.9270e-01 6.3411e+00 1.2446e+00 +#> -1.5592e+01 -8.3166e+00 -8.0093e+00 -1.7355e-02 1.3073e+01 -1.2499e+00 +#> -4.8396e+00 -8.6553e+00 -2.1501e+01 1.0832e+00 -1.5631e+00 3.5211e+00 +#> 6.4349e+00 -1.4709e-01 1.0039e+01 4.1366e+00 -5.1818e+00 5.3779e+00 +#> -2.8191e+00 1.0056e+01 -8.1596e+00 -7.9815e+00 -8.8640e+00 1.4352e+00 +#> -1.3145e+01 5.0618e+00 -4.3751e+00 -6.5379e-01 -8.6885e-01 -7.8195e+00 +#> 4.0526e-02 7.5483e+00 -1.9344e+00 -1.4227e-01 -1.0023e+01 5.8452e-01 +#> 1.3192e+01 1.3451e+01 6.0892e+00 3.4051e+00 8.8799e+00 4.9666e+00 +#> 4.2960e+00 5.7212e+00 6.3871e+00 9.6408e+00 -4.0683e+00 -1.0980e+01 +#> 3.8653e+00 -9.0519e-01 2.7193e+00 3.2622e+00 -2.5227e-01 4.5618e+00 +#> 3.0555e+00 -1.0348e+01 -6.2554e+00 -8.3331e+00 8.2203e+00 2.1232e+00 +#> 1.0208e+01 -5.9385e+00 -1.3444e+01 4.4038e+00 1.4671e+00 -8.2144e+00 +#> 1.1789e+01 1.3811e+01 6.1038e+00 -7.4244e+00 1.3696e+01 9.6627e+00 +#> 6.3077e+00 1.0908e+01 1.8206e+01 8.7864e+00 5.6093e+00 4.2826e+00 +#> 4.2985e+00 7.8137e-01 7.8540e-02 9.3323e+00 3.5540e+00 -5.1729e-01 +#> -1.6727e+00 6.6090e+00 4.7937e+00 5.9470e+00 3.9930e+00 3.1163e+00 +#> 9.4688e+00 -1.6961e+00 1.0365e+00 -1.3247e+01 7.5337e+00 -3.6556e+00 +#> -1.6152e+01 -1.8936e+01 -7.3162e+00 -3.1980e+00 -1.3776e+01 -2.8769e+00 +#> 3.5247e+00 -3.0341e+00 1.0915e+01 -9.8410e-01 6.7528e+00 7.6098e+00 +#> -1.1487e+00 -1.0516e+01 5.7968e+00 6.2888e-01 9.3936e+00 1.2855e+01 +#> 8.2081e+00 -1.0463e+00 -1.2325e+01 5.4360e-02 -2.9514e+00 -2.2365e+00 #> -#> Columns 31 to 36 1.6968e+01 1.4056e+00 -1.3579e+01 -9.4824e+00 -3.1901e+00 -1.5056e+01 -#> 3.3381e+00 3.8086e+00 -1.0171e+01 -8.9115e+00 6.1601e+00 -1.3511e+01 -#> -8.7886e+00 1.1156e+00 2.0683e+00 7.6585e+00 1.0455e+01 9.0514e-02 -#> 2.2720e+00 1.0692e+01 5.2476e+00 1.5036e+01 5.3032e+00 5.4630e+00 -#> 1.5361e+00 -1.8013e+00 -2.5648e+00 3.2145e+00 4.1681e+00 3.9148e+00 -#> 1.1932e+01 -1.6998e+00 6.7076e-01 8.8148e-01 -6.3605e+00 1.7223e-02 -#> -1.9262e+00 -9.2899e+00 -9.5948e-01 1.8220e+00 -3.3391e+00 5.1185e+00 -#> 6.4869e+00 -4.8870e-01 -1.2153e+00 5.8586e-01 5.8405e+00 -5.5425e+00 -#> -1.3434e+01 -2.8273e+00 -3.8836e+00 3.6068e+00 4.8271e-01 6.4796e+00 -#> -2.6460e+00 2.7995e+00 -9.0357e+00 1.2773e+01 -2.1468e-01 2.2131e+00 -#> -8.1002e-02 6.3334e+00 -3.2733e+00 1.2021e+01 5.1330e+00 -1.6836e-01 -#> 3.5073e+00 -8.2699e+00 1.6375e+00 -3.3127e+00 -4.3346e+00 -1.8677e+00 -#> 7.9125e-01 -9.4631e+00 -2.6451e+00 -9.7767e+00 2.7838e+00 -1.1135e+00 -#> -8.0633e+00 -3.0523e-01 1.0451e+01 3.4521e+00 -4.9708e+00 -1.2935e+01 -#> -1.3916e+00 -2.5956e+00 -1.1536e+01 -1.3008e-01 1.2098e+01 -7.4245e+00 -#> -8.8167e+00 -4.2268e+00 -4.3778e+00 -3.3864e+00 2.3205e+00 2.8307e+00 -#> 1.5622e+01 5.6263e+00 -1.1112e+00 -1.5210e+01 -3.7397e+00 2.1831e+00 -#> -6.3162e+00 -6.0929e+00 2.0127e+00 -5.9087e+00 -2.6422e+00 1.6015e+00 -#> -7.7263e+00 -1.6963e+01 -1.6966e+00 -5.4852e+00 5.0603e+00 -1.1660e+01 -#> 8.5882e-01 -1.3065e+00 -3.0651e+00 -1.0387e+01 7.6108e+00 1.0662e-02 -#> 3.0485e+00 1.5695e+00 2.5101e+00 1.7252e+00 3.2778e-01 -2.9300e+00 -#> -8.3939e+00 5.5164e+00 1.6709e+00 3.5003e-01 -1.2074e+01 2.2491e+00 -#> 8.1846e+00 6.9727e+00 8.1906e+00 4.9120e+00 -1.6916e+00 1.5517e+01 -#> -5.5969e+00 2.9195e+00 -3.6840e-01 1.7274e+01 4.2599e+00 1.2123e+01 -#> 4.5651e+00 8.8605e+00 -5.9988e+00 -4.6366e+00 -3.1383e+00 -1.1470e+00 -#> 7.4988e+00 4.8204e+00 -4.8057e+00 -1.4195e+01 -1.6288e+00 -7.9563e+00 -#> -7.8994e-02 -4.8934e+00 -7.4796e+00 -3.6926e+00 -4.7582e+00 4.3932e+00 -#> 5.4703e+00 -2.7433e+00 1.5770e+00 -2.6086e+00 5.7119e+00 -1.0186e+01 -#> -8.3719e+00 -4.3971e+00 -1.1563e+01 -5.9293e+00 -6.1670e+00 -9.7025e-01 -#> 1.2016e+01 1.7604e+00 -1.4021e+01 -8.7414e+00 3.9784e+00 -7.5222e+00 -#> 4.3311e+00 -1.9517e+00 7.4661e+00 6.4736e+00 7.3379e+00 3.1902e+00 -#> 1.4190e+00 -3.5703e+00 2.5072e+00 -2.0089e+00 7.2083e+00 3.6831e-01 -#> 1.4973e+00 8.0480e+00 -4.3088e+00 5.0519e+00 -1.2431e-02 -3.9246e+00 +#> Columns 31 to 36 -2.4160e+00 -9.8490e+00 9.2617e-01 1.2981e+01 6.3057e+00 -8.5699e+00 +#> 7.7927e+00 3.1773e+00 -4.7796e+00 -3.2451e+00 2.4790e+00 -7.4513e-01 +#> 1.8965e+00 -1.4839e+00 -2.0183e+00 -1.1404e+00 -5.8512e+00 -4.9910e+00 +#> 9.4973e+00 -3.0604e+00 -7.5236e+00 -1.0549e+01 -6.0912e+00 3.4864e+00 +#> 8.7613e-01 -1.2540e+01 1.5420e+01 1.0203e+01 -7.3026e+00 5.7082e+00 +#> 2.7574e+00 8.5051e-01 -3.7750e-02 -5.1999e+00 -1.4755e+00 -1.2914e+01 +#> -8.6040e-01 3.3290e+00 5.2815e+00 6.3405e+00 5.5871e+00 -4.1847e+00 +#> -3.1350e+00 -1.1125e+01 1.3637e+01 -5.1070e+00 -6.0118e+00 1.7395e+01 +#> 1.5454e+01 7.0023e+00 -1.2538e+00 -1.2083e+01 -1.0761e+01 -2.5431e+00 +#> -1.9039e+00 3.4253e+00 2.7404e+00 -1.4629e+00 -4.5087e+00 4.0576e+00 +#> 3.8297e+00 4.5924e+00 -3.6834e+00 7.7188e+00 9.2847e+00 1.2748e+01 +#> 2.7658e-01 -4.9807e+00 6.5602e+00 5.4798e+00 1.4785e+00 1.8425e+00 +#> 3.7173e+00 3.8542e+00 6.7257e-01 3.5744e+00 -5.4453e+00 -9.5923e+00 +#> 5.0061e+00 -2.5951e+00 -3.4896e+00 1.0720e+01 -7.5133e+00 3.1578e+00 +#> -5.6386e+00 1.0525e+00 2.2377e+00 3.7038e-01 -4.9494e+00 -1.8860e+00 +#> -1.9891e-02 -8.7955e+00 2.1543e+00 -4.7553e+00 -9.9719e+00 5.4613e+00 +#> -4.1496e+00 1.3958e+01 -1.6059e+00 5.4896e+00 6.5098e+00 4.0166e+00 +#> -8.3209e+00 -1.8790e+00 1.2269e+01 -3.8065e+00 -4.5256e+00 7.0439e-01 +#> 9.8887e+00 -2.9514e+00 4.1527e+00 -2.8896e+00 8.8576e+00 -8.5063e-02 +#> -4.8624e+00 -3.8806e-01 -9.4698e-01 2.0949e+00 2.2369e-01 -1.1511e+01 +#> 6.9678e+00 7.0676e+00 -7.7105e+00 8.2408e-01 3.9933e+00 2.4516e+00 +#> 8.0568e-01 7.3904e-01 1.1489e+00 9.2044e-03 -2.7105e+00 -1.0492e+01 +#> 1.3313e+00 1.3976e+01 7.7165e+00 6.0428e+00 5.6637e+00 -3.3999e+00 +#> 5.5349e+00 -1.3613e+01 1.0381e+01 2.5384e+00 1.4881e-01 -6.0892e+00 +#> 1.1582e+01 8.4071e+00 -9.1990e+00 8.6955e-01 4.5253e+00 4.6655e+00 +#> 4.6368e+00 -1.1878e+00 -9.3051e+00 -4.8539e+00 1.7904e+00 -3.5288e+00 +#> -5.9411e+00 -6.2883e+00 1.3240e+00 -4.7931e-01 -5.6301e-01 -2.9917e+00 +#> 1.2742e+01 -2.5881e+00 6.3592e+00 -1.7294e+00 3.9995e+00 -3.3552e-01 +#> 3.3428e-01 -3.5750e+00 -2.8391e+00 2.4259e+00 -2.8570e-01 5.7347e+00 +#> -1.0743e-01 2.9691e+00 1.5777e+01 5.7520e+00 -8.2096e-01 -8.0071e-01 +#> -5.8091e+00 -2.8520e+00 -3.7087e+00 -7.4047e+00 -3.3277e+00 2.3684e+00 +#> 1.8535e+01 -1.7203e-01 4.4053e+00 -8.0303e+00 -8.4598e+00 5.1239e+00 +#> -3.7432e+00 3.9723e+00 1.1141e+00 -1.3880e+01 1.4985e+01 -1.1681e-01 #> -#> Columns 37 to 42 1.0349e+01 3.5146e+00 4.7248e+00 2.4224e+00 3.1631e+00 -1.8932e+00 -#> 3.7355e+00 -4.9438e+00 5.1820e-01 -1.3734e+01 2.2520e+00 1.2284e+01 -#> -7.2834e+00 -2.9175e-03 -9.4973e+00 3.5105e+00 -2.9854e+00 -7.4132e+00 -#> 4.6324e+00 -4.4956e+00 -2.1906e+00 6.3653e+00 -1.3877e+01 1.4574e+00 -#> -6.3226e+00 5.5992e+00 -1.6028e+00 1.2614e+01 2.3026e+00 2.4687e+00 -#> 5.3798e+00 -5.2953e+00 5.1357e+00 -2.8384e+00 8.5415e+00 -4.4999e-01 -#> 1.3274e+01 -5.1165e+00 -9.4891e-01 5.1584e+00 -1.4090e+01 -2.0401e+00 -#> -1.0071e+01 8.7172e+00 2.4762e-01 4.5268e-01 9.5065e+00 6.5990e+00 -#> 3.8933e+00 -1.4519e+01 -4.2085e+00 -5.6620e+00 2.6159e-02 2.7152e-02 -#> 1.9220e+00 8.1747e+00 5.4029e+00 8.3436e+00 7.5888e+00 -6.9745e+00 -#> -8.2549e+00 7.4162e-01 -6.1332e+00 -2.8622e+00 3.1681e+00 1.2724e+01 -#> 8.1403e+00 3.7808e+00 2.9839e+00 3.2436e+00 -2.0750e+00 -6.6230e+00 -#> -3.8046e+00 1.1949e+01 2.0603e+00 1.3661e+01 -4.4081e+00 -8.8752e+00 -#> 4.0630e-02 -9.7636e+00 1.0656e+01 -1.0516e+01 6.4355e+00 2.4332e+01 -#> 4.1552e+00 -4.4734e+00 9.5094e+00 -8.9332e-04 1.4007e+00 -6.9024e+00 -#> -7.1951e+00 -9.5963e+00 6.6720e+00 -7.8115e+00 -3.9879e+00 1.7070e+01 -#> -1.1219e+01 3.4550e+00 6.2057e+00 1.7176e+01 -3.6684e+00 9.2366e+00 -#> 1.0725e+01 -4.2982e+00 1.2791e+01 5.9354e+00 -6.8996e+00 1.2754e+00 -#> 3.3122e+00 -5.9823e+00 -1.6210e+01 5.5249e+00 7.5663e+00 1.0667e+01 -#> -7.7834e-01 2.7156e+00 1.1732e+01 -7.9248e+00 1.3731e+01 -7.1842e-01 -#> 8.4354e+00 -2.6473e+00 -7.0378e+00 9.5294e+00 1.0219e+01 4.3076e+00 -#> -1.6865e+01 -1.0685e+01 3.3568e+00 -8.7478e+00 5.4412e+00 4.8094e+00 -#> 4.0752e+00 1.9527e+00 2.2571e+00 -3.4768e-01 -2.9087e+00 -8.7077e+00 -#> -1.1830e+00 1.9568e+01 -5.2073e+00 6.3466e+00 5.4534e+00 -1.6413e+01 -#> 5.9593e+00 -8.8745e+00 2.7595e-01 2.5411e+00 8.2347e+00 4.6738e+00 -#> -5.1192e+00 5.9188e-01 9.3946e+00 -6.3051e+00 1.1390e+01 6.2909e+00 -#> 2.7971e+00 -7.5224e+00 -3.4047e+00 1.0831e+01 -1.3286e+01 1.6648e+01 -#> 4.9520e-01 2.1406e+01 3.6402e+00 8.1908e+00 2.2226e+00 1.0036e+00 -#> -5.8694e+00 -1.9102e+01 -1.5645e+00 2.6963e+00 1.2637e+01 1.9963e+01 -#> -7.9123e+00 1.4164e+01 1.4852e+01 -6.8379e+00 7.8168e+00 -2.5681e+00 -#> 1.5386e+01 6.3521e+00 8.1042e+00 6.7304e+00 -1.3655e+01 -8.0183e+00 -#> -5.5019e+00 2.0131e+00 5.0412e+00 -7.0756e+00 5.0653e+00 -1.2269e+01 -#> 1.1495e+01 4.0187e+00 4.1938e+00 -1.1167e+01 1.0863e+01 -9.0092e+00 +#> Columns 37 to 42 5.9127e+00 3.5513e+00 3.4210e+00 -3.0361e+00 -3.9979e+00 -3.2414e+00 +#> -8.3478e+00 3.9888e+00 -6.8009e+00 2.9108e+00 -3.9676e+00 -1.3084e+01 +#> 6.1419e+00 3.3308e+00 4.2453e+00 2.0735e+00 -3.0325e-01 4.0093e-02 +#> -7.2299e+00 9.5466e+00 4.7664e+00 -1.4321e+00 -9.0915e+00 -3.8821e-01 +#> 1.6036e+00 -2.7499e+00 1.3207e+01 3.6614e+00 1.2311e+01 -1.5622e+01 +#> -3.8523e+00 -4.6020e+00 2.6399e+00 -7.8059e+00 1.6730e+00 -4.3755e+00 +#> 9.1574e-01 -8.5406e-01 -6.4620e+00 3.0858e+00 3.9684e+00 -3.4153e+00 +#> -8.5680e+00 7.8068e+00 5.7354e-01 -4.5577e+00 1.0946e+01 1.0115e-01 +#> -4.4545e-02 -5.7618e+00 -1.7288e-01 -1.3676e+01 -1.1941e+01 1.0912e+01 +#> -7.1148e+00 -9.3002e+00 3.5723e+00 -4.7340e+00 9.5595e+00 2.9633e+00 +#> -4.9323e+00 -1.1229e+01 1.9060e+01 -2.2314e+01 -1.0375e+01 1.2683e+01 +#> -5.2458e+00 2.5543e+00 2.5698e-01 -8.3591e+00 -2.1949e+00 -4.7709e+00 +#> -8.6382e+00 -6.1757e+00 -7.5267e+00 8.6819e+00 -6.2349e+00 1.6083e+01 +#> -3.9643e+00 -6.1261e+00 -5.2405e+00 -1.3970e+00 1.3901e+01 -1.4500e+00 +#> -3.7972e+00 -4.2547e+00 -1.5126e+01 -2.8267e+00 4.7399e+00 6.9800e+00 +#> 7.8953e+00 6.9430e+00 -5.5523e+00 1.2434e+00 3.8093e+00 2.5516e+00 +#> -1.2301e+01 9.0655e+00 4.8731e+00 7.8256e-01 -7.1606e+00 4.4650e+00 +#> -1.0572e+01 1.8517e+00 -3.2508e+00 -1.7122e+01 6.7357e+00 1.9725e+00 +#> -1.7413e+01 -2.0952e+00 -6.9225e+00 -8.7225e+00 6.2306e+00 -3.5343e+00 +#> 3.6866e+00 -2.4677e+01 -4.2415e+00 1.8065e+00 3.0915e-01 9.5712e+00 +#> 1.7296e+00 2.8315e+00 -1.0357e+01 -2.9803e+00 -2.7833e+00 -6.4746e+00 +#> 2.3642e+00 -3.4611e+00 -6.5354e+00 1.8402e+01 -2.0546e+00 -9.8110e+00 +#> 9.7047e-02 2.5412e+00 8.1969e+00 -2.3119e+00 -8.7385e+00 6.1981e+00 +#> -1.7581e+00 7.0399e+00 -8.3292e-02 -1.6233e+01 9.5128e+00 4.0066e+00 +#> -2.5750e+00 -2.7078e+00 8.2693e+00 -6.4337e+00 1.0517e+01 1.2219e+00 +#> 1.1326e+01 -6.1008e+00 5.5458e+00 1.7954e+00 -5.8911e+00 5.3163e+00 +#> -5.4871e+00 -5.5184e+00 -5.6121e+00 -3.8479e+00 4.5289e+00 1.8836e+00 +#> 3.5214e+00 -8.7408e+00 9.1136e+00 -6.3647e+00 -2.6298e+00 4.8887e+00 +#> -2.4187e+00 -2.5903e+00 8.9128e-01 6.8829e-01 2.1995e+00 7.2179e+00 +#> -8.5783e+00 -9.0089e+00 -9.2762e+00 -1.1818e+01 -9.8745e+00 8.3556e+00 +#> 4.3238e+00 9.6471e+00 -1.9621e+00 1.2542e+01 -7.3664e+00 9.5723e+00 +#> 4.4038e+00 -2.9037e+00 1.1862e+01 -6.4084e+00 1.6031e+01 6.6063e+00 +#> 4.8039e-01 2.2151e+00 -2.4990e+00 -6.7493e+00 -3.7793e+00 3.6258e+00 #> -#> Columns 43 to 48 1.9086e+00 -6.0084e+00 1.1663e+01 1.6831e+01 1.2646e+01 1.4408e-01 -#> 7.7910e+00 9.5390e-01 6.2202e-01 8.2527e+00 6.3771e-01 -2.7207e+00 -#> 2.8097e+00 -5.4443e-02 8.8826e+00 -1.0211e+01 5.2685e+00 5.2125e-01 -#> 8.4363e+00 5.9659e+00 -2.7217e+00 -1.3296e+01 -5.1549e+00 1.4063e+01 -#> 1.0081e+01 3.2227e+00 9.6486e+00 -7.0275e+00 -4.2905e+00 8.8995e+00 -#> 8.6326e+00 1.1378e+01 -2.3089e+00 8.4529e+00 1.1244e+00 -9.1948e-01 -#> 3.7497e+00 8.2288e+00 9.7651e+00 -3.2653e+00 1.7484e+00 6.9823e+00 -#> 1.0540e+00 -2.4206e-01 -8.1490e-01 -2.3298e+01 -1.6047e+01 4.2955e+00 -#> 6.5837e+00 1.1039e+01 1.2911e+01 9.7721e+00 -1.1140e-01 -7.1750e+00 -#> -8.2676e+00 -1.0484e+01 1.3251e+00 -2.4151e+00 3.7316e+00 2.1665e+01 -#> -2.0550e+00 8.4096e+00 1.5844e+01 -1.5410e+01 -4.6701e+00 1.7298e+00 -#> -8.3883e+00 -8.4074e-01 -7.2652e+00 3.0749e+00 -5.4976e+00 -7.0820e+00 -#> -3.3652e+00 -1.8007e+00 -3.4501e-01 -2.0708e-01 -5.1626e+00 2.1304e-01 -#> 1.6354e+01 9.7717e+00 -1.5131e+01 8.6530e+00 1.5403e+01 -6.7722e+00 -#> 1.2625e+01 1.7819e+01 3.2093e+00 -5.8925e+00 6.5707e+00 8.1253e+00 -#> 2.4240e+00 5.0578e+00 -1.0251e+01 6.3943e+00 1.5771e+01 -2.9332e+00 -#> 2.9671e+00 4.1355e+00 5.8757e-01 -1.5622e+01 3.2887e+00 6.6223e+00 -#> -1.5192e+00 5.2934e+00 -1.2964e+01 -7.3480e+00 2.9503e+00 -6.8738e+00 -#> 4.9112e+00 4.4184e+00 1.4162e+01 -6.7740e+00 2.7057e+00 2.2440e-01 -#> -1.0507e+00 -2.7629e-01 -1.4210e+01 -5.9002e+00 3.1342e+00 -2.1610e+00 -#> 2.3724e+00 -6.5675e+00 -3.6146e+00 -1.4949e+01 -1.0355e+01 -3.2275e+00 -#> -3.8806e+00 4.0648e+00 -6.2838e-01 6.4692e-01 -3.7762e+00 -4.5512e+00 -#> 5.0239e-01 -1.0756e+01 -2.6947e-01 1.3397e+00 1.3567e+01 3.3119e+00 -#> -4.1779e+00 -3.8258e+00 9.0232e+00 -2.8485e+00 9.2501e+00 1.4849e+01 -#> -2.0827e+00 1.7546e+00 -3.7182e-01 4.5059e+00 1.2492e+00 -8.6025e+00 -#> 8.0779e+00 3.7081e+00 -7.3670e+00 4.0668e+00 6.6366e+00 -3.3706e+00 -#> 1.1349e+01 7.9921e+00 4.4069e+00 -5.2089e+00 -6.7050e+00 1.2475e+01 -#> -1.4296e+00 -8.9067e+00 7.0266e+00 -2.1112e+00 3.0178e+00 1.9257e+01 -#> 1.1055e+01 1.1767e+01 5.6965e+00 -2.8091e+00 7.3366e+00 -1.0011e+01 -#> -1.2181e+01 -1.1801e+01 1.3236e+01 1.6488e+01 -6.9282e+00 6.8039e+00 -#> 2.4913e+00 -7.4843e+00 3.3452e+00 -8.6046e+00 3.4812e+00 5.3174e+00 -#> -3.3165e+00 1.2759e+00 -1.6281e+01 4.1924e+00 -3.1811e+00 -1.9584e+01 -#> -1.4903e+00 -5.9860e+00 6.4547e+00 2.1014e+01 -4.2636e+00 6.3224e-01 +#> Columns 43 to 48 6.3546e+00 5.5814e+00 -4.1345e+00 -2.1214e+00 1.7819e-02 -9.3293e+00 +#> 1.5115e+00 1.1845e+00 -1.1078e+01 -3.0541e+00 -1.8373e+00 -5.1780e+00 +#> 4.6270e+00 4.3330e+00 -1.9397e+00 -3.9596e+00 8.7563e-01 -3.5595e+00 +#> -4.3821e+00 -1.0747e+01 -1.0921e+01 6.2560e+00 -8.3863e-01 6.7045e+00 +#> 6.0324e+00 5.4766e+00 6.1607e+00 6.2573e+00 -1.1745e+01 -5.8074e+00 +#> -8.7181e-01 7.0205e+00 2.7019e+00 4.2208e-01 1.3686e+00 -4.8149e+00 +#> -1.8445e+00 -1.2732e+00 8.9257e+00 -1.3513e+01 1.6947e+00 4.7902e+00 +#> -1.4727e+01 -6.4389e+00 -6.7826e+00 4.7253e+00 -1.6116e+01 1.5001e+01 +#> -2.7950e+00 -5.0996e+00 -1.4505e+01 3.0346e+00 -4.9890e+00 -6.1698e+00 +#> -2.2781e-01 2.3249e+00 3.5943e+00 6.8096e+00 3.8828e+00 -3.9986e+00 +#> -7.1669e+00 -6.5423e+00 -5.9308e+00 2.9696e+00 4.4083e+00 -9.7395e+00 +#> -1.3168e+01 -3.5437e+00 -7.0148e+00 -8.5682e+00 -4.7069e+00 1.1038e+01 +#> -1.1073e+01 -3.4101e+00 -6.7015e-01 2.9419e+00 -1.2253e+01 1.3861e+01 +#> -2.5928e+00 7.5227e-02 5.7482e+00 1.3727e+01 -1.1598e+01 -7.1034e+00 +#> -1.1612e+01 -7.7452e+00 8.0805e+00 -9.9243e+00 -8.6924e+00 5.9893e+00 +#> -1.3262e+00 2.4119e+00 -3.1468e+00 7.0294e+00 -7.4336e+00 1.2526e+01 +#> 1.1899e+01 -4.6263e-02 1.0874e+01 -1.6575e+01 1.3480e+00 -1.0570e+01 +#> -7.2232e+00 -1.1131e-01 7.9359e+00 7.3654e+00 -1.5571e+01 8.5639e+00 +#> 3.2576e+00 2.9600e+00 -5.3451e+00 1.8798e+00 7.2577e+00 4.4541e+00 +#> 6.3007e+00 1.4342e+01 4.6795e+00 1.4109e+00 6.0204e+00 -5.9023e+00 +#> 4.8023e+00 -2.3711e+00 -2.9629e+00 -1.2690e+01 4.0853e+00 -2.9595e+00 +#> 4.1702e+00 3.0889e+00 -6.3344e+00 -5.7244e+00 1.3824e+01 1.0056e+01 +#> -1.0141e+01 -6.1246e-01 -1.1567e+00 -4.7049e+00 -1.3935e+01 9.1054e-01 +#> 7.9943e+00 1.3407e+01 1.6255e+00 4.1522e+00 -5.3960e+00 4.7836e-01 +#> 2.2200e+00 1.1039e+01 -3.0500e-01 4.4403e+00 2.0097e+00 1.1695e+01 +#> -2.2005e+00 1.8139e+00 1.7448e+00 4.1344e+00 5.5313e+00 -3.9247e+00 +#> -1.8229e+00 3.6532e+00 2.8835e+00 3.0477e+00 -8.1991e+00 1.0751e+01 +#> 3.6532e-02 3.6095e+00 -9.7047e-01 5.4361e+00 1.5120e+00 5.3995e+00 +#> 3.1780e+00 8.9854e+00 7.1141e+00 3.9608e+00 1.4050e+00 3.4557e+00 +#> -1.2866e+01 -3.5154e+00 4.4069e+00 2.0944e+00 -1.4698e+01 -5.0193e+00 +#> -3.6509e-01 5.1551e+00 8.4945e+00 7.6052e+00 -8.5984e+00 5.1943e+00 +#> -1.8555e+01 6.3359e+00 -1.5282e+00 -4.0118e+00 5.0232e+00 1.1312e+01 +#> -6.1000e+00 -1.2387e+00 -8.2431e+00 8.6410e-01 9.8649e+00 1.3506e+01 #> #> (9,.,.) = -#> Columns 1 to 8 4.0794 6.0684 -0.8311 4.8778 -2.0680 -5.0483 -8.1212 -0.4670 -#> -5.0321 -1.8980 -7.3105 3.9990 2.5946 3.8788 -5.8263 -0.4804 -#> 12.8792 -12.9890 1.0354 0.2266 4.5250 -7.3148 5.9497 3.1359 -#> -3.6702 1.7862 3.8205 -1.1715 9.8759 -2.4723 -4.8305 -20.5428 -#> 13.3035 -1.2442 -2.8034 4.6820 -5.0879 6.6643 3.9401 2.0031 -#> 3.3032 2.2903 10.9000 9.5233 -8.0382 -17.9027 10.9481 1.0601 -#> 8.1374 2.9132 6.7715 6.9283 -7.8365 0.8160 10.2724 -10.3356 -#> 5.3852 -3.9204 -0.3595 -1.8751 -3.5394 20.6540 9.7091 -1.9489 -#> 0.8717 1.9024 -7.1412 5.2893 2.1951 -23.9145 9.6487 2.1870 -#> -2.4710 -3.1546 11.8893 -3.1098 5.0098 -6.4714 4.5560 -6.5721 -#> 8.2237 -3.3640 -6.0437 10.2538 -8.5034 9.4943 4.6656 0.5343 -#> 4.6898 4.6695 -6.3377 6.2074 -2.6848 8.3498 1.1032 0.8084 -#> 5.9736 -3.9981 -0.2361 -0.2370 -13.3947 8.2142 0.5673 -6.0443 -#> -5.6291 18.3545 -9.2522 -10.8182 7.8836 -18.0514 -10.5459 1.1597 -#> 4.2296 -7.4623 6.4381 4.7830 -4.2380 -5.9684 14.1344 -19.2029 -#> -7.1332 18.8846 5.4187 -9.2719 -5.0517 -2.2815 2.1851 4.0934 -#> -5.2267 -8.4886 -2.4892 5.5292 4.2910 1.0350 -4.9162 -0.9101 -#> -8.1712 0.3072 7.1822 -4.1487 4.3976 -9.8743 14.2932 -14.2236 -#> -3.3688 1.2148 2.7096 -11.7475 -12.5490 -3.2319 -0.9382 15.8213 -#> -11.5855 -5.4447 11.0557 3.4124 1.1715 -10.6065 6.2076 2.0333 -#> 1.6404 -4.7255 1.6720 5.3021 6.5244 -17.9380 3.5248 4.6255 -#> 4.3144 -9.9478 -5.6320 -11.0713 -11.2514 7.6034 9.0796 -7.7695 -#> -10.1595 0.9970 14.5487 12.4250 -3.0845 -3.2217 0.8475 4.6780 -#> 5.8065 -2.8322 14.9710 -7.3477 0.8346 -11.5608 1.4898 0.7894 -#> -7.8699 -5.5137 -6.0862 3.4121 15.1010 -22.8861 3.0404 13.9179 -#> -6.6843 -4.1122 4.2866 -9.9555 7.2913 -10.2866 -9.7519 2.7445 -#> 1.7172 1.6126 -2.0818 3.9591 -8.4740 -5.7567 -0.8639 -3.9426 -#> -4.2496 6.4542 -0.2463 1.8102 5.1814 -6.9287 -7.0213 9.4895 -#> 6.3897 0.4685 -0.1945 -5.5021 -6.2104 -7.8718 0.0966 6.0887 -#> -3.4026 10.1926 -6.2556 8.6156 -7.1564 3.1795 2.7905 -0.4113 -#> 4.7720 1.4452 15.7820 -2.6927 7.7603 8.4853 -2.7105 -5.2556 -#> -4.3637 11.7902 -7.3200 3.3080 12.9666 8.0143 -15.2166 15.6200 -#> -2.3635 1.5295 2.4287 10.6374 10.6979 -5.8618 2.1516 -5.7713 +#> Columns 1 to 8 -8.1832 0.1362 -0.4915 -1.0407 -7.3703 -1.7879 5.7507 -5.8673 +#> 2.5422 -5.5568 -1.7756 11.5901 5.1868 -3.3447 -4.9699 4.5047 +#> 4.9694 -0.7470 -3.1076 0.1330 3.4620 5.4725 -1.0366 -11.4798 +#> 15.0149 -3.8052 -1.1027 1.5937 -9.0624 -1.3845 -2.5101 1.0986 +#> -10.2025 -4.1056 6.7674 -4.0452 -4.5385 1.0042 -0.6998 -14.1617 +#> 5.8363 -2.8939 15.5251 -6.4314 0.3172 -7.2951 7.1723 -4.0533 +#> 18.3843 -3.0947 2.6987 -8.5305 5.4004 -4.0875 1.3148 20.6922 +#> 14.1377 8.8000 -3.4150 -0.7623 -1.8689 11.0341 -5.1328 7.1987 +#> -13.8579 -15.4919 4.2459 7.4631 -2.6184 5.5489 -7.0029 -12.7808 +#> -4.8014 -0.8354 8.5274 -1.4477 5.5920 3.5952 -2.3063 -2.6907 +#> -5.3179 6.0025 -1.3447 4.3491 9.0115 -0.0241 3.5481 -5.7069 +#> 12.1946 -1.5472 3.3849 -4.4272 -5.5386 -0.7148 -4.7987 -4.0082 +#> 5.1601 4.7451 -4.8614 4.9870 -3.3710 0.6032 1.7901 2.4665 +#> -3.7900 -3.7001 -2.2769 -4.1085 -0.5727 -9.7645 -4.4055 9.1107 +#> 4.1666 5.6311 -5.0046 0.7492 -5.3866 1.3544 -9.2811 -5.3068 +#> 7.3567 3.6154 -4.6574 3.1771 1.0163 6.2261 11.2779 -3.6957 +#> -11.2783 7.8325 -4.5468 -0.2824 2.1565 -3.8123 -9.9824 5.4123 +#> 18.8053 -3.7631 -10.6808 -8.9003 7.5764 -1.1539 -0.8487 -3.6032 +#> 4.1850 7.3493 3.6412 13.9481 4.9575 2.6773 5.1736 2.7721 +#> 4.7698 -11.3023 12.4770 5.3364 0.9790 -1.0861 -2.8539 -3.8610 +#> 3.7222 -5.4386 -1.4738 9.0638 7.7817 6.4650 0.5258 5.3134 +#> 2.4380 3.2938 4.0400 3.5199 -11.8223 3.3059 2.5778 -9.8781 +#> 17.8766 6.6763 3.9912 -8.6001 8.9255 -4.0468 0.9085 13.7909 +#> 15.8659 -16.5383 8.3743 -2.0670 -6.8738 2.8211 -6.6428 0.1441 +#> 19.6792 -1.6840 6.4083 4.1757 10.2453 -9.0501 0.9174 -2.7111 +#> 3.2741 -0.5703 8.9895 1.2622 3.8401 -0.8652 -3.2099 0.5349 +#> 4.4368 -4.2240 11.9494 -0.2721 -3.0057 5.3236 -3.0448 -3.8302 +#> 4.0558 0.2912 12.0330 5.8881 1.2482 -2.2835 15.3772 -7.7132 +#> -4.0999 -0.4149 -4.5999 -0.0200 -6.6630 0.6751 -12.8473 3.6422 +#> 0.1158 3.0992 2.3752 -3.0391 3.6136 5.9456 -0.8301 1.3793 +#> -14.9808 3.0920 -13.9783 5.8663 -7.4728 3.7850 -5.3515 -4.2583 +#> -2.8341 -0.4232 -1.5558 4.1194 -4.2162 4.6438 3.8111 -0.7015 +#> 8.0191 -4.1810 1.1353 0.2684 -6.2055 9.3124 -17.1230 -9.4973 #> -#> Columns 9 to 16 6.4279 -16.6403 -18.3629 -17.5751 1.5928 -11.3314 2.3082 -3.3119 -#> 15.1420 -7.8180 5.0243 -2.5117 -0.7384 -3.4678 -2.5084 9.2290 -#> 1.9041 1.7503 2.8069 -6.1292 7.4328 6.1056 9.0484 -3.3383 -#> 16.0871 16.7116 4.0324 -2.9360 9.1257 2.7541 5.0165 -3.1155 -#> -3.6804 -5.7240 -2.9193 -5.5015 4.6540 2.6300 -0.3867 -5.0567 -#> -18.2437 -23.9267 -1.5425 4.6601 -2.0346 -0.8490 -5.6068 -3.2220 -#> 7.2447 6.9831 0.4325 -7.8024 1.7635 -8.6065 10.7907 4.7603 -#> -2.1499 -18.3106 -1.5499 2.3885 4.2410 0.3205 -0.0706 -15.5370 -#> -7.2642 -3.3160 2.4902 0.7529 -2.6600 1.8324 -4.9594 9.6799 -#> 3.5323 7.4025 8.2952 -5.4834 -2.4099 -5.4144 3.5544 -4.9415 -#> -6.3884 -3.4431 7.9970 2.7729 3.4578 -0.4114 11.7179 7.9719 -#> -11.1124 10.7049 2.7164 6.0928 -10.8485 -2.9538 -8.0745 7.4084 -#> -6.2716 0.8655 -0.5051 -2.3025 2.4459 -2.4969 -2.2801 -12.8532 -#> 1.7287 0.0174 -0.5900 8.8881 4.1497 -3.9259 -13.2147 15.8763 -#> 11.9867 -6.9489 -1.4523 -4.2875 -0.1344 -3.6711 9.0379 -4.5052 -#> 9.7145 -2.6730 -15.3691 -7.4209 1.0313 10.1746 6.8281 8.4267 -#> -2.2905 -2.6880 -12.2839 -2.8750 7.6246 -3.2068 -3.9528 -8.3951 -#> -1.7096 -6.3767 1.9717 6.9931 10.4830 -0.2552 -2.3697 -7.0752 -#> -3.4325 -10.2179 -3.4009 -4.4899 -3.8635 8.8770 2.8008 4.5130 -#> 2.5901 -18.7449 -0.3828 1.1593 -3.4166 -10.1580 -14.9132 -10.8550 -#> -5.1128 -0.5076 15.3124 0.3820 4.5298 0.6550 -2.3290 -13.3959 -#> -0.8302 0.2536 -4.9642 4.4154 -5.0953 -4.1481 10.3788 2.9103 -#> 3.0757 5.9847 -0.6975 1.0189 -6.6328 -8.9430 -5.0165 5.8381 -#> -12.5129 3.4130 7.9631 -1.8649 -8.0697 -7.1588 -13.7465 -16.1740 -#> -4.3849 -3.6834 1.4595 4.1363 1.2030 4.3309 7.0658 4.2559 -#> 6.0773 -19.6393 -14.0629 -2.3869 4.5976 7.4737 5.9223 -6.7138 -#> -8.0946 5.0019 -0.4931 0.1409 8.9070 -0.9026 -1.5408 2.8584 -#> -3.2197 -7.2077 0.3732 -1.0563 8.2695 -7.9381 -2.0098 -11.3553 -#> 3.1388 -3.3002 -3.0741 1.2747 3.5834 5.7041 4.2506 11.6135 -#> -6.2947 -18.5550 -7.1581 -9.2618 -2.6979 -14.5040 7.1882 -7.9180 -#> 3.8713 -1.1188 -5.3525 -4.0106 3.4619 -9.9489 0.9650 0.0564 -#> -7.3784 7.5384 -2.5166 9.6918 -0.9690 16.9486 -23.4199 2.4217 -#> -2.1453 2.9490 10.9724 -0.0240 -6.9801 -10.6743 1.9304 8.1391 +#> Columns 9 to 16 -7.7459 4.7234 -2.1911 -10.3581 -2.2793 4.8364 7.2442 -0.5625 +#> -10.0197 -3.6309 -5.1790 6.8189 -5.0406 -10.3516 3.8497 -0.9915 +#> -8.6123 2.8491 5.1664 2.8275 1.8867 -7.2216 -10.5986 -5.6497 +#> -8.7524 -7.0032 -1.2434 -2.0073 -1.4083 3.9759 3.0168 11.4859 +#> 6.1296 -1.4526 -11.5173 -7.5842 -10.2113 -0.3445 6.7264 -3.3998 +#> -7.1876 -11.4746 13.3634 8.6027 11.5729 5.9233 -6.0313 -8.2863 +#> 1.7004 -5.0417 8.5269 8.5074 6.8426 -3.0775 -1.9318 -0.6201 +#> 1.6841 3.9921 -12.5881 6.9904 -7.7008 7.8078 5.5877 10.4576 +#> -4.0274 18.1002 -2.9453 -6.1221 -15.1447 -13.1688 -10.2129 15.9692 +#> 10.6322 -4.1466 0.6274 -6.8538 -2.7226 2.5010 -1.6684 -9.3393 +#> -0.2958 8.0991 -0.7922 4.3306 5.3981 8.5330 2.5476 0.6380 +#> -9.0524 1.1968 -3.4523 -0.3024 -1.2216 14.8349 -4.5176 -5.2358 +#> -6.3026 8.5910 -6.5132 -11.6790 -12.0950 6.6668 5.7587 -1.5212 +#> 13.2454 -7.2372 -15.5973 -17.9738 -8.1724 3.7138 15.1896 2.7070 +#> 0.0089 -0.8720 -11.7476 -2.9856 1.3286 6.0723 2.0188 -2.2052 +#> -1.5125 11.5158 -6.9612 14.3976 -4.8587 1.2791 -3.4618 -2.5441 +#> 3.6691 -4.3201 7.5167 -6.8287 12.8890 0.2217 -5.9513 -6.8837 +#> -6.8306 -3.9219 14.5606 6.5739 10.0210 0.3813 -5.7225 -11.4863 +#> 7.2751 7.9474 4.8315 13.9806 3.6597 6.1145 2.9858 -3.3464 +#> -1.0838 -0.9017 4.5285 -6.6221 -9.5912 -11.7182 1.9417 -5.7042 +#> -3.5688 3.4659 1.4189 -9.5359 -4.7904 -8.4741 5.6314 5.3858 +#> -8.2044 4.3410 6.5003 0.7294 -4.0890 10.9251 -3.4258 -1.7692 +#> -5.3508 -12.6048 1.1534 9.6404 16.7233 1.8397 -5.7716 -8.0276 +#> -5.8340 14.3254 7.6403 11.0916 -8.6014 -3.7149 -6.6651 0.4798 +#> 6.9197 -5.6689 6.9883 6.9236 5.3596 -14.9410 0.2660 1.1348 +#> -8.4866 -12.5675 0.6859 -8.4340 -1.3663 -12.2784 1.8450 1.8364 +#> 6.2671 0.1223 12.7108 -9.3529 -3.5482 -0.5294 8.0272 8.0868 +#> 11.7264 4.9685 2.8389 0.8228 0.8212 9.9968 -3.0364 -0.4173 +#> -6.6905 -1.4530 0.1888 -1.7171 -3.2134 -11.5802 -3.9344 -5.0706 +#> 0.4806 6.2594 -3.0297 -1.6364 6.2587 12.8124 6.4464 -0.1632 +#> -3.2312 5.1486 -8.9598 -3.0052 -21.4659 -8.1963 -6.5604 2.8753 +#> 8.1991 2.8298 -19.9156 -5.5093 1.9804 -5.0241 -1.7925 -5.1321 +#> 0.6226 6.3552 4.6889 0.0354 -4.2593 1.1301 -8.1101 12.5086 #> -#> Columns 17 to 24 8.0659 -9.8109 -17.5197 -1.4956 -2.7980 -1.3839 10.8934 -9.2477 -#> 2.9348 3.9345 -6.3472 6.7811 -6.7502 3.8585 5.7719 7.3738 -#> 3.7759 -1.2448 4.2845 -4.3191 0.2600 4.1004 2.6863 4.5444 -#> 12.4635 13.6476 4.2648 -9.2781 -1.6858 -4.4844 4.1397 5.1457 -#> -0.3290 -9.6221 -1.0781 1.6044 -0.5301 -11.1221 5.6580 -4.9634 -#> 6.2739 -5.5969 -4.1184 -7.9925 0.6532 -1.1652 3.4576 -5.9513 -#> 10.6276 1.0927 -9.2528 2.5044 9.9256 -5.4391 0.4126 -1.3181 -#> -0.9920 -2.8386 0.3108 -4.3165 5.5252 7.3908 -0.4162 -14.1043 -#> 2.3134 -7.4856 -0.0544 -5.2347 -0.9355 6.6240 -1.3276 10.7852 -#> 7.8179 -9.1380 -1.4468 6.2373 6.9862 1.4667 -12.8261 -0.8780 -#> 0.6620 5.7587 11.7828 -1.8801 -7.8894 11.5090 4.9039 -5.0160 -#> -4.0376 1.7842 -7.6461 6.9464 5.3759 -3.3801 -7.9744 1.3897 -#> -0.2717 4.3383 5.1764 -12.3860 -2.6778 -4.4454 6.4609 3.0602 -#> -7.5645 9.5570 -7.4381 -4.3807 -1.6359 -14.0753 -0.6845 -5.7670 -#> 12.7688 -8.1250 1.3557 -5.4335 -5.8085 3.7587 3.5603 2.1621 -#> -11.5333 2.3040 12.5750 -4.4823 -8.4828 2.9692 9.9874 12.1453 -#> 5.8875 -0.1194 -0.8944 -12.4041 1.4352 -8.2784 10.2505 -6.2482 -#> -14.2126 7.0171 12.9963 -7.4035 -9.4354 -5.3856 -5.4378 14.4636 -#> -8.6178 -14.3717 10.9290 -1.1101 0.5204 10.4756 1.0523 1.6210 -#> 4.2888 -2.1153 -2.1217 -6.3444 -6.8318 11.7709 -3.7033 10.7405 -#> 8.0226 -4.4725 -2.1581 -2.4154 4.2568 -2.2097 -19.8357 5.1823 -#> 0.3821 3.0292 3.1383 7.3895 -5.0592 9.0426 5.7879 -11.8164 -#> 2.0660 -5.6077 8.9119 0.3280 -2.1825 0.6266 14.0744 2.4195 -#> 1.0469 10.6938 8.7573 -25.5889 -0.2029 9.9365 -13.8058 5.6670 -#> 3.1036 -6.7063 3.8607 2.4476 -3.6309 7.1585 4.8015 1.0726 -#> -4.9733 -1.1730 -4.1544 -5.9858 -3.5929 4.7898 -5.2797 -4.2409 -#> 7.0606 -1.9421 2.2402 -9.2652 0.2939 -3.5939 5.4470 1.3376 -#> -12.6290 8.0336 13.5466 -14.6799 -0.1997 -6.8844 6.4218 -7.7101 -#> 4.3238 -7.4270 2.7846 10.7714 -11.9544 8.1263 -7.6938 9.7714 -#> -9.7766 -5.1577 1.0592 -3.9683 -7.2028 4.2519 9.6452 -16.5199 -#> -3.3280 -8.5588 -3.2580 1.6575 0.7049 3.9927 -6.2243 -0.4779 -#> -10.1726 15.6080 -0.0723 -8.2501 4.1061 -1.6644 -0.3377 4.0142 -#> 5.6474 8.4324 -2.6393 4.7854 -1.1411 11.3926 -4.7050 0.1635 +#> Columns 17 to 24 4.2059 -5.5520 -3.0468 1.0175 -1.3807 4.9594 10.9674 -3.8942 +#> 7.4939 -0.8236 -9.6058 -1.5212 -0.3222 -0.3243 5.8993 -1.2364 +#> 4.9936 -2.1095 2.8616 1.7382 5.2608 2.1405 2.0487 -9.2167 +#> 3.1877 8.1751 -1.2342 11.0327 3.2423 8.2091 0.5770 16.0137 +#> -0.6983 -8.1531 -3.3467 -4.4953 5.9530 -7.3115 -1.2206 -5.6281 +#> -1.8305 10.6471 -4.0879 -4.4938 -10.7697 5.4321 0.4087 -6.4028 +#> -7.3518 15.0429 -3.3821 -5.3942 5.4415 -0.4463 12.8811 -10.3852 +#> 2.4327 8.3312 2.4391 9.8791 9.1811 5.7551 -7.1100 -0.9688 +#> 9.9638 -4.2095 -3.8897 -14.0447 -5.1093 -6.3825 3.2285 11.5336 +#> -1.5851 -10.9154 1.2062 -5.7882 -8.2579 -14.6081 -11.9815 4.9466 +#> -5.6415 -1.8218 6.0796 4.8861 1.7285 0.2765 -1.6939 8.0356 +#> 6.3597 -2.0178 1.5816 -1.7511 -3.6893 7.0550 3.7846 7.6283 +#> -0.9841 -3.6696 -12.9781 -2.2069 -14.8485 -3.4331 -19.7907 0.7851 +#> -14.2028 -2.5504 -9.0771 -4.2099 -3.0954 8.3411 -5.1700 12.2364 +#> -3.8958 5.7019 -6.0307 -3.9535 -3.2254 -4.6280 -12.5457 -8.7411 +#> 8.2590 5.4200 4.5460 8.4914 12.2911 7.1513 4.7144 -7.6375 +#> -1.4695 -0.5575 4.0604 0.3423 -1.9028 -6.8345 -0.1294 14.7587 +#> -11.1111 8.5032 -2.8651 8.1954 7.2075 -0.8381 -9.3413 -6.8504 +#> 4.9361 0.4057 -2.0924 -0.5368 -9.7859 -1.0365 -3.2669 -8.5951 +#> 0.1877 2.9919 -0.2260 -2.0668 -7.8431 -10.3760 -3.0541 -10.7712 +#> 1.1386 -2.3280 -7.5244 -6.3388 -0.0155 -8.5991 4.8890 8.2987 +#> 9.2263 4.3248 -0.2449 5.7908 -6.4480 6.5837 -2.9764 -13.5566 +#> -6.2898 8.9691 2.7937 -9.9365 -7.4681 -2.1230 8.0409 3.3539 +#> -0.5466 9.6928 5.8119 4.5839 2.9515 -7.6144 1.0381 -14.2992 +#> -3.0932 13.5023 3.1045 2.4480 -5.2515 -7.1009 8.6616 5.2182 +#> -1.4976 5.5373 -6.7405 -1.2780 0.2723 -5.2043 0.9465 8.5725 +#> -5.3337 10.3196 -8.6103 10.2320 -3.7096 2.1194 -1.5716 1.5836 +#> 4.1773 -2.2314 -10.3426 -1.2117 -13.9390 -1.1418 2.0845 0.0757 +#> -6.8736 13.4079 -2.3401 5.9668 1.2662 -4.9621 -7.1224 3.5072 +#> -11.1563 -1.2891 -15.4368 -3.5740 -10.9554 -3.9699 -5.3479 -7.8923 +#> -2.9025 -1.1671 -8.8319 -3.4025 -4.6392 4.4500 -6.6146 9.7060 +#> -4.2532 -2.2575 -17.1578 3.2153 -2.4606 -7.2451 3.6524 7.7684 +#> 3.4458 -9.6891 9.8224 12.1008 -4.8138 2.0533 0.1830 7.4203 #> -#> Columns 25 to 32 11.1852 -7.6783 -4.4760 -6.2867 -14.4768 -5.5915 -2.3897 -1.7277 -#> 2.5738 -1.7489 -0.8135 -12.1182 11.3483 8.8064 -6.4352 5.5023 -#> -0.9505 -1.6135 -5.0113 9.0222 -3.6225 3.2098 3.7084 -5.9199 -#> 5.4133 -16.0761 7.3412 19.7539 8.0536 -4.3415 2.0151 -6.6417 -#> 0.5821 -9.7743 -8.9166 -2.1462 4.3378 1.4944 13.7540 5.7123 -#> -11.6263 -14.4128 -12.4400 -6.0476 -11.2459 6.1059 2.8093 9.7881 -#> 6.1340 -1.5156 4.7161 -12.7458 0.4976 8.1295 8.6126 3.0070 -#> -2.7693 10.9625 2.0651 -19.3172 4.9871 7.9184 13.0141 19.7211 -#> -11.5707 3.4166 -15.3739 -4.2086 0.3017 -7.8660 1.3321 -8.0142 -#> 5.0140 -4.8983 -7.2128 0.9540 4.8724 -0.1939 -1.9584 -15.3688 -#> -1.4485 5.1045 -0.0854 -14.6064 9.1149 1.7403 4.2503 -2.0895 -#> -6.7345 9.3218 -1.2156 -7.1773 1.4140 7.4115 5.7588 8.5236 -#> -4.1262 0.9882 15.1218 -0.9274 -7.9207 3.1481 -1.6564 4.9407 -#> -6.0797 2.7882 4.1580 12.1277 6.2366 -2.8015 -11.5175 -3.9804 -#> 0.5365 -2.5556 -2.7976 -9.2249 1.2070 12.6141 5.7339 -0.6995 -#> 1.4168 -14.0447 8.2472 6.8786 -4.0087 -5.6162 0.5010 -4.9696 -#> 3.9318 -7.7873 3.0492 -5.4148 -9.5561 0.0113 14.3477 -3.5788 -#> -6.2673 -0.4624 16.4651 0.4618 2.4340 7.2395 -5.6198 8.3328 -#> -7.6682 4.2533 7.8766 0.0981 -10.9411 -9.1038 -4.9945 -0.3450 -#> -7.6848 0.6633 -4.0730 -7.5607 -8.0175 -0.1730 -4.9055 11.3131 -#> -5.7167 9.9912 -1.9191 2.7759 4.4893 1.2290 4.7103 8.0413 -#> -1.6533 8.7521 9.9300 4.1369 -11.7643 -1.3706 4.2014 0.8237 -#> -2.6175 -8.3520 -3.8934 0.8523 -12.1993 -6.4672 -4.6347 -5.2350 -#> -6.7551 -3.6316 1.6006 1.2255 -10.7569 -3.4983 -4.6958 -18.2304 -#> -2.2933 -0.8002 5.0204 9.0503 -7.8023 -1.4203 0.3685 -6.3405 -#> 0.9046 1.7930 4.5703 3.4607 -6.1397 -7.0052 -1.4754 -3.1954 -#> -2.8187 -9.5606 12.8859 -9.7074 3.5686 12.6171 9.1333 -6.4353 -#> 2.7439 -2.4240 8.5999 3.5995 -3.5231 -9.2750 -11.4203 -4.5213 -#> -4.9503 6.8071 8.7293 4.0040 -5.3628 4.6692 2.8335 -0.7443 -#> 0.4532 3.8679 -10.4293 -14.7597 -8.6314 -3.1424 3.6973 -1.7702 -#> -5.7853 3.6905 -8.3404 -10.1098 -4.8931 7.1072 4.4414 -5.0839 -#> -1.0830 0.6989 -2.6288 9.4472 -13.4725 5.8905 -2.6344 6.1472 -#> 1.6652 5.0443 -13.9105 -2.0468 3.3163 -0.7483 -12.5587 -4.1535 +#> Columns 25 to 32 1.0887 -11.0118 4.4736 2.2329 -3.0081 1.9307 -5.0697 2.9423 +#> 2.1104 8.6393 1.6732 1.1195 1.5393 0.0249 0.8813 -12.0733 +#> -4.2103 6.2671 4.4297 -3.1274 6.8547 1.7689 -6.7992 -3.9158 +#> 0.5626 -4.8139 -0.3864 9.4383 -5.1803 18.1212 2.3827 -2.1123 +#> 2.4489 -3.3559 0.2965 -2.3292 -4.8591 -2.9052 0.5543 0.8507 +#> -3.5223 -0.8679 6.5141 -4.8131 0.0583 10.5951 3.3942 1.0627 +#> -12.1853 7.0096 -5.6897 3.4268 -10.6364 -5.2994 1.9010 3.2238 +#> -11.1066 -5.0477 2.3751 -0.6185 4.4833 0.6942 -4.7928 5.6108 +#> 8.4516 11.1090 -5.9245 10.3845 7.9494 2.8893 5.7679 -8.9791 +#> 8.8283 0.8126 -1.9831 3.9480 -2.8942 -1.8668 1.9301 -2.5843 +#> 8.4339 -12.7653 10.8994 15.5482 2.7457 -7.3268 0.6044 1.2060 +#> -1.8305 2.2700 -2.4256 -0.6081 5.7981 -0.4782 -8.6941 -0.5383 +#> 3.3600 14.2027 9.7570 -2.8196 -3.7621 9.5351 3.1167 -6.0473 +#> -0.4774 6.0749 4.8148 11.0053 4.0574 1.9971 0.7213 -2.9641 +#> 2.4432 5.2264 -2.4432 -4.9660 4.1106 -1.9219 -0.2957 7.2579 +#> 4.5602 1.1611 3.5060 -4.1962 9.4393 -5.2955 -2.0844 1.5498 +#> 3.1192 5.8630 -0.1532 5.3690 -10.0420 4.4278 5.0017 5.3303 +#> -13.6625 12.8651 11.4828 6.1397 -8.0348 5.1076 4.9746 5.4097 +#> -7.9518 -7.3636 2.9843 -9.4875 1.4911 0.8673 4.1667 3.9759 +#> 0.9254 7.8925 -1.2477 2.6830 -4.7077 -11.0350 2.6056 -4.0730 +#> 3.8865 8.0677 0.3313 4.9630 7.8809 -6.8292 -8.3703 -9.9351 +#> 3.7381 -8.4871 -4.8211 -5.0227 -9.6585 -1.0686 -0.0386 -4.4425 +#> -10.0503 -1.6145 -4.6398 -4.0048 4.0530 4.5118 4.4244 11.3235 +#> -15.1534 -0.6262 -4.1878 -6.9952 -2.9259 2.9190 -0.0752 11.6472 +#> 5.0678 -7.3431 -3.0300 -2.3666 -10.2400 -2.1587 -1.8177 -0.3579 +#> -0.1289 1.2054 -4.4791 5.3275 3.4119 -12.6049 -5.6836 -1.6711 +#> -8.5504 0.8343 1.2572 1.5737 -4.6236 6.7879 -3.3966 2.5857 +#> 2.7257 -16.1171 1.6263 -7.9978 -4.6713 3.5073 -4.5688 1.2174 +#> -5.2920 2.6312 -4.6101 3.1988 -2.4407 -6.9844 -2.6392 1.0342 +#> -10.9614 3.5416 3.4600 2.8762 2.6182 4.4955 12.4071 19.2520 +#> 4.7675 2.4569 3.9275 -7.7440 11.9608 0.2377 -5.3626 -17.5080 +#> 18.0704 12.2137 4.5459 -5.4836 10.2440 -2.5025 1.7040 5.0205 +#> -1.1751 -12.4466 -5.6714 -3.4590 0.4078 -10.4913 -11.7068 3.2886 #> -#> Columns 33 to 40 1.2980 5.7922 -2.6835 6.7159 9.6417 -2.6298 -0.8528 -0.4202 -#> 1.8438 -2.5133 -0.9831 2.2937 6.0663 8.0411 4.4082 2.6379 -#> -8.1019 -1.3295 2.5620 -4.6356 -2.6643 3.4993 0.4289 -7.3983 -#> -3.0134 -4.4428 -1.4359 -5.3858 8.7989 -13.9669 -7.9427 3.1195 -#> -4.4803 4.7607 -6.0444 -0.9186 -1.2217 1.9578 2.5686 -0.9102 -#> -8.7065 4.3657 1.5683 5.5544 -17.4600 12.2941 4.0490 -1.7500 -#> 0.0132 2.6190 -0.4216 -12.0082 -9.7049 -6.1584 -1.5848 -0.1581 -#> 9.8927 20.7229 7.2420 2.7999 -11.8450 1.7785 2.4026 -7.7475 -#> -7.6714 -5.8588 -2.8094 3.0173 -20.9604 -2.8228 4.7490 -0.5274 -#> -3.7243 -3.8045 -7.7921 -10.8642 5.1522 -0.0962 2.9651 9.7325 -#> -6.3715 -0.2433 0.0752 -9.4436 -4.0806 9.0486 9.3798 -6.6853 -#> 8.6038 4.7407 2.6414 5.2493 1.0450 -1.2455 11.3315 -5.4282 -#> -1.3117 11.8059 14.3912 6.1844 3.2220 5.4814 -6.3132 -1.5302 -#> -3.6532 -17.5908 -3.5290 -6.2984 -1.7378 -7.9013 6.8136 -2.7963 -#> -9.3930 3.2737 -2.1988 6.9773 2.0967 -8.0507 -9.9822 -4.4279 -#> 3.3786 -8.5124 6.4465 1.9150 10.1108 -13.1152 -0.7935 9.3200 -#> 10.4707 -8.0685 8.6091 -2.0858 4.2249 -9.6019 -2.6548 -5.5713 -#> 3.5279 -1.3915 16.7588 7.1058 5.7903 2.1495 -3.0769 8.0010 -#> -7.8178 7.4678 -1.4509 2.5190 -5.2619 -2.2746 16.0377 1.3202 -#> 1.2576 4.2236 11.5194 13.0194 -3.2023 9.1482 -13.7077 4.5081 -#> -5.5141 7.7185 -0.4163 2.6531 -5.2403 10.6327 -4.4554 -0.7468 -#> 2.4275 -5.7975 1.7842 -2.6893 -2.7460 -4.6191 4.1462 -3.3226 -#> -8.3906 6.9027 -3.9422 9.1921 0.6376 2.4150 -10.8794 6.3257 -#> 2.2617 -1.5724 -2.1324 1.4894 0.7507 3.9992 10.0400 4.0461 -#> -3.0378 -7.1001 -6.8504 1.8413 0.2720 -9.0168 -8.3117 -3.9334 -#> -2.4101 -10.7373 4.4570 -0.2030 -5.8664 -1.8609 -17.1171 4.6217 -#> 9.4701 -5.9071 7.8036 -3.2998 -8.9704 4.3793 0.5720 -6.6505 -#> 0.3020 -6.4357 11.4205 -10.2687 5.2483 -2.8666 7.2009 6.5887 -#> -7.9362 -6.2417 -10.4147 10.7933 -5.1977 -10.6599 -2.4203 3.5938 -#> -0.5701 5.9710 9.2179 -2.2129 -3.2973 0.7501 3.1773 6.7176 -#> 8.1722 2.4580 8.0178 -3.3873 -16.7557 -11.7991 -1.3586 4.7263 -#> 8.6734 11.5497 1.4913 17.1166 -5.3510 6.3995 -3.3012 -0.8676 -#> -1.1865 -5.8330 -5.4534 -3.7517 -4.7330 14.3180 7.2902 7.8606 +#> Columns 33 to 40 9.0291 9.0648 -4.7912 0.8361 13.4520 -0.9470 6.0231 8.4682 +#> -5.5403 1.3308 0.0535 1.0446 -10.3582 -0.5971 3.5746 -0.6527 +#> -2.0753 -0.8903 1.6490 -0.8700 1.7476 -1.5503 5.8583 2.7897 +#> 10.6961 8.0565 3.9005 9.8578 5.3039 7.0780 2.4352 8.1389 +#> 4.8794 9.1367 -1.3914 8.5088 2.4580 -0.0621 9.0786 -13.3420 +#> -2.1885 -7.0007 -8.8945 0.6700 4.8290 3.8721 -11.3740 -11.3452 +#> -1.1829 -1.9312 -7.4180 -8.6648 -5.4902 2.2234 -7.1657 -3.5329 +#> 7.1127 -0.9872 1.5894 4.6443 5.7469 0.2491 2.3115 -1.9354 +#> -5.3910 -6.0073 21.1120 10.7926 -8.2970 0.3224 16.0895 2.7767 +#> -16.0522 -4.2267 -5.5280 -6.0465 -8.7839 10.4525 -10.5019 3.0818 +#> 7.5702 8.8413 2.9530 8.4895 12.8108 10.4018 1.6808 4.3083 +#> 8.0420 -5.5155 3.2963 -10.0506 -5.8500 6.9933 -4.2845 6.4808 +#> 6.7079 -12.9063 -9.3036 -1.6147 0.6322 6.3550 -8.5571 -0.9226 +#> 1.3621 0.3922 -3.9505 6.2516 -3.3053 9.8737 13.5528 5.7667 +#> -0.3757 0.7022 8.6452 -13.9733 4.3118 -10.8438 -1.8795 -4.4047 +#> -7.5107 -9.4712 7.2232 3.2643 -2.5581 -9.4539 3.0422 7.1533 +#> 0.3523 7.1380 5.0800 -1.5233 6.1655 2.6686 -4.8108 12.0540 +#> -4.0293 -11.2158 -8.3625 8.2846 -4.3129 0.5222 -16.2305 2.1044 +#> -1.6736 1.8621 -6.5074 -3.5824 1.0331 0.1216 -5.3052 -4.4795 +#> -13.8506 6.9267 3.1798 -10.5809 -7.9187 8.3874 7.4720 -4.1051 +#> -12.7617 -8.2905 6.1391 -0.9022 -9.8472 -4.6668 7.4433 6.1648 +#> 0.9251 -1.8448 -3.1751 -11.2548 5.0614 -7.0791 -0.8570 -7.6254 +#> 9.2979 3.2845 0.2584 -5.9465 3.8216 5.0401 -12.9721 -1.2510 +#> 15.6567 -4.8082 5.8797 5.8092 -0.9878 3.6345 4.4043 -3.2140 +#> -0.4622 10.6627 -2.3295 -9.9826 -10.2325 -7.2459 0.4033 2.2700 +#> -11.3552 -1.3845 -1.7160 -5.5424 -11.6904 0.8719 1.9306 -5.1464 +#> 6.1921 5.0279 -3.1814 1.1111 2.1819 4.8440 10.7639 -9.0983 +#> -0.5592 -0.6208 -2.1767 2.3614 1.5910 -0.5328 -3.2849 -4.1854 +#> -2.6891 6.2450 1.9662 2.5096 -5.1621 -1.9323 8.2095 -5.4009 +#> 8.1952 -15.6097 5.9874 0.3915 2.0798 9.8226 -2.2809 -7.5705 +#> -0.0894 -1.9000 0.2765 9.2129 -0.9447 -9.9310 6.3105 3.1701 +#> -13.3567 -5.2193 9.2523 -4.4614 6.6329 -5.9363 8.1410 -3.6325 +#> 8.4824 3.9591 11.5334 -3.9089 2.0096 -11.1190 6.3440 4.1236 #> -#> Columns 41 to 48 3.5615 -9.7688 -4.3698 9.4824 -3.1192 4.5284 11.1475 6.8623 -#> 8.3879 2.4612 9.4136 10.2137 1.5750 1.5142 -2.0387 -4.3419 -#> 4.8508 4.3983 0.7569 -4.0457 1.3259 0.2923 1.1547 3.2599 -#> -1.4479 -3.5318 -3.8466 -4.2446 -7.6283 -11.9392 -19.9979 -4.2009 -#> 9.4132 5.7710 8.7189 9.7178 -1.7340 6.4248 -1.3654 -2.6776 -#> 0.6586 -1.7411 6.1335 -8.5081 9.3015 9.7500 -1.5191 -4.5925 -#> -5.9769 2.0140 -7.3969 -2.6265 6.8943 -4.0132 -5.8919 6.3048 -#> -6.8142 -5.2937 3.0421 5.4276 -3.1331 9.1253 3.9011 -1.3756 -#> -8.5611 -0.3279 6.4001 -12.0204 3.4183 7.4415 -9.0594 11.2156 -#> 8.1157 4.5898 -1.0780 2.5033 -2.1577 -6.4207 -14.1143 -8.2264 -#> 2.4492 -0.1197 4.2152 5.1966 -10.2710 2.1175 -0.4986 -3.3390 -#> -2.0919 2.5979 8.4456 -1.3720 -3.2895 6.1981 3.1747 -0.2003 -#> -3.3364 1.5890 -2.6446 -4.0857 8.2205 -18.2212 8.3387 -1.3977 -#> 8.6574 4.8976 4.3133 11.0241 -9.1563 10.2224 -9.8328 -7.7376 -#> 6.9685 6.5083 11.3045 -10.6788 1.1892 1.8784 -3.1766 -4.8822 -#> 8.6182 -6.2387 -18.0660 4.8077 2.5121 -12.4155 0.6158 -1.9639 -#> -5.5851 11.7859 -0.7654 -3.9968 4.2055 -6.8894 9.0475 -2.4525 -#> -3.0200 -1.1177 3.6660 -7.0122 2.4538 -0.5365 -10.3393 6.7757 -#> 0.0465 -9.8708 -4.2814 0.0838 4.1645 -10.5060 -0.7702 3.3377 -#> -0.3392 -3.5162 2.9724 -11.0832 6.6663 0.7159 -1.9853 2.1120 -#> -4.8323 0.7137 10.9749 -1.8166 4.4660 6.5846 -15.5988 -1.3574 -#> -4.1564 -3.3339 -5.3380 -8.1182 -10.7575 1.1331 7.1809 -7.1301 -#> 2.8081 -9.6512 2.0413 -3.0798 -5.8924 0.5218 -2.0901 9.6971 -#> -4.0738 6.1353 3.4338 -5.3469 2.7293 -11.0841 0.2694 -5.0426 -#> -2.1924 -1.7987 11.2064 -9.8901 -15.7909 6.7003 -0.3568 -1.9345 -#> 0.7802 -2.9752 -9.0987 -4.5661 3.1597 0.8458 0.2071 -4.5137 -#> -3.6960 -1.9876 -0.2447 3.1732 6.7256 -8.5313 -4.9369 -13.9011 -#> 3.2326 14.2881 3.1216 2.2416 2.1360 -6.8162 0.7646 -7.6339 -#> 4.7298 -4.4067 9.1713 5.2442 -4.9678 4.6076 -9.5215 -4.5575 -#> -14.0215 3.5428 -1.2730 -1.0921 6.3475 2.0168 9.2348 5.8859 -#> -18.8806 -10.0248 -4.5760 -8.5119 -1.5314 -2.2611 -1.2832 9.1316 -#> 3.2393 -5.1773 -0.6072 5.6596 3.7721 -0.3780 9.8461 4.7959 -#> 4.3407 7.1963 6.0763 -4.1961 -0.9477 11.8882 -6.1514 -1.7034 +#> Columns 41 to 48 5.0410 -8.9959 3.6210 2.3558 4.2792 9.0765 -0.2901 -8.2545 +#> -0.2864 -0.9212 0.5931 10.9194 -2.4218 -0.4575 5.8690 1.6568 +#> 5.0047 -10.1728 0.8103 -1.6926 -2.3361 -3.8185 1.9042 10.5510 +#> -3.0268 10.0630 -1.2243 -0.4988 -14.4268 -7.6654 11.6947 -5.8465 +#> 5.3218 4.6032 2.2118 8.5098 11.5692 6.5156 -5.8169 -9.4412 +#> 3.7137 9.5904 -2.6944 -3.6659 13.0890 -1.9711 -7.6408 11.7283 +#> -4.1888 -2.9035 8.6672 3.3554 -2.0120 4.0365 -3.9517 14.0364 +#> -9.1515 6.8065 2.0609 -6.2702 -8.2293 1.2109 21.0128 6.6906 +#> 0.3517 -8.4302 -17.9337 2.4910 -4.2998 -0.7051 -6.1338 -8.6990 +#> -8.0499 5.7869 -3.4726 -5.7918 3.7812 1.0511 -4.8952 -7.1046 +#> 11.4852 11.1430 6.8332 -9.5315 3.1408 12.5682 16.7221 -7.8358 +#> 6.6932 7.0197 6.2696 -4.2915 6.3723 6.6846 0.4017 4.4911 +#> -1.0270 -5.7155 4.1574 -5.4587 -14.7176 6.8744 11.3282 -3.0413 +#> -7.4301 -13.1757 -7.2101 -10.7080 -0.5443 0.6558 0.6875 -15.6206 +#> -12.2979 -13.3088 8.0980 4.7962 0.9168 -1.0282 -1.9946 12.9978 +#> -0.2283 0.5386 -9.6258 -1.7718 -4.5427 -7.0732 -0.4332 -1.1528 +#> 3.6472 0.0716 3.0132 12.9852 7.1906 -6.3459 -7.9011 -2.5922 +#> 1.9467 0.1932 0.2445 -2.1829 -9.1354 7.1187 -1.3450 6.5238 +#> 2.5183 1.3257 -4.5913 -2.2107 3.0072 -4.8577 2.1004 9.1451 +#> -16.4159 -8.5217 5.0396 -1.4263 1.5453 1.7852 -1.6999 -2.6252 +#> 9.0679 0.6447 -3.7581 -0.2467 5.2097 3.9798 7.8734 -1.7358 +#> -7.2657 -0.9888 -4.7308 -4.8582 13.1597 1.8404 -1.4779 7.1479 +#> 11.2440 5.4012 6.5676 11.6884 -6.2825 -3.8195 -7.5253 8.1161 +#> -2.4993 -11.7202 4.9578 -7.6192 -1.8207 5.4769 -6.5508 -3.3858 +#> 4.2418 1.5527 3.6143 6.1662 -7.3733 -8.1268 0.6360 -8.6289 +#> 3.3160 0.1369 3.3930 4.6690 -6.5854 2.5423 -3.6526 -10.4141 +#> -4.6225 -1.1011 -4.3853 1.1562 -7.5589 5.6440 0.0104 -9.5033 +#> -0.4008 6.5317 4.6031 6.1779 4.5459 3.6929 -8.5979 3.1530 +#> 2.4049 -5.8995 -5.0820 2.2342 -3.6889 2.4698 -0.3644 -11.7565 +#> 5.5751 -1.0420 -2.6784 -8.4789 1.6883 6.2200 -8.2652 -1.7530 +#> 9.4669 -5.5824 -10.3935 6.8019 -9.0175 1.6938 4.5165 -5.6580 +#> 13.7564 12.0536 7.8328 -8.3668 -4.7477 -7.2689 -9.9702 -12.1428 +#> -6.0093 3.6453 -0.5948 -7.6315 -0.8672 7.7942 5.7707 -7.6996 #> #> (10,.,.) = -#> Columns 1 to 6 -1.1510e+00 1.4708e+01 6.1299e+00 1.9380e+00 9.1910e+00 -5.4459e+00 -#> -2.4515e+00 5.8284e+00 -7.1605e+00 4.7084e+00 3.7136e+00 -9.5652e+00 -#> -1.7493e+00 -9.4196e+00 -3.2808e+00 1.5286e+00 7.8173e+00 8.9133e+00 -#> -4.3640e+00 9.2855e-01 -6.1887e+00 -6.1604e-03 -1.9447e+00 1.2969e+01 -#> 9.0246e-02 5.5193e+00 -2.7066e-01 -3.4072e+00 -2.3908e-01 -1.2357e+00 -#> 1.1252e+01 7.1942e+00 1.1412e+01 4.9808e+00 -3.6412e+00 1.6060e+00 -#> -3.5517e+00 -1.7384e+00 2.6949e+00 -2.0860e+00 1.4382e+00 -6.8337e+00 -#> 3.3280e+00 2.3369e+00 -1.0039e+01 -3.3540e+00 3.2409e+00 -1.2759e+01 -#> 4.4777e+00 -3.3609e+00 4.2431e+00 1.2564e+01 -7.6268e+00 1.8690e+01 -#> 3.0412e+00 1.4121e+00 -6.2280e+00 -6.6548e+00 -1.7658e+00 -5.1015e+00 -#> 1.5029e+00 2.9603e+00 -3.1640e+00 6.9582e-01 8.4434e+00 2.5271e+00 -#> -4.1522e+00 4.7775e+00 -1.0516e+01 -9.1064e-01 -6.6537e+00 -3.6508e+00 -#> -8.0951e+00 6.3684e+00 -9.9464e-01 -8.3192e+00 1.2819e+01 -1.2725e+00 -#> 1.7781e+01 1.4571e+01 -5.4962e-01 1.0911e+01 -3.1689e+00 1.7386e+01 -#> -2.2865e+00 6.4819e+00 -9.7681e+00 -2.3027e-01 6.4727e+00 -5.5001e+00 -#> -8.3685e+00 1.9537e+00 5.4966e+00 -1.0910e+01 -1.1238e+00 1.0691e+01 -#> 7.1031e+00 -2.3167e+00 1.8460e+01 -3.7902e+00 1.3384e+01 -7.9244e+00 -#> -2.3056e+00 -3.9997e+00 -2.2651e-01 7.2948e+00 -2.4566e+00 8.5297e+00 -#> 8.0650e-01 3.1507e+00 8.2394e-01 5.3150e+00 -2.3854e+00 4.6375e+00 -#> 8.0931e+00 4.0334e+00 5.6126e+00 -3.0156e+00 -3.7886e+00 -9.7867e+00 -#> -4.6833e+00 6.6924e-01 5.7609e+00 7.9856e+00 -1.3209e+01 -1.2543e+00 -#> 5.6184e+00 -7.3573e+00 -3.9674e+00 -4.5558e+00 1.1325e+01 1.1856e+01 -#> 2.3098e+00 -7.6610e+00 1.2973e+00 -2.5097e+00 -1.1555e+01 -1.6619e+00 -#> 1.0846e+01 1.2510e+01 5.4761e+00 -3.8703e+00 1.8016e+00 -1.6538e+00 -#> -1.0300e+00 -5.6389e+00 7.5530e+00 9.5494e+00 -9.2211e+00 -1.8173e+00 -#> 2.5764e+00 -6.1685e-01 1.0452e+01 -3.8054e+00 7.0047e+00 2.2879e-01 -#> 9.5835e+00 3.4535e+00 2.5474e+00 -5.9788e+00 1.5455e+00 -1.1756e+00 -#> 1.0323e+01 8.5548e-02 1.0385e+00 -6.7566e+00 1.1697e+01 4.6183e+00 -#> -4.9143e+00 9.3366e+00 2.1455e+00 6.2903e+00 -6.1693e+00 1.2083e+01 -#> -1.3754e+00 3.9394e+00 1.0684e+01 -7.6846e+00 1.5168e+01 -4.2258e+00 -#> 1.2749e+00 -4.5121e+00 2.4132e-01 1.1152e+00 -1.5815e+00 5.2832e-01 -#> -9.0462e+00 6.7138e+00 -3.1081e+00 -5.1703e+00 -2.0812e+00 6.7342e+00 -#> 4.6960e+00 9.0685e+00 -6.0166e-01 5.2299e+00 -2.3888e+00 -3.4944e+00 -#> -#> Columns 7 to 12 -1.0082e+01 -1.0771e+01 -8.9546e+00 1.8042e+00 -2.7159e+00 -1.7762e+00 -#> -8.4417e+00 -7.9267e+00 -9.4689e+00 -2.6069e+00 -2.0608e+00 1.0649e+00 -#> 1.4768e+00 6.8986e+00 -2.8778e-01 -5.5315e-01 8.9970e-01 -4.4995e+00 -#> 9.3775e+00 -7.7333e+00 1.3670e+01 4.7990e+00 -2.5009e+00 -9.3834e+00 -#> 3.6530e+00 -1.1222e+01 -5.3500e+00 -5.1788e+00 5.5077e+00 -5.6291e-02 -#> -1.0494e+01 -3.6301e+00 -1.0994e+01 -2.9919e-01 3.4897e+00 5.0841e+00 -#> 6.2714e-01 -4.2035e+00 1.6428e+00 7.9124e-01 -9.2246e+00 3.1176e+00 -#> -9.9808e+00 -8.9762e+00 5.5213e-01 -3.8203e+00 -4.1071e+00 4.6112e+00 -#> 3.8380e+00 5.4504e+00 -4.7822e+00 -9.8952e+00 4.3101e+00 1.2299e+01 -#> 3.6037e+00 7.2293e+00 -1.0695e+01 -2.0063e+00 6.5496e+00 -2.8960e+00 -#> -1.3454e+00 -3.6698e-01 9.2473e-01 7.9984e+00 2.8898e+00 9.4804e+00 -#> -1.0162e+01 4.9699e+00 3.4657e+00 -1.2196e+00 5.4783e-01 -4.5905e+00 -#> -4.7108e+00 -2.1457e+00 5.7715e+00 9.0599e+00 -1.3146e+01 -1.2753e+01 -#> 1.1410e+01 4.6495e+00 7.7839e+00 -9.2500e-01 1.3030e+01 3.1535e+00 -#> -7.5110e+00 1.3772e+01 -2.4349e-01 3.3336e+00 -1.5929e+01 6.0472e-01 -#> 8.5152e+00 -7.2888e+00 -2.2775e+00 3.6564e+00 7.3078e+00 -2.1732e+00 -#> -8.1812e+00 -5.6365e+00 2.7685e+00 -5.5288e+00 -8.5448e+00 -1.0250e+01 -#> 1.2640e+00 1.5171e+00 2.8921e+00 1.3248e+01 -1.1732e+01 -2.6864e+00 -#> 3.8779e+00 -6.2568e+00 -1.8679e+00 3.7103e-01 5.4897e+00 4.7297e+00 -#> -1.1899e+01 7.1531e+00 -4.0514e+00 4.0651e+00 -6.1371e+00 9.5976e+00 -#> -2.0377e+00 7.9734e+00 3.2218e+00 2.6228e-01 1.4351e+00 7.6534e+00 -#> -1.9397e+00 1.1279e+01 4.0064e+00 7.8378e+00 -1.6255e+00 -2.5241e+00 -#> -8.9815e+00 1.8569e+00 -8.1455e+00 1.0342e+01 -2.6133e+00 1.6513e+00 -#> -4.7943e+00 4.7982e+00 3.4861e+00 2.0369e+00 2.9492e+00 -1.2867e+01 -#> -1.0601e+01 1.4021e+01 2.1461e+00 -2.4602e-01 -7.3911e+00 3.3181e+00 -#> -3.6000e+00 1.0415e+00 -5.2782e-01 -5.1445e+00 2.0022e-01 1.7768e+00 -#> -4.7952e+00 -6.2317e+00 1.9980e+00 2.2601e+00 -2.6737e+00 -8.5609e+00 -#> 8.5771e+00 -2.4351e+00 -1.1265e+00 -2.6971e+00 5.2554e+00 -1.7823e+01 -#> 6.7204e+00 1.4255e+01 6.1903e+00 5.5310e+00 -7.5880e+00 9.5411e+00 -#> -7.2577e+00 -6.7162e+00 -9.2917e+00 -2.7819e+00 2.8843e+00 -7.8432e+00 -#> -3.5886e+00 1.1184e+00 2.5095e+00 -3.2145e+00 -5.5742e-01 -2.6120e+00 -#> -1.0957e+01 2.7352e+00 -3.9247e+00 -1.8870e+00 9.8008e-01 -1.1580e+00 -#> -4.1271e+00 4.0719e+00 -1.3393e+01 -4.4279e-01 6.8259e+00 8.2849e+00 +#> Columns 1 to 8 0.5967 8.9061 -3.1299 -7.8840 0.3534 -13.0330 -3.8970 11.3081 +#> 3.7395 5.4733 4.7818 0.1871 -4.5540 4.7952 -1.7746 -3.1318 +#> -5.6015 -0.1544 2.0885 -1.7926 3.2126 0.9821 -10.2387 -0.6123 +#> -6.0875 -6.2407 -3.3544 5.1424 -3.3596 -1.1933 11.7744 5.2448 +#> 4.0958 4.7821 -4.8804 -6.7308 -9.5122 2.2718 1.8152 1.3776 +#> -2.6449 0.7836 -6.0213 -10.6665 -4.9997 7.1032 6.6242 -3.3453 +#> -3.5176 10.3968 5.4079 -9.2823 16.6938 5.0424 -8.1388 -8.3120 +#> 4.4481 -4.6667 -6.1982 5.0855 8.4598 0.4721 -4.2269 -5.6700 +#> 9.0840 -10.8703 9.1757 4.4316 -25.6014 14.8390 1.2594 18.1604 +#> -4.1434 0.2375 1.4550 5.8239 -7.9385 -1.3985 -3.4671 -2.3989 +#> -13.7737 -0.7641 -4.8603 6.7941 7.3874 -11.3247 -0.3040 3.3053 +#> 4.6027 2.2413 -4.7244 -6.3537 1.2002 2.4862 -0.0701 7.7824 +#> 11.8750 4.3681 4.4364 -2.3271 6.1403 -12.6188 -9.9999 3.2493 +#> 3.5347 9.2051 -1.1649 3.9263 19.1030 -6.0443 1.2443 -3.0241 +#> 14.7534 2.4395 -1.2814 -6.4214 10.4381 -21.0720 1.3778 9.6047 +#> -2.2916 -19.9770 -5.5558 10.4334 5.0819 5.1359 -1.5383 2.8387 +#> -3.8058 0.3947 -4.4943 0.8695 -5.1525 -14.0862 2.4184 2.7103 +#> 8.2592 6.3667 -13.5373 -5.4544 12.4179 -0.0283 -2.8308 -10.8751 +#> 1.3312 2.1701 1.9028 -4.5062 1.2966 2.2194 4.3314 -7.5127 +#> -10.6727 2.8034 15.8894 11.9669 -12.2913 -10.0215 -7.8647 -4.2430 +#> 0.4096 -3.4861 -1.3180 2.8427 9.9121 -7.2794 -13.5559 -2.7689 +#> -3.9413 -5.7351 1.7413 -3.3382 -3.8379 -10.9658 -2.6630 11.9106 +#> 9.9347 10.1389 0.0226 -11.3068 3.4018 15.2355 0.1433 1.0414 +#> 5.6263 11.1606 2.1327 -10.9379 2.4913 -3.4527 -12.6513 0.5640 +#> -6.5287 2.1751 12.7916 -1.3484 0.8640 3.0026 -1.8041 -3.4488 +#> -8.5781 -1.3665 8.6437 8.0611 -5.7120 1.5846 1.3035 0.4388 +#> 0.2692 -1.2841 7.3197 -10.4970 -3.8196 0.9985 -1.8457 2.9985 +#> -5.2647 -11.2524 -7.7536 3.1290 -3.0876 5.2501 10.8070 7.6927 +#> -1.6310 9.4374 14.7801 10.3291 -7.6170 -7.8053 -6.4189 -11.4885 +#> 19.7077 8.9745 -0.7391 -10.9278 9.4445 5.7570 7.7951 14.4237 +#> 5.3998 -4.8437 2.6966 1.2622 4.2669 3.3835 -3.7844 -6.4461 +#> -3.1946 -12.8971 -4.9457 13.2197 -2.3462 11.8761 8.9372 6.7757 +#> -0.2066 -8.3420 10.6258 -2.2407 0.5455 -2.1206 -3.1131 9.0821 #> -#> Columns 13 to 18 1.4518e+00 -1.7907e+00 -7.9384e+00 6.5315e+00 4.2660e+00 1.4122e+01 -#> -4.7786e+00 9.9377e+00 7.1004e+00 9.3690e+00 -2.6187e+00 1.4162e+01 -#> 9.8217e+00 -1.7407e+00 8.8441e+00 7.0788e+00 -1.1299e+01 -7.9481e+00 -#> 9.8827e+00 5.8448e+00 -9.9542e+00 -7.7639e+00 -4.0326e+00 -3.8740e+00 -#> -9.0378e-02 -2.6884e+00 4.5369e+00 7.4611e+00 -1.6236e+00 8.2237e-01 -#> 2.6702e+00 -8.4511e+00 7.7322e+00 6.1408e+00 -5.8271e+00 1.1371e+01 -#> 2.4814e+00 1.9165e+00 -1.1149e+00 -7.0363e+00 2.5774e+00 1.0552e+01 -#> -1.3126e+01 9.4007e-01 1.2221e+01 1.2636e+01 -7.5024e+00 -1.8134e+00 -#> -3.1079e+00 3.2881e+00 5.4640e+00 8.8803e+00 1.3867e+00 -1.4285e+01 -#> -4.6069e+00 -7.2583e+00 -8.5716e-01 -6.4116e+00 6.0483e+00 1.4786e+00 -#> -2.1766e+00 1.1177e+01 7.7207e+00 7.4819e+00 -9.5557e+00 -1.5603e+01 -#> -3.5557e+00 1.5440e+00 3.4989e-01 1.0391e+00 -1.3528e+00 9.3870e+00 -#> -3.6253e+00 -1.6029e-02 -8.2342e+00 6.4836e+00 2.6881e+00 2.9792e+00 -#> 5.0866e+00 -1.5473e+01 -9.9891e+00 -7.4947e+00 -1.3624e+00 7.0891e+00 -#> -8.1447e+00 5.6452e+00 1.6197e+00 1.3334e+01 -1.1119e-01 3.0060e+00 -#> 2.4880e+00 -2.5340e+00 -8.1627e+00 7.4808e-01 1.6019e+00 4.5472e+00 -#> 1.5419e+00 5.1208e+00 -3.9342e+00 5.4785e+00 7.8769e-01 5.2958e+00 -#> -1.0982e+01 -2.1442e+00 3.8397e+00 -6.2287e+00 -5.1945e+00 4.4689e+00 -#> -8.0164e+00 -6.6248e+00 -3.7490e+00 1.5171e+01 4.9799e+00 -3.7437e+00 -#> -1.1349e+01 -2.8744e+00 -2.7050e+00 4.0806e+00 1.3722e+00 4.1536e+00 -#> -7.2407e-01 -9.3004e+00 -3.5112e+00 -5.6065e+00 -6.6253e+00 5.4250e+00 -#> -4.0743e+00 -4.7070e+00 1.2838e+01 2.6905e+00 -8.5392e+00 -8.7264e+00 -#> -1.7624e+00 -5.0954e+00 6.2427e+00 -6.9174e+00 4.3744e+00 -4.5048e+00 -#> 3.0713e+00 -1.1409e+01 -9.2123e+00 -3.3423e+00 1.1981e+01 -1.6001e+01 -#> 3.4765e+00 -2.1769e+00 -2.9399e+00 -4.0751e-01 6.6048e+00 -9.9558e-01 -#> 5.0735e+00 -8.0494e+00 -5.2977e+00 -2.9626e+00 -6.8171e-01 3.6940e+00 -#> -6.0202e+00 -5.2359e+00 -9.8874e+00 1.0407e+01 7.2523e+00 6.6818e+00 -#> -4.5084e+00 -8.7041e+00 -1.4744e+01 1.1044e+01 6.7042e+00 -7.7103e+00 -#> -5.2094e+00 -9.2715e+00 -2.5132e+00 -2.0430e+00 3.9644e+00 1.8019e+00 -#> -1.0330e+01 1.0737e+01 6.5686e-01 7.8642e+00 9.6778e+00 8.8608e+00 -#> -2.2792e+00 -2.1896e+00 1.7922e+00 -8.7522e+00 -3.9561e-01 1.1902e+01 -#> 1.1399e+01 4.3614e+00 -3.1763e+00 -3.0516e+00 9.3060e-01 -6.1095e-01 -#> 2.6698e+00 8.5572e+00 9.3059e-01 -8.6888e+00 1.2132e+00 2.0137e+00 +#> Columns 9 to 16 12.7184 -5.6334 7.6452 14.8108 -10.3884 -10.6418 7.3686 3.8239 +#> -0.0840 -3.4907 2.3260 3.1506 -1.7152 -4.5557 -4.8369 -3.9408 +#> -7.2580 -0.9143 7.4439 -2.6752 9.8491 -3.4175 -0.6750 7.0462 +#> 6.4653 -3.4637 -8.7393 11.3686 -6.0135 3.1535 1.3512 5.3406 +#> -2.4577 4.0158 12.4024 0.9398 6.4745 4.8406 -6.1452 14.1451 +#> -17.5642 -17.8735 6.4547 6.2830 9.7406 1.4131 -2.1599 -1.9808 +#> 9.6575 6.9286 -4.0918 -6.6685 11.1647 -1.9471 -1.7669 -11.0983 +#> 4.8917 9.8644 -18.0006 18.3614 0.4248 10.6514 -4.2588 6.4091 +#> 1.5368 7.1106 -0.0565 -11.4301 -15.7930 1.1931 -8.7124 -7.0264 +#> -12.7397 -11.5190 1.1062 -9.4952 8.2471 5.2444 -2.4862 5.4913 +#> 13.5010 11.9210 2.5241 5.7832 -8.6986 6.7588 4.6441 26.7168 +#> 6.4930 10.7462 -3.5446 8.4761 -0.1220 -1.3715 -10.6841 7.6107 +#> 1.1948 -0.5737 -14.9850 9.1370 7.8642 -16.4949 12.9352 -4.4521 +#> 2.3075 -8.9138 -16.7157 -6.4073 -2.3865 12.6094 6.3325 1.7814 +#> 15.0262 -2.5743 -18.1020 -2.8585 -6.8411 -4.7078 -0.4035 -12.8958 +#> 0.5476 4.2362 -5.4529 -0.1188 -11.6455 5.6475 1.0045 -7.7516 +#> 14.5935 2.3331 6.2971 -0.2265 -6.4906 4.2403 -0.5130 10.8227 +#> -0.5713 -2.5741 -7.0101 -2.7351 3.6928 -9.9242 2.6234 -2.9524 +#> 2.8933 -8.8181 -5.1029 1.0112 -4.7493 -6.1398 -1.5642 13.0257 +#> -10.0909 -7.3498 -3.4673 -12.0644 5.6308 5.1258 -3.5983 -1.8730 +#> 10.3024 2.6471 1.5830 0.0541 -1.8486 -6.8148 -9.4120 3.1598 +#> -6.7805 -14.3555 -1.7404 11.5228 -2.2756 -3.9881 -10.4924 -7.8470 +#> 6.7334 21.2917 -4.4236 -4.5718 17.6852 2.0614 11.6169 -2.8394 +#> 3.1136 1.5566 -4.4270 3.0665 -5.2457 -2.5503 1.2777 -0.2787 +#> 7.4344 1.5674 -12.1284 -7.9893 7.1850 10.1423 4.3577 -2.5188 +#> -11.7506 -6.5480 2.8386 -1.3697 5.8747 -0.7517 2.4682 -8.0993 +#> -6.0596 -7.6772 -1.9051 3.0569 0.1072 -1.4377 -4.3270 0.6160 +#> -5.7295 -6.1819 11.3433 3.7970 2.1330 4.5662 2.4839 2.5734 +#> -2.6202 4.8083 -7.7703 5.7833 -10.3329 6.8201 -1.2716 -3.6942 +#> 2.8626 5.2851 -8.5269 -7.2407 -2.9539 -11.7345 9.8633 2.6877 +#> 1.6020 7.5345 -5.3247 11.1320 -3.9768 3.8956 6.8449 -6.2985 +#> -1.3377 12.3795 -5.5648 3.3444 -11.1755 8.6887 2.6475 4.8080 +#> 8.7424 7.3310 -4.0678 4.0101 -9.6832 1.9098 -5.8224 -1.9697 #> -#> Columns 19 to 24 3.1242e+00 -1.7359e+00 -3.3670e+00 9.2639e+00 1.1055e+01 -2.3429e+00 -#> -4.2909e+00 3.3208e+00 -1.5062e+01 4.3947e+00 5.0361e+00 -2.6086e+00 -#> 5.6307e+00 -4.7571e+00 -1.1382e-01 -3.3869e+00 -6.9188e+00 7.4758e+00 -#> 4.4813e+00 5.7593e+00 8.7943e+00 -1.1875e+01 -6.0524e+00 8.9488e+00 -#> 1.0235e+01 8.4402e+00 1.4450e+01 2.7812e+00 7.2263e-02 -3.3179e+00 -#> 1.5148e+01 9.6237e+00 6.9552e+00 -3.8001e-01 -1.0859e+01 6.4538e+00 -#> 5.2313e+00 7.1842e+00 -9.8373e+00 -5.4476e+00 9.7978e+00 2.4108e+00 -#> 1.5244e+01 1.4151e+01 3.2298e+00 -7.6963e+00 -4.4103e+00 -2.9889e+00 -#> 1.0298e+01 6.3490e+00 -4.4758e+00 8.8059e+00 -1.7732e+01 -2.1141e+00 -#> 3.7406e+00 -8.9840e+00 2.5201e+00 -5.0936e+00 6.2817e+00 5.8291e+00 -#> 9.6266e+00 1.9386e+00 -5.5450e+00 7.9442e+00 -6.1569e+00 3.7012e+00 -#> 3.7923e-01 1.5367e+00 4.1575e+00 -6.0499e+00 5.9302e+00 2.2745e+00 -#> -1.0050e+01 -2.2731e+00 8.6788e+00 -9.9291e+00 -5.6922e-01 -1.3604e+00 -#> -6.5292e+00 1.3730e+01 -1.0047e+01 9.6298e+00 -9.2012e+00 -4.4376e+00 -#> 4.0541e+00 3.1760e+00 1.2248e+01 -2.8897e+00 3.2389e-01 -1.1149e+01 -#> 2.2017e+00 4.8893e+00 4.7247e+00 6.1307e-01 -1.6959e+00 -1.9882e+00 -#> 1.0252e+01 -2.2491e+00 2.3286e+00 -4.3153e+00 1.2832e+00 -3.1327e+00 -#> -2.3985e+00 -3.4597e-01 2.4154e+00 -4.7692e+00 -3.3074e+00 -7.4276e-01 -#> -3.8533e+00 5.3527e+00 -8.8127e+00 -1.4880e+01 4.0541e+00 -3.2262e+00 -#> 1.3978e+00 2.5510e+00 2.0188e+01 9.2224e+00 -1.1162e+01 -6.1515e+00 -#> 4.1505e+00 -6.7055e+00 -4.2155e+00 1.4835e-01 1.5798e+00 9.3494e+00 -#> 1.1808e+01 6.8497e-01 2.3234e+00 -1.0927e+01 -1.1371e+01 6.6349e+00 -#> 4.8906e+00 -5.8453e+00 1.0335e+01 3.2712e-01 6.6653e+00 2.5518e-01 -#> 4.0984e+00 -1.0536e+01 1.5835e+01 2.6016e+00 1.6003e+00 3.5174e+00 -#> -3.7511e+00 -1.2930e+01 -8.8864e+00 8.9835e+00 -1.7069e+00 2.0311e+00 -#> 8.3025e-01 -3.5143e+00 -1.0223e+00 -5.9961e+00 -4.2259e+00 -4.4451e+00 -#> 1.5221e+01 2.0810e-02 -1.1063e+01 -1.3123e+01 -2.5652e+00 1.4035e+00 -#> -1.1284e+01 7.8458e+00 5.7174e+00 -6.0471e+00 -6.5509e+00 -1.2211e+01 -#> 3.0079e-02 -2.7623e+00 -5.8705e+00 1.0009e+01 -4.2271e+00 -1.3138e+00 -#> -6.4721e+00 6.9665e+00 -7.0353e+00 3.5454e+00 -5.5335e-02 -7.7748e+00 -#> 1.4640e+01 1.3125e+01 1.0948e+00 -1.2123e+01 8.9720e+00 4.7290e-01 -#> -1.4898e+01 4.9410e+00 2.4164e+00 1.2534e+01 -9.3768e-02 1.6201e+00 -#> -5.7218e+00 -4.0255e+00 -2.5160e+00 1.2270e+01 2.6487e+00 4.5178e+00 +#> Columns 17 to 24 9.9252 -6.0601 14.5614 -4.6357 -9.7399 -3.4691 -3.6213 4.3343 +#> -1.1188 9.0092 -7.7302 5.4782 0.7325 -14.4709 -6.1643 -1.5871 +#> 0.1377 -6.8225 -9.1232 -2.2449 5.5945 3.1723 0.5147 -0.6275 +#> -10.6650 -3.2306 11.6264 -8.4183 -5.8531 1.1509 -3.7451 -2.3484 +#> -2.6869 13.4504 -7.0607 -3.8833 -4.8293 -5.4682 -7.0744 -4.7783 +#> 11.5476 11.2247 -4.3399 -4.6338 8.4566 2.7376 8.3324 -3.0561 +#> -7.7542 4.6611 1.0957 6.0585 10.8559 -1.2190 -0.9405 14.1794 +#> -7.9773 5.7414 0.5181 1.5246 6.6802 7.2207 -2.9101 2.9138 +#> -4.5942 12.4311 -11.4073 -12.1179 0.8398 -18.3194 -21.1181 -12.0491 +#> 4.8846 -5.1675 -10.9318 3.3900 -11.0907 4.5585 1.5767 -2.1348 +#> 3.8656 -7.2082 16.0581 -4.6179 -15.4396 1.4072 17.3777 -1.4314 +#> 8.9796 7.3150 -0.5738 -1.7750 -0.5341 15.3414 4.0000 -9.2475 +#> 4.2670 -5.9049 -3.6708 -6.0051 -12.1615 -2.7186 9.6878 -6.5743 +#> -9.5218 -7.5280 3.2344 -1.6397 -16.0515 -18.9206 -6.6279 -9.2495 +#> 10.8967 2.1110 3.9062 -17.6723 -3.1092 5.1993 10.1052 -5.6322 +#> -7.5139 5.9768 0.3199 10.9290 4.6202 8.0122 3.5969 2.8177 +#> 0.2765 -6.1538 7.9534 -3.8241 -8.8093 9.6636 3.2367 0.6593 +#> -1.1557 8.1527 -0.5829 -8.9356 2.5215 12.6012 2.9697 -2.5946 +#> 5.5167 0.6529 -9.0097 6.1313 -0.3132 7.0266 7.4054 2.5653 +#> 13.3081 3.3736 3.5746 -1.6584 -9.7668 -7.1930 -1.1739 13.8205 +#> 7.5616 4.2413 -11.7830 5.7772 -7.7152 -15.0252 -6.6403 3.4414 +#> 12.1636 -16.4812 6.1080 1.4087 1.1822 7.9212 16.6163 7.1504 +#> -6.7957 2.9469 4.6523 -2.6631 4.4369 9.3618 2.6648 -4.6262 +#> -2.5317 16.4987 -9.9909 -1.9704 11.5051 -2.7488 -0.9009 2.9463 +#> -8.2265 -3.5278 3.9527 2.4009 10.1392 4.0727 9.8097 8.1686 +#> 4.0022 2.4145 -0.9292 6.1996 -1.2054 -6.8335 -5.9702 7.3219 +#> 11.5061 3.1860 -6.2495 -5.5714 2.9508 -7.2449 -2.2884 3.9701 +#> 10.0300 -4.2100 6.6654 -8.6852 2.7246 7.7847 2.6797 -1.3201 +#> -4.3981 7.1251 2.5460 -2.6470 -1.9696 -1.4992 1.2303 3.8541 +#> 20.3686 2.7208 -3.7131 -3.9702 -0.4787 -17.2879 1.5020 -12.7779 +#> -2.2966 3.4540 1.9025 -1.8796 -0.6216 -1.9349 -11.4880 -5.6661 +#> 1.8981 -5.0815 -16.0781 -13.4546 5.5494 -3.1190 -3.0422 -7.8317 +#> 8.1814 -14.1720 1.1857 -6.0221 8.1155 8.0420 -2.3536 0.0388 #> -#> Columns 25 to 30 -2.2760e+00 -7.3144e+00 4.8292e+00 -4.9657e+00 -7.1232e+00 1.3142e+00 -#> 6.5929e+00 3.1490e+00 6.9204e+00 6.4151e+00 -6.3404e-02 -3.7897e+00 -#> -4.0338e+00 -4.5316e-01 -7.0003e+00 3.3699e-01 6.8378e-01 -4.5510e+00 -#> 1.6622e+01 5.6350e+00 3.6938e+00 -3.4149e+00 6.7602e+00 -6.1815e+00 -#> -4.3909e+00 1.5651e+00 2.4429e+00 -3.0016e+00 9.1529e-01 -2.4233e+00 -#> 2.3989e+00 5.4087e+00 -7.5278e+00 -1.5690e-01 -2.3424e+01 -7.2567e+00 -#> 3.1272e-01 -1.2439e+01 6.5578e+00 9.9514e-01 1.0721e+01 -1.0280e+01 -#> 5.9367e+00 4.2118e+00 7.7408e+00 8.6852e+00 1.1312e+01 3.4991e+00 -#> 9.7336e+00 -4.2098e+00 -1.2592e+01 7.2535e-01 -5.9812e+00 -1.1667e+01 -#> -8.3478e+00 -2.7242e+00 1.3632e+00 -1.0446e+01 1.7912e-01 -3.8909e+00 -#> 6.3733e+00 6.0159e+00 3.1218e+00 2.2932e+00 3.8410e-01 -8.7694e+00 -#> -8.2073e+00 -1.6457e-01 3.4766e+00 -4.1029e+00 7.8382e+00 4.4347e+00 -#> -1.7276e+00 4.3395e+00 -5.2719e+00 9.4849e-01 2.7905e+00 1.4499e+00 -#> -1.5380e+00 -2.5573e+00 -2.8523e+00 -3.6523e+00 -5.3597e+00 -2.7917e+00 -#> 9.0720e-01 1.2394e+00 -3.1198e+00 4.7940e+00 5.8422e+00 -1.1933e+01 -#> -6.5024e+00 -4.4094e+00 1.1358e+01 -5.3117e+00 -6.8802e+00 -6.3841e-01 -#> 6.5537e+00 -4.6925e+00 2.1657e+00 9.1326e+00 -2.0540e+00 -9.5667e+00 -#> 7.2024e+00 1.1624e+00 6.0365e+00 2.2904e+00 6.0530e+00 -1.9205e+01 -#> 1.4611e+00 -4.2919e+00 -1.4080e-01 2.7746e+00 2.0329e+00 2.0843e+00 -#> 5.4795e+00 -8.7125e-01 2.6906e+00 3.5602e+00 -3.3491e+00 1.1599e+00 -#> -2.3461e+00 6.5558e+00 8.0490e-01 -1.0127e+00 7.3854e+00 5.4343e+00 -#> -2.3029e+00 2.5260e+00 -3.6641e+00 2.6716e+00 3.3921e+00 3.6307e+00 -#> -8.0277e+00 -4.0514e-03 -1.1109e+01 -2.7329e-01 -5.4768e+00 -2.4345e+00 -#> -4.2815e-01 5.5739e-01 -1.0275e+01 -1.1990e+00 -3.5567e+00 2.1626e+00 -#> -2.6098e+00 1.0137e+01 -6.7354e+00 6.6041e+00 8.3207e-01 4.4087e+00 -#> 2.3618e+00 1.0020e+00 7.4331e+00 1.3256e+00 -7.2381e+00 6.9058e-01 -#> 4.2137e+00 4.6010e+00 -3.9047e-01 1.5557e+00 -3.4771e+00 -6.7903e+00 -#> 3.8030e+00 -4.4897e+00 -2.7895e-01 8.4346e+00 -2.8834e+00 -7.8370e+00 -#> -7.1090e+00 5.3218e+00 -8.2733e-01 -5.1612e-01 8.7156e+00 -4.2369e+00 -#> -8.5179e+00 -1.3605e+00 -1.1377e+00 1.4068e+00 -8.1943e+00 -1.7865e+00 -#> -5.5196e+00 -7.5730e+00 -5.7586e-01 3.5259e+00 4.4455e+00 -1.3109e+01 -#> -3.4827e+00 8.6397e+00 -1.4932e+01 4.6374e+00 -9.3930e+00 1.1858e+01 -#> 5.6901e+00 2.3284e+00 -1.0366e+00 6.8662e-01 -5.5608e+00 -2.5195e+00 +#> Columns 25 to 32 -13.0714 -0.0187 9.9873 0.0273 -1.5594 -8.8974 1.9009 -0.2988 +#> 11.1452 -3.4016 -1.0266 5.2595 -1.3008 -9.7955 1.9404 6.3022 +#> 4.5345 6.6938 -1.1893 -5.0986 -7.8672 0.3647 9.1410 2.2182 +#> -0.1823 3.1071 0.5731 -3.8624 4.6680 -3.8057 -1.7355 5.4212 +#> -3.7469 1.0013 0.6827 5.9371 6.4182 -3.6334 0.0980 -8.1209 +#> -6.9428 8.4255 -5.2805 -4.1397 2.5125 6.7380 0.0117 2.8179 +#> 3.2635 5.5990 0.2279 10.8135 -13.2858 2.0835 -12.2490 8.7211 +#> 9.7543 2.1740 -7.9529 -8.4028 13.3070 -0.6142 -6.8777 3.5090 +#> 2.0193 -2.8902 -0.1670 -4.3193 -8.3546 -7.4165 9.1943 -0.2285 +#> -6.0421 -4.7265 -3.4241 -2.6472 -1.3033 -1.3679 -0.6396 2.3762 +#> -9.2454 2.1403 7.1917 4.1617 9.3208 -1.4319 -0.3995 -3.2467 +#> 1.4912 1.0969 8.8175 -4.7641 -2.3168 -1.2784 -0.9874 2.8688 +#> -1.6398 -19.2599 -4.5990 -7.5227 -9.5624 -3.0111 -1.9332 -0.4576 +#> 2.2556 -8.7141 -1.2503 -2.5759 -2.0213 -12.4116 -13.0938 -1.2698 +#> -1.8907 -6.3740 2.7677 -11.0381 -14.6966 1.2929 -2.7640 1.9016 +#> 7.2026 -5.4447 1.2816 0.6494 10.8643 4.5731 11.8345 4.1345 +#> -10.0139 -4.1466 17.8547 10.1177 -0.6593 -3.2823 -2.8638 -1.1804 +#> 5.6003 9.5377 -12.8658 8.2076 -5.6384 -4.2426 -3.7383 3.3327 +#> 0.8414 -2.0776 -9.0444 -8.1033 8.3204 1.2580 1.1945 -0.6222 +#> -13.1733 -1.8308 2.2248 -1.5546 -17.0267 3.8451 -7.9885 -3.7998 +#> 12.2557 -8.9716 0.5187 6.3482 -6.0040 -8.8644 -1.6706 14.2456 +#> -5.2001 -3.9760 4.0510 -15.6646 2.8702 7.1827 8.8502 5.0523 +#> 0.2147 14.3354 2.8928 7.9836 -13.9572 0.6832 -4.1280 2.5752 +#> 7.6655 2.1213 -0.3125 -1.6562 -13.0862 -4.2538 -8.4234 -0.8730 +#> 8.7337 8.2235 1.0279 -0.9177 -3.9001 8.1327 0.9260 -3.5470 +#> 1.6943 0.7340 -8.3707 9.8251 -4.3107 2.7295 -1.9003 6.3897 +#> -0.9235 -0.2456 -8.6805 -2.3808 -8.1465 3.7409 2.0240 1.7404 +#> -11.2546 7.3871 -2.0856 -0.5858 7.3479 3.9408 9.0609 -4.7659 +#> 0.8641 1.7741 1.0623 1.4678 -4.4862 -9.6961 -15.5146 -1.2309 +#> -1.9795 -10.4593 -4.6745 5.4269 -5.7725 -12.6849 -7.0410 0.6236 +#> -0.4239 -0.0613 -4.8713 1.7130 0.9216 -3.7975 -0.5672 -4.8570 +#> -3.0661 -1.4783 0.3980 0.0231 5.6443 0.4720 -1.4982 4.9216 +#> 4.9952 8.3241 2.3390 -4.7183 -5.2696 2.5039 3.4909 -2.3067 #> -#> Columns 31 to 36 2.0549e+00 -4.3663e+00 4.6274e+00 -2.7770e+00 4.0951e+00 -3.0836e-01 -#> 1.6512e+01 -5.8965e-01 4.0134e+00 1.1879e+01 1.4031e+01 3.2302e+00 -#> -2.9421e+00 6.2940e+00 5.0424e+00 3.0158e+00 4.4634e+00 -2.1569e+00 -#> -1.1351e+01 -3.4089e-01 5.8826e+00 4.3915e+00 -7.7045e+00 7.1392e-02 -#> -6.5426e+00 -3.9611e+00 -1.5092e-01 4.2079e-01 5.9726e-01 2.1377e+00 -#> -1.3936e+01 3.3846e+00 5.6410e+00 1.8708e+00 -8.9681e+00 -4.8510e+00 -#> 3.8352e+00 7.1991e-01 -8.4910e-01 -1.2214e+01 -3.6696e+00 9.2911e+00 -#> 1.4515e+01 -2.6906e+00 -7.7254e+00 4.5797e+00 1.1218e+00 -2.0977e+00 -#> -4.4800e+00 2.6482e+00 6.9065e+00 3.1507e+00 2.0082e+00 -5.3859e+00 -#> -1.4128e+01 1.3816e-01 2.9477e+00 -1.9106e+00 -3.6697e+00 -2.2761e+00 -#> 5.4178e+00 -5.8320e+00 -5.7231e+00 -1.5806e+00 1.2152e+01 -1.0140e+01 -#> 1.9179e+00 -2.3483e+00 5.8366e-01 -6.8551e+00 -3.2098e+00 8.6950e+00 -#> 3.8399e+00 9.9482e+00 1.5313e+00 6.8653e-02 4.2482e+00 3.6826e+00 -#> -2.5470e+00 -1.4484e+01 -9.0480e+00 -4.8119e+00 -1.6745e+01 -4.4600e-01 -#> -5.5114e+00 1.3359e+00 -1.6710e+00 -1.4465e+00 8.3191e+00 -9.3249e+00 -#> 6.0060e-02 9.4988e+00 6.0412e+00 8.8024e-01 6.9324e+00 1.2031e+01 -#> -5.6851e+00 2.5318e-01 -9.3610e+00 9.1362e-04 9.4072e-01 -4.5883e+00 -#> 1.7162e-01 9.5146e+00 5.0873e+00 7.1316e+00 3.6845e+00 7.4398e+00 -#> 2.5172e+00 7.7000e+00 1.1956e+01 4.7511e+00 -2.8945e+00 1.5428e+01 -#> 5.9333e+00 7.9293e-01 -7.8763e-01 9.9137e+00 -6.2309e+00 -1.2783e+01 -#> -4.6147e+00 -1.4190e+01 2.4994e+00 -2.6133e+00 -7.3028e+00 5.2317e+00 -#> 1.6406e+00 9.9635e+00 -6.1104e+00 -1.0154e+01 9.4588e+00 -2.3062e+00 -#> -8.0695e+00 1.3592e+01 1.2976e+00 -1.0431e+01 -1.4089e+00 -6.0822e+00 -#> -1.1015e+01 1.5045e+01 1.5913e+00 2.8774e+00 -3.9416e+00 -1.8998e+01 -#> -1.2227e+01 -7.4663e+00 -8.8610e-01 -7.8272e-01 4.3430e+00 -3.1237e+00 -#> -1.0315e+01 -1.3596e+01 -8.0018e+00 -2.9016e+00 6.0070e-01 -3.3632e+00 -#> -3.8378e+00 9.5447e+00 5.4928e+00 -1.2036e+00 5.7343e+00 7.3284e+00 -#> -1.5390e+01 -8.8993e+00 5.3606e-01 6.2705e+00 -1.0778e+01 2.5031e+00 -#> -4.9752e+00 -3.1720e+00 -2.2769e+00 -7.5106e+00 8.8023e+00 3.0209e+00 -#> 8.2876e+00 -5.6669e+00 -6.2541e+00 -5.9230e+00 7.6724e+00 -6.0533e-01 -#> -9.2757e-01 -2.7814e+00 -4.3140e+00 -1.3984e+01 -4.1415e+00 1.2586e+00 -#> -4.3848e-01 1.1996e+01 3.2413e+00 6.3810e+00 -1.3565e+00 -5.3411e+00 -#> -1.7962e+00 -6.0509e+00 3.4097e+00 2.9417e+00 1.9023e+00 -1.4072e+01 +#> Columns 33 to 40 -9.8567 7.6175 -1.8756 -8.4411 10.6488 -8.1493 6.0528 10.1028 +#> -0.9945 -0.7705 -4.7930 2.1427 -0.1435 -2.7452 -4.7158 -2.5213 +#> 0.6202 3.9948 -5.6909 -6.4643 0.3004 0.9035 7.0773 3.1519 +#> 10.1589 2.7722 1.8978 -4.3193 3.9828 10.7195 3.5876 1.9366 +#> -4.4238 1.0247 0.5953 -1.6051 0.0304 -11.7576 13.4233 -4.1562 +#> -3.1859 -5.7735 19.4141 4.7993 -0.6175 -8.4062 -16.4626 3.1498 +#> 6.9534 5.0277 -2.9447 -0.0850 -2.2145 6.2498 -15.7354 1.9657 +#> 13.0054 4.9416 -3.7534 -0.1095 -3.8238 2.8011 0.8824 -4.2184 +#> -0.0062 -17.3774 1.0912 5.1841 -2.2457 2.5393 13.0165 -10.6976 +#> -9.7258 -3.5219 6.1499 -12.4502 -1.0758 -9.1970 -2.8117 -3.9221 +#> 2.8484 5.5359 -3.9454 -10.4596 3.0916 -3.4259 -2.1721 -0.8166 +#> -1.3490 1.1040 3.8767 -3.0998 -12.2260 -2.0939 1.1307 2.4352 +#> 11.8101 -0.2954 -7.6868 -4.6325 -3.1428 10.8741 -10.9336 16.3077 +#> 4.8506 -13.5014 -2.7887 6.8250 15.0096 9.8246 -8.1045 8.1444 +#> -10.8244 -3.0767 1.9881 6.4509 0.7621 5.1647 -8.5194 13.2671 +#> 6.9365 -3.5365 3.8543 12.9086 -2.8667 1.2013 9.6631 6.2177 +#> -9.3565 15.8733 0.6735 -0.6567 7.3761 -2.2755 3.4360 -2.6279 +#> 0.3020 -1.3872 -2.5643 -7.3403 3.0948 -4.3499 -6.3204 -3.7817 +#> -4.7272 6.5900 5.1556 -0.6818 -1.1616 3.2967 -1.8406 2.3409 +#> -4.6962 -5.2557 5.7527 -0.5302 -8.6238 -0.9163 0.3526 0.5857 +#> -4.2859 -4.8647 -4.0140 -6.6039 -9.6733 -6.1373 -8.1921 3.4339 +#> -8.1660 2.6921 11.1481 -5.1359 -4.8097 0.5925 6.0313 9.1418 +#> 11.4851 2.3422 2.0015 6.1188 2.0525 9.4460 -10.7253 -8.3846 +#> 0.3583 -7.9371 -0.4494 5.2844 0.9358 3.5774 -1.1877 0.7038 +#> 11.8828 -4.3449 12.6311 1.8897 1.4277 8.2822 3.1796 -4.4574 +#> -7.6745 -4.8668 13.4500 -5.0439 -1.6162 -8.7502 -4.2821 9.2119 +#> -0.5675 -2.4104 14.9981 -5.2886 3.6040 -0.5608 -3.1493 5.8028 +#> -1.7977 1.2788 15.8534 -9.6430 1.1484 -11.0741 4.0682 3.3635 +#> -10.4483 -9.8690 5.2062 -6.3103 1.8113 -7.4166 -2.6534 2.8004 +#> 0.8338 -3.5295 4.4940 5.8538 6.6255 9.1051 -1.6787 3.8386 +#> 0.6720 -7.0088 -12.6543 6.1760 -7.6490 5.6031 -6.8800 2.8252 +#> 3.3358 -1.0577 8.0275 -11.5627 2.7493 -5.3755 16.5066 -0.5246 +#> -1.5817 0.4956 -9.7349 -7.9308 -9.1433 6.4206 7.8914 -5.4136 #> -#> Columns 37 to 42 -6.2390e+00 7.4627e+00 1.5723e+00 -2.5190e+00 4.9774e-01 3.5490e+00 -#> -1.8892e+00 9.1216e+00 7.8881e+00 -6.1767e+00 4.9554e+00 8.8803e-01 -#> -7.0721e+00 4.0161e+00 -1.0343e+00 1.2696e+01 -6.3369e+00 -2.1948e-01 -#> 1.3684e+01 -9.2964e+00 -8.0246e+00 1.0011e+00 -2.0440e+01 -2.1537e+01 -#> -6.7623e+00 6.8869e+00 -2.3903e+00 5.0027e+00 -1.3824e+00 7.4134e+00 -#> 1.7012e+00 2.9455e+00 6.5387e+00 -2.4028e+00 -3.4351e+00 -9.1958e+00 -#> 3.7916e+00 -8.9537e+00 -7.2207e+00 -4.9574e+00 8.1806e-01 1.7337e+00 -#> -3.7733e+00 1.3423e+01 1.3992e+01 1.0672e+00 -1.1168e+00 -5.3264e-01 -#> -2.9634e+00 4.7297e+00 -5.1181e-01 2.1821e+01 2.2174e+00 2.6987e+00 -#> 8.9396e+00 -7.7852e+00 -8.7303e+00 -2.0843e+00 -5.4726e+00 -3.1127e+00 -#> -1.3309e+01 1.7285e+01 -3.7094e+00 -1.1378e+00 3.7256e+00 -7.7327e+00 -#> 4.6996e+00 -1.4067e-01 -3.8312e+00 -6.2441e+00 7.1122e-01 2.4115e-01 -#> -8.7191e+00 -2.7296e+00 -3.7690e+00 -6.6840e+00 2.8123e+00 -9.0216e+00 -#> -7.8271e+00 -1.4785e+00 1.2006e+00 -3.0301e+00 -3.6380e+00 2.2138e+01 -#> -1.8020e-02 -7.3946e+00 -1.8668e+01 4.9143e+00 8.0434e+00 -7.5144e+00 -#> 2.9670e-01 -6.9659e+00 6.4073e+00 3.5793e+00 -4.7447e+00 -6.0572e+00 -#> -7.2437e+00 -8.0995e-01 -1.1089e+01 -2.2674e+00 -4.6293e+00 4.8363e+00 -#> 7.0735e+00 -7.6543e+00 3.0502e-01 -4.8940e+00 -2.1813e+00 -1.7265e+01 -#> -2.1917e+00 9.3046e-01 4.0582e+00 7.7997e+00 2.3249e+00 2.7131e+00 -#> 6.4485e+00 -1.8380e-01 -2.9445e-01 2.0924e+00 7.4262e+00 -8.2395e+00 -#> -2.2302e+00 1.7512e+00 -3.4890e+00 -2.6745e+00 3.4653e-01 3.6789e+00 -#> 8.9544e+00 -6.1406e-01 9.0091e+00 -2.5289e+00 -6.6100e+00 -9.2875e+00 -#> -8.6285e+00 -7.6416e+00 -1.2472e+01 -3.9108e+00 1.2114e+01 -3.0555e+00 -#> -3.7552e+00 -1.0725e+01 -1.0359e+01 1.1363e+01 -4.7689e+00 -6.3023e+00 -#> -4.3483e+00 -2.4968e+00 -1.6712e+01 -2.3206e+00 9.0312e+00 -4.3267e+00 -#> 1.7978e+00 1.7474e-01 4.3791e+00 -5.9808e+00 4.6256e+00 7.4821e+00 -#> -3.0482e+00 -7.1479e+00 1.4285e+00 -4.9415e+00 -5.8376e+00 -5.3080e+00 -#> -1.7139e+01 -5.7809e+00 -8.0277e+00 -5.4555e+00 -2.2411e+00 1.5101e+00 -#> -1.6970e+00 -5.6225e+00 -1.0386e+01 -4.1191e+00 -4.6791e-01 1.1010e+00 -#> -2.1864e+01 1.1266e+00 1.2700e+00 -7.2086e-01 9.0356e+00 1.3241e+01 -#> 1.5384e+00 -1.5492e+01 -2.5981e+00 -2.3097e+00 -7.5876e+00 1.6444e+00 -#> -9.8640e+00 7.9875e+00 7.5978e+00 -5.3028e+00 7.6840e+00 9.5088e-01 -#> 4.4981e+00 2.9509e+00 -3.7114e+00 -5.6748e+00 -2.7754e+00 -9.2440e+00 -#> -#> Columns 43 to 48 7.5252e+00 -4.7240e+00 2.8171e+00 3.8666e+00 1.0257e+01 1.6760e-01 -#> 1.4654e+01 -1.1176e+01 6.4938e+00 -4.7155e+00 1.6589e+01 1.9941e+00 -#> 2.4068e+00 3.9986e+00 2.0345e+00 -5.9613e+00 1.1134e-01 1.2031e+01 -#> -4.1788e+00 -2.1450e+00 -1.4720e+01 1.2275e+01 -1.3673e+01 8.7555e-01 -#> 6.7612e+00 1.4690e+01 -7.0012e+00 -4.7893e+00 3.9739e+00 7.7816e+00 -#> 5.9188e+00 3.5156e+00 -8.0060e+00 -4.2263e+00 1.0632e+01 4.4382e+00 -#> -8.1016e-01 3.7534e+00 4.6450e-01 -2.3587e+00 -3.2603e+00 9.0285e+00 -#> 1.1181e+01 6.6565e+00 -1.0311e+00 -1.6246e-01 2.7070e+00 7.7686e-01 -#> 2.6272e+00 2.6926e+00 -2.1784e+00 -3.2185e+00 -2.2183e+00 1.5972e+00 -#> -5.3338e+00 -1.2750e+01 -4.8989e+00 2.2367e+00 -7.1979e+00 -5.7989e+00 -#> -8.5229e+00 -2.5259e-01 1.8121e+00 -1.6339e+01 1.4385e+01 3.1480e+00 -#> -5.5123e+00 -2.9095e+00 9.6932e+00 -7.3890e+00 -2.1089e+00 -5.4664e+00 -#> 5.6571e-01 2.2835e+00 2.0445e+00 -3.1459e+00 -1.8582e+00 1.9847e+00 -#> 1.1227e+01 -3.7863e+00 2.6117e+00 -2.0254e-01 2.9419e+00 3.3584e+00 -#> 1.8669e+01 -2.5709e+00 -7.3639e+00 -1.0516e+01 3.2457e+00 1.2949e+01 -#> -1.4009e+01 -1.4490e+01 1.3553e+00 2.0864e+00 -3.2283e-01 1.0447e+01 -#> 6.4845e+00 1.0992e+01 -8.9764e+00 -4.2840e+00 -6.8353e+00 2.0669e+01 -#> -4.7237e+00 -1.5023e+01 5.2116e+00 2.4891e+00 -5.8575e+00 7.5567e+00 -#> -1.1441e+01 -7.2402e+00 1.1403e+01 3.5053e+00 -9.5773e+00 8.1672e-01 -#> 1.3615e+01 -6.2740e+00 -7.0639e+00 3.1193e+00 -4.7688e+00 -3.8634e+00 -#> 5.3512e+00 2.4257e+00 2.7007e+00 -7.2868e-01 -1.0831e+00 -1.0120e+01 -#> -1.9115e+00 -3.5195e+00 7.7107e-01 -1.0501e+01 -9.2194e+00 -2.2617e+00 -#> -1.2850e+01 -8.3718e+00 -2.8517e+00 -3.1152e+00 1.5854e+00 -4.3248e+00 -#> -7.8824e+00 1.0903e+01 1.5360e-01 4.4607e+00 -6.1502e+00 -5.7983e+00 -#> -4.9865e+00 -7.0381e+00 3.4789e+00 -6.8242e+00 6.5550e+00 1.0764e+00 -#> 1.3635e+01 1.7434e+00 -1.0626e+01 2.4001e+00 1.0004e+00 4.2354e+00 -#> 5.1739e+00 1.3836e+01 -7.8074e+00 -1.8637e+00 9.1099e+00 3.1453e+00 -#> -8.1692e+00 6.1423e-01 -4.5621e-01 1.4270e+00 -1.0939e+01 1.0901e+01 -#> 5.8934e+00 -7.4564e+00 4.9315e+00 -8.1992e+00 -4.8292e-01 -2.0732e+00 -#> -4.3536e+00 1.4776e+00 5.8203e+00 -8.0869e+00 6.1614e+00 9.3853e+00 -#> -1.3184e+01 -1.1708e+01 -2.4239e+00 -1.0928e+00 -7.8648e+00 7.4512e+00 -#> 3.7807e+00 4.5977e+00 1.4227e+01 4.8748e-01 5.5077e+00 -3.9448e+00 -#> -1.1252e+01 -1.0866e+01 3.1634e+00 4.5651e+00 1.2367e+01 -1.0874e+01 +#> Columns 41 to 48 -8.1875 8.5302 10.6741 -4.4169 5.6607 -3.9774 -15.0424 -4.5615 +#> 5.8743 -4.6295 8.2095 -7.0607 2.1486 6.9981 9.3540 -1.5149 +#> -3.6533 6.1376 -0.4473 -10.2394 -2.5592 4.9061 -0.0012 14.1799 +#> 3.1752 2.6807 -6.0404 5.3171 -2.9454 2.4533 2.5071 -3.0980 +#> -0.3000 -1.1908 3.2271 -8.3158 5.7898 -0.3585 1.7213 0.7952 +#> -4.8271 6.2385 -2.0346 7.9903 -7.1636 7.2280 -9.0562 -7.4104 +#> 3.2648 -8.7601 6.2029 -11.8512 -13.2813 11.5996 10.7559 2.0016 +#> 0.6342 4.8021 -5.3250 2.9570 -2.0764 10.3792 12.6361 7.9569 +#> 11.1874 -6.3212 10.1186 2.2684 -1.2053 -16.2543 -0.4046 -4.7055 +#> -2.0598 -5.7966 -0.0920 -2.3383 -0.2680 -1.0659 2.0296 3.0285 +#> -2.9306 -5.7754 2.7468 11.0579 -0.0750 -12.9258 -10.6918 -17.1543 +#> 3.9201 12.7043 7.3187 -2.8003 -6.3074 -0.3992 -2.8109 4.0858 +#> -17.7045 3.9539 -9.2181 -1.6945 0.3697 9.2919 -5.8179 8.4449 +#> -3.6705 3.6850 -0.4694 -0.4929 -20.8587 8.9568 -6.7975 1.9337 +#> 0.5909 3.7837 11.8904 -10.8828 -4.3004 -8.3260 -17.8307 7.4843 +#> 6.9089 -1.5590 1.6277 3.2958 10.1334 0.6770 2.4188 4.8049 +#> -4.1183 -12.6335 14.2806 -1.3766 -1.6701 -13.4123 -14.5154 -0.8717 +#> -3.5142 -2.7452 4.8765 -6.7347 -3.3675 8.8471 -17.2389 1.0827 +#> -8.4277 -2.8826 -5.3520 7.4995 -4.1673 -0.2969 -6.6840 -3.0668 +#> -3.6709 -9.5021 7.6367 2.4663 -5.0088 -12.1178 -3.3172 10.5893 +#> 4.2535 -4.5195 3.8522 4.4381 0.0149 10.4340 -4.3466 -13.1328 +#> -3.5570 13.2433 -7.2976 0.5737 8.0422 0.1279 3.7678 4.0912 +#> 0.6259 -10.8859 3.5943 -9.6395 -11.6260 8.6014 4.0807 0.4150 +#> -5.6781 0.0669 1.6359 -6.9778 -10.9852 -0.5076 -8.5315 -4.4731 +#> -3.2479 -15.9125 -0.9775 0.9561 -12.3057 3.4267 6.1202 7.7863 +#> 8.0905 -2.5483 -1.8191 2.7612 -1.0365 12.4728 4.0729 4.6249 +#> -5.0980 -2.8091 -0.0682 4.9691 -12.2233 9.7946 -9.6531 5.6263 +#> -7.1407 4.0348 -12.3803 -0.7324 8.3051 2.3644 -3.1617 5.3572 +#> -2.0959 -0.8399 -2.7117 8.7299 -14.4823 -8.4834 -3.7089 -7.1273 +#> -4.4759 1.5071 -0.0862 -1.5696 -11.0392 -0.3375 -21.2326 -4.0626 +#> -1.9187 0.8044 0.2210 6.2557 -3.0297 -1.9610 0.5112 1.5936 +#> 4.0483 -0.3336 -21.5891 -2.8182 5.2686 -3.8660 5.1294 6.1881 +#> 4.3781 2.6013 -7.9393 10.4154 -5.9459 -6.2546 6.0568 0.0836 #> #> (11,.,.) = -#> Columns 1 to 8 -2.0044 4.3699 -2.4127 4.2359 4.1969 -0.2678 3.8926 7.5892 -#> 6.1867 0.1656 -4.8839 -1.8466 -4.4712 -5.9437 -10.9038 4.0003 -#> 8.4201 -8.1247 -1.8980 -4.0356 -6.8123 4.3460 5.1257 -9.9883 -#> 0.9669 8.8183 8.4483 -4.3425 0.9247 6.5621 12.3047 2.4185 -#> 6.7167 -2.4476 -1.9954 -3.5758 -6.7449 -8.1649 -5.5353 -1.2549 -#> -2.4296 2.2598 14.8066 4.2901 -8.1575 -0.0060 -10.8620 15.7208 -#> -3.4516 -10.6639 5.1614 -7.7433 2.2696 2.9458 3.3822 0.3909 -#> 6.6280 -11.6794 0.7623 7.4987 3.8300 1.4233 -13.8746 -11.0928 -#> 11.9090 -16.2859 -2.0074 -5.6302 -6.8406 -7.0040 -0.6425 -3.5537 -#> -8.4572 1.7583 -2.4892 1.3537 4.7256 -3.4651 3.8739 0.7713 -#> 9.7891 -11.9684 -0.2066 15.0137 -7.3798 -1.7238 11.3389 -16.5909 -#> -2.2045 8.7826 3.2816 0.7466 -5.2149 -2.3559 -3.3869 1.5777 -#> -2.5540 7.1872 11.5563 -2.1551 -0.1032 12.4840 6.1519 3.8442 -#> 1.0610 13.9169 -5.9530 -3.8892 -4.4112 -5.2429 9.1333 9.5078 -#> 5.8089 -4.8836 15.1818 -1.9421 -10.6217 6.9401 -3.8909 -1.3724 -#> -7.8908 5.5054 -8.3785 -8.3396 -3.5264 1.3435 18.2884 6.9249 -#> 6.6423 -7.1030 1.7219 7.1863 7.9274 -0.1598 -4.0641 -9.6481 -#> 5.8048 2.5447 14.5549 6.4658 -1.0197 6.5001 7.7019 -2.9925 -#> -2.3873 -14.1483 -8.2676 0.0137 7.7786 6.5895 6.4276 -6.6812 -#> -8.9539 -10.5986 8.0475 3.8106 -2.6754 0.6577 -6.8355 4.2199 -#> 9.1807 -1.5445 11.4027 3.9223 3.0354 -2.9551 -15.0646 0.2942 -#> -1.2010 3.2711 -18.3424 -2.1797 -5.7391 11.5237 13.7566 -7.5049 -#> -1.2460 -2.2535 1.8274 10.3330 4.5104 -2.5318 4.1912 -5.8933 -#> -2.9833 -2.7955 13.6504 4.9941 5.8426 3.9711 12.0044 3.4093 -#> 4.5080 0.0392 -7.4748 10.0702 2.1613 6.0438 -0.6968 -12.2627 -#> -5.8455 -4.4407 5.9451 -1.1177 7.1119 4.5139 -9.7403 2.9613 -#> 5.2632 2.9664 3.9723 2.8500 3.8921 4.1397 4.4269 -3.3525 -#> -4.6684 2.3957 6.1306 16.3635 10.9679 -0.5797 -0.5639 -2.7507 -#> 9.3602 -2.3128 -11.8529 3.9773 -1.8450 2.9816 14.0366 -6.2402 -#> -6.2591 -1.8565 -0.4631 14.7339 2.1109 -2.1565 -12.1677 6.6745 -#> -1.3769 -11.6554 5.5780 5.5344 12.0577 7.3156 -3.7152 -3.5623 -#> -7.9856 10.3130 -1.8823 2.1432 -0.4400 -4.6726 -6.2517 1.1903 -#> -0.9517 -2.9278 1.0489 7.3569 0.5211 0.1772 -0.7285 13.3707 +#> Columns 1 to 8 -3.1056 7.1014 -1.7520 -5.2867 -2.6633 -6.5338 5.3263 -15.7428 +#> 0.4713 -3.7744 2.3758 11.3051 -0.9939 5.6276 5.1841 7.1862 +#> -2.2767 7.0200 -0.0728 2.2325 -3.2260 7.8706 3.6194 4.8128 +#> 5.3578 0.0121 5.4959 7.4852 7.2253 3.3149 -6.7105 -8.1140 +#> 2.7616 0.8186 -2.4662 -0.6929 -9.9915 0.4618 -1.7305 -9.5296 +#> -5.8322 7.4398 8.4307 2.1667 -1.6221 -4.0324 -2.7911 -8.0866 +#> -5.8750 -6.1321 8.3655 -6.7013 1.1581 9.8018 3.4853 12.6581 +#> -9.6312 16.3625 -14.0993 -1.0879 6.6407 6.3807 -9.1717 2.6844 +#> 2.7172 4.4788 1.5058 2.4096 -1.5733 10.3485 -0.8834 2.2941 +#> 7.0686 -8.5086 2.7823 6.2788 -1.9113 6.3283 -3.2110 6.8253 +#> 1.9269 4.3827 5.4367 5.6315 -4.0407 -2.6768 -4.7427 3.3134 +#> -4.0706 12.0355 1.4931 1.4809 1.0108 13.7341 0.7601 5.6928 +#> -7.4500 13.5813 5.3255 0.8900 16.8893 20.4150 -8.8246 0.1538 +#> 3.7011 3.4544 2.9602 9.5964 -2.5339 17.3166 -3.7979 -6.1747 +#> -10.3421 6.0965 -6.9043 -6.9519 6.8128 10.0486 -2.5503 5.5157 +#> -1.1109 7.4610 -18.2971 -4.9955 5.7766 -4.3632 -3.2210 0.2086 +#> 7.9126 -12.5112 13.7044 -6.3423 -0.1060 3.5485 -7.7820 -2.0850 +#> 4.6018 -6.4806 6.5563 -7.7512 -0.2436 11.1964 -7.4478 12.9881 +#> -3.8200 2.3284 -11.0287 7.9292 -3.1138 2.1129 3.8329 7.1461 +#> -0.2347 -13.2690 10.6711 2.7179 6.8641 -3.2205 11.2996 -5.7390 +#> 1.3354 -0.1358 -7.1959 3.9022 1.3122 5.8613 4.3540 16.2754 +#> -5.4127 7.2560 -15.3921 4.6897 2.4843 2.6917 19.3003 -7.2927 +#> -9.1813 -1.6702 24.5728 -6.3445 -0.2350 -2.5025 2.0758 2.5542 +#> -11.9177 10.7019 -10.7787 -4.1487 -13.7692 8.1852 -6.5000 6.6651 +#> -1.7072 -8.3519 8.5500 3.7614 -0.6362 -3.5548 16.1865 3.6186 +#> 3.0541 -6.4272 1.8470 -0.4228 2.4739 -5.1297 2.7983 0.3182 +#> -2.3400 0.9296 2.0187 1.1432 -5.7968 -2.0452 -5.9075 -11.6250 +#> -0.0323 -0.5476 -1.7421 -1.9274 2.6061 -1.9536 5.5033 -9.6515 +#> -6.3654 1.6957 -10.5462 3.6596 -0.6410 -10.5334 11.2728 3.3740 +#> 1.4619 9.2676 -2.7505 3.3931 1.0666 7.7865 -15.5857 4.3630 +#> -4.1392 11.9711 1.6731 -5.9038 1.9859 -3.8609 2.0448 -8.1301 +#> -0.7937 13.8198 -18.9691 10.3603 24.2892 -14.3045 2.7017 2.4326 +#> 0.7147 -0.5399 -12.1657 8.7173 -8.7910 -9.3547 10.9389 9.0797 #> -#> Columns 9 to 16 6.3823 3.0219 -2.8446 -4.6026 -3.7400 -4.1116 -7.1448 -7.5928 -#> -3.3294 -4.4999 -5.0383 -3.9010 -2.0349 -6.5806 -0.7017 0.5485 -#> 0.2619 0.9938 -6.3134 -3.0559 3.3365 1.7051 -0.3802 2.8995 -#> -8.0614 -0.8129 0.3033 20.1874 1.7606 0.4109 5.9526 10.6269 -#> -4.0922 -5.5850 -13.6079 -5.6689 -11.0693 -8.8210 -3.5062 -6.0664 -#> 5.8566 2.6684 -5.3008 -9.9224 0.8673 -11.0890 7.2668 5.1669 -#> -3.4305 4.8934 -13.3550 1.1970 -0.1186 4.9312 0.2421 3.2356 -#> -0.8893 -1.7562 -4.0922 -1.5024 -24.6047 1.4579 13.1380 -5.0338 -#> 0.1682 2.2131 1.6275 -7.8510 7.6663 -6.3077 -14.3870 22.1853 -#> 5.7356 -5.8148 2.0537 10.2324 2.7517 10.0743 -1.6487 -6.4380 -#> 12.8964 -4.1159 -4.4931 2.6539 3.5004 1.7731 12.6290 -4.6256 -#> 5.3116 -3.7887 4.4995 -14.8593 -4.2049 7.0560 -3.3320 -7.5902 -#> -4.0621 6.1609 -4.2993 -0.3039 -8.4894 -1.1391 14.2142 -10.8272 -#> -0.8747 -6.0277 1.8341 -16.5662 9.2631 9.6649 0.5672 -3.7222 -#> 6.7275 -13.1337 -7.5456 -12.3858 -4.7681 9.5557 -5.8113 -4.4115 -#> -1.1570 -7.7482 -1.9757 2.1171 -0.6067 -3.9330 0.0538 3.5047 -#> 1.0995 1.2456 0.8121 -7.7823 -8.1074 -3.2819 6.8381 -3.0345 -#> 2.5230 1.4343 -1.5845 4.0496 -7.0159 -7.2508 -0.9545 9.0573 -#> -9.3421 -1.2291 5.5133 4.4957 -4.7450 1.2507 -0.0840 3.7238 -#> 5.9721 0.4973 -0.8971 1.9492 -11.6507 -6.1942 -0.0588 3.0740 -#> 0.6651 8.9529 1.2361 -3.9383 -4.2929 6.1735 -1.6646 4.5894 -#> 9.7561 -8.0153 2.2952 1.5529 0.2461 -0.9306 7.2547 -2.9878 -#> 9.1697 4.8139 13.1205 0.1607 11.1355 -3.9690 -2.9050 9.5772 -#> 13.4256 7.8929 5.7852 -1.8729 -4.2819 12.1169 4.5292 -1.7870 -#> 7.7253 -4.2477 13.1721 -6.5561 10.9189 1.2953 1.1537 -0.6233 -#> -8.4916 -0.1692 3.4664 7.2991 -1.1835 3.4548 -1.7278 0.9493 -#> -2.8162 -4.2595 -1.5781 -6.4060 2.9334 -7.0955 12.3795 2.8014 -#> -10.3906 6.7465 3.1686 -0.3980 -2.3234 -0.3691 9.6803 -14.8288 -#> 4.0592 -10.3295 -3.2436 -3.0166 2.3703 -2.3240 -4.1811 8.5929 -#> -1.2070 10.0707 -0.4461 -7.8677 -11.1716 -5.1939 14.7600 -11.8722 -#> -5.8241 4.7446 7.1372 -0.0313 -4.6634 1.8151 -5.5978 7.6861 -#> -3.1263 5.6811 13.5624 -14.0331 6.8948 -7.2957 8.0425 1.4875 -#> 6.3600 5.2668 0.5438 -0.3248 15.2348 4.9118 -5.3705 0.7440 +#> Columns 9 to 16 -7.4168 11.0759 1.1003 6.0576 -9.2876 -8.4411 -3.5381 9.7936 +#> -5.1028 8.0608 3.7483 2.6942 3.2878 1.6583 2.9900 2.8672 +#> 0.7359 6.1439 -0.2405 -0.4419 -3.0154 -1.7064 -8.4121 1.4606 +#> 3.8276 -2.2371 -3.4983 1.0718 4.5416 -16.9983 -3.7466 2.8815 +#> -3.8746 11.3641 7.9679 -4.7086 -4.7910 1.2206 -3.7255 2.6025 +#> 2.8650 -4.5575 -4.8067 -10.0891 -5.6783 11.3451 8.9307 -22.3631 +#> -7.2190 -2.3230 -2.5787 9.3403 8.6515 -2.5033 5.2275 4.6794 +#> 15.8816 3.1307 7.7790 12.0086 -5.4551 -15.1441 3.1261 9.8844 +#> 7.3713 12.6069 -12.5193 14.9959 9.2254 5.9878 -1.8536 -2.9194 +#> -2.3693 1.9804 -1.8490 -2.7110 -1.7631 18.8069 -2.5947 -7.2365 +#> 11.1356 7.7725 2.9667 9.3270 -2.9968 -0.2738 -3.1082 10.0524 +#> -0.4463 7.3109 4.3322 -0.8646 -2.2492 2.0766 -3.0842 -5.9032 +#> 6.6210 3.5389 1.9736 9.0659 2.1851 -11.8417 0.1917 2.0172 +#> -1.2623 -3.3925 11.4168 2.3249 -4.8050 -9.3300 5.1964 13.0922 +#> 3.8708 5.5887 5.2564 5.0277 -11.3091 5.1434 -1.6130 3.2994 +#> -2.0875 -8.1905 -3.9017 0.8841 -12.7772 -1.0022 6.7764 -5.8629 +#> -3.3820 -2.0941 -4.3281 -14.3973 5.4035 2.8416 -8.3787 -3.1533 +#> 8.8659 9.1797 1.2592 -4.6483 -12.7040 10.8555 11.3138 -2.1920 +#> -3.7947 -4.4181 9.3954 0.4649 7.3404 5.9480 3.3579 -2.8093 +#> -6.8092 7.5523 -1.2000 6.2166 5.8215 9.4886 1.9996 -9.5033 +#> 0.3237 6.0587 12.7698 7.3901 16.7772 -2.5261 4.7448 1.9454 +#> -2.9198 -1.9480 1.3946 -4.9714 -0.1512 6.7602 -4.6601 -12.6422 +#> 0.8079 -1.1571 -5.7779 3.2545 -2.1266 0.3567 -1.2619 -5.9750 +#> -5.4052 7.6162 -9.3153 12.9787 -1.5731 -3.2355 8.6699 1.3725 +#> -5.8511 -18.1395 -8.7816 -1.3586 4.7561 2.3052 -4.5350 -5.8648 +#> -4.7533 -2.0937 -1.9807 -3.3829 -1.8970 4.3063 4.9850 -6.0642 +#> 1.5990 -5.0245 3.8857 -2.0606 -4.4603 -0.3299 4.1605 -9.2712 +#> -0.4386 -5.2443 -1.9523 5.1703 2.6962 16.3616 -5.8466 -4.4763 +#> 7.1911 -7.5282 14.3286 -1.2862 7.9797 0.7726 8.6860 -4.6079 +#> 5.9871 6.4725 1.2052 7.9906 -16.6894 3.8827 18.7941 -7.3567 +#> 11.7971 1.0183 1.1445 5.7499 7.6029 -11.9659 -3.6406 1.5794 +#> 7.0029 -8.7005 6.3045 5.2256 -1.0162 7.7319 -1.4217 -4.6253 +#> 8.2087 -0.9443 1.8711 3.2622 2.5441 0.5841 -3.8732 1.5598 #> -#> Columns 17 to 24 0.5236 6.8693 -0.4034 -9.7509 -4.5863 18.9446 12.2359 -1.6541 -#> -2.9956 4.0989 -10.3061 1.1723 -1.5345 7.7239 14.5010 -0.1832 -#> 13.2186 0.7272 -8.6150 3.3184 16.5073 1.1210 1.6489 -2.2074 -#> -3.4138 -16.0043 17.6044 1.4957 -10.2243 -4.1157 -0.2742 5.9374 -#> -4.2704 0.2206 -4.0499 8.9308 1.4830 -1.8442 10.6688 -4.1980 -#> -0.2917 -13.9340 6.4268 12.3239 8.9711 7.4519 7.6545 1.0119 -#> -2.5605 -1.7786 5.2483 -10.9202 -9.1474 -3.8889 -4.1318 -4.5254 -#> -10.1291 -4.8287 -14.1620 -6.3614 -11.1534 -8.4878 12.9382 -1.4245 -#> 16.6430 -8.2832 -4.1078 14.2384 13.0596 1.6659 -0.0978 6.2452 -#> -9.2022 6.4704 15.6910 -2.6631 0.6269 1.7004 -1.7806 0.7099 -#> 3.6032 7.8300 -24.2912 12.1176 9.7138 -0.3804 15.4161 -10.8723 -#> -10.0752 9.4278 2.1156 -8.8559 -2.5048 6.6876 -4.4726 5.2917 -#> 4.8320 10.4034 0.9745 -12.8283 1.6226 -0.0745 -9.1379 -3.6713 -#> -5.8681 14.4700 12.2456 1.0197 -7.4787 0.7518 7.1122 3.2161 -#> 13.5318 1.5475 -6.2782 -10.4522 3.0410 8.6182 -2.5363 -7.1108 -#> -4.6763 0.1332 9.1238 -3.5555 -2.0220 -1.4984 7.1836 8.1871 -#> 1.5535 -12.9286 2.1660 -0.5209 -7.5615 -4.8321 -1.2277 -3.6797 -#> 0.9089 -10.6601 10.4946 -1.6091 6.9751 -8.0629 -4.4556 2.6121 -#> 6.1206 4.4565 -4.0688 -2.7858 5.5975 -1.0423 -3.8477 0.0219 -#> -2.9188 -6.3728 8.4461 -3.2103 -7.5373 5.3769 -0.4173 -6.6054 -#> 0.5667 -12.7742 2.3528 1.9002 1.5721 -12.6775 0.5521 -3.3713 -#> -0.7820 4.8531 -1.1454 -4.0201 8.4258 3.1378 5.1369 4.2380 -#> 14.7232 -0.8633 -7.0142 3.2225 -2.5383 -9.4545 -2.2383 -4.1793 -#> 4.2805 1.1091 9.2722 3.3933 -5.3543 0.2697 -3.0848 8.4100 -#> 10.8122 -9.1854 -14.0378 1.1658 8.9828 -7.5437 -14.8283 -1.8276 -#> 0.2647 -4.6193 5.6080 -3.8388 -7.6890 3.8130 -2.9010 -7.1018 -#> 11.1065 -1.7242 2.5281 -2.9828 -4.6451 -11.2156 -1.1177 10.7715 -#> 4.6313 5.9269 6.4877 -5.0038 -7.0321 -7.7048 0.2439 -3.5283 -#> -3.2314 3.8181 -0.7659 0.2947 3.5189 -0.3271 -1.7176 -9.7372 -#> -7.1887 16.1351 -6.9254 -10.8912 -5.4141 3.3091 14.0301 -5.9519 -#> -1.1507 -7.1501 2.9040 -11.2942 -8.9464 -11.5441 -7.8685 8.8295 -#> -11.5279 10.8920 -2.8373 8.6184 -10.8648 6.1828 -5.2552 8.0270 -#> -6.4354 -0.0202 2.1212 10.6847 1.9121 10.6659 5.9482 3.1849 +#> Columns 17 to 24 3.0514 -1.7865 -2.6677 -13.7430 -4.6591 0.2537 -1.3093 -12.1497 +#> -3.6335 3.2847 0.2427 4.6065 -3.5308 -5.7190 14.7120 3.3639 +#> 10.5363 -0.8013 7.5259 2.2988 5.0467 -5.5617 -8.1138 2.5266 +#> -13.9912 -3.8696 2.9843 -2.2994 -2.6520 1.3427 1.4171 -5.3636 +#> -8.2402 5.0633 -0.3580 1.5580 -20.1335 -14.2977 -5.0227 -1.5824 +#> -5.3244 17.7564 -11.2515 -5.4180 6.8312 17.0223 -1.9325 -6.5925 +#> -6.7305 1.7371 -0.5783 6.5701 3.2058 1.1611 9.8399 20.0164 +#> -6.8536 4.8541 11.9151 1.9440 -1.5002 0.7527 5.2115 -0.6330 +#> -2.0804 11.0721 5.3992 -3.6796 -5.1200 2.0026 6.9874 -17.9426 +#> 7.2199 6.7778 -0.8521 1.0804 1.8044 6.0031 -2.9563 -0.3860 +#> -6.6783 1.3658 0.8428 7.0860 6.9103 1.2721 -2.1449 -9.8141 +#> 1.8483 -5.3921 9.4492 -1.6840 1.2464 3.8513 -0.4600 -12.6121 +#> 8.2959 1.5687 9.5056 2.2983 -2.0890 4.1798 10.1808 -4.0232 +#> -0.7005 3.6303 0.8530 -2.4843 -10.8943 -13.2525 11.2986 14.8797 +#> 1.8221 -13.6809 4.9042 -2.2119 4.3699 -5.6280 -8.7255 -5.4939 +#> 2.5839 -4.6317 -5.0155 11.8297 -3.2862 10.7527 4.1126 0.1293 +#> 12.8436 -15.2097 -0.2136 -11.3170 -3.1281 -0.3734 -1.6421 -1.0410 +#> -16.1127 12.7949 -2.7832 14.0396 -1.4200 5.4328 -2.1606 -4.6819 +#> -4.2130 8.1218 -0.1096 7.9438 9.6171 13.0847 -0.8271 -1.9071 +#> -5.3947 4.8697 7.9126 9.0326 -1.9985 7.4682 3.2385 7.7796 +#> 7.7056 9.0513 -0.6917 -0.0706 6.3404 7.3673 13.3844 2.8799 +#> 8.9054 -3.3671 6.1026 -3.1914 16.4169 10.8542 -6.9700 -15.3057 +#> -0.5790 -7.5054 -2.7670 3.4779 -10.5296 -1.6088 1.4685 11.3029 +#> -22.5003 21.0097 4.5520 -7.8701 -3.9777 -4.1955 6.4590 -7.7822 +#> -13.9366 -6.3445 0.4268 12.9550 -8.9029 7.3922 9.0724 22.1141 +#> -6.6576 6.8693 -14.1005 1.7424 -4.5490 3.0694 7.3529 8.7343 +#> -7.2280 10.5721 -8.0667 -2.8534 -6.6502 13.8471 -3.8542 0.9561 +#> -1.7408 5.3414 -7.1313 2.6918 -0.6835 16.9315 -4.9303 -2.7553 +#> -6.0870 4.1576 -2.2776 4.7928 1.5814 -7.6875 -0.2116 4.9794 +#> -2.8338 12.3172 -4.5041 -8.0548 -3.9249 -0.1538 6.8541 -18.6167 +#> 7.8449 1.0817 8.0695 1.4604 -14.7621 -3.2426 -2.7435 3.1915 +#> -4.2422 0.4061 -5.7550 2.7467 -0.0334 2.0099 -1.6942 -0.1959 +#> -8.5752 -2.8829 8.5170 -6.1580 11.2926 2.9038 -16.6995 -8.4883 #> -#> Columns 25 to 32 -5.0729 -4.2956 2.7086 6.4407 15.8604 -0.3588 3.0230 -8.7129 -#> -7.9630 -0.9580 5.0442 -2.5717 1.9893 5.3521 6.3859 5.3545 -#> 2.0941 1.9097 -12.0480 -7.8120 -1.1532 11.4927 -8.4720 0.4449 -#> 4.2617 -6.9544 -8.1263 -3.9371 -0.5503 -4.5696 19.1867 7.3988 -#> 5.5247 0.5356 0.7781 1.2835 1.0388 -1.3812 2.8845 9.4449 -#> -0.6169 10.7332 -4.6661 3.8535 -1.2422 -5.3036 2.2425 9.5974 -#> 4.5023 -8.9653 -4.7451 6.3774 -3.6699 -1.5664 1.6872 4.6048 -#> 3.5291 2.9527 8.3790 -1.0565 -9.1058 1.3912 -4.0224 12.2775 -#> 6.6163 11.7969 -0.0560 -6.3300 -6.5183 -7.4860 -8.6395 9.1613 -#> -4.0831 0.6745 -8.1844 3.2280 -3.4047 -1.9166 0.7233 4.1844 -#> 6.8935 3.1620 -0.2730 -8.7033 -3.1915 8.9787 -0.6695 3.8883 -#> -4.3835 -1.0841 5.7311 1.6674 2.7202 -10.2040 6.5731 -3.4295 -#> 6.2861 -4.3858 0.8793 5.7060 -0.1589 6.7821 -7.4484 -0.8987 -#> -11.4815 -1.8563 8.6766 -5.0828 -0.2160 -10.1367 16.3202 -15.2852 -#> -0.9740 4.4800 4.1621 11.5320 4.9337 2.3787 -13.8811 13.0770 -#> -2.3275 -6.4719 -5.6679 -0.2408 3.0308 2.7526 8.1827 -7.3714 -#> -7.6305 -1.1428 8.5583 6.8418 -1.1626 6.2628 2.1209 7.9571 -#> 1.3825 -4.8033 -1.8967 3.3390 -5.9439 1.8516 11.8852 4.2061 -#> 6.7047 -1.5728 1.9421 -6.5667 -3.1609 2.4729 0.5374 4.6061 -#> 3.9707 13.9025 12.1335 6.5156 -2.7861 0.3097 -8.4294 4.0610 -#> 1.8200 10.6741 -6.9567 -3.4479 6.3126 3.0632 -9.6933 5.1884 -#> -6.5620 1.9818 -3.6081 -0.1819 -10.5116 -3.7730 3.1047 -5.6763 -#> 11.4688 8.5353 3.8160 13.8754 5.2576 -0.4056 -7.2265 -6.5845 -#> 6.4180 -1.8389 -0.7055 8.5180 -8.2785 7.0835 -19.0100 15.5229 -#> -6.8713 7.3240 -1.0499 -1.4583 10.3664 5.5127 -1.9181 -5.3814 -#> -10.7972 5.2820 -1.3866 2.9625 1.5788 10.2621 -6.9272 0.7044 -#> -9.1831 -3.3115 1.0362 -3.3940 -4.2613 5.9859 9.9480 3.7198 -#> -8.1137 3.6621 8.2821 1.7979 -1.2668 1.4004 4.5593 1.2181 -#> 1.0208 -0.7449 2.9976 5.4707 2.6559 -3.3447 1.3942 -0.3096 -#> -4.3125 10.7800 7.2531 9.5339 0.3425 -1.7535 -7.0209 -12.0271 -#> 1.3050 2.2791 6.5436 9.1550 -10.5545 -6.5303 4.5502 9.5093 -#> 8.7469 -4.9613 2.1701 1.9083 9.6481 -6.2135 -2.7381 -14.1108 -#> -4.4900 5.6005 -2.7029 -1.0956 2.2024 -1.2472 7.6779 1.9417 +#> Columns 25 to 32 4.5461 -1.7874 6.9406 -6.1462 0.8465 -3.4333 -0.1287 -1.2245 +#> -10.6198 -10.2090 6.8449 9.1826 -8.7153 0.3717 11.9924 2.0144 +#> -0.2221 -3.0370 -1.6119 -5.1413 0.4188 2.3011 -1.1918 3.7638 +#> 0.4165 -3.6168 -3.3551 9.3613 3.7573 0.0487 14.0333 3.4898 +#> -7.1865 2.1603 -0.6283 3.7277 -2.4244 5.4873 -4.1391 0.3937 +#> -14.6706 6.0137 4.3979 -6.1696 4.8594 9.4999 -3.8605 -12.1960 +#> -10.8756 3.3342 -2.3423 13.3649 -8.9117 2.1469 -1.4568 -2.2630 +#> -1.9746 -2.0341 -1.2929 -4.8138 13.0213 0.6383 7.1013 1.6213 +#> -0.9603 -12.5348 4.4349 -3.1326 -6.6191 3.7959 8.8567 18.0121 +#> 8.1876 1.0164 -3.2779 -4.2590 -4.0202 -0.4069 -5.3847 -10.4643 +#> 1.8676 -4.2744 -1.2372 -12.6805 16.6786 -0.3825 1.8035 -1.3214 +#> -0.8577 1.7369 1.8565 -9.1353 10.9804 3.5655 -1.0358 2.5880 +#> -8.1819 4.4556 4.6743 -3.0216 -2.0720 8.6054 -10.6734 -0.2614 +#> 5.4568 10.4090 0.3215 -1.2129 6.4001 -2.3890 8.2217 0.7159 +#> 2.0578 -3.1233 -6.9508 -2.6483 4.2655 10.3881 16.7621 -1.3654 +#> 5.0270 2.8696 -3.2183 -6.5008 0.4007 -7.3746 -4.1468 1.1954 +#> 8.9372 -10.4825 -1.0310 6.3987 -4.7489 1.2082 -4.1153 -4.4258 +#> -1.3007 9.6133 -13.8906 9.1863 -5.9332 5.5027 14.8598 0.9152 +#> -8.4679 5.6421 -2.6120 -5.4634 -1.2663 -3.8618 4.9288 0.1378 +#> -4.5856 -9.3817 2.7296 2.4823 -7.1148 14.9971 2.7313 -3.3222 +#> -4.9916 8.9191 -5.7191 4.3404 -9.1205 -8.4813 4.2785 -1.1341 +#> 5.8225 -0.9654 -0.7333 -10.1825 11.3850 0.4820 1.1737 2.0676 +#> -8.7471 -4.1528 -4.5781 5.6589 -1.0593 4.3626 -8.3686 3.9424 +#> -8.3945 7.3017 0.5110 -0.9259 -0.0898 1.5115 13.4798 -0.3394 +#> 0.0846 -2.1283 -3.7992 12.5121 5.7055 -9.6746 -0.0973 -8.4925 +#> -1.9461 0.7274 -1.2698 13.6316 -8.7979 0.6258 -5.7439 -10.4442 +#> -8.3608 7.2671 -2.9568 4.3619 -8.1883 11.5379 0.0437 -15.2504 +#> 4.9201 -3.8489 1.7777 -1.0836 -1.1132 -3.6810 -5.9686 -1.6751 +#> 4.2006 4.4060 -4.5429 8.2122 3.8339 1.5789 7.6177 4.1010 +#> -6.1299 6.6651 -0.9906 -6.2403 -5.7599 3.3714 3.9567 -3.8084 +#> -3.2045 5.2026 4.9811 -5.1504 4.5886 2.0285 -10.9803 3.6086 +#> 14.0256 1.5228 9.4775 -4.2767 0.6166 -8.6033 -3.7829 -5.1891 +#> 8.1207 0.6480 -6.2109 -0.8614 8.4112 -2.7397 10.1298 4.2593 #> -#> Columns 33 to 40 -0.0019 0.2802 11.2920 -9.1013 -5.6047 -0.0040 5.0107 6.8514 -#> 6.9784 -4.2512 -0.7175 -4.0739 8.1430 -6.1468 2.5069 8.5195 -#> 1.0547 -0.3367 -7.9803 -4.1151 -1.3741 11.1988 1.6343 -2.4667 -#> -8.8407 -7.3048 0.8817 1.6348 19.7482 -2.2385 -11.9681 -0.4096 -#> 14.3348 8.9444 6.1513 -3.8608 -4.9776 0.7029 4.0575 0.0101 -#> -1.0902 -10.9092 -6.6897 -5.8111 5.5139 9.2579 5.1977 9.8372 -#> 0.8237 -4.5022 10.2385 12.2472 -1.3605 5.4792 -4.8483 -9.0908 -#> 12.6671 3.5102 6.4837 5.3618 6.4916 -10.6185 -7.3910 6.6741 -#> 1.3334 -0.3487 -7.4179 1.1611 -6.2333 0.7965 3.6717 0.0761 -#> -6.9966 -4.8677 1.2862 -3.7916 0.1066 -16.2524 6.9257 9.2883 -#> 14.2929 -15.8576 0.3450 -4.3398 3.7223 11.3818 3.6238 -7.7689 -#> 0.7186 3.9697 -1.0347 8.9312 -9.0946 -4.7356 6.0722 -1.6076 -#> -6.7104 -2.8066 -1.2940 -9.2257 6.4690 11.5947 -4.0898 -7.5930 -#> 9.8058 13.1059 -4.8653 12.2930 -12.9543 -8.7284 2.0773 -2.7334 -#> 10.1541 -2.2709 -0.0826 -13.8402 13.5996 10.3662 -8.8221 -7.8720 -#> 3.1338 3.4794 -8.2790 -1.3139 8.1712 5.2529 0.6040 -7.0523 -#> -2.3543 9.7280 -3.3076 1.4279 0.8343 -0.5399 -0.3940 -7.1317 -#> 1.3395 -0.0886 -14.3154 -3.6316 4.7655 11.4265 -6.9424 -0.0195 -#> -7.2402 0.8065 -6.1732 5.3272 -10.5435 0.5907 7.7513 6.7939 -#> 0.6357 3.2251 -4.7789 -8.3201 4.2994 -3.6608 -10.4161 14.7643 -#> 1.0850 5.2056 -1.1094 9.1666 -9.4409 -8.2777 0.9586 8.9043 -#> 9.5923 -4.4406 0.3631 -4.2734 7.5759 5.4691 -4.3900 -6.4848 -#> -4.8792 -13.1105 -9.9396 -6.0821 -4.2209 12.5849 -2.5196 -7.9711 -#> -23.4039 -9.2183 -8.1744 -8.4877 4.3403 -1.6354 1.4089 2.3033 -#> 4.3382 4.4030 1.3752 -6.4615 -2.0714 0.8318 4.8986 2.2077 -#> 2.5029 10.4185 1.9057 -7.3488 5.1091 -2.3736 -5.1177 8.5054 -#> -11.5271 1.1920 -1.4756 -4.4427 8.2370 5.6577 4.2550 -7.8401 -#> 3.8904 4.7149 -14.7242 -5.9000 -12.1250 -3.5753 9.7209 3.8768 -#> 11.7669 3.3826 7.0856 2.0755 -1.0698 3.9380 -8.9544 -1.4195 -#> 18.2892 -11.3806 0.7879 -2.0373 -8.2313 -9.0982 10.7094 0.3240 -#> -4.2813 1.9703 -0.1238 16.0385 -7.7890 -12.1289 -10.8846 4.2699 -#> -8.0051 -3.0747 4.6354 1.3537 6.1540 -0.7435 -3.5279 -8.5859 -#> -1.3912 -15.9602 1.5367 -5.4228 -1.3131 -4.3742 7.9185 9.7122 +#> Columns 33 to 40 9.2424 5.3477 -3.3071 -8.9944 3.3546 -6.3851 16.6293 -8.3728 +#> 0.5228 -5.2786 -3.1655 10.0735 -4.0833 -6.4511 -3.5643 11.1066 +#> -6.8805 -6.3628 0.5653 -7.7253 -2.9464 -0.7135 -1.6304 -0.9286 +#> -4.5952 -5.4674 -6.6472 2.5320 4.3021 -12.3713 -3.1266 3.0358 +#> 3.6776 5.2333 4.1299 1.3047 2.6016 -4.1254 3.0028 -12.5456 +#> -16.5943 -16.6593 -5.0624 5.2969 -5.4644 -0.2972 4.1406 12.2008 +#> 12.8467 13.4065 -1.4311 -7.0946 -4.9756 11.1617 -8.7335 7.9459 +#> -2.1967 15.2606 1.1168 -2.2982 14.1499 -6.3651 0.1412 -5.4841 +#> 3.8924 -7.0436 -3.8106 15.5210 -5.5157 -0.7474 0.7868 6.6903 +#> -4.6871 -8.0278 4.5998 10.6388 -5.5684 5.9090 0.2474 11.6656 +#> -1.9133 7.0300 17.2314 -3.8331 -4.5409 5.6244 8.5037 -6.6785 +#> -1.5067 1.8693 6.3062 -4.7347 0.0889 3.0335 -3.5098 -0.5445 +#> -13.4363 5.1465 -11.8876 -8.6249 2.7343 7.8906 4.2040 11.1498 +#> 2.5616 16.8276 -12.9997 -3.1459 -0.2504 -0.3913 -1.4329 3.8785 +#> 1.2087 -10.3873 -18.7725 -10.7496 10.2618 1.0601 -11.6176 1.2907 +#> 6.6043 -4.2279 -4.2960 -6.6874 11.4313 2.0871 -1.8794 -10.9946 +#> 8.9732 -17.4081 3.0446 -6.6103 -9.8251 3.8942 -8.6507 2.8292 +#> -5.3151 10.3033 -21.6493 4.1621 -4.6132 2.6497 -9.7478 -8.4951 +#> -7.8956 -1.2738 3.8441 -2.3730 4.7064 3.7897 4.9676 6.5962 +#> -7.6967 -14.8281 -8.7461 -2.9554 -6.8432 -2.1544 0.5735 12.7185 +#> 9.7002 5.3942 5.0005 3.4487 0.1106 4.1346 3.3380 13.2634 +#> 14.0664 -20.6510 -4.8552 -4.3232 4.7416 -4.5424 -2.4989 1.3249 +#> -6.9674 11.2402 -1.6994 -10.3309 -2.8903 4.0565 -6.2305 -4.8603 +#> -2.9008 10.0424 -18.4889 -8.2101 -2.9165 -9.3681 -0.6713 0.0844 +#> -7.2360 -2.5705 -11.9855 -13.4294 1.0608 -5.7634 -6.5146 -6.0410 +#> -9.9475 -2.0874 -6.5069 14.7216 -12.8900 -3.6905 6.6518 3.1321 +#> -8.3534 -3.0493 -4.1314 7.4558 -6.2577 0.7444 12.5636 9.9604 +#> -0.3961 -2.7503 12.8154 18.9382 2.4836 -0.2249 14.2536 0.8238 +#> 1.2639 6.7575 -12.8442 -0.0376 -8.2375 -12.7026 -0.5518 -7.0981 +#> 0.4576 7.1450 -7.4989 -0.1666 -6.9255 12.3310 5.8064 5.1844 +#> -7.9315 7.3573 1.4095 -1.3109 7.9046 -4.1940 0.4275 1.4821 +#> 0.9128 1.9522 5.4874 9.3033 6.4007 2.2500 1.5040 -12.2080 +#> 7.1595 8.6311 11.8110 0.6751 2.7331 -8.2961 2.1748 -9.6275 #> -#> Columns 41 to 48 11.9842 -0.5593 -1.7805 -0.0418 2.9324 4.4272 10.7479 -5.3032 -#> 6.4636 -7.0260 -2.1067 -2.6697 -0.7586 -0.1127 -1.9880 4.2114 -#> -7.8123 4.6202 -1.8848 2.7081 7.1877 -24.3626 2.2723 5.8324 -#> -3.6324 2.1598 1.4967 2.0073 2.5986 1.4709 8.2080 8.4126 -#> -3.0173 -2.4682 1.8973 -0.4220 11.2470 -8.9623 -0.4299 1.1831 -#> -11.3260 0.8946 -11.9878 8.5653 -13.7869 2.4784 -3.6501 3.3985 -#> 0.7377 3.5998 1.3043 -4.0607 3.9259 5.5577 13.7190 -4.1788 -#> 3.7997 -7.6852 -2.1255 2.1282 16.8586 -0.1670 -0.5216 -5.6847 -#> 0.0417 0.4327 3.9841 3.7029 -3.3788 -1.5266 -3.1260 9.1985 -#> -7.9677 3.4836 2.0614 -9.5991 -4.0841 5.0656 12.3368 -3.2452 -#> 6.2654 -4.9748 4.1019 -9.4819 30.9802 -20.2084 1.4996 -1.2812 -#> -2.4100 -1.8367 -0.7233 1.9043 -4.4144 7.0083 -4.5828 -3.8193 -#> -1.2719 7.6519 -12.3358 -1.2329 2.1559 -0.8138 -0.6092 0.2082 -#> 1.6161 6.0475 0.1815 -5.6616 -2.7878 -1.0785 -2.3120 0.9993 -#> 5.1240 -1.1110 -8.6762 8.2783 -3.1611 -8.0870 3.4626 2.6431 -#> 0.5291 3.3617 0.6551 -8.7125 11.2056 5.7662 5.2713 -2.7741 -#> 11.6409 -2.7260 -4.5710 -3.0022 7.6567 -9.3038 -0.0798 5.8057 -#> -5.5851 3.2143 -1.6160 -3.7551 -2.0959 1.3541 5.3000 -3.7737 -#> -3.2166 -2.7181 7.8093 -9.5603 8.7075 7.0108 5.1225 -11.7553 -#> 6.5461 -7.2921 -1.2451 7.5864 6.0977 -1.2823 4.6137 -0.9353 -#> -13.1320 10.1831 2.5122 6.5948 -8.3315 -9.3659 -1.7092 1.8071 -#> 1.1201 -0.9914 -2.0848 4.5972 2.9202 -1.4931 -2.1520 3.0542 -#> 7.4057 5.9809 -4.3698 1.5064 -2.8317 -2.2117 2.1553 -0.2234 -#> 3.6103 7.1969 -12.8206 -7.6817 -0.5296 -3.9748 5.1967 -1.6486 -#> -5.1564 6.3437 4.0816 13.9357 -10.6232 -6.3037 -5.1245 4.8427 -#> -0.6854 1.5132 3.3787 9.7651 -1.6342 3.9888 1.7152 -3.4456 -#> -2.9518 -1.5337 -3.2749 -7.5476 -2.8363 0.7673 -4.6810 4.3637 -#> 3.4508 5.3307 0.0274 -18.9641 10.7559 0.6545 -2.6214 2.7683 -#> -0.3699 -2.1346 5.4093 -2.6155 -1.9389 -6.1243 9.4638 -2.6488 -#> 4.5731 9.7016 -3.4944 -14.8443 8.1975 8.4388 -7.5679 -0.7393 -#> 1.9754 2.5775 7.9926 3.1289 0.2847 18.3038 10.5872 -6.6662 -#> -1.1725 5.3218 -12.1124 0.4054 -11.2314 9.3301 -8.0784 0.4656 -#> 5.2721 1.0929 3.0305 0.9061 -9.7753 12.5669 2.3650 6.1578 +#> Columns 41 to 48 -3.1392 6.3236 0.7073 1.5120 -1.8391 -5.7788 4.0804 -8.3241 +#> 2.5106 -6.4301 -11.1320 3.6476 -0.7345 0.6073 2.4868 -1.4135 +#> -3.1169 -2.2953 -1.0181 -6.0643 -4.7081 -3.0841 7.2758 4.2352 +#> -6.0995 -1.8265 -8.3836 -10.2064 -4.8300 6.6835 6.6905 1.2455 +#> 1.8673 -12.9154 6.3378 -2.2437 2.5361 -11.8875 10.1140 0.9790 +#> -0.1967 -6.7477 7.1395 -2.1479 7.5633 -7.3922 -13.5113 -1.1924 +#> 3.2614 -3.0319 -0.5722 2.5267 -7.7651 -0.8806 -9.4327 8.8522 +#> -3.0833 1.5224 -0.3071 -1.7744 2.4594 -3.0523 5.2288 0.7627 +#> 7.5373 -12.6389 -15.4380 -19.1049 2.9447 4.7457 10.5029 -20.2439 +#> -0.6883 3.6972 10.2625 7.4831 12.7269 0.2402 -2.7910 0.3031 +#> 5.8815 4.4874 1.6697 -2.4667 -2.5506 -6.5303 5.3810 -5.0018 +#> -1.9833 1.0399 -1.3247 -2.0429 -9.2600 3.3040 2.0330 -5.5974 +#> -14.7912 11.8963 1.3233 3.1297 -2.8905 6.8713 -0.6342 4.2857 +#> 4.1237 2.4904 -2.5805 10.9582 -2.2437 -3.8681 2.7818 -10.3735 +#> -4.5889 -2.3082 -4.3875 -8.3914 -11.7653 14.0941 -7.1736 -0.7640 +#> 5.8828 3.9000 1.2275 -4.7927 1.9082 -2.4038 4.7111 -1.7195 +#> -2.2195 4.5441 -2.2650 -5.1624 -14.8150 3.0868 -3.3167 0.7638 +#> 6.2045 0.6251 -6.5964 -0.9374 -15.1725 -9.3577 2.6756 -1.7007 +#> 4.1191 2.1594 6.4082 6.6128 8.0440 -6.0048 -17.7271 5.2561 +#> 10.8162 -15.8923 -1.5520 -2.8257 1.6919 0.5887 -0.5257 -1.1731 +#> -0.3497 8.7414 -2.5578 7.8543 6.5660 3.1135 5.4449 4.9379 +#> -12.8605 -2.5898 1.3534 -3.8071 -6.5426 -5.0482 -10.5402 8.2086 +#> 5.5201 1.8604 -4.7878 -1.7672 -8.1637 3.5020 7.9703 -11.6711 +#> -1.9367 -7.4007 -6.7922 -8.3268 -3.9128 -3.5023 4.7860 -6.6200 +#> 15.3472 -6.4593 -7.5412 -2.4014 8.0834 5.2980 13.1981 -14.9130 +#> -0.4958 2.8085 -1.0653 8.3809 19.6020 6.4079 5.1017 -5.3585 +#> -5.0959 -3.7581 0.4790 3.6858 13.8312 -7.0175 3.4018 -0.0525 +#> 11.7728 -6.3824 14.8093 3.0395 11.0097 -2.1108 -10.7993 -8.3392 +#> -4.4606 5.5990 -12.7177 -3.5385 0.6170 -5.5415 9.0312 -2.2260 +#> 0.9364 10.8148 -0.4495 9.6610 0.5725 -8.4637 -8.2022 -12.4599 +#> -2.4467 2.0941 -3.6358 1.4687 0.7493 5.3189 10.9268 1.3595 +#> 6.6599 -0.7239 3.0088 -4.1595 18.0101 2.9459 6.7269 -3.7759 +#> -3.1599 -0.8773 -10.0258 -3.6227 -1.7192 0.7841 0.9650 7.5942 #> #> (12,.,.) = -#> Columns 1 to 8 4.5001 -1.2295 -1.8164 0.6520 3.2763 5.5859 3.9245 -4.6731 -#> -3.8160 2.2018 5.1452 2.9829 7.7092 2.6458 -3.2690 1.8272 -#> 10.2061 5.9445 -5.4832 5.7572 -13.4972 -9.2555 -4.7143 1.7513 -#> -7.6837 8.4554 -6.2225 -7.9215 -15.1581 4.2338 -4.3397 -8.5616 -#> -1.4925 0.9310 0.1745 7.8605 -10.1225 -9.7224 -10.6454 -5.1328 -#> 5.1166 -5.5563 -3.4195 -7.5142 -6.0150 3.4729 -12.7218 -4.4432 -#> -0.6968 1.9179 -1.0473 -12.1094 1.7510 -3.5513 -10.3687 3.9037 -#> -8.8777 -8.4637 9.1328 7.5304 2.5435 -0.7738 -4.5382 7.4405 -#> 3.4902 -4.5657 -6.3298 -8.0692 6.9725 -9.0220 -4.4722 -9.1183 -#> 4.3632 7.8274 7.3080 1.6116 -3.2386 -2.5663 -2.4155 2.2608 -#> -6.8906 -7.7736 -3.5131 5.5850 -7.2101 -7.0828 -3.4648 -1.2636 -#> -4.9135 7.8486 6.0787 9.6393 6.3337 2.7853 -2.4782 -2.6175 -#> 12.2056 11.0097 -5.6749 6.6335 -0.6023 5.6280 -3.9017 10.6491 -#> -3.4025 -15.7746 -15.6751 -16.0670 1.3937 -4.1239 -5.8367 -6.8215 -#> 12.1459 3.2775 7.3165 -6.2589 -5.5058 -3.6283 -6.4149 -9.3527 -#> -1.2078 -0.0522 3.6285 0.1165 -5.0967 3.7390 -1.1201 -9.5000 -#> -3.5308 2.7058 -3.9890 3.4945 -5.7438 -0.8554 -8.4195 2.9762 -#> 6.6982 3.4786 8.0461 -9.3455 -1.4272 6.8840 -7.2722 1.4466 -#> -4.4001 -0.1649 2.9268 8.8747 -1.2902 5.5666 2.5303 0.6259 -#> 8.3606 -7.1928 13.2341 -0.3941 18.6431 6.5971 -1.3029 4.8651 -#> 1.9562 5.0652 1.6687 8.4165 -2.4939 3.4391 -3.4970 2.5919 -#> -6.0691 -18.4595 -1.0337 6.6505 -1.8230 -0.6532 -1.8843 -6.8945 -#> 8.5778 1.2562 13.4635 5.7836 6.2390 6.4734 13.7121 -1.7506 -#> 16.3378 12.4865 -5.7148 -9.5783 1.0756 6.7436 11.4067 -5.2211 -#> 3.4916 0.0112 0.4477 5.7018 -9.1030 -0.5422 2.6076 -5.5469 -#> -4.8885 -7.4376 2.1646 -15.9880 -2.2590 1.2272 1.8984 -7.8221 -#> -8.0032 14.9478 -12.9263 1.4746 -10.2535 2.0050 -9.9589 1.8762 -#> 6.5848 19.4712 -4.4186 1.4986 -9.8512 7.1145 -0.2163 10.5178 -#> 0.1889 -11.4429 -1.6276 -2.4710 -0.2683 -0.3900 -5.8692 -18.0839 -#> 9.9172 7.0181 7.4458 3.8973 1.2154 -1.8324 -1.7741 17.6313 -#> -3.3293 10.3854 7.7157 -2.8775 -0.8746 0.2266 -1.4422 -2.2013 -#> 13.3809 -2.5503 0.5669 -8.6428 12.1443 6.1498 5.0442 -0.9521 -#> 0.7790 -5.2123 4.2130 -6.1849 10.4151 0.1508 3.2000 -3.6462 +#> Columns 1 to 8 -4.7494 11.6914 6.2275 -2.5189 -9.1071 1.7811 8.3844 -4.6822 +#> 7.3511 -1.7579 -5.5751 3.7172 -2.1976 -2.1622 -1.7208 1.7950 +#> -4.0640 5.4961 -5.4718 -9.3183 5.4233 0.9490 -5.1374 -2.9941 +#> -3.8344 -3.7589 1.6410 -0.5586 -0.2092 -5.3019 -0.4378 3.5170 +#> 0.2908 3.2402 5.5140 0.5267 1.3939 -9.7284 2.4027 -6.8657 +#> 6.4532 -9.8105 4.2592 -3.6302 -12.7820 -1.5951 3.8619 -2.7962 +#> 5.3662 -3.8730 -14.0795 -1.0919 8.4617 -4.6664 -2.9468 0.8340 +#> -3.8975 -4.4485 -9.7173 -6.4734 -4.1037 -13.0983 -0.2139 7.3276 +#> 5.7682 -6.4684 9.7062 14.8237 16.2986 -2.7680 16.0024 -4.0138 +#> 3.4810 -4.3589 9.3481 1.0873 -2.2977 0.8925 -6.1126 2.3789 +#> 1.4736 -3.9429 2.4021 8.4457 -5.0132 9.4390 5.1010 4.0382 +#> 0.5604 2.8878 -1.6742 -5.9348 -8.8134 -5.6354 0.0670 1.6170 +#> -7.7194 0.8488 11.2531 3.9743 -4.1372 -13.6285 4.5616 12.9156 +#> 0.9943 8.7256 4.8928 10.7819 -2.6758 1.5081 -6.8332 1.9871 +#> -3.2482 17.7955 10.7015 2.5543 -0.5682 -9.8350 -3.6823 -4.3511 +#> -7.0937 -8.6378 -0.5751 -2.0821 -2.4636 11.8386 8.7885 2.4834 +#> -6.0151 12.8042 9.7828 4.2838 -5.9085 12.9451 2.2958 -4.7117 +#> 5.5618 -4.4812 0.2074 -10.1383 11.1834 -7.6623 1.9025 4.2811 +#> -6.7463 -8.3927 2.1827 -0.8423 -6.8870 5.1483 9.9108 6.1397 +#> 4.0850 3.3250 -0.9212 2.1076 11.6495 -3.4480 -6.3162 -4.4809 +#> -6.7212 1.8606 -0.1036 7.6388 -1.1124 2.6191 7.7316 9.8316 +#> -13.5180 -7.1545 -1.3133 2.3878 -3.2021 8.1715 5.6784 -1.3819 +#> 12.5770 3.9169 -2.9238 -13.0430 1.9886 -11.6656 -8.8397 0.6372 +#> 2.6831 -0.3313 -7.7377 4.8823 10.0838 -15.3990 2.4189 -9.2835 +#> 0.8459 -3.4268 -11.0981 -4.7562 2.3583 1.1973 -2.7463 -0.1310 +#> 6.0014 -5.1915 -0.9598 -8.1805 0.9224 -12.0693 -3.3295 -2.6039 +#> -1.9592 0.8019 1.9063 -14.8500 2.9979 -12.3182 10.9789 -0.3586 +#> 2.7076 -9.6546 10.1328 -7.4256 -0.0473 -0.6829 2.6728 6.6606 +#> -3.8981 4.1951 -2.3174 4.1387 5.6621 -5.3716 -3.5363 -13.7948 +#> 8.4676 3.2628 8.0927 3.1231 -5.4016 -13.5442 6.5253 5.4612 +#> -5.4619 5.2149 9.5666 -3.4838 -2.8675 -0.5318 -2.1331 4.0017 +#> -2.9222 1.2974 7.2555 3.4485 3.1459 -7.7185 12.1555 0.9251 +#> -4.2126 4.7496 -9.8861 -9.8656 14.1192 -4.2276 4.6448 -7.6168 #> -#> Columns 9 to 16 -4.0860 6.4213 -2.5722 -2.3741 4.1568 -5.8059 1.7523 -1.2796 -#> -3.1006 -3.4590 -16.6207 -8.0393 -1.1310 -3.5510 4.8503 -2.9037 -#> 3.0810 -3.6914 -6.7095 2.1614 -2.9226 -10.6996 15.6299 6.2988 -#> -11.1525 -16.5291 9.1431 5.3459 3.2410 -10.4744 -6.6682 1.0263 -#> 1.0383 1.9176 -2.6346 -2.2827 -3.5935 -3.7414 1.7668 10.5311 -#> -0.7062 -1.7054 0.5477 -2.0877 -7.5597 -2.5377 1.7576 -6.5614 -#> -8.4782 12.9652 11.9687 -3.9546 9.3618 9.1582 -10.6152 -4.3985 -#> 2.9258 5.9505 1.6781 -2.0285 -6.0790 2.2242 6.5264 6.0665 -#> 12.6039 -2.1076 4.9537 -11.3343 2.3227 1.4610 -2.6695 13.5633 -#> 6.5875 -11.4847 6.2436 2.8999 0.8432 -0.0396 0.9859 -1.0960 -#> -2.4116 4.0544 -15.8423 -0.0828 0.6131 -5.3123 -0.8512 3.7520 -#> 2.1860 -3.6086 -0.5867 -3.6484 0.5793 10.9506 -6.7126 -4.9702 -#> -13.6129 5.2558 -3.1411 4.7320 0.3424 7.0105 4.9324 -6.7802 -#> 4.0216 6.1902 5.0901 10.2227 -0.1422 -11.2877 -6.1075 3.0605 -#> 5.2406 -1.4246 3.9030 -2.4046 0.8196 -8.4962 4.4973 6.3254 -#> -7.6489 6.6798 -7.4386 3.0880 -0.5156 -8.3741 3.2873 6.4475 -#> -3.4557 14.1308 -2.1692 -4.5955 7.5381 3.6648 -0.5949 12.6182 -#> -8.7020 -7.9468 -3.3009 -5.2928 -1.0027 7.5290 9.5723 -9.5589 -#> -1.3027 5.1076 8.0577 0.2842 -11.1052 4.8339 6.8218 -0.0609 -#> 2.5851 0.7047 6.0073 9.2696 5.6514 -10.3622 10.6630 1.7014 -#> 0.3271 -7.3651 4.2371 0.7115 -2.7260 4.1920 1.4280 -10.3795 -#> 3.9296 8.8945 -0.0389 -0.4414 -3.4227 -8.3822 15.2850 20.2510 -#> 5.6511 4.8389 2.8384 -2.2936 2.6809 3.3876 -9.1269 -3.5801 -#> 8.9159 -13.7750 14.9805 15.4946 2.3645 4.7642 -14.0470 -10.4856 -#> 8.8515 -4.1713 -1.1083 5.2987 -3.5451 1.7682 8.2087 -5.3819 -#> 4.3616 9.0161 6.3545 2.6342 -3.4428 -7.2009 6.3231 10.2266 -#> -1.8823 7.2042 4.8943 -3.7700 -0.2878 4.5386 -11.0202 4.4571 -#> -10.8001 2.1424 10.2672 8.6709 -4.4365 3.3502 -5.4543 -4.8962 -#> 6.6005 10.0152 1.3169 2.4047 -1.9600 -12.1608 11.7453 8.2295 -#> 0.8924 8.1780 -7.9939 -4.9859 5.4768 8.9285 5.1992 -9.6600 -#> 9.9078 7.4913 11.5323 -4.9031 8.4436 12.0946 -1.5957 -8.7517 -#> 2.2561 -3.1106 -7.4184 4.5571 -2.6696 2.9912 -1.4644 -10.2755 -#> 6.9493 -20.7305 -2.5021 0.0686 7.6799 0.9227 -8.6592 -16.3700 +#> Columns 9 to 16 1.1053 3.5929 3.0596 8.0787 -3.8845 1.6366 6.9312 -3.5006 +#> 6.6065 -0.7426 -8.6614 -0.8896 -1.1389 -4.3423 -2.0040 -4.8005 +#> 5.8483 -3.6555 -8.0874 0.7958 -3.8505 -1.4514 -5.1474 -1.1738 +#> -16.2258 4.3450 5.8417 6.3028 4.4347 9.6735 9.1234 -0.5046 +#> 8.5183 -0.5802 -0.7787 5.9788 -12.6460 0.7465 -13.3365 -3.4691 +#> -2.7279 -0.3538 -13.0109 -1.0451 3.0467 12.7585 3.3072 12.5092 +#> 6.4983 9.7885 -4.6848 -6.7536 10.7469 -6.1712 0.0495 -11.3812 +#> -4.7200 -0.9540 0.8806 9.6815 -3.0431 8.7748 4.5873 -8.6234 +#> -0.4974 11.3381 -3.9772 -3.7338 -3.8782 -3.2250 -18.5084 -7.4382 +#> 4.1628 -3.9405 -8.6841 -6.2948 -3.6343 -7.3230 0.3029 1.8037 +#> -12.4989 2.2490 13.7228 10.4315 1.7481 6.1588 15.8546 8.0660 +#> -3.2981 -0.5289 9.9215 10.8434 -1.1319 -0.6649 -4.8565 -4.5528 +#> -2.8793 -5.5543 -11.6374 1.7953 -8.5499 -0.5389 -5.6380 13.0501 +#> 5.3041 6.5157 2.8312 -0.8540 -9.9015 0.3593 -7.3609 1.7051 +#> 2.2576 11.1360 -5.2332 -7.1754 0.3668 -13.2184 -8.1626 -12.7167 +#> -4.5291 -5.2401 6.8047 2.6375 -3.2612 9.4454 8.7669 3.9668 +#> 2.7251 -1.5767 1.8422 -9.4165 12.5574 -10.6839 13.6745 6.8854 +#> 1.3396 7.8490 0.4546 -6.4737 -4.5701 2.1387 -16.8663 -12.9710 +#> -4.6366 -7.5210 -7.7491 4.3944 -11.0267 6.6839 11.0491 3.2236 +#> 8.8870 5.1582 -9.1483 -21.4267 -3.1668 -2.6182 10.5676 -2.0491 +#> 4.5671 -0.6424 -0.2153 -0.7346 -2.4687 -11.5683 5.5841 0.1110 +#> -9.0368 -7.6692 -4.8518 4.1419 -3.9568 -3.0074 -3.5510 -0.1042 +#> 1.9658 1.7985 3.5889 -11.9282 8.5851 3.2788 -2.9930 -6.4279 +#> 5.5135 15.1663 -12.9999 2.7351 -12.1934 2.2852 -5.7878 -13.8305 +#> 1.4929 -2.5567 -7.4677 -10.8425 4.2928 6.5082 11.4283 6.1736 +#> 2.0748 2.6545 -4.9670 -5.5344 3.1958 -0.7656 9.6221 10.2205 +#> 2.7918 8.2074 -15.2004 0.7522 2.1662 8.3776 0.4561 7.9721 +#> -4.3121 2.1842 -7.0578 6.0894 -5.7502 6.8616 1.7681 14.7573 +#> -1.8653 14.6449 0.8759 -5.7035 -1.0147 -4.5710 -0.3629 -9.3235 +#> 5.2758 8.0527 -1.0049 0.5178 -10.0698 6.3923 -8.6552 -4.0547 +#> -7.1736 3.9217 7.2714 -5.0306 2.6258 -0.6003 -7.0254 -5.5973 +#> -5.0572 4.0888 0.1128 6.7492 -7.8908 -2.0775 9.7692 -0.4405 +#> -7.4791 11.3341 6.2327 6.9447 -2.4409 -0.2219 -9.6524 -15.3869 #> -#> Columns 17 to 24 -2.0726 -8.5104 7.9019 0.8095 1.3915 -1.9690 6.3407 -4.2864 -#> 4.3306 -6.2841 5.3894 -0.8770 -8.1014 -1.7010 -6.8155 7.6810 -#> -6.7169 -8.4859 -10.5882 3.0183 2.8342 -0.5263 -1.2124 -0.5939 -#> -1.4485 10.2153 -6.5924 0.9652 2.0956 1.7560 1.1663 -10.9764 -#> 1.4974 -3.7323 -9.4498 8.6725 13.9418 -6.7380 -3.5900 -14.4211 -#> -5.1598 -6.6805 1.9089 11.0177 4.9359 -1.2375 -18.6508 -2.8171 -#> -4.3521 4.9877 7.8086 2.4810 -13.5333 -5.0737 5.3768 0.7935 -#> 5.4761 -15.1532 -17.5176 7.3348 3.8836 4.4217 1.4999 3.2297 -#> 4.6920 -13.0070 4.5573 4.2250 -7.9661 6.4834 -1.7304 -0.2558 -#> -6.9633 10.6470 7.2614 2.0687 1.7736 -3.7101 10.6454 -6.3196 -#> 3.5066 -10.6950 -5.6799 -4.6649 -2.4139 -8.7156 0.1989 1.2605 -#> 0.2611 2.4521 4.0678 0.5756 1.4464 -7.7336 2.6385 1.9377 -#> 0.3724 12.6858 -6.6219 1.6642 4.2785 -2.6077 -4.8870 11.6064 -#> 16.4867 -1.3708 10.7355 3.2567 -2.8250 -9.6266 -13.2044 -5.2769 -#> 4.0348 -10.8766 8.0989 11.4609 -13.0163 -0.6370 1.8148 -1.7120 -#> 2.6839 7.6035 2.5099 -13.0648 3.8901 4.8382 -3.4283 2.3372 -#> -11.1093 -4.6944 -5.4822 9.0872 7.4998 -2.4886 -0.8404 1.1495 -#> -4.3792 0.9561 7.4340 3.5363 -3.4969 7.9804 -12.3729 6.5105 -#> 21.3925 2.3420 -2.7837 -7.9849 -2.1592 5.0746 0.0229 15.6099 -#> 5.2014 -7.5834 6.1664 -0.7725 -0.5959 10.4268 -6.7089 9.3841 -#> 3.9032 0.0352 -7.8161 11.2603 -4.6685 1.1181 -5.0832 3.3944 -#> 7.3372 -1.8604 -3.0359 -0.2803 -2.4817 -3.0330 4.7001 0.7510 -#> -8.9387 -7.2561 15.3523 -0.1818 -4.5693 -1.8624 8.3489 3.5271 -#> -1.2746 25.1526 3.3148 -6.3416 7.5979 -3.0577 -1.0300 -1.1902 -#> -1.9080 -0.5866 1.9487 4.7672 -0.3371 1.0384 -0.0173 2.7061 -#> -0.4439 -6.8372 1.0193 3.8842 6.6423 3.5682 -9.6387 -1.2007 -#> -0.0374 14.5370 -9.4165 8.8781 -1.4394 -11.5466 -5.7869 -0.3195 -#> 5.4837 11.2849 1.7591 1.1738 17.6410 2.1348 -10.2499 1.9873 -#> 16.1238 -0.1729 4.4630 0.1696 -10.7953 1.3255 1.3351 2.5280 -#> -8.5880 -1.6725 7.6080 -1.2273 1.6391 2.3870 7.2627 4.0279 -#> -12.7845 -4.4068 10.2311 7.9620 -6.4136 11.8917 12.7687 -2.2985 -#> -0.3770 -10.9905 0.7378 -15.5345 9.1464 1.6215 -10.1390 9.8305 -#> -4.4512 -1.1555 16.8918 -3.8699 -9.2140 -3.4994 0.3889 -7.3029 +#> Columns 17 to 24 2.6361 -9.4123 3.6802 -15.0500 -5.1439 3.1893 -0.6530 9.0843 +#> 10.5934 0.2358 4.9103 5.9398 10.3799 -13.4461 -2.9386 -9.4093 +#> 4.0320 -0.0288 -12.2230 0.0415 11.9752 5.4533 -0.5251 -1.9834 +#> 0.5462 -0.1345 8.5135 2.8463 2.2393 1.9689 -12.0151 6.1393 +#> -5.0602 -1.3962 -7.4218 -9.5314 3.4516 -5.5321 3.0937 3.8872 +#> 10.5453 4.7741 4.6247 3.4166 -1.3455 6.0257 1.8492 3.3361 +#> 11.7867 -3.4657 -3.0901 3.9521 2.2942 0.4662 -1.7708 -6.2033 +#> 2.5445 2.0228 -3.4097 1.4003 -3.0445 4.8202 -9.8775 5.2100 +#> -16.0743 -0.1736 1.8009 14.2162 4.8747 -2.0714 8.3904 -5.4989 +#> 3.5552 2.2678 10.8353 1.7180 -12.3527 7.8974 4.5832 -14.5396 +#> -7.3328 -7.7900 1.5407 3.2935 -12.4024 -1.0715 0.2774 0.0674 +#> -2.6300 -0.6552 -1.9199 8.9205 -16.7103 3.4903 0.5024 -2.0053 +#> -3.6374 2.1865 1.9466 21.3596 1.1995 -6.3210 0.3500 5.8160 +#> 8.0220 20.6556 2.3092 -4.1542 -4.0168 -6.2778 -1.0384 -10.4952 +#> 3.5660 -17.0017 0.4178 8.5593 9.2322 10.7303 4.4723 -9.7647 +#> 0.0986 3.0008 -9.3054 -4.7007 1.8947 -2.9916 9.1122 5.9285 +#> -11.1414 -13.6340 2.2483 3.0558 2.6748 7.2134 -2.3264 -3.9131 +#> 0.0850 -0.6033 -6.5273 -2.4373 6.7109 12.2250 0.4376 -16.3410 +#> 8.9518 -0.2789 9.6188 2.3399 0.2835 8.9719 8.7331 3.9723 +#> 4.7040 4.7003 -5.9337 13.5381 -1.3971 -1.1050 2.8087 -4.5327 +#> 6.3325 1.8240 10.4689 -9.5013 1.1701 -9.1390 1.7532 -6.3633 +#> 7.6225 -13.6399 -1.2877 -4.3078 4.5141 18.9283 -1.0607 10.2141 +#> 0.8643 13.8970 -10.9306 7.5635 4.0442 0.0402 8.5154 -6.7378 +#> 2.1145 -15.8297 2.8630 -1.9691 1.0558 6.8279 9.9117 4.6583 +#> 6.5111 11.1032 -0.3492 1.2771 12.1997 -8.8836 9.8978 -0.4514 +#> 5.2945 7.6371 1.7623 1.8614 -7.6340 -11.7832 5.3925 -4.3533 +#> 8.7173 4.7956 2.9614 -0.8700 4.9186 -6.8387 1.7339 5.5832 +#> 2.1718 2.5207 -1.3494 9.3554 -4.9000 5.1328 -1.9538 4.1706 +#> -4.0399 -7.3008 -3.6094 -5.8133 -3.9073 -3.4838 3.4059 -0.3304 +#> -5.9197 9.8172 7.2579 3.9443 -1.1704 4.5221 18.8373 2.9012 +#> -2.1499 4.9134 -15.6842 5.0810 -1.5658 -10.4101 -2.7212 0.0296 +#> -5.6622 5.2506 4.0153 8.5021 -13.9100 -5.2145 0.8474 3.0206 +#> -6.5415 -15.8894 9.0498 -8.4162 -3.5104 11.1390 -5.0427 6.8724 #> -#> Columns 25 to 32 -1.4314 0.2024 -4.2907 -3.4588 -5.4996 -3.6940 -5.0281 1.0755 -#> -11.5288 -0.3438 -5.6180 5.4355 -1.9867 8.7279 -1.3348 -3.2532 -#> -1.7205 -2.4896 -6.8395 2.5132 6.8259 7.1714 -8.4811 1.8884 -#> 12.2358 -2.2164 9.4752 -6.3795 10.7066 19.2873 25.2370 -6.6958 -#> -12.0759 -11.5659 -1.5740 3.2449 -0.4928 2.1976 7.9682 -3.1802 -#> -5.4648 -1.1428 3.6683 1.4968 11.3532 -2.0583 9.4373 -3.6948 -#> -0.8467 0.3965 -1.5380 -3.3360 13.1708 -2.0463 2.0306 9.3244 -#> -6.1372 -11.2179 -2.1520 0.2010 -9.1823 -1.6848 9.5493 2.4082 -#> -0.1230 -4.3619 -2.8306 2.8916 8.8175 7.8968 -11.3143 7.2510 -#> 3.9155 -5.1768 5.4694 3.1729 -1.6373 -1.1168 -6.9770 -2.4431 -#> -5.5661 -2.9941 5.8947 11.3340 -0.4344 10.3948 6.8087 14.5704 -#> -7.4637 5.9919 -1.1063 -2.8024 0.4877 0.9881 0.4987 7.7180 -#> -0.1479 8.8453 -7.3753 -11.8225 1.0302 -5.8837 -0.5575 -0.0465 -#> -1.1157 10.4909 9.3996 4.8179 8.5646 -2.8770 -1.1479 -7.2244 -#> -7.7664 2.7722 0.2519 -5.8544 -0.2825 8.1229 2.5394 -1.9655 -#> 3.7868 6.7755 1.1934 -2.7120 -0.8109 -2.1745 9.7496 -7.8422 -#> -6.0130 -4.9658 3.0462 -4.8569 3.0011 3.1681 -7.8576 4.7335 -#> -0.8590 -2.2963 3.9233 -11.4663 2.8552 5.0722 13.3096 -0.2567 -#> 2.4902 6.0696 -4.5408 6.5379 -5.8495 -6.7323 -6.0372 3.8359 -#> -1.7285 4.3793 8.7735 -2.9332 -6.4038 -0.0454 -4.6984 -8.4368 -#> 4.4295 -4.1835 0.2776 7.2391 8.5665 8.1895 -2.9724 1.7052 -#> 8.5844 0.4590 5.8992 -0.5390 -5.5647 -4.9448 -0.5048 -2.1230 -#> 8.4651 0.7857 8.0964 4.4390 -7.4403 -7.3722 -2.1246 -7.9744 -#> 9.2435 3.5409 -2.5928 6.3410 -3.9584 -1.3803 -16.1972 3.2548 -#> 6.9294 -0.4257 3.1535 1.8400 5.1561 8.7191 -6.5142 -0.4285 -#> 2.4707 2.4562 -0.8055 -6.3825 -8.4170 5.5621 5.7736 -12.0265 -#> 17.0837 -1.2967 -6.9036 3.5851 8.0978 1.0316 1.1240 0.9540 -#> 0.2170 -2.0506 -1.8686 -8.5422 -8.8884 -4.6117 -5.6362 -1.9346 -#> 4.5573 -1.4111 4.1964 4.8692 -5.5536 4.0321 -9.5204 -3.1594 -#> -6.7277 -0.8789 0.3533 -11.3807 -5.0255 -7.8873 -17.7735 4.3360 -#> 4.5420 0.0373 -0.1702 -0.2021 -1.0652 9.4204 -2.9035 13.7808 -#> -7.4720 3.0406 -4.3222 -3.9177 1.7697 2.4556 -0.5615 -7.5813 -#> -1.4074 -0.0846 5.4931 7.8539 1.9790 6.8378 -1.2712 9.0633 +#> Columns 25 to 32 8.0032 -11.7964 2.5258 10.9881 7.6960 11.2029 -0.1921 -1.3995 +#> -0.3967 2.8627 1.6747 10.5516 -1.5473 -5.7551 1.5399 12.9520 +#> 1.4094 -4.9826 7.2381 -1.0842 11.4178 4.7459 1.3283 -0.8630 +#> 5.8319 8.9319 -0.1512 10.0652 -8.8583 5.9899 -5.2435 0.8863 +#> -3.2973 5.0795 15.1959 7.3075 6.6475 -3.4660 12.2190 -6.1263 +#> -0.4660 -1.3674 6.3498 -2.3529 -10.7566 -6.0212 0.3009 3.8556 +#> 9.5261 -10.7244 -13.8307 -8.7597 -11.5763 6.3765 -1.6130 -0.5126 +#> -4.5751 9.9278 -4.4040 -2.5034 7.4549 4.9258 -3.7550 0.6807 +#> -26.6666 2.0031 12.2180 22.6127 0.8523 -15.8085 -0.4355 -0.9975 +#> 4.4551 -0.2536 1.0217 2.0787 -7.7166 3.1223 -4.8857 2.4261 +#> 3.8361 -2.2282 6.6116 8.1729 11.9204 -2.7291 3.3454 8.3342 +#> -7.6496 -2.0285 -1.4510 1.2877 3.2973 10.5090 3.1704 -5.6285 +#> 3.2244 -13.2609 4.0548 -1.2995 2.4223 1.9072 1.3706 -5.6108 +#> 1.7014 2.1072 -1.6385 5.0523 -0.4813 -5.1208 1.3388 -4.3354 +#> -8.2576 1.6686 3.1727 1.9465 -0.7740 8.9057 -10.5024 -15.5402 +#> -8.6054 0.4861 -5.3284 -2.6231 11.5253 -8.7368 4.9920 -3.9115 +#> 1.8474 0.0106 -0.9431 9.5235 -4.9173 13.6484 -6.0853 3.0887 +#> -0.9601 -18.8055 -3.8210 -2.0399 -2.6730 -1.1174 1.7746 -9.7763 +#> -1.3466 1.0300 -6.4389 -5.8898 -1.6671 -9.7794 -3.2503 9.3921 +#> 4.3637 0.0824 1.2090 12.4997 -3.7938 -8.7367 5.1119 2.8845 +#> -1.3058 2.6759 -2.0402 5.2329 8.1843 5.1306 -0.8960 8.4270 +#> -5.1609 -7.3512 -7.1294 6.2029 -8.6638 8.7176 -4.9596 1.8840 +#> 11.3615 2.8806 -0.5232 -11.7841 3.6225 -0.5805 8.0442 -12.3476 +#> -4.9003 -2.6468 0.9900 -1.4977 -1.1662 6.3457 2.1121 -5.4354 +#> 13.9385 14.3035 -9.1850 -7.1474 -7.4894 -14.0816 11.1249 1.0052 +#> 17.5312 8.5143 4.3163 5.2052 0.3356 -5.5745 1.4287 7.6658 +#> 6.6776 3.0296 1.3280 6.7349 -7.0024 -3.1787 -2.8548 -4.9663 +#> 8.2172 4.1557 9.3674 -5.3014 -7.6051 -4.9040 -4.4259 -8.3788 +#> -5.6016 2.0360 -5.8811 5.0308 -2.7195 5.1137 2.5360 8.1114 +#> -8.9261 -4.9704 6.6290 0.7140 4.8766 -6.7308 -5.3420 -10.8223 +#> -4.4571 3.7214 7.8362 2.5191 5.5639 -2.6301 10.3899 -3.3793 +#> -10.0100 17.6458 10.0663 -5.3321 5.1774 -0.9540 -2.3858 -0.9945 +#> -8.7549 13.1753 -5.8074 -1.2596 -0.3728 -0.6953 -12.6268 -2.0990 #> -#> Columns 33 to 40 6.9240 2.2739 7.0122 -1.2701 -2.0115 -11.0764 -0.4458 1.0459 -#> 7.3224 -17.3510 7.5953 -1.9614 0.2458 4.0317 -6.9640 1.3121 -#> -1.6173 -3.9226 -3.5357 0.0570 7.2102 5.1422 3.8731 1.4483 -#> 9.0869 -4.2868 -9.8618 -3.8325 -0.4532 17.1419 -8.5024 -10.2601 -#> -0.2259 -4.7216 -4.4411 1.1250 10.8359 7.9578 2.3752 -4.4357 -#> 5.8127 5.1264 -2.9787 -1.3527 -1.2789 -4.9458 -7.6917 -3.4897 -#> -1.2848 -5.1894 -6.8718 -5.8907 1.3262 3.1774 -5.5343 -0.2647 -#> -2.7601 -6.5327 2.7842 -1.9254 3.0175 7.3933 -0.0057 9.5916 -#> 5.9871 -2.5564 -4.1029 -9.8237 -4.6692 8.0873 -0.3651 7.6660 -#> 0.9929 -7.0597 8.7479 1.4223 0.3354 4.1855 -6.3449 -6.7046 -#> -2.0061 -11.2205 12.1521 -7.9127 -2.7581 -5.9309 0.4322 3.8732 -#> -4.0634 4.8246 -2.8899 -5.7091 1.0374 -2.4193 3.5607 -0.9288 -#> 5.1217 5.9972 -6.4678 -1.1522 -5.8698 -8.0975 0.9765 -2.1171 -#> -7.7123 -9.3345 4.5372 -3.0058 6.6523 -2.0967 -14.2951 10.2028 -#> -0.6436 -8.3312 0.9566 -1.0786 -5.6910 2.0966 -6.9138 12.0494 -#> -4.8390 -4.3222 4.5728 5.7941 -4.1121 -2.2718 -9.8567 -0.8623 -#> -3.5303 0.5670 -4.8218 4.8225 1.5968 4.4576 -4.7246 -3.1750 -#> 2.3410 -1.1211 -2.5534 -0.4316 -8.1122 13.7344 -6.8140 -3.7069 -#> -10.9319 -4.5192 -4.2456 3.3167 -10.8028 -6.1140 -8.8725 11.7455 -#> -4.2912 1.0876 5.6363 -0.4622 -8.5785 2.3193 -15.1438 9.9583 -#> 2.7829 0.9444 -9.0406 -0.5925 -2.3984 2.4452 3.7937 4.2564 -#> -1.4780 4.3162 2.4416 -2.1974 1.2660 -11.5144 2.8115 3.2969 -#> -1.6887 7.3647 3.4006 8.6264 -2.1062 -21.2366 5.4839 -7.3611 -#> -3.4757 -4.6786 1.9789 -4.2565 -10.2247 -7.5134 -1.3938 -6.8941 -#> 6.3073 -2.8765 -2.7399 1.3770 -4.9203 -3.8809 6.7716 0.3234 -#> 5.7622 8.9038 4.6575 16.3209 -2.9244 -1.3163 -0.1654 8.7446 -#> -4.1624 -6.9978 -15.7060 -0.7463 1.0695 0.1511 -2.9578 -10.5753 -#> -7.4290 -1.1818 6.1719 11.9056 -2.4689 -1.7901 -9.7585 -3.2402 -#> -2.9330 -10.4269 -1.6227 -3.9259 -4.4273 -6.0231 1.4791 2.9367 -#> 10.5194 2.4614 13.6208 1.2071 -6.8071 -14.5845 -7.7553 7.8994 -#> -2.7698 3.2642 -13.1002 1.3493 -2.2242 0.5566 4.5718 -4.1912 -#> 6.4498 -4.5213 3.0324 -1.5308 1.1251 -1.1830 10.9511 -10.9109 -#> 2.9400 -4.1101 7.9595 -4.0359 -6.0525 -3.9387 -2.4284 -12.6910 +#> Columns 33 to 40 -22.0667 0.9400 5.4472 2.0393 4.1143 9.0760 -9.0910 -4.6369 +#> 1.0332 -7.7490 4.0268 -7.3917 -8.2659 -0.1511 7.4030 -5.3979 +#> 0.7237 11.6383 4.1008 6.7454 -6.3581 -2.7842 -1.4503 0.7231 +#> -4.5894 -1.3522 -2.9072 -8.7338 2.5281 8.6765 5.7041 -5.7059 +#> -9.9749 -8.8492 -3.6960 2.5723 2.3695 -3.9924 -1.9653 -8.3369 +#> 4.8991 2.2204 -11.9367 10.5248 8.0569 -4.3868 0.7721 17.0658 +#> 3.6578 12.2244 1.4273 -5.8624 2.6819 -2.7624 -2.8953 4.9717 +#> 4.3171 1.3951 2.8092 -0.3986 7.1502 -6.6138 7.9262 -5.1982 +#> -2.8936 -12.6981 -12.8259 -5.2457 -12.0545 -5.8977 -9.6706 -15.5798 +#> 1.3049 -0.7115 4.6173 3.3217 -9.5191 2.3573 2.3755 8.1484 +#> -8.5958 -0.0780 18.8421 -1.5876 5.9577 9.2416 -0.1654 0.1862 +#> -5.5045 1.0510 0.5905 0.7304 5.0837 -3.9223 -3.8227 -7.8152 +#> 1.5532 1.8694 -3.5596 -12.2458 -9.7737 -1.4396 5.1892 5.6769 +#> -6.9358 0.9797 4.5459 -10.2927 -4.5621 -3.6155 7.2474 14.4220 +#> 0.9170 -3.7835 -6.0728 -3.9294 -10.3809 -18.2403 -6.9323 -10.2960 +#> 13.1026 9.0039 3.3894 -0.2237 10.2792 -2.3542 5.4395 -4.1443 +#> -4.4686 -2.3009 13.3682 -4.3334 -5.2948 4.2646 -3.8576 -9.9639 +#> 2.8922 6.9258 7.0938 3.3861 -2.4659 -14.6034 -11.7722 1.1025 +#> 0.7982 -4.3303 6.3976 3.0597 -2.2543 -7.1565 4.3338 0.0678 +#> 3.1602 0.5309 -3.1283 -8.2464 -7.2549 -3.6729 -3.1837 4.2661 +#> -1.5452 -6.6099 10.5968 4.1484 -1.6138 -1.4313 6.6799 1.1826 +#> -0.4683 8.1659 -7.7243 4.3027 -6.6899 -0.1859 -2.5647 -6.2542 +#> 1.1286 3.7235 -1.4043 -6.1850 2.9576 -2.6909 -7.6662 3.9362 +#> -12.4645 5.2797 -4.6931 0.1677 2.8582 -13.9392 -16.1099 -20.1223 +#> 10.3361 2.2572 2.0199 -6.7932 -0.1536 4.0980 4.4018 3.3731 +#> 1.6968 -7.8575 0.3945 1.5132 -3.5100 11.9803 2.1581 9.6080 +#> -1.4268 -6.7280 -3.6018 4.6838 -1.2171 1.0601 0.0963 12.2065 +#> 0.7501 5.7370 -5.0484 6.9951 -1.9868 5.4559 0.4505 8.7851 +#> -3.3038 -8.6952 -3.9468 -3.3912 -2.7301 -6.4622 -4.9541 -12.6603 +#> -11.6937 -10.4497 -1.3698 12.8531 -10.0808 -11.6848 -11.9295 2.8064 +#> -2.8564 -8.1062 -0.3436 -10.5153 8.7385 -1.6354 6.2880 -4.6702 +#> 7.6629 -7.2868 -9.2056 0.8147 -9.2184 -2.4807 7.0501 7.8249 +#> -5.2482 -7.6515 -0.5730 4.1781 -4.3705 0.6740 -11.5030 -9.2450 #> -#> Columns 41 to 48 -6.0871 -3.5399 0.5556 9.9953 0.1490 3.1870 -6.8017 -13.3374 -#> 1.2691 2.3812 -6.8871 1.8500 5.0405 -0.9936 5.5278 5.6075 -#> -5.6155 -1.3369 -2.0059 9.6355 -12.1226 1.6743 -0.1978 11.2916 -#> 1.6661 4.4906 7.2712 -12.7531 13.2853 0.7525 3.8068 13.7190 -#> 4.6584 -3.7663 -8.8265 2.4192 -7.8541 -10.5215 -4.8588 1.6292 -#> -6.0489 -4.9913 13.1335 7.5758 -5.6704 -2.7146 7.0350 3.8747 -#> -1.6261 4.0118 14.6895 3.5888 -2.2662 3.8648 -1.8610 -6.0246 -#> 14.2621 -8.8456 -21.6802 7.7011 -0.4258 -8.2232 0.4463 1.6795 -#> -5.0160 6.0531 1.9887 7.9535 3.9957 -0.8742 -5.1014 5.0997 -#> 4.3507 6.6107 -2.5763 -5.0663 5.9065 7.2129 -0.1093 2.1234 -#> 8.6998 -11.3704 -9.0154 16.7581 -7.8162 -9.6847 10.0728 1.3776 -#> -5.6878 -4.9777 0.8464 5.6317 -4.4443 1.0655 -0.7358 -0.4476 -#> -3.2676 1.0310 2.7182 -4.5991 -6.6428 11.7684 0.0934 2.0296 -#> 3.3787 5.7968 2.6977 -6.3838 1.3135 -14.7291 -5.4446 -2.5756 -#> -6.7851 -4.3474 4.3288 -3.5593 -11.8488 12.7276 -11.8330 -1.3451 -#> -10.9471 2.5203 13.9947 -6.5298 6.3223 -5.1206 -5.3993 -0.0330 -#> 7.5481 -7.9991 -7.1928 0.4923 -10.9895 6.2631 4.2667 -2.1313 -#> -10.8836 -6.3230 8.0348 -10.5145 -5.3140 1.6496 4.7261 0.3512 -#> 11.6409 8.2070 -3.0861 11.9172 -7.4494 0.6038 -5.9887 4.6739 -#> 2.8818 -3.6726 -2.8435 -3.0786 -3.5361 10.6113 -3.9672 -12.3372 -#> -5.1373 2.7864 0.3852 2.0220 -4.7495 -2.6269 8.5026 2.4522 -#> 10.0219 -5.2154 -1.9534 1.5399 -0.1130 -11.5162 5.6648 1.9193 -#> -1.1927 -3.9725 2.2673 7.0946 2.2641 5.1577 6.1204 -6.5758 -#> -0.5557 19.6535 5.5160 -0.9664 -5.6467 11.3334 -3.9025 2.1416 -#> -2.0937 -2.5151 8.3120 1.5805 -8.8088 1.0911 6.1394 -5.1742 -#> -3.1773 -3.2192 3.7206 -6.5699 -1.1953 1.7972 -1.5603 -6.4786 -#> 8.5507 11.2789 15.4065 -11.7882 1.7775 0.7023 10.9679 7.2172 -#> 6.6951 0.6656 -15.2272 -4.2974 -6.1461 -9.1327 -0.0604 8.8080 -#> 3.9147 7.3136 -0.1624 -3.7806 -10.1947 -5.6692 -1.2823 -10.3802 -#> 4.9400 -8.4757 -12.2068 7.9412 8.0861 -6.6263 1.0023 -2.1691 -#> -3.4307 -2.0088 1.9208 5.2437 4.9451 -4.2088 -8.7323 -1.8118 -#> -4.3933 5.3883 -6.2951 -0.2386 2.5083 12.3352 5.4033 -1.1363 -#> -9.1130 6.0769 5.5055 2.3297 3.4682 3.0630 2.3178 -1.0043 +#> Columns 41 to 48 7.9662 -13.7454 -1.0169 -5.5775 7.0638 -7.7525 -1.5533 -5.9468 +#> 5.9048 7.3728 -1.6593 6.9097 -1.9123 -1.8745 1.8214 0.0036 +#> -1.1187 -2.0120 3.7279 -1.2286 -8.0900 4.0261 -1.3597 0.9028 +#> 9.1625 4.8416 -9.3799 -2.6597 0.7257 -18.1457 0.3595 -1.5322 +#> -9.4205 -7.3622 8.9233 -4.1012 -4.0627 2.7612 1.0628 -11.7552 +#> -6.8685 -1.9029 3.7424 5.5274 -8.8231 -5.5170 7.0308 1.7698 +#> 2.8859 -0.1491 -7.1103 -1.2714 8.6377 0.9600 4.2306 -3.2029 +#> 9.5702 -1.2062 -1.1239 -1.8485 -5.6695 -10.9140 0.4760 -1.1800 +#> 7.6003 -2.3418 2.0561 5.6033 4.9139 14.3200 -6.2849 0.4827 +#> 7.6720 10.6200 7.7725 1.0289 -0.8690 1.0928 -1.2726 -1.3336 +#> -1.5738 -4.3034 4.7761 -11.3644 -6.3435 -13.6716 2.9580 9.5662 +#> 2.8213 3.8977 5.8461 -3.2699 -5.1327 5.5957 -4.1228 -3.2800 +#> 14.1444 14.4494 -12.9769 9.2490 -13.0667 13.7308 -13.6684 8.0853 +#> 10.9089 11.6345 -9.7478 3.4604 -3.2959 -8.4554 -11.8054 -7.1879 +#> -4.5526 -7.7690 5.4055 13.3036 8.2348 -0.7017 3.3871 -0.0078 +#> 1.6512 -7.2042 -1.8150 2.1704 -3.7272 7.0310 -0.7151 3.0810 +#> -4.3768 0.4102 -3.1572 -6.8379 9.4968 -1.7917 -1.0887 -2.9825 +#> -14.3613 3.8702 3.5913 9.1168 -5.4368 11.6484 11.4087 1.3940 +#> 2.2346 3.6526 4.5928 0.9426 -3.9204 0.1750 8.6222 2.4408 +#> -3.4692 7.1035 1.2109 4.9761 4.8151 3.0252 -3.9490 1.9902 +#> 12.0162 12.8137 -1.1962 1.6503 4.1553 -1.0130 -1.7365 -5.5606 +#> -1.9209 -13.3138 4.3697 -7.7498 7.9286 -1.1156 6.9897 -4.5968 +#> -8.5765 5.2894 -9.7852 6.2904 -12.1120 -0.0676 -6.8073 6.3118 +#> 1.1691 -11.0791 1.6748 -1.1146 8.3899 5.4649 5.1353 -5.1855 +#> -7.3494 12.8866 -1.8187 -7.4662 1.9500 -4.6029 2.0345 9.8622 +#> 2.8446 11.8781 0.7513 5.4125 -11.6326 -0.7603 -1.0453 -1.4411 +#> 1.2697 2.6173 -1.3243 5.6600 -9.3858 1.9423 5.1690 -6.2484 +#> 5.2726 -2.7323 6.7922 -4.5550 -9.7473 -5.0790 6.9899 -0.5686 +#> -5.5780 -2.9224 2.2087 2.7429 3.7300 1.1425 3.6194 -4.7255 +#> 1.6573 1.4485 1.9136 7.9484 -7.1654 8.4802 -4.1225 -0.2600 +#> 0.8855 2.5714 -7.3493 10.8064 -5.3623 8.0614 -2.2887 0.7669 +#> 11.3010 3.7844 7.2402 -8.7479 -3.5159 -4.1591 -0.7923 -0.8065 +#> -4.4396 -7.5309 5.0241 -8.5806 4.3893 3.9590 11.7192 3.8639 #> #> (13,.,.) = -#> Columns 1 to 8 12.7182 3.8064 -4.6155 -2.5688 -14.8029 -15.1014 14.4892 15.4805 -#> -2.9289 -1.0909 3.3326 1.2205 -0.1754 -3.5472 -0.4407 3.1290 -#> -3.2056 -17.0926 3.7162 9.3470 8.8201 1.5397 -7.4968 -8.2629 -#> -2.9956 13.7263 -3.6488 14.7005 10.6438 -1.2256 -14.0772 -0.5874 -#> -5.1724 -0.4424 -7.6078 -6.2448 -3.3594 4.1992 -4.1085 -0.3341 -#> -12.0820 -4.6100 -1.2477 -3.0685 1.4967 -2.3838 10.4906 7.2323 -#> -3.5632 -6.4135 0.5737 -3.8202 -7.1241 -1.6567 8.9361 -5.5403 -#> 1.4872 -3.9668 4.4526 -1.0609 -7.8944 -0.7267 6.2148 17.0953 -#> -4.3076 -8.5738 -12.5724 2.4266 4.4529 4.6341 -14.1243 2.0411 -#> -8.8410 2.2157 -2.9455 -2.8395 -4.8541 8.5633 10.4077 -10.2788 -#> -4.8465 -24.2523 9.7135 9.3001 -7.8211 13.3860 -2.3863 -3.3853 -#> -4.7057 5.7289 3.5920 -10.1702 0.9869 11.4714 13.8918 -2.4690 -#> 15.4570 -4.6192 4.4933 2.7143 0.9961 5.4573 6.6889 -3.3549 -#> -1.1126 4.7426 -5.7291 -8.4460 3.5938 -12.8634 -16.3419 9.6687 -#> -5.3567 -8.1428 -4.4861 -2.8718 -10.2998 14.9662 7.5159 -16.8273 -#> 0.2225 -6.3716 5.0470 2.5779 -7.1910 0.0347 2.0312 4.2226 -#> 15.0371 -5.1816 0.3470 -2.7460 -5.2628 -6.9144 -5.2145 11.9414 -#> 0.8363 0.7920 6.9030 3.1910 8.0712 7.6977 2.9686 -3.0270 -#> -1.0038 -8.6622 0.0588 3.2239 -10.7892 3.4944 6.1863 7.9359 -#> 11.5861 -1.0449 -1.9407 -6.4830 -6.3589 -6.8171 11.7829 6.8568 -#> 2.6081 5.8065 2.0014 -1.1764 1.5566 -1.7988 6.6751 -1.2022 -#> -3.4828 -8.0346 1.7535 5.6982 3.8348 -0.0052 2.4027 2.7767 -#> 1.1414 -1.1804 -3.3855 -0.6290 -5.3685 4.0934 9.0223 -5.5763 -#> -9.7996 -10.3637 3.0964 2.5960 -3.6950 10.3639 -4.1379 -4.6375 -#> 8.5050 0.8158 -6.0417 -3.8965 -2.2206 3.6987 -0.5295 -1.9969 -#> 15.0140 5.7383 0.3618 -8.7803 -8.2044 -21.6215 11.0205 8.9326 -#> 3.8972 -0.7520 -4.1582 2.4096 0.7532 4.9532 -2.0873 6.8268 -#> 12.8697 -12.6970 -10.1723 3.7770 -0.2269 -6.4761 -2.3072 9.8770 -#> 0.8609 -3.8583 -3.4605 -3.7649 -12.9216 3.7425 -1.5771 -5.6844 -#> 22.6210 -12.0578 -11.3394 0.6404 -6.8014 -2.1969 11.6617 9.4068 -#> 1.5590 1.0693 -3.1193 -8.9169 0.2025 -1.1535 12.7848 5.9852 -#> 10.6550 3.2722 7.2799 1.1171 4.2774 -4.7724 -2.8974 1.9139 -#> -10.4001 1.4934 -4.0810 1.3808 4.9810 -2.6628 4.0061 0.4019 +#> Columns 1 to 8 -3.6634 11.7006 -2.3465 2.5491 1.2513 15.0872 3.9071 -3.7039 +#> -7.5430 -5.0217 -1.1259 2.9532 -4.3064 -12.5227 7.4883 -0.7826 +#> -4.8263 5.0260 -2.1014 4.9165 -7.7390 2.8510 -6.6097 -3.1135 +#> 11.0680 8.2539 -1.4301 4.0648 5.2830 -9.1697 1.5641 -6.9430 +#> -2.8181 6.1959 3.1877 4.8425 -1.8609 5.7562 -8.1935 10.2056 +#> 5.3712 2.6640 -0.5245 -5.0170 6.1865 4.4139 2.2748 6.9259 +#> -9.8872 -9.9068 16.6081 -8.7895 -3.0137 -8.5560 0.4983 -9.4284 +#> 14.6079 9.1200 -4.4497 3.7596 -4.4370 9.6875 -1.5132 -3.0632 +#> 10.4435 -11.5281 -2.9306 3.4770 -17.9612 -18.0787 -4.1950 3.8424 +#> -6.5856 -4.6185 -2.1146 -8.8492 3.8233 -7.1640 12.0346 -3.3192 +#> 8.8703 -3.1801 -5.5119 -2.3467 13.0228 13.1376 -8.5730 -17.1273 +#> 5.6516 9.5670 6.8403 -0.2228 -3.6787 1.6586 -11.5520 3.9891 +#> -6.4235 9.2340 -4.6301 -7.2694 -20.9493 -5.2339 -1.8552 -3.4232 +#> -17.4046 -3.4950 -8.2909 -12.2312 -11.4055 -4.0846 4.9478 4.1859 +#> -7.7053 14.7090 8.4482 -1.4595 -4.8895 -1.6412 -12.1121 6.3141 +#> 8.2675 -3.4990 -1.5989 7.2090 -3.2253 10.5338 1.3287 1.6864 +#> -5.9271 3.3371 3.7844 -0.0344 8.0547 -7.4095 5.8454 -3.3631 +#> 6.0657 0.0643 7.9280 -0.9753 10.0336 -14.9738 -8.1182 -11.1720 +#> 2.3997 0.3197 -8.1412 5.0547 -1.1633 -0.0044 0.7921 5.1530 +#> -13.5078 -5.3857 10.9510 -2.3409 -3.7326 -11.3581 5.8096 6.6168 +#> -14.4874 -3.8764 -6.3581 -6.4819 -5.2883 -0.0434 5.8970 0.0169 +#> 0.4836 14.0193 -3.0191 4.5484 2.7025 6.5061 -14.6585 5.4071 +#> 2.8530 -2.7527 12.0503 -8.2553 -3.7081 -6.6519 -3.0411 -8.3584 +#> 5.8670 4.5923 10.8247 10.6554 -8.5501 -0.5768 -8.7984 3.2151 +#> -10.4729 -4.2524 6.8356 -1.0749 -2.2912 -8.0098 -0.8256 -3.7473 +#> -6.0066 -11.9215 0.1903 -1.3730 7.3203 1.8022 15.8698 -6.6902 +#> -2.2189 10.0914 2.1397 1.4380 -3.0355 5.3256 11.9334 0.8750 +#> 6.0831 -2.2203 -7.9691 0.0413 4.8903 0.9257 -4.8753 2.7907 +#> -4.2281 -8.3557 4.1414 8.6239 3.4848 -2.6406 -2.7133 -0.7676 +#> 3.7334 -0.0489 -5.2341 -4.3111 -9.4666 -0.5592 -5.1409 2.2493 +#> -0.7118 -3.7026 -4.3064 -3.6553 -10.7432 1.3990 3.1702 4.6608 +#> -2.4640 -0.9377 -13.0793 -5.0459 -10.4149 -2.9753 -13.3317 13.5061 +#> 14.6908 2.6278 -2.2211 14.1800 2.8116 3.7336 -15.0117 -11.2100 #> -#> Columns 9 to 16 -3.8668 -2.7478 7.1392 -0.0054 -10.1722 0.1198 2.4732 -10.4295 -#> -2.5754 -7.8994 -0.1849 -3.9888 4.4277 2.1233 -1.5317 -3.4512 -#> 0.1197 -2.1993 -2.9206 3.3155 1.5636 -2.1639 -5.8681 3.5642 -#> 11.7872 16.3354 1.8357 2.9643 -8.7039 -16.2158 8.4494 18.9837 -#> 0.9327 -6.3295 -8.8967 -0.9992 -1.9962 1.4210 -4.5254 5.3463 -#> 2.7183 2.6481 2.7299 -4.0675 -0.1711 2.8628 0.1096 -13.9721 -#> -3.0392 11.4258 -5.2078 -6.1831 -8.3396 1.0591 5.2099 -17.4486 -#> 3.5829 -9.0933 -1.7016 -3.1307 -0.3316 -0.5869 1.8062 2.3150 -#> 11.4195 9.0115 0.8662 -4.4907 1.4685 2.5792 -11.6804 1.5862 -#> 4.9865 2.8401 -10.9968 4.7530 12.6697 1.5929 -3.2429 -4.4498 -#> 2.8203 -7.8873 2.5764 4.9312 -2.4513 8.1254 -9.4267 14.4767 -#> -12.1972 4.5896 -3.4220 0.1365 -1.4710 7.8849 10.8458 -12.1589 -#> -1.4284 2.1966 3.1238 6.6784 -10.8862 -3.4908 -1.3427 -4.6650 -#> -7.8388 -6.3305 -7.3320 -13.8610 -4.9487 -6.9116 8.5775 11.5956 -#> 0.0853 -3.0662 -2.6750 -8.6276 -1.1991 7.8136 -2.3070 -9.7200 -#> 5.5640 1.9188 6.5125 10.3249 2.2903 -10.1161 -9.1294 11.1086 -#> -0.1784 -7.3305 -7.4177 -0.3665 -7.3336 5.7114 0.8584 -0.4436 -#> -1.0338 1.6243 3.5237 6.4708 2.4569 -15.7223 -2.4562 3.3643 -#> 7.7476 -0.8750 -7.3911 -0.5791 1.9732 -9.2091 -3.6274 1.1787 -#> 6.1295 0.0278 6.0033 -4.9993 9.2848 -0.0427 4.6808 5.4250 -#> -3.9739 5.5625 -14.3787 -10.4563 -2.6791 5.7062 2.1011 -13.5959 -#> 6.9645 -6.5812 9.3948 5.4502 3.3648 13.1890 5.1092 10.3599 -#> 14.9967 2.4909 -1.4835 2.4225 1.3400 4.2230 1.5507 -4.6680 -#> 7.1955 -4.8003 -1.2917 -5.7070 0.1072 -8.7850 -0.7931 7.4616 -#> 1.6131 -5.1979 -9.2722 -5.3319 -5.0503 12.5062 1.3686 -5.8991 -#> -6.1673 2.4002 6.9490 -12.5821 2.0743 -1.9734 4.7670 -11.5226 -#> 4.9347 5.6705 -9.3898 -4.2742 -13.3988 4.0754 -5.8353 -2.3527 -#> -7.3341 -6.5663 -11.6332 2.3066 -2.2382 -7.5558 8.6617 -5.3038 -#> 5.8818 -6.0350 -2.6655 -7.3441 -0.0255 10.9176 -10.1541 8.6460 -#> -5.3208 -9.5275 -1.1192 8.5280 -2.6452 5.6199 -7.0017 -17.5711 -#> 4.3405 14.7082 -0.3999 -9.3258 -3.6946 -8.6221 6.9970 -2.3698 -#> 1.7119 1.5103 10.6896 4.0890 0.2511 -10.5023 3.5156 -0.9019 -#> -4.1529 1.8704 6.3679 -3.1304 8.9665 2.7667 1.1333 -0.4228 +#> Columns 9 to 16 0.7055 5.3372 2.3857 9.8498 13.9058 -1.0284 4.2575 -0.3985 +#> 2.4973 1.8814 1.1596 4.4882 -5.1098 3.5906 0.1416 1.4112 +#> -7.1435 -11.3774 -1.7443 -11.5485 -3.1390 -0.4563 -7.0423 -9.4656 +#> -5.4811 -1.8409 -21.5860 5.2348 -10.1543 -9.5078 0.7112 7.6442 +#> 2.3183 -5.3474 10.5340 2.5499 -4.9198 5.6376 -7.0210 9.2535 +#> 4.9089 -8.0855 13.9734 -3.2112 0.5272 8.6306 19.1600 -3.2919 +#> 1.3275 -2.5095 -4.0798 -3.9851 -6.7735 1.0907 -2.9488 5.1906 +#> -12.1918 -12.6011 -15.2500 -5.5611 2.0174 -1.6917 -5.2859 -0.7239 +#> 1.6655 14.7827 3.5094 11.9244 -17.1345 -3.9618 -9.4608 -4.1386 +#> 5.2112 -3.1974 4.3246 -4.7389 2.4243 -4.4393 -0.2289 5.5442 +#> 5.9081 -3.8746 -14.7140 9.1677 7.9390 0.6412 -8.1849 8.4742 +#> 1.9444 -1.4311 1.6348 -8.1830 4.9506 3.5888 -3.6202 1.5792 +#> 1.8206 -0.7748 2.7631 -8.0890 7.5731 4.2226 7.0237 -4.2553 +#> 7.9008 -9.0680 -0.1227 3.2858 9.7920 -1.2807 4.0855 13.1322 +#> 14.9999 12.7574 11.5203 -3.0972 -1.5829 3.0574 -9.0936 4.8858 +#> -9.0999 -6.6623 -3.5187 6.8552 -1.4794 0.8885 -5.8652 -7.6416 +#> 0.1801 16.2267 3.5628 3.7081 5.4119 -1.1915 5.8899 -6.3191 +#> 20.7212 -2.8291 7.0077 1.8045 -9.1430 -3.4606 -5.8282 10.7758 +#> 3.5529 -4.2529 4.7236 -11.7163 9.4926 3.0950 -1.2735 0.0445 +#> 5.7824 0.7473 8.2635 -5.5184 5.2261 1.3900 8.6179 10.1008 +#> 2.1830 0.5389 -1.0652 -1.5605 4.4187 -0.8938 -5.0422 -8.2918 +#> -12.9665 9.9457 -1.7538 -2.6524 -0.5728 -10.8804 0.0585 -14.9092 +#> 7.6141 -6.1671 -2.4238 1.0189 -4.4932 15.9982 3.9502 5.1367 +#> 8.5227 -3.6223 5.9550 -2.7361 -6.9041 2.1936 -4.6941 6.8064 +#> 6.7095 -7.2098 -9.7687 4.7201 -10.1551 10.5384 5.3275 15.8047 +#> -0.2212 -10.3496 6.1988 2.9295 4.3238 -0.4055 2.7564 11.3936 +#> 7.3169 -6.9206 8.5480 0.3347 -5.1331 11.5520 8.2046 4.4873 +#> 4.2834 -11.2113 -1.7950 -2.9543 -12.1670 6.6623 -4.1590 -2.7030 +#> -2.7735 8.2522 0.3774 6.0110 -0.2993 -6.9696 -1.6683 9.7112 +#> 7.7624 7.7287 8.2828 9.5794 4.2534 11.3801 6.2064 -2.3607 +#> -1.3668 -5.1105 -1.2431 -1.4789 1.0997 2.5763 4.8078 -8.8272 +#> -0.6322 -10.1607 -3.6125 -9.1013 -8.0364 0.0231 -12.7076 -2.3615 +#> -0.8883 9.1348 -6.1148 -3.4250 -7.3065 -12.2307 -13.2130 -0.6904 #> -#> Columns 17 to 24 3.2644 -7.6745 10.2642 6.0378 -0.4222 6.0229 -1.2183 -9.0270 -#> 0.5475 3.1340 -1.7418 11.0928 0.0034 4.6541 -6.4449 6.8976 -#> -5.2873 8.3848 1.2836 -3.6144 -0.5825 7.4089 5.6819 -6.9342 -#> -6.3869 6.9111 0.6449 -4.9521 15.6333 1.1160 -3.0849 0.4828 -#> 7.6230 -1.8587 7.6517 7.5453 -5.5194 -6.6080 -2.5460 -4.6176 -#> 10.3962 12.6604 -3.6792 -1.5480 4.6448 9.8750 -3.0308 -5.3889 -#> 7.2503 0.8777 -9.2999 -4.8500 14.7870 9.0033 -5.2733 -0.1219 -#> 4.2802 -6.1003 4.3013 2.7553 -15.3193 -9.5015 -0.1899 8.7518 -#> -4.2541 10.1026 0.5425 -6.4690 -3.4436 10.0342 1.6691 -7.5241 -#> 6.9315 -2.6069 -5.3847 1.8447 3.9693 -10.8297 -3.8406 2.8337 -#> 1.8448 -1.0881 7.9049 7.4700 -8.6152 9.5878 6.2947 -9.4251 -#> -1.6250 -5.0960 -5.8780 -6.4262 -0.6168 3.6598 -5.6435 0.1190 -#> 8.8656 -3.5729 -0.7615 -2.3844 0.9248 2.6855 1.6193 5.6035 -#> -11.0357 8.2507 1.4583 12.7547 8.3913 4.8109 -0.6845 -17.8735 -#> -5.1945 3.3425 14.4335 -5.6128 -1.2405 16.7262 -8.0684 -3.8181 -#> 1.7073 11.1047 7.8915 2.4152 6.0107 3.5970 4.5400 -14.8674 -#> -0.2061 -6.4413 10.6123 -5.4404 0.3608 -10.9615 12.0835 -10.8443 -#> 1.0752 16.0308 3.4707 -10.5946 1.7174 9.9684 -1.6525 3.1576 -#> 2.7748 6.7030 3.1177 8.2975 -1.7461 -10.9206 7.0825 -4.9354 -#> 1.3440 -1.1578 4.7978 -5.6843 -10.4707 -6.3842 -5.6464 -2.9741 -#> -3.5364 5.4568 -13.7798 -1.0582 -4.4093 2.3835 0.6889 -0.1660 -#> -1.9331 0.0593 9.4289 -1.3197 -3.2413 2.3622 -1.3254 -2.7177 -#> -4.3343 -19.4646 -0.5988 -13.6729 -8.8478 8.7292 0.9266 -0.4544 -#> 0.7685 12.0428 -7.5370 -4.6764 13.1441 11.0089 2.3001 0.6684 -#> -12.9065 -0.3823 6.3071 1.0944 -9.8442 0.6113 6.0180 -6.7031 -#> -9.0167 3.9631 7.3313 -2.6333 -9.3185 -10.9269 -5.3837 2.5250 -#> 4.2730 0.4342 -5.3750 -1.3528 13.0142 -0.3168 0.0088 2.7591 -#> -7.7178 5.6187 3.7903 2.4726 1.0198 -20.0105 12.6015 6.4381 -#> -3.6189 9.8294 9.2189 14.6899 0.4935 11.5095 -4.7393 -10.4125 -#> -4.1638 -2.9080 0.6444 2.5991 -8.8652 -5.8333 12.0345 9.4001 -#> -9.8705 4.0219 -8.9801 -18.6822 6.6756 0.3598 -11.3057 10.5311 -#> 5.9611 -5.2889 -5.9852 2.2408 -7.3118 5.1187 5.4850 0.3128 -#> 0.6640 2.9601 -4.1693 2.8989 8.1751 13.8882 -11.8104 4.4661 +#> Columns 17 to 24 -6.8487 4.4435 10.4634 -1.6576 -2.6611 2.6581 5.2221 1.0016 +#> -0.9367 10.5000 -12.4177 -6.7089 -4.5763 -0.6408 -5.7989 9.3414 +#> 11.1943 -12.4327 2.1724 -11.2070 5.6373 -3.5042 3.3512 11.4406 +#> -6.2311 9.1892 -7.9499 10.8719 -3.5594 2.3633 0.8106 -1.7022 +#> -18.6351 1.3245 -3.3700 -9.8865 6.7971 -2.3936 -13.8278 -1.3279 +#> 10.7476 0.6038 -4.1464 1.2876 -5.2170 5.2810 -6.4491 -7.7816 +#> -10.9651 8.5084 -2.1616 4.4372 -0.6238 -4.9970 -6.8737 1.1720 +#> -17.3835 -5.6508 -6.1543 -2.5702 4.8792 -5.1308 13.4418 -2.0235 +#> -3.6095 18.0106 -3.7500 -1.5892 -4.3154 -14.7346 -8.9063 -5.4267 +#> 2.7424 5.2954 5.8235 -11.2857 -3.5601 5.1312 4.3286 1.4308 +#> -4.9521 -2.3942 7.2079 8.4091 -4.0983 15.4002 9.3447 -19.0548 +#> 8.7038 3.1002 -1.5292 -4.9779 -6.0928 6.8519 6.8001 -7.6720 +#> 9.6998 -4.7902 16.0905 -4.7504 -11.9164 -14.8104 6.2689 5.6866 +#> 6.0375 12.7432 15.3665 -0.0719 -6.9700 3.2936 -7.2990 -4.7881 +#> 9.0116 11.9058 13.1974 -9.4618 -8.9103 -5.2014 4.6467 -3.2343 +#> -0.8079 -11.8627 -1.1501 -0.9050 9.7038 0.0530 1.6431 3.3465 +#> -3.3342 19.6242 6.7424 6.6249 -2.8769 -0.4599 -4.5489 -7.6756 +#> -21.7216 16.5270 5.8606 -1.3811 4.7660 -1.6038 -4.1579 -2.6533 +#> 3.7908 -10.7424 0.8633 -3.6348 4.3650 4.3374 2.8766 -3.0366 +#> 6.3106 1.4443 0.9491 -1.2270 -6.9068 -14.7269 -2.1878 4.9022 +#> 0.1759 3.4991 2.0196 -6.8288 -10.9171 -0.7773 0.3181 7.3345 +#> 9.9469 -8.8342 -4.8429 -1.8343 -7.8642 0.5494 4.6327 -4.6239 +#> 6.3188 -5.4162 1.5947 0.8548 -0.6736 -4.1723 -9.7915 -10.8970 +#> -20.9290 9.9974 1.7247 6.5055 3.5172 -8.3991 -0.0851 1.8686 +#> -0.3862 -5.8277 -1.3904 12.1954 -3.6997 -3.5465 -11.3572 0.4942 +#> 0.1431 -8.7328 0.4494 -9.0969 1.5637 2.8361 5.8983 2.2474 +#> -2.2032 1.0613 -2.0395 -2.8915 -1.1496 -5.0477 8.3861 -0.2060 +#> 4.5145 -2.0720 -6.4740 -12.9726 -0.9755 3.3538 -4.1977 -7.1319 +#> -7.6895 0.5421 8.9445 6.5222 -0.1812 0.9959 1.8674 -4.9401 +#> 1.8714 10.3658 15.8881 0.7732 2.0168 -5.0220 1.8601 -10.9951 +#> 3.6769 -5.3409 4.3714 -0.6178 2.5703 -13.1579 2.2485 -1.8658 +#> 4.0850 -2.1553 -3.5993 -11.0591 1.0388 -1.5474 2.2893 -12.5559 +#> 1.8180 -6.4558 -5.8333 3.2604 -1.7808 -2.0268 13.2578 -11.1933 #> -#> Columns 25 to 32 0.1069 4.4300 4.3003 6.5373 -7.5178 -9.7551 -7.0320 -9.3276 -#> -12.5463 -8.7917 -0.6243 3.1369 -2.3819 1.8997 8.5821 1.8319 -#> 1.0214 -0.2199 -10.3648 -4.5515 -2.5590 -4.8727 -0.3350 -0.8497 -#> -5.2845 -4.0951 0.4800 3.8095 13.0600 -0.3360 11.9384 19.4068 -#> -6.6383 1.7713 -5.8471 -3.2602 4.2594 -4.0928 2.9030 8.8970 -#> -5.2800 4.1837 1.3231 15.0332 0.6706 -8.4351 -4.1162 -10.4221 -#> 0.5118 3.6860 2.4320 8.9058 1.7754 0.0929 1.0673 11.7182 -#> -1.4659 10.9084 -0.4592 7.5206 8.2967 7.0389 4.2406 11.6816 -#> -1.0805 -12.3320 -19.3237 -5.8110 -0.9302 1.8380 -19.4156 -7.7184 -#> -2.6221 1.7152 3.4518 4.9626 2.2381 -2.2154 -3.6776 5.4062 -#> -8.0877 -4.6032 2.0279 -9.7583 7.7099 3.8745 3.0555 6.6926 -#> 4.7537 6.8879 7.1303 -0.9630 9.1403 2.7630 3.1092 1.5607 -#> 1.4150 18.0743 10.4641 -1.5044 -3.9003 -10.2854 3.7217 -7.7446 -#> 6.4209 -9.0526 6.0607 -6.2215 -6.8936 -5.0646 3.7337 -2.1049 -#> 2.4639 16.8365 -3.5789 3.9893 -8.1617 -2.3536 4.4018 2.1488 -#> 3.4904 -5.3955 17.2709 -0.1046 -0.7013 1.9910 11.2501 2.8145 -#> 5.8840 4.0392 4.6691 8.8491 1.7121 -9.7288 1.5834 -0.9072 -#> 5.7569 -5.7727 12.9906 6.4348 -4.4528 9.0758 -1.0589 4.9945 -#> -4.2344 -0.3840 -0.8968 0.2907 -1.2674 1.3855 -8.0873 -9.2414 -#> 5.2562 2.3932 12.1631 13.1696 -7.7894 14.9373 -5.3492 -4.2549 -#> -10.1516 1.2459 0.8897 5.6136 8.0912 5.3998 -3.4557 -4.1128 -#> 9.0228 -0.1128 -3.0950 -2.6131 2.9784 1.2623 -4.3874 -4.5560 -#> 3.7943 6.0817 6.3203 11.0212 -0.4039 7.1245 -0.3493 -9.0718 -#> -4.3440 15.9696 4.4231 9.1531 -3.5382 -7.9130 -3.9667 -8.0826 -#> 3.7145 -1.6401 -0.5113 -5.6870 2.7285 6.6672 2.8719 -11.5719 -#> -3.1369 -4.8979 -4.1840 0.1913 -13.0126 -4.8511 -3.0648 -7.7171 -#> 3.0135 1.6006 12.5806 11.2100 7.9489 -15.5392 3.0761 1.9750 -#> 0.2687 1.8509 10.7228 -3.3233 0.6185 -14.0387 1.9764 -4.6311 -#> 5.9052 -0.2443 7.0376 0.0236 -4.4985 7.8975 -6.8967 -6.6564 -#> -1.9590 6.0037 13.4657 -8.3504 3.7786 -16.6422 -12.9441 -7.0071 -#> 8.0193 3.8135 3.1621 12.9735 10.7427 5.3991 -4.2870 14.5053 -#> 3.5541 2.5437 2.1426 3.5000 0.3929 0.2014 12.6902 -4.4364 -#> -5.8694 -7.8271 -2.6009 5.2320 5.7658 9.6837 -4.6968 2.5444 +#> Columns 25 to 32 -8.0239 -1.2470 -0.6931 -1.4859 15.4803 2.7798 -5.4120 -0.8856 +#> 1.7940 1.8404 3.1563 -0.6655 -1.1949 6.9470 8.6517 6.3564 +#> 7.5723 3.2677 2.2348 3.9421 1.6255 -6.8559 2.3345 9.3224 +#> -4.4171 0.7608 2.5544 0.0417 3.5979 9.5273 2.1453 0.3212 +#> -1.5959 -0.0580 2.6072 -0.9632 0.9185 -3.4173 -8.7403 -11.4461 +#> 4.7117 6.5115 -6.0725 0.5120 17.3436 2.2232 -3.5680 -8.9147 +#> 12.3179 -6.1806 8.4467 -4.8787 -8.7924 -7.3739 6.0087 -3.3437 +#> 8.1436 3.9498 -0.7209 6.4006 -3.1987 -0.6735 13.6243 14.5705 +#> -11.3278 13.3021 6.6153 12.0833 -3.5539 17.4280 -5.5479 -8.8259 +#> -11.8505 14.9520 -3.0781 -1.0469 6.4941 -1.1333 2.2075 -7.1297 +#> -3.0877 13.8003 16.1267 13.0234 6.5797 -6.0963 4.7527 -0.6197 +#> -4.9823 -1.1061 -3.2840 17.3658 11.4418 6.4878 8.9719 4.7849 +#> -1.0398 5.3661 -5.8047 0.2761 1.5251 9.0993 -1.7992 16.2079 +#> -1.7283 2.0229 -1.4710 -13.5464 -1.2899 -0.3508 9.2304 11.1253 +#> -1.9243 -3.3115 1.2582 3.5164 0.4513 6.6903 -5.5428 12.5095 +#> 5.0222 -10.4896 -2.1402 3.0046 -11.0875 -0.9612 9.9269 5.9036 +#> -5.2882 -3.9958 -4.2295 -3.4205 -0.3779 -0.5442 -12.3781 4.2384 +#> -1.9695 7.4295 -2.1455 -2.2733 -0.7245 -3.8250 8.4891 4.4323 +#> 8.9002 4.4686 3.9014 4.6856 -0.3516 -13.4372 -4.6156 1.1151 +#> -2.6154 2.7633 7.3950 -4.2532 5.7675 -0.1448 8.2137 -4.7611 +#> 10.8310 0.6534 15.4860 5.6420 7.1490 8.5290 8.3403 3.2629 +#> 4.0982 -0.5285 -9.5515 4.7863 5.0920 6.7295 -8.6959 -7.3684 +#> -3.4486 -3.0928 4.3251 -0.5811 -8.3932 -11.1466 -5.6941 -7.0080 +#> -3.9545 -1.4796 2.7408 -6.0690 0.7845 -5.4991 -4.3873 -6.5194 +#> -4.4769 -3.6620 9.0240 -22.6348 -7.2083 -7.6506 -3.4351 -5.3684 +#> -4.9203 0.3637 2.9119 -4.9513 7.2147 3.5280 9.6197 -7.1984 +#> -0.5157 3.0591 -3.0860 -5.5801 12.4945 5.8421 -8.9570 -4.0305 +#> -1.2811 22.6866 3.6331 6.8412 7.4134 -20.7373 -17.5149 -13.5000 +#> 1.8131 -16.6703 3.9986 -7.1984 -3.3335 4.6610 4.4134 2.6929 +#> -11.0258 11.6858 3.7568 11.6514 1.0455 -1.4648 -7.1173 -2.8521 +#> 17.4906 -4.3705 -0.7198 1.7958 -5.0022 6.4528 -1.6037 6.2115 +#> 1.2112 5.6321 2.8833 6.4522 -7.5531 -14.5363 1.1835 0.6486 +#> -2.0771 8.8949 5.8341 7.3181 -6.4115 -0.5107 -3.2262 -7.8994 #> -#> Columns 33 to 40 -12.4905 0.9962 -12.2520 1.8805 0.7729 2.2591 -2.4218 -1.1181 -#> 4.0905 -4.6726 -4.1660 -6.2877 2.1565 -9.2002 -3.7986 -5.0362 -#> 2.3994 8.7141 3.2630 -0.1725 4.8186 0.7792 -5.8155 6.4687 -#> 1.4119 -7.1807 10.3300 -12.6097 18.3856 12.5793 0.5493 6.3617 -#> 10.8638 7.3653 7.2373 -4.0725 4.0856 8.2117 -9.3570 -7.8061 -#> 7.3977 3.2087 -3.9545 -0.3392 0.3293 0.7637 -0.3166 5.9642 -#> 5.6641 -2.3012 1.2621 -1.1695 2.5621 4.1702 4.9207 3.2028 -#> 7.3067 5.3040 1.6473 1.7503 9.1175 2.8779 -2.2866 -5.0414 -#> 9.1455 1.1883 7.9984 -1.1010 0.7344 -0.3915 6.8009 8.0225 -#> -7.8686 -5.7029 -1.8316 -0.0498 -4.6592 -1.9285 -3.9908 -2.4433 -#> 2.5765 4.2916 -0.1664 -1.0247 6.6853 3.0413 0.8368 5.5294 -#> 2.1822 -1.1567 -4.4557 11.3355 -9.0053 2.5235 -1.9342 -7.2046 -#> -0.7974 0.6058 -6.8831 0.0058 2.0477 8.0620 -6.5564 0.8866 -#> -3.5303 -0.4432 5.9479 5.8558 8.6230 -8.0827 2.2993 -6.1262 -#> 4.9180 -3.5954 -9.6872 6.2345 6.8421 1.4011 -11.3722 1.2919 -#> -9.6036 -6.8975 -0.2126 -1.7491 3.3766 4.9670 5.3099 1.0471 -#> -0.6523 8.0283 -13.2220 -1.0271 -1.3558 8.9063 -15.6677 -1.9965 -#> 9.0133 -7.1879 3.1848 3.3971 7.7392 3.0583 4.0695 -1.5547 -#> -7.8588 -2.7530 -3.3350 4.8636 -6.2975 2.7418 2.4916 3.9969 -#> -2.1845 0.3225 -7.1511 5.1200 1.0721 -5.5169 -3.6237 -2.7235 -#> 4.9371 5.6697 10.0409 -0.1665 1.5843 -3.9746 5.9828 8.9432 -#> -8.0029 2.7323 -1.8968 -2.2308 -1.7877 -3.7023 4.5103 1.2744 -#> -6.0240 -2.3316 -12.1196 6.0434 -12.3856 -0.0222 -0.2043 5.0812 -#> -0.8801 -0.2558 -8.0960 0.6245 1.0336 1.1230 -1.4520 -2.2997 -#> 0.1147 3.5448 -9.3894 1.3536 -2.8737 -1.1762 -4.0554 5.0370 -#> -12.9656 -2.1674 -5.5133 0.7504 2.1340 -3.4405 -2.6298 3.6028 -#> 6.0563 -4.7571 -5.6242 -5.9527 7.6094 13.6002 -14.7488 6.5359 -#> -12.3398 1.0796 -5.4789 5.6667 -0.5361 -2.4495 -18.1071 -4.5523 -#> 2.5113 -0.5029 -0.0765 -4.5280 6.1293 -2.0885 12.7282 -5.5231 -#> -18.0310 0.9654 -0.9127 -5.1784 -2.2538 -5.6351 -6.1726 -1.2225 -#> 2.4638 -3.7619 1.7139 7.3916 9.0185 7.7355 1.6119 -4.0585 -#> -1.5231 11.2594 -10.2775 11.4900 -0.3246 6.5134 8.1654 -7.3673 -#> -1.2108 -10.1927 -2.5560 2.4528 -2.8826 -7.8111 5.8999 -0.9200 +#> Columns 33 to 40 -3.9058 3.6330 14.6814 11.0294 0.6114 -7.4025 -8.1898 -15.6568 +#> -1.5660 -0.1950 1.3365 -10.1040 4.6690 3.9809 4.8058 2.7980 +#> 5.4841 5.6827 4.3761 8.0758 -3.0589 -2.7941 -0.6464 -6.5529 +#> -2.1475 -1.3394 6.7814 -10.1263 4.0161 3.5362 0.1569 6.3468 +#> -4.9919 -3.3644 10.6606 16.6456 5.6330 13.9116 0.1069 -20.3199 +#> 6.1785 2.2155 0.4135 -4.5136 10.3880 -3.1039 5.6768 -1.1867 +#> 9.6005 -24.1643 12.1603 -1.2046 3.8662 -3.9055 9.2843 14.8498 +#> -8.5824 1.5253 5.2585 2.6103 1.5645 12.8596 -5.5087 1.3866 +#> 8.6366 8.2602 -9.8375 -6.2958 -16.1229 -0.5396 3.2167 9.1022 +#> 2.4253 -3.1367 5.1562 -18.1027 8.9979 -0.9739 3.5797 -5.3982 +#> 14.0565 7.0063 1.7293 13.8184 3.9196 9.1474 -9.0444 -0.8980 +#> -3.7577 -8.3676 3.9386 18.2545 7.4840 6.6019 -3.3903 1.3207 +#> -12.2218 3.3944 -0.9306 0.7553 -7.0267 0.8121 7.5666 -2.7124 +#> 0.7766 -6.3066 4.8312 -3.3798 -5.3686 7.5447 1.8752 2.9230 +#> -3.6166 -15.4443 3.0092 -7.7962 -5.7321 -1.4795 6.7907 8.4577 +#> -1.4439 16.2603 -12.0475 0.2325 -21.4273 -5.2215 -5.5117 4.4395 +#> 3.9491 -1.8370 8.4841 -1.8794 6.2200 -3.1105 -7.4990 5.1101 +#> 8.6191 -7.4676 14.2140 -12.7239 4.8058 -11.9116 1.1672 8.1439 +#> -4.1610 2.1837 -6.6788 -11.4972 -1.0558 -4.0919 6.2242 0.7378 +#> 9.6382 -0.5253 1.6212 -8.9249 -11.7176 -2.2420 18.8895 6.4936 +#> 5.0481 5.7827 9.8296 4.1209 0.6601 -6.0024 2.1525 -3.5833 +#> -11.7774 -0.2523 -1.1469 9.3400 2.0623 4.7312 11.6909 -1.5328 +#> 4.9049 -9.2653 -6.2921 13.0919 11.4640 5.7578 -4.4693 9.4364 +#> 1.1265 -9.7010 21.3250 -3.3045 -5.2495 -9.7180 0.0209 -0.6504 +#> 8.1159 -16.3472 -11.7493 1.3023 -4.2803 8.0947 10.3524 14.3685 +#> -0.0148 0.6821 -0.0359 -7.6832 5.7399 -7.4555 0.8439 -0.1664 +#> 0.2682 -7.6822 -3.1223 -9.8920 -1.6114 -12.7770 5.1609 -6.7563 +#> 0.7045 7.4599 5.1447 6.1177 6.6432 1.7982 1.0804 -5.2116 +#> -0.7470 -1.7846 -5.2651 1.7306 0.4855 3.4184 1.0711 3.4388 +#> -8.4033 6.6338 5.5670 -9.8963 -10.7894 -5.7310 -1.9054 -0.8739 +#> 3.4799 12.8977 -12.5416 17.0875 -4.7295 10.3302 -14.4967 3.3181 +#> -8.4333 11.1517 -3.9651 1.0370 -7.0633 5.5265 6.3791 -1.7160 +#> -4.2158 -2.3132 -8.1403 8.0781 9.8984 -6.8006 4.2172 -5.6759 #> -#> Columns 41 to 48 0.1988 5.2504 -3.0760 10.2908 -1.3008 -9.9980 -13.6230 5.6009 -#> 1.1537 3.1284 -4.4897 3.9502 5.0897 3.6015 7.8427 1.9832 -#> 5.0285 -1.6221 4.5983 -10.6953 -3.2972 8.4126 -4.6680 2.0519 -#> 4.9383 6.7629 0.1700 -0.7731 10.6456 2.6704 -4.1326 -3.7576 -#> -6.5911 6.1939 7.0798 1.6225 -1.5633 3.8868 5.1865 9.6929 -#> 3.2894 0.1017 11.9715 0.1416 8.5866 -7.0269 8.0777 -6.1043 -#> -2.9275 -10.6820 1.3487 13.4299 -3.0858 -4.1574 -12.7612 3.0961 -#> -3.7740 1.8306 -1.6566 -0.4461 -2.3291 4.8333 4.8621 3.8554 -#> 4.4658 1.1858 8.9472 2.9284 -4.3364 -3.1499 -4.1935 0.7509 -#> -3.4900 -1.9808 -0.9273 8.6051 -0.6985 -1.2492 -1.7975 3.1656 -#> 9.5229 5.4037 5.7429 2.6790 -12.2003 6.4440 -5.3810 4.4119 -#> 2.7466 -4.2808 -1.5434 7.0435 -1.9691 0.1578 13.3589 -2.7863 -#> -0.4999 -5.5772 -2.0889 -3.4647 -0.7473 3.2818 4.4717 -5.4836 -#> 3.9357 5.8264 2.2869 -2.3583 -8.4141 -7.6839 -1.1675 -4.9500 -#> -3.5572 2.3844 12.0013 1.1560 -10.2125 0.8103 -1.5860 4.9254 -#> -5.5393 15.7903 -2.9637 -7.3790 -2.8508 0.4124 -2.9728 -2.5130 -#> -15.2763 1.5354 -2.9154 5.6464 -4.8810 12.7598 -8.5844 1.4449 -#> 0.0455 -4.2933 3.1696 -11.3167 5.7426 10.5657 5.2644 9.0828 -#> -3.8138 -0.8780 -6.8863 -2.3013 -4.3458 -3.6998 -8.0492 -6.8730 -#> -2.9428 6.1095 3.6243 0.9395 1.2437 1.5961 -2.6437 -7.6080 -#> -2.8307 -14.9884 -3.5193 -1.7380 13.1939 1.2391 -1.5804 6.7580 -#> 4.3376 1.4819 5.9095 -10.5479 -6.1397 -1.9740 -3.3975 -11.0190 -#> 6.2649 0.7111 13.0831 -2.3153 -0.6452 1.3613 -1.6137 -3.0733 -#> -2.5901 -5.3288 7.2456 12.8651 1.4870 -3.5211 -5.8653 -25.0905 -#> -0.2713 -8.3647 0.0763 -5.4667 5.4971 -1.8255 -10.7636 0.0127 -#> -0.4668 8.2574 -11.0812 -7.1201 1.5301 -6.1162 -2.5051 0.0130 -#> 0.9573 -5.0825 -1.6988 8.7437 -5.5978 2.2083 -11.9892 -2.3081 -#> -4.2024 -0.2054 -11.7608 0.5975 -10.1964 9.2613 6.0544 5.0097 -#> -3.8999 -6.2661 8.1442 -2.5761 -2.0750 -13.3615 -13.5262 0.3246 -#> -4.1377 4.2118 -12.6908 7.3387 -3.0250 -1.6286 4.4283 8.4087 -#> 1.0502 -1.8882 2.5351 5.0288 -0.8796 -3.3671 -5.9324 0.6171 -#> 6.7530 5.1505 -5.3810 3.0831 8.8983 -7.8994 19.5764 -7.6865 -#> 10.5669 6.0169 2.3651 7.6554 3.8816 -5.4518 -0.1022 -2.6415 +#> Columns 41 to 48 9.4739 -2.4871 -11.3898 -6.2782 2.2322 -15.6750 13.3813 -9.2363 +#> 2.9246 11.3030 -3.7889 -6.0741 10.5975 -7.4606 -6.8567 0.2646 +#> 4.7538 8.0857 -4.1892 7.2678 -3.3109 8.2137 -6.0604 -1.6080 +#> -1.5788 -10.4323 0.5596 -2.8241 14.4969 -5.3987 1.5820 -3.6046 +#> 8.9440 -8.8995 -1.2210 -11.4799 -2.2984 5.0576 -5.7322 -0.0743 +#> -7.6970 -2.1091 6.3578 3.6874 -4.1044 0.6602 -0.5166 -1.5042 +#> -9.6716 11.9752 4.4680 15.2822 -4.9844 0.1639 -3.2687 8.5479 +#> -12.9347 -9.1832 16.0518 -12.3684 11.5679 12.1763 1.6678 -1.6048 +#> 9.8337 -0.2622 1.8546 -7.2773 -1.6749 -2.5461 -3.6256 4.3443 +#> -5.8384 9.5115 -1.6364 -1.0145 -3.5868 -2.8844 -0.9321 3.4537 +#> -16.3595 -11.9573 -5.9198 3.6488 0.3895 7.7617 -3.1785 -5.8786 +#> 0.4726 6.7204 -0.4686 -3.9788 6.2379 3.9203 -0.2650 -0.2112 +#> -4.6015 -1.7512 11.0462 2.8313 -6.2434 -4.6101 -0.7652 4.9493 +#> -6.3871 -0.2391 3.9989 -12.8485 -3.8938 -5.5514 -6.0448 0.6949 +#> -3.6988 5.1299 1.5247 -3.4503 0.1101 8.9090 5.2390 1.7370 +#> 4.6947 1.3253 2.4065 -5.1609 0.0391 16.1757 -0.3270 4.9306 +#> -3.5305 3.2545 -14.4031 6.5018 3.8694 -6.9241 -1.9808 -10.2522 +#> -5.7138 3.0211 6.1957 1.8028 8.7582 3.8991 -0.8794 -7.2811 +#> 2.2139 -1.7236 1.2650 -8.8652 8.1364 7.5462 3.5493 5.6968 +#> -9.5784 -11.6312 4.0365 2.2844 -9.2006 -8.8440 2.5961 7.6096 +#> 0.7704 13.0104 -6.0015 -9.8596 -3.2601 -4.3436 -6.1417 6.1274 +#> 13.2206 8.8281 -5.4809 0.9209 4.3793 -1.0726 9.8446 4.3888 +#> -6.6724 2.1674 3.0973 10.2949 -0.4234 7.4996 -5.8836 -10.7790 +#> -2.8081 -5.2632 6.5798 -5.0252 2.1289 -6.0331 -0.0873 9.5376 +#> 7.9091 -2.2422 6.4234 -1.8565 5.3989 5.8702 -14.2319 -0.6525 +#> 2.4293 5.9469 -3.8970 -1.9631 1.3240 -9.1889 -2.5622 -3.3236 +#> -3.9343 -7.3933 4.6675 -6.5705 -2.7564 -3.9535 -1.6035 9.7085 +#> 3.8070 -9.2476 -5.6887 1.4793 -0.3372 1.1336 2.3916 -0.3128 +#> -5.4129 0.3016 6.2480 -14.1575 5.3201 -10.1676 3.0214 -2.6561 +#> -7.8239 4.1868 5.5191 -0.8306 -5.6963 10.4358 -2.3267 -3.0777 +#> -8.9461 -0.8685 5.8140 -7.0942 -3.3415 -5.1304 -2.9065 -2.1565 +#> -1.8962 -5.7789 7.2716 -10.1762 0.1679 4.7211 -7.0600 -7.6150 +#> 12.2349 -4.6248 5.0205 -4.7099 10.4281 -0.6178 -0.5726 -0.6882 #> #> (14,.,.) = -#> Columns 1 to 6 -2.9470e+00 -8.6971e+00 -4.9650e+00 4.1232e+00 -3.1464e+00 5.3055e+00 -#> 1.5427e+00 -1.1095e+01 4.2713e+00 5.7637e+00 3.6362e-01 -9.4469e+00 -#> -7.6291e-01 -3.9613e+00 -1.6844e+00 -2.7396e+00 1.8311e+00 -4.1143e+00 -#> 8.9186e-03 1.9440e+01 -8.9361e+00 -2.8924e+00 -3.0730e+00 1.9352e+01 -#> -9.7225e+00 -4.7628e+00 -6.7697e+00 3.6149e+00 -8.2467e+00 -2.5238e+00 -#> -7.0066e+00 -2.8066e+00 -6.3864e-01 1.5807e+01 -3.9110e+00 3.8801e-01 -#> -6.5141e+00 1.6159e+00 -2.5237e+00 2.6393e+00 1.0126e+01 2.1174e+00 -#> -1.9377e+00 -3.7156e+00 -1.0927e+01 2.3730e+00 3.7326e+00 -1.6164e+00 -#> -1.1518e+01 1.2488e-01 -8.5711e+00 1.2285e+00 1.5453e+00 1.2867e+00 -#> 1.4090e+00 1.2069e+00 4.5120e+00 8.8675e+00 -4.1212e+00 2.0452e+00 -#> 2.1451e+00 7.7938e-01 9.6499e+00 4.1449e+00 1.4574e+00 -8.9375e+00 -#> 1.0404e+01 -3.8630e+00 7.2910e+00 -5.0135e+00 8.1828e+00 -4.1867e-01 -#> -4.0174e-01 6.0947e+00 3.0210e+00 -1.5495e+01 2.5243e+00 -9.6399e-02 -#> -1.4876e+00 -3.3305e+00 -8.6169e+00 6.6952e+00 -2.1132e+00 2.6100e+00 -#> -2.6931e+00 5.3624e+00 -4.1022e+00 5.6431e+00 -2.5500e-01 -9.9579e-01 -#> 6.6913e+00 -1.3960e-01 -3.6253e+00 5.5914e+00 -1.5811e+01 -5.9588e+00 -#> -8.7709e+00 9.3600e-01 -8.2002e+00 4.3308e+00 6.0203e+00 -5.1867e+00 -#> 2.4328e+00 7.7536e+00 2.4390e+00 -4.8175e+00 -8.0653e+00 -2.9132e+00 -#> -7.8568e-01 -3.3420e+00 7.9374e-01 1.0131e+00 -5.6944e-01 -1.9089e+01 -#> -3.7811e+00 -1.9507e+00 -1.2322e+00 4.1061e+00 -8.2291e+00 6.5997e+00 -#> -7.1566e-01 2.7382e+00 2.6303e+00 3.4786e+00 2.8957e+00 5.6836e+00 -#> 9.6807e+00 -6.2116e+00 -3.0417e+00 -2.4916e+00 3.7252e+00 2.3739e-01 -#> 1.0791e+00 6.4898e+00 1.3307e+01 1.0551e+01 -5.9581e-01 2.3691e+00 -#> -5.3603e+00 1.0517e+01 1.5184e+00 -9.7263e+00 4.9366e+00 4.8630e+00 -#> -2.2348e+00 5.4156e+00 7.0300e+00 1.3209e+00 -4.3836e+00 3.8853e+00 -#> -5.0869e+00 -8.2025e+00 -8.6510e+00 1.0470e+01 -5.3927e+00 2.7191e+00 -#> -1.1621e+01 4.2484e+00 -4.0592e+00 1.0454e+00 1.1135e+01 2.2191e+00 -#> -8.1427e-01 3.7419e+00 -6.3552e+00 -9.7194e-01 5.5231e+00 -8.6829e+00 -#> 3.1891e+00 -4.9019e+00 7.8427e+00 -1.7551e+00 -2.0898e+00 -5.0366e+00 -#> -2.5993e+00 -4.6363e+00 3.5349e+00 2.1700e+00 -3.2316e+00 -3.7960e-01 -#> 8.6088e+00 1.6032e-01 3.2837e+00 -4.5223e+00 1.6399e+01 7.8730e-04 -#> 5.7161e+00 -4.1015e+00 2.2706e+00 -7.0973e+00 -2.1299e+00 6.9960e+00 -#> 7.8803e+00 -2.1816e+00 1.0224e+01 2.5879e+00 1.2398e+00 7.2957e+00 -#> -#> Columns 7 to 12 -1.1646e+01 1.0131e+01 2.9991e+00 1.1064e+01 -2.1592e+01 -1.3639e+00 -#> -7.7052e+00 1.2533e+01 -2.7843e+00 1.1178e+01 -7.1699e-01 -3.4950e+00 -#> 1.7887e+00 -4.3862e-01 4.8924e+00 1.2163e+01 4.5573e+00 3.6067e-01 -#> -2.0127e+00 9.5348e-01 3.6808e+00 -7.9013e+00 1.7339e+00 1.1636e+00 -#> -2.2187e-01 3.9322e+00 -8.6138e+00 1.6170e+00 1.3332e+00 -5.1143e+00 -#> 7.7466e+00 -2.5890e-01 6.3755e+00 -1.1040e+01 -1.2306e+01 -6.3907e+00 -#> -9.5735e+00 -2.1791e-02 -8.1075e+00 -1.0824e+01 -1.0851e+01 1.0848e+01 -#> 7.0934e+00 1.8923e+01 -9.2418e+00 -1.6874e+00 -3.7214e+00 -1.5075e+01 -#> -2.3427e+00 -8.0402e+00 1.0169e+01 6.0570e-01 -5.0686e+00 -2.0361e+00 -#> -1.3110e+01 -1.7934e+00 -6.1507e+00 -1.4862e+01 1.0235e+01 4.1763e+00 -#> 5.6747e+00 9.7487e+00 -1.0725e+01 1.5667e+01 -4.6575e+00 -2.7561e+00 -#> 2.7392e+00 3.2462e-01 -6.5484e+00 -1.1757e+01 -1.2866e+00 1.7478e-01 -#> 2.4737e+00 3.8282e+00 1.9346e+00 1.3481e+00 -2.8944e+00 -1.9211e+00 -#> -2.5990e+00 -1.9938e+01 -1.8334e+01 3.8725e+00 8.6759e+00 -5.0472e+00 -#> -7.9813e+00 -7.0264e+00 -6.9558e+00 7.8006e+00 -1.2365e+01 3.5747e+00 -#> 5.1817e-01 1.5110e+00 1.7366e+01 -1.8837e+00 9.2961e+00 -5.0517e+00 -#> 3.5365e+00 -2.3325e+00 -2.8275e+00 -3.1046e+00 -5.2681e+00 -8.9198e+00 -#> 6.5027e+00 4.3644e+00 1.2701e+01 -3.7251e+00 2.0427e+00 7.9933e-01 -#> 7.8586e+00 -7.0188e+00 5.9492e+00 2.5074e-01 -3.2924e+00 1.8061e+00 -#> 8.8907e+00 -5.8894e-01 1.5509e+00 -3.8481e+00 -7.0443e+00 -1.4297e+01 -#> 7.1570e-01 -4.8762e+00 -1.1524e+01 -1.3736e+01 2.4542e+00 -1.8681e+00 -#> -1.7810e+00 2.3527e+00 -6.2339e-01 4.3841e+00 3.7593e-01 2.7804e+00 -#> -9.2921e-01 -1.0520e+01 -4.9004e+00 4.5912e+00 -9.6276e+00 -5.5966e+00 -#> -1.2594e+00 -1.3813e+01 1.0828e+01 -1.5079e+01 7.6320e+00 9.8324e-01 -#> 1.4521e+00 -1.4763e+01 -2.9935e+00 7.2191e-01 -9.4360e+00 7.4886e+00 -#> -2.6939e+00 -6.2316e+00 6.4547e+00 -1.0803e+01 4.7456e-01 -1.2776e+01 -#> -7.0297e+00 -3.4002e+00 -3.2316e+00 -2.2789e+01 1.9848e-01 9.2831e+00 -#> -9.8949e+00 -1.2377e+01 -3.3984e+00 6.5185e+00 8.3058e+00 -4.8508e+00 -#> -9.2559e+00 -9.8128e+00 -1.0571e+01 4.7695e+00 -4.0007e+00 9.6223e+00 -#> -1.2972e+01 1.3614e+01 -8.2897e+00 1.0589e+01 -6.8666e+00 -5.1787e+00 -#> -2.1274e+00 2.2466e+00 -4.8599e+00 -1.7663e+01 -1.2687e+01 6.8946e+00 -#> 9.7860e+00 7.6324e+00 8.4055e+00 7.6672e+00 4.1579e-01 -1.3755e+01 -#> -9.5256e+00 5.1509e+00 5.7313e-01 3.8565e+00 -8.5037e+00 6.5861e+00 -#> -#> Columns 13 to 18 -5.8376e+00 -2.9279e+00 -5.7467e+00 -5.5014e+00 -4.6681e+00 -1.1295e+00 -#> -9.8382e+00 4.1628e+00 -4.8965e+00 -6.0329e+00 1.7361e+00 2.7454e+00 -#> 5.3213e+00 -9.8942e-01 3.3805e+00 4.8124e+00 7.9408e+00 -4.1334e+00 -#> -8.4554e-01 -5.8992e+00 3.3015e+00 9.2698e+00 -6.1940e+00 -1.8760e+00 -#> -3.3602e+00 -6.5618e+00 -1.3458e+01 -1.0804e+01 -1.0276e+01 -2.3611e+00 -#> -3.2807e+00 -4.7024e-01 -4.8106e+00 -7.1463e+00 -1.2021e+01 -2.0456e+00 -#> -3.3972e-01 -6.5393e+00 -7.5630e-01 -1.8538e+00 -3.5975e+00 -3.9039e+00 -#> -1.5135e+01 7.3035e-01 -8.3808e+00 -4.8221e+00 -9.3744e+00 -3.2798e-01 -#> 2.4560e-01 -7.5552e-01 1.1924e+00 -3.4284e+00 1.9044e+00 -2.9079e+00 -#> 6.4364e+00 -2.0622e+00 -2.1329e+00 2.0296e+00 5.0129e-01 1.3035e+00 -#> -7.6127e-01 1.3828e+01 -9.9733e+00 4.2783e+00 2.4848e+00 -9.1430e-01 -#> 6.9285e+00 -1.0299e+01 2.3943e+00 -3.8257e+00 1.1201e+00 1.4560e+00 -#> -4.5045e-01 -4.0313e+00 -4.1263e+00 5.8071e+00 -2.0225e+00 3.4909e+00 -#> 6.6008e+00 2.6956e+00 1.0403e+01 -1.1447e+01 -5.6352e+00 -5.7858e+00 -#> -3.8587e+00 -4.4260e+00 -3.5954e+00 1.2241e+00 3.7192e-01 3.6454e+00 -#> -2.1584e+00 -7.1288e-01 -1.5161e+00 -2.7456e+00 1.8898e+00 4.4897e-01 -#> -1.1219e+00 -3.7443e+00 -4.8727e+00 -8.3364e+00 -7.5283e+00 -4.1012e+00 -#> -1.0698e-01 -1.2422e+01 -2.1764e+00 -1.7482e+00 1.9705e+00 -6.7154e+00 -#> 4.0076e+00 -8.2704e-01 -2.5556e-01 2.2082e+00 4.1201e+00 3.2584e+00 -#> -4.1204e+00 1.1085e+00 -3.8030e+00 -3.9246e-01 -6.4055e-01 7.0182e+00 -#> 4.5501e-01 1.9863e+00 4.2138e+00 -3.2505e+00 2.1790e-01 -6.2447e-01 -#> -2.7618e+00 8.8997e+00 3.3469e+00 -1.5589e+00 -2.7696e+00 -9.9674e+00 -#> 7.7602e+00 4.1035e+00 6.3377e+00 8.3649e+00 1.8985e+01 1.2069e+01 -#> 8.8029e+00 -3.1696e+00 6.7168e+00 1.4785e+01 8.6215e+00 2.7457e+00 -#> 7.1574e+00 -1.0295e+00 2.9361e-01 3.8908e+00 6.6932e+00 3.7401e+00 -#> -3.9838e+00 1.7488e+00 -2.1708e+00 -1.1437e+01 -4.0910e+00 -7.1037e+00 -#> -1.6132e+01 -8.5650e+00 -2.4694e-02 -1.5977e+00 -7.3895e+00 8.8435e-01 -#> 5.3712e+00 1.7691e+00 2.4260e+00 -1.8620e-01 3.9535e+00 1.0418e+00 -#> 9.8902e-01 9.6299e+00 -3.1638e+00 -6.1946e+00 -4.0484e+00 -7.3948e+00 -#> -7.4685e+00 7.6784e+00 -7.6226e+00 -9.7231e-01 4.6998e+00 1.2277e+00 -#> -2.1309e+00 -9.5858e+00 3.8287e+00 -4.5513e+00 1.1547e+01 -8.7646e+00 -#> 5.6137e+00 -1.0663e+00 -2.6638e+00 7.5905e+00 -2.1023e+00 4.7568e+00 -#> 2.8300e+00 6.5272e+00 -2.2828e+00 3.1876e+00 6.7941e+00 -1.4367e+00 +#> Columns 1 to 8 1.8120 3.7617 0.4471 6.9333 -12.9029 1.5540 5.6503 -7.1980 +#> -2.5197 9.9323 -1.8481 5.1337 4.5225 -3.0721 7.1803 -5.2650 +#> 0.6766 3.9011 -5.2145 -1.1104 -2.6207 1.9313 2.8473 1.3332 +#> -4.1116 -0.5486 -0.9061 -9.4907 -0.0606 -2.8519 -3.7295 3.4588 +#> 5.0245 -5.5788 -8.9182 3.7319 8.3697 -1.0909 0.8366 -0.6395 +#> -0.8158 -0.0995 1.2385 5.0583 -2.6876 10.0157 -9.2965 -3.6235 +#> 0.8441 -1.0265 2.4561 4.4603 2.9985 3.5421 10.6706 7.0625 +#> 1.9724 -6.2498 5.1469 -2.8947 11.4586 11.2741 -7.1414 8.9407 +#> -10.9602 -0.1255 -7.6967 -1.8333 -8.7321 -4.6374 -0.8599 8.8685 +#> -2.8211 -0.0814 7.7133 5.2489 -11.1519 1.9016 -12.6401 -6.0678 +#> 12.5198 7.3402 4.4746 -7.9494 -3.3943 12.6510 -7.7838 -1.6605 +#> -3.2784 -5.5715 10.8100 11.7527 -6.6415 13.3171 0.1200 5.8478 +#> -8.6510 4.7206 -7.7298 15.1382 -10.8644 -6.7093 -8.7067 -8.7603 +#> 6.2818 -1.1763 -6.6250 2.2117 -3.6052 0.1065 -5.4839 -2.7576 +#> -5.4923 -8.3758 -2.2596 -4.5804 9.3766 5.7054 5.5243 1.8644 +#> 5.4238 -5.1292 9.7032 -7.3221 6.1139 -3.9237 -6.5717 2.9857 +#> 1.2697 -13.6312 15.9850 -12.2223 3.7401 -8.0154 2.3675 -0.8374 +#> -8.3809 -1.3908 -10.8221 6.5877 6.5755 13.0431 -1.0928 1.1669 +#> 3.6379 5.5428 2.4451 -5.0665 -6.0208 0.5615 -0.8265 -4.3748 +#> 6.1441 4.7829 -0.6880 10.4632 -12.4652 1.5572 -5.5885 -13.2753 +#> 2.3854 10.9534 16.0951 12.1824 1.0762 -9.4443 0.3599 -3.0608 +#> -1.4715 -4.8643 9.5817 1.8899 -4.6848 5.3230 4.2168 5.1630 +#> -7.8870 -3.5397 -8.6147 -4.7593 3.6831 5.0845 4.2286 8.5966 +#> -1.7009 -8.2563 -19.8460 18.3021 -14.0808 8.9529 4.7158 5.1620 +#> 12.2938 -2.9541 -5.9204 3.6288 6.4764 -13.4344 2.2028 2.1978 +#> -0.1014 10.9994 -2.7530 18.7125 -6.3077 -5.6898 -4.1101 -17.2708 +#> 7.2069 1.5405 -8.8842 5.7010 -2.3828 -13.0868 -3.4182 -3.5180 +#> 1.3883 9.2243 8.9420 5.4042 -9.1066 0.3937 -2.3768 -11.3954 +#> 1.5710 -2.6089 -7.3017 2.3454 6.5143 -6.4729 7.6334 3.7750 +#> -5.4822 -4.6137 -8.1065 1.4911 -12.6661 6.1865 -5.8872 -6.2860 +#> 3.2892 0.3209 -2.7690 -4.5741 16.8951 -12.6602 -3.5407 4.6877 +#> -3.8372 5.7957 5.6769 -1.9891 -7.7453 -7.8117 -13.0259 -14.0559 +#> 6.1158 4.4327 -9.5881 2.1524 -5.3699 5.2145 3.0018 10.7193 #> -#> Columns 19 to 24 3.9527e+00 3.9152e+00 8.9049e+00 1.1294e+01 -9.8714e+00 -1.3542e+01 -#> 7.7918e-01 -3.5703e+00 1.1471e+00 1.0779e+01 -4.1714e+00 3.7584e+00 -#> -1.1080e+00 9.3968e+00 -5.0234e-01 -7.4000e+00 -4.7738e-01 4.4831e+00 -#> -7.2076e+00 -3.4422e+00 -6.2024e+00 1.1907e+01 1.0278e+01 -1.3093e+01 -#> -8.0908e+00 4.3010e+00 -3.2117e+00 -8.4518e+00 3.5677e+00 -1.0008e+01 -#> -1.2001e+01 4.0682e+00 -4.7979e+00 1.7572e+00 -4.5670e+00 3.5043e+00 -#> 5.7016e+00 -8.2803e+00 3.3730e+00 3.0721e+00 5.9141e-01 -4.8445e+00 -#> -3.6571e+00 3.2611e-01 -9.7575e+00 -3.3793e+00 8.9044e+00 8.9033e+00 -#> -2.7785e+00 3.7773e-02 -7.7068e+00 4.1643e+00 -1.1703e+01 3.0570e+00 -#> -3.7972e-01 -6.2760e+00 -2.5783e+00 6.8392e+00 1.0227e+00 -1.8564e+01 -#> -4.7111e+00 5.1840e+00 1.6555e+00 -1.8181e+01 1.1991e+01 9.1902e+00 -#> -1.3548e+00 -6.1443e+00 5.9713e+00 -2.2839e+00 3.6700e+00 1.3198e+00 -#> -6.7875e+00 1.3635e+01 6.8890e+00 -1.0232e+01 -9.0848e+00 5.8770e+00 -#> -2.5420e+00 -5.9944e+00 6.9125e+00 -5.3103e-01 -1.1211e+01 2.6487e-02 -#> -1.0139e+01 1.1467e+01 -5.1473e+00 5.1925e+00 -1.4699e+01 2.3066e+00 -#> -7.9126e+00 2.2568e+00 7.3034e+00 -5.2624e+00 -5.4210e+00 4.4153e+00 -#> 3.0570e-01 -1.5279e+00 -1.7311e+00 7.4561e+00 -8.5627e+00 -1.4265e+00 -#> -3.0795e+00 -3.2056e+00 2.3707e+00 -3.7382e+00 -5.6777e+00 1.3584e+01 -#> -6.0551e+00 9.6878e+00 -2.8827e+00 -1.5638e+01 -6.0660e+00 1.1794e+01 -#> 5.2208e-01 -9.5151e-01 4.8781e-01 7.1150e+00 -1.1985e+01 1.1426e+01 -#> 5.2291e+00 -5.9671e+00 -7.7412e+00 7.6153e+00 1.0399e+01 -2.9500e+00 -#> -6.2040e+00 -2.7724e+00 -2.0706e+00 -9.4660e+00 2.1540e+00 3.0201e+00 -#> 4.2770e+00 8.7830e+00 5.0159e+00 -4.2996e+00 9.8929e+00 -2.7062e+00 -#> 2.9477e-01 1.0222e+01 -4.2887e+00 5.1453e+00 -4.7670e+00 -2.9534e+00 -#> 5.0972e+00 6.2548e+00 -8.7009e+00 9.8776e-01 -2.0427e+00 -6.0557e+00 -#> 3.7945e+00 1.5340e+00 -4.9280e+00 1.0356e+00 -1.1497e+01 1.8980e+00 -#> -5.5207e+00 -2.3598e+00 -6.4203e+00 5.3196e+00 1.6321e+00 -7.9663e+00 -#> -3.3121e+00 4.5133e+00 4.7246e+00 -1.1493e+01 -1.7462e+01 -2.9324e+00 -#> -5.5510e+00 -7.6881e+00 3.9163e+00 2.0472e+00 -4.8133e+00 -8.8644e+00 -#> 3.4029e+00 3.8217e+00 9.1463e+00 -6.9773e-01 -1.1475e+01 -8.3752e+00 -#> 1.2595e+01 -9.3585e+00 2.2784e+00 3.9545e+00 1.0027e+00 -6.0553e+00 -#> -1.7755e+00 4.8426e+00 3.1131e+00 5.8507e+00 1.4758e+00 8.1132e+00 -#> 8.1158e+00 -2.6878e+00 3.8420e+00 9.7280e+00 -2.0917e+00 -1.7866e+00 +#> Columns 9 to 16 0.3597 -2.0903 -3.1581 -2.0367 -3.0659 13.5757 11.8259 -8.2849 +#> -14.3246 2.8480 2.4120 -6.6259 -2.9739 0.4189 -9.0361 1.6881 +#> -8.4891 -7.2466 -3.5202 -2.7671 2.6824 3.1614 -2.5880 -6.5934 +#> 0.2683 2.2314 -8.0590 3.0173 -13.5310 -8.3040 -2.7080 10.9972 +#> -11.2204 1.0337 4.6042 -11.4552 0.0898 8.9120 3.3409 -9.9265 +#> 9.9767 -7.0705 -3.3332 -10.8903 5.7525 2.4714 5.6048 4.9469 +#> -2.4297 -4.3130 -2.8449 -7.0217 -10.1437 14.2901 -8.3152 -0.4260 +#> -12.7395 -2.0215 -15.3991 -3.6943 -9.9595 3.8962 2.0800 -1.4782 +#> -3.1190 6.1817 -2.4657 13.6280 4.8562 -19.5169 -3.0502 10.2227 +#> 5.5958 6.6090 6.2062 13.5058 -11.4794 -5.1918 4.0016 0.6111 +#> 11.3145 -10.4141 -13.3045 -2.2231 -3.4931 0.2439 2.3371 10.4773 +#> -5.2823 -1.5924 -18.2333 -7.5242 -2.9962 5.2234 5.7785 -6.7293 +#> -14.7249 5.7797 -0.6724 4.2688 4.8974 -11.4439 11.5410 12.9653 +#> -8.6522 7.7081 16.2287 -0.3754 -8.5992 -2.4765 6.0954 3.9052 +#> -11.1785 -1.5492 2.7302 2.6521 -5.8620 5.7717 6.8828 9.1204 +#> -4.5918 4.7367 0.2843 12.5741 13.0995 -2.5704 -7.2906 -6.7709 +#> 3.8466 9.3663 3.1537 5.9368 -5.7631 4.0011 -2.5978 -0.6000 +#> 5.6944 -4.0560 -6.6739 7.5072 5.3467 12.8864 -12.6360 -6.3561 +#> 2.2674 -1.5447 -5.0801 3.5843 4.8275 -0.2540 5.1684 -1.0358 +#> -0.4786 -7.0435 5.8918 2.1709 -1.1985 -12.5241 0.3920 11.7516 +#> -6.8908 -7.7343 2.9669 -3.6853 -3.1499 -2.1967 -5.0498 -3.4463 +#> -0.5249 -11.5338 -14.0674 -2.7252 0.1619 -1.8788 5.8070 -1.8265 +#> 5.7027 0.7319 -7.4653 -20.6158 -4.1757 6.5519 -1.4161 0.5598 +#> -6.4897 -0.5664 -6.9819 4.3203 -1.1149 7.4771 -3.2735 4.2626 +#> 8.4064 -3.9333 2.8795 -4.9561 -10.6564 -11.0564 -6.0721 1.1060 +#> 0.5055 -1.4520 8.7027 -2.5157 -0.4962 -9.4471 0.5019 -3.4809 +#> 0.4523 0.8833 7.0779 -1.0875 -1.6334 6.1787 11.3117 1.1238 +#> 5.6203 -6.2314 -6.3620 -3.6415 0.8934 0.3930 8.0999 2.3680 +#> -1.9187 0.1178 2.8303 1.0446 -4.8628 -13.3341 4.5485 -1.1984 +#> -2.1462 6.8713 1.1878 4.4796 9.8793 11.9375 4.3436 5.2784 +#> -8.1113 2.3671 3.5521 -3.1801 5.7325 -4.4532 7.3262 -2.4309 +#> 1.4593 1.9454 18.0287 7.6329 5.2203 -17.9666 0.1984 -5.6458 +#> 13.4660 -12.4678 -11.6993 0.1278 -7.4379 1.2361 -2.4902 -4.4603 #> -#> Columns 25 to 30 2.7514e+00 -2.9044e+00 3.1635e+00 4.8970e+00 -2.7153e+00 -2.1692e+00 -#> 4.0094e+00 5.8292e+00 1.2055e+01 9.6828e+00 -3.7371e+00 3.6239e+00 -#> 1.4078e+00 4.0304e-01 1.2239e+00 8.0703e+00 -9.6928e+00 -6.7939e+00 -#> -7.4295e+00 7.2254e+00 2.9496e+00 3.6850e+00 1.9936e+01 -7.6404e+00 -#> 7.7423e+00 4.6614e+00 3.0677e+00 1.6276e+00 -8.0058e+00 -1.4904e+01 -#> 2.7324e+00 4.6477e+00 2.9174e+00 -7.6481e+00 -1.5952e+00 -7.8083e+00 -#> -2.1986e+00 5.1477e+00 -4.2553e-02 1.3089e+00 4.7724e+00 -9.3814e-01 -#> 1.0899e+01 -4.5767e+00 7.8206e+00 4.1006e+00 -9.6803e+00 -9.8559e+00 -#> -2.5533e+00 5.8232e+00 4.3523e+00 3.5645e+00 -2.8284e-01 -2.3316e+00 -#> -8.0524e-01 7.9485e+00 -1.6328e+00 6.4514e-01 9.6695e+00 3.4099e+00 -#> 1.4511e+01 -1.5813e+01 1.3674e+01 9.2899e+00 -1.7942e+01 1.4025e+01 -#> -4.0874e+00 5.8157e+00 -7.6620e-01 -1.0892e+00 6.5480e+00 -3.9128e+00 -#> 3.3873e+00 -1.3534e+01 2.6775e+00 6.6464e+00 5.7631e+00 -6.4381e+00 -#> -1.1190e+01 -4.2002e-01 -1.3070e+01 -4.3599e+00 -7.0482e+00 2.7458e+00 -#> -2.0814e+00 -8.8541e+00 1.6759e+01 5.5173e-01 -4.2523e+00 1.2654e+00 -#> -4.2751e+00 1.3150e+00 -1.3943e-01 3.2242e+00 1.1298e+01 1.2557e+01 -#> 2.9336e+00 -4.9230e+00 6.7140e+00 2.0939e+00 -6.5652e+00 -1.6155e+01 -#> -8.0575e+00 -1.4924e+00 3.4084e+00 -3.2414e+00 6.2329e+00 5.5945e+00 -#> 3.6416e+00 7.1970e+00 2.9832e+00 4.8062e+00 3.9785e+00 1.2869e+01 -#> -2.4926e+00 -5.4260e+00 6.8716e+00 -3.1893e+00 -4.6895e+00 1.4379e+00 -#> -8.6248e-01 4.2699e+00 -1.4311e+01 -6.6221e+00 8.4722e-01 -1.9782e-01 -#> 6.0904e+00 -8.3998e+00 7.4810e-01 8.4135e+00 8.9536e-02 1.5779e+00 -#> 9.0231e-01 -1.2763e+01 2.2154e+00 -6.3118e+00 -5.7779e+00 2.1693e+01 -#> -7.9907e+00 -1.0028e+01 -2.4887e+00 -2.3234e-01 1.3156e+01 9.6231e+00 -#> 4.0964e+00 -2.1144e+00 1.2411e+00 -2.9359e+00 -2.9314e-02 1.0143e+01 -#> -3.5980e+00 3.4881e+00 -2.2463e+00 -1.3874e+01 -1.0153e+01 -5.2240e+00 -#> 1.0538e+01 -2.3452e+00 1.2710e+00 7.5885e+00 4.9355e+00 -1.0769e+01 -#> -2.4105e+00 -3.5217e+00 4.9055e+00 1.9954e+00 -4.5076e+00 -1.8899e-01 -#> 3.2111e+00 -7.7566e+00 -4.4094e+00 9.5524e+00 6.6475e-01 1.4449e+01 -#> 1.2576e+01 -4.6264e+00 -2.9808e+00 4.9721e+00 -7.8618e+00 -2.9451e+00 -#> -1.2270e+01 7.4681e+00 -8.6775e-01 3.1733e+00 -1.0157e+00 -4.1103e+00 -#> 6.1041e+00 -7.8737e+00 7.6910e-01 2.6178e+00 -2.1165e+00 -2.4539e+00 -#> -2.9781e+00 5.1067e+00 9.0291e+00 3.7386e+00 4.0225e+00 1.1059e+01 +#> Columns 17 to 24 -0.8807 -6.8757 -4.9313 -7.2179 -2.7915 15.4274 6.0164 -6.6741 +#> 0.9477 1.7979 -6.3627 4.4939 6.2186 1.5902 -11.6395 -3.5445 +#> 7.9012 4.5292 7.2227 -1.0916 9.0658 5.3200 -5.7882 -5.3311 +#> 4.3502 7.8103 7.9528 13.1282 -5.2297 -0.6976 -1.5083 -8.7624 +#> -13.8298 3.3520 -3.1743 -18.8409 9.0000 1.3979 3.1690 6.7249 +#> -4.2754 -10.9421 2.2851 13.1925 0.6630 -2.1236 -10.3466 1.8604 +#> 15.2846 -3.9597 -12.6631 2.4750 -4.1738 -3.3280 8.4616 1.3307 +#> 6.3590 5.3857 2.8926 2.1786 -6.7312 -0.7581 1.0523 -3.7789 +#> -13.7418 11.9056 13.6120 2.5728 6.4213 5.6939 -11.1140 -0.4415 +#> -6.8191 4.5158 -7.8159 4.3063 0.4596 -0.0760 -2.0341 3.8460 +#> -0.5926 -9.4973 8.0374 -6.0093 -12.5298 1.7373 1.6441 19.0382 +#> -9.2623 5.8766 4.1363 -3.2158 -9.5395 1.7469 6.7436 1.4874 +#> -0.2246 -11.9674 -3.3364 9.8491 -2.7533 3.9118 5.7652 6.6789 +#> 4.5272 0.4888 -9.6354 -1.6830 -1.9183 8.2584 -3.5511 0.5338 +#> -1.9234 -7.4784 5.2854 -2.8972 2.0119 -6.1031 2.9352 -5.9270 +#> -1.0020 2.6322 15.1039 -2.4978 1.5712 4.8497 -1.9763 -1.4250 +#> -2.7766 -8.9166 -12.5303 2.3104 3.5020 -0.2225 11.0040 3.1141 +#> 3.6221 -0.0765 -8.2000 6.2186 0.7653 -0.4866 -10.3200 -1.4989 +#> 10.7927 2.6817 -1.8595 -1.8303 1.1885 2.6971 1.4769 -4.0710 +#> 4.1746 -4.2341 -12.4317 4.1027 2.7154 2.4214 -5.6199 -2.7166 +#> 1.3276 0.3343 1.0940 -5.9334 -6.1352 5.5198 13.6717 -8.4329 +#> 3.9061 1.7449 6.7268 -2.8043 -2.4510 3.9353 4.1274 -12.8426 +#> -0.4724 -2.4116 -3.0013 1.0078 -1.8346 -1.3911 4.9137 15.8103 +#> 6.3488 4.6773 -2.9455 -2.1355 -1.7662 1.3958 0.5978 -4.8514 +#> 4.0734 10.6386 3.3235 -9.7826 -5.2383 -2.9818 6.3445 7.3067 +#> -3.0872 8.4646 -2.7031 4.6974 -2.4227 -0.9212 -4.9382 -1.8930 +#> -2.7740 1.2674 -3.8433 3.4049 2.2112 1.5422 -3.4015 -8.1686 +#> -1.1354 -0.5527 1.1582 -10.9276 -2.6106 10.2117 2.6492 17.8093 +#> -0.7095 6.3531 -2.1500 -2.2012 -7.6940 -7.6331 0.4168 -6.9157 +#> 3.5068 -6.9159 -1.8968 -2.1031 4.7683 11.9701 -4.3158 4.0219 +#> -4.6503 -5.6369 7.3009 -0.4046 -5.3646 -0.5556 2.6237 5.3386 +#> -10.0209 8.0023 10.9669 -7.7269 0.4845 3.9636 9.8087 5.9099 +#> 4.0444 12.4672 7.9136 -7.6656 -9.4817 1.4621 6.5596 -11.4023 #> -#> Columns 31 to 36 -1.0024e+00 -2.6713e+00 1.2578e+01 -1.7690e+00 -3.4575e+00 3.3586e+00 -#> 6.5265e+00 -6.1522e+00 3.4934e+00 -4.9728e+00 5.8530e+00 -2.0366e+00 -#> 2.4769e+00 -4.4414e+00 -7.5334e-01 1.5265e+01 -1.1129e+01 -2.2469e-01 -#> 8.2506e+00 1.4354e+00 4.4313e-01 5.4406e+00 1.2897e+01 4.4803e+00 -#> -2.8658e+00 -1.8296e+00 -2.1623e+00 -2.4244e+00 -3.0937e+00 -7.2425e-01 -#> 8.4132e+00 7.2456e-01 -5.0226e-02 5.9712e+00 -3.5478e+00 3.6569e+00 -#> 2.0938e+00 3.0349e+00 1.0649e+01 3.8103e+00 -3.3931e+00 5.1124e+00 -#> -5.9067e+00 2.8918e-01 -7.7774e+00 -6.2789e+00 2.6692e+00 1.3624e+00 -#> -7.5225e+00 5.4142e+00 -1.2569e+01 1.1863e+01 -8.2005e-01 9.7496e-01 -#> 4.8118e+00 9.3095e-01 5.2597e+00 5.2339e-01 -2.7255e+00 8.6351e+00 -#> -2.6015e+00 -9.6194e-01 3.9729e+00 -1.5552e+00 4.7174e+00 -4.7509e+00 -#> -5.2280e+00 2.3861e+00 -6.1787e+00 2.1090e+00 -8.7541e-01 1.8596e+00 -#> 4.1943e+00 -2.1511e+00 7.2290e+00 1.4783e+00 -6.3846e+00 3.8103e+00 -#> 6.9841e+00 5.9577e+00 -8.0214e+00 3.0639e-01 1.8434e+01 -7.8145e+00 -#> -2.7008e+00 9.5668e+00 2.2890e+00 2.7696e+00 -1.1703e+01 1.4588e+01 -#> 6.2605e+00 -5.3954e+00 2.2707e+00 -2.5968e+00 4.0614e+00 -6.1080e+00 -#> -2.1722e+00 2.8522e+00 5.6883e+00 2.5314e+00 -6.3639e+00 4.9474e+00 -#> -9.4953e-01 1.5049e+00 -3.5119e+00 3.2323e+00 4.7714e+00 2.7848e+00 -#> -1.2248e+00 5.2855e+00 -1.1353e+01 8.8063e+00 -4.5217e+00 -2.6649e+00 -#> -2.8687e+00 3.8691e+00 -8.2179e+00 -6.2951e-01 -3.1783e+00 5.1316e+00 -#> 4.0843e+00 -8.5289e+00 1.2830e+00 1.2650e+01 -5.2441e+00 8.3415e-02 -#> -5.9715e+00 -1.1104e+01 1.9260e+00 -3.7978e+00 3.4070e+00 2.1377e+00 -#> 1.4444e+00 2.2540e+00 3.7112e+00 8.3033e+00 -8.9732e+00 1.7013e+00 -#> -7.3074e+00 1.8956e+01 -1.8431e+00 1.0475e+01 -8.2182e+00 1.1970e+01 -#> 2.3107e+00 1.4038e+00 8.8203e+00 4.4088e+00 -4.7749e+00 8.1737e-01 -#> 5.5478e+00 4.3362e-01 -3.9417e-01 5.8719e-01 -3.4224e+00 8.3111e-01 -#> 1.9707e+01 1.6573e+00 4.5278e+00 6.3618e+00 6.5622e+00 2.3959e+00 -#> 9.7553e+00 1.3196e+00 -3.7189e+00 1.0158e+00 4.2140e-01 -5.3399e+00 -#> -1.0143e+01 1.5711e+00 7.4052e+00 -3.8900e+00 1.1622e+00 1.5689e+00 -#> 8.2355e-01 -1.3308e+01 1.7697e+01 -1.3180e+01 -1.5565e+00 -6.2004e+00 -#> 1.5589e+00 3.6957e+00 -3.5663e-01 3.4026e+00 9.4058e-01 8.3490e+00 -#> -9.1588e+00 3.6904e+00 -1.9694e+00 -5.0691e+00 -1.4893e+00 -2.4870e-01 -#> 1.5074e+00 4.2083e+00 6.5068e+00 -1.4737e+00 7.8155e+00 1.9302e+00 +#> Columns 25 to 32 -6.0222 -2.5225 18.6794 -1.9022 0.0288 0.1890 -12.0331 -4.6590 +#> 7.9806 -6.4788 -6.1458 1.9264 -2.6169 2.2489 -0.1440 7.5822 +#> -1.1021 -2.6544 9.7899 3.3499 6.7148 -6.5702 6.2299 5.6617 +#> -6.2453 1.0456 7.5186 5.3260 -0.4289 8.5268 -1.1748 0.9125 +#> 0.9165 10.8179 -5.7658 -9.7494 12.5624 -2.2520 4.1101 -3.6776 +#> 4.4642 4.5141 -0.9595 -0.9528 6.1274 -7.9298 -11.2766 2.0582 +#> 9.7725 -25.7100 -8.8137 -2.2008 0.1551 -0.7973 9.8084 1.2838 +#> -12.8132 4.4471 4.9165 1.6298 1.1680 11.3918 4.7620 -1.3822 +#> -7.6801 11.2273 9.5392 -6.5460 -1.5583 -21.2909 -1.6984 4.6187 +#> 9.8602 -9.5653 -0.5661 2.2599 -6.6182 -2.0556 -2.2713 -2.2365 +#> -11.1825 -5.2334 11.5033 3.7391 -9.5969 10.6885 4.3899 -15.0028 +#> -11.3795 -6.1763 6.2081 1.2671 -0.5915 3.4431 -3.1682 -2.6503 +#> -18.4274 -3.3459 8.9185 9.3857 -9.9528 -7.2578 7.3259 -2.2813 +#> -1.3254 3.3307 -2.5424 -2.6175 1.0168 0.7376 -4.4100 -11.7450 +#> -2.9995 1.9990 4.8363 -3.6735 -5.7607 -18.0266 -8.4709 -6.5698 +#> -9.5125 19.1244 4.1371 6.8073 -2.6230 14.8248 1.3387 1.2955 +#> 11.7142 -16.4838 -2.2590 13.0867 -5.6591 -1.9879 1.6286 -2.7937 +#> -4.3823 3.8865 -2.0699 -10.4855 9.0669 -8.6802 -9.2607 0.2720 +#> 1.3276 4.0587 -12.2692 -5.6160 -9.3784 6.8906 1.0773 -7.5102 +#> 19.9966 -13.4888 -1.3764 9.2722 -6.4324 -11.7465 -6.4369 3.9438 +#> 1.8748 -12.9575 -0.1238 8.8789 -7.3453 -0.0752 0.7718 -1.9769 +#> -2.9430 1.2230 -3.3981 1.3450 7.5772 6.8048 2.6787 8.3449 +#> -3.7040 -15.1341 0.6486 -7.6604 2.4353 7.3543 -2.4480 7.8708 +#> 0.3476 -4.1923 6.6404 -6.4607 6.8673 -13.9570 -4.9220 -10.8102 +#> 2.2574 3.9855 -15.4048 -11.4323 -7.1003 14.8612 11.6437 4.7460 +#> 14.6156 -3.7702 4.1366 4.8487 0.2202 -7.1611 -1.1738 8.7857 +#> 5.6938 6.3797 -4.9098 -2.6667 1.5749 -17.5671 -3.0667 2.3424 +#> -3.2931 5.9301 -8.8754 -7.4322 7.0949 3.0345 2.9480 6.5884 +#> 6.8072 -2.5864 -7.3007 3.4889 -1.4750 -6.7630 -9.3298 -8.1758 +#> -1.8700 8.0152 5.3331 -7.1700 -10.8139 -22.1410 -5.6817 -12.2239 +#> -6.3280 12.9446 0.1962 1.4002 -3.2044 3.6930 -16.2343 9.5315 +#> -6.5399 12.6252 -2.6233 -5.3308 -0.1140 4.8691 14.7716 -5.8068 +#> -0.3855 -0.3596 -1.5771 -8.5168 -1.1172 1.3504 -1.1572 -0.5796 #> -#> Columns 37 to 42 -6.0443e+00 -5.0038e+00 2.4085e+00 6.1652e-01 -2.9455e-01 4.1388e+00 -#> -2.1589e+00 7.5638e-01 -1.4299e+00 5.4934e+00 1.1379e+01 -1.7734e+01 -#> -5.5570e+00 1.4518e+00 -5.1437e+00 5.8455e+00 -1.1211e+01 -1.0749e+01 -#> 8.0975e+00 2.3521e+00 3.8920e-01 7.0470e-01 1.1417e+01 5.2706e+00 -#> 2.2747e+00 -2.5634e+00 -2.5810e+00 2.8128e+00 -1.2677e+01 -9.6755e+00 -#> -6.1550e+00 -4.8737e+00 -1.0272e+01 8.9978e+00 4.5288e+00 2.6653e+00 -#> -6.3205e+00 -5.1040e+00 3.3478e+00 1.1458e+01 1.1980e+01 1.3942e+01 -#> 2.7540e-01 -1.3097e+01 -1.1894e+01 8.7668e+00 -6.5043e+00 1.0282e+00 -#> -1.1896e-02 -1.3126e+01 -8.0822e+00 -2.1835e+00 4.4062e+00 -8.4565e+00 -#> -5.2831e+00 -2.2819e+00 1.3993e+01 6.1044e+00 1.3993e+00 2.7036e+00 -#> 5.0564e+00 -2.4792e+00 2.1099e+00 1.6402e+01 -9.3117e-02 -1.0041e+01 -#> 1.6624e-01 -1.1526e+01 1.0828e+01 -5.7474e+00 4.6739e+00 -3.1839e+00 -#> -5.9014e+00 2.9813e+00 -1.6901e+01 2.9944e-02 -2.5500e+00 1.0309e+00 -#> 3.1118e+00 -7.3564e-01 5.5841e+00 -1.4666e+01 3.8443e+00 6.1601e+00 -#> -9.0465e+00 -3.6688e-01 -1.7310e+00 -6.8496e+00 6.2570e+00 5.2079e+00 -#> 2.4793e+00 5.8129e+00 -6.8916e+00 -1.0394e+01 4.2798e+00 4.3335e+00 -#> -8.6506e+00 1.2264e+00 -4.2477e+00 2.0104e+00 5.3704e-01 5.4668e+00 -#> -4.2562e+00 1.6511e+00 -7.5951e+00 -1.8699e+01 -4.9848e+00 2.8267e+00 -#> -4.0240e+00 -6.9409e+00 -3.4937e+00 1.0665e+01 -1.2527e+01 -7.0438e+00 -#> -2.1840e+00 4.4412e+00 -1.4393e+01 -1.0654e+01 -3.5939e+00 1.2485e+01 -#> 7.2340e+00 -3.2706e+00 -2.2007e+00 6.2254e+00 -6.8434e+00 -1.3621e+00 -#> -4.5564e+00 -4.0301e+00 -2.7368e+00 -4.7971e+00 -3.8564e+00 -4.0555e-01 -#> 5.1519e-01 1.7264e+00 4.8336e+00 -3.0540e+00 1.3621e+01 2.4650e+01 -#> 3.8365e+00 1.0922e+00 -3.1472e+00 1.0390e+01 9.9348e+00 7.2009e+00 -#> 4.4151e+00 1.0959e+01 1.6859e+01 -2.6747e-01 3.2473e+00 -2.3686e+00 -#> 3.5090e+00 1.3791e+00 -5.6040e+00 -1.0725e+01 -4.0518e+00 7.6260e+00 -#> -7.0225e+00 -6.5785e+00 -1.0424e+01 9.5121e-01 1.7987e+01 6.9230e-01 -#> 3.2599e+00 4.0881e+00 -1.3803e+01 -1.1379e+00 5.3533e+00 -1.5402e+00 -#> 2.1776e+00 -4.0299e+00 -7.1673e-01 -3.0291e+00 -2.9262e+00 2.9943e-01 -#> -2.6078e+00 -3.6891e+00 -9.8233e+00 5.5719e+00 1.2748e+01 -3.7681e+00 -#> -5.5408e+00 -1.4093e+01 4.5600e+00 4.5790e+00 1.8872e+01 1.6317e+01 -#> 1.8432e+00 -5.3269e+00 4.9184e-02 -1.3810e+01 -3.9466e+00 1.8990e+00 -#> 1.1511e+00 1.7908e+00 2.0034e+01 8.6886e+00 1.1405e+01 -4.1812e+00 +#> Columns 33 to 40 -0.8611 1.0568 -9.0654 5.3277 4.1641 4.4664 -6.3162 -3.7858 +#> 4.1445 1.2710 6.9080 -10.6709 6.6070 8.0267 2.1055 3.5532 +#> 3.7797 5.5512 -7.8304 -1.8071 -0.8741 -4.7816 0.3294 1.1418 +#> -1.7175 -3.0894 10.3689 -5.6529 -8.6062 4.1273 3.9762 -7.6929 +#> 14.5131 -6.0969 -5.2956 3.3127 6.8256 5.7329 -6.6279 -0.1204 +#> 12.7190 1.6159 -0.0140 7.5648 3.0746 -10.5060 -9.0135 -4.9237 +#> -3.6573 12.0284 -2.2545 -9.4216 8.5257 6.2418 -4.3268 2.3996 +#> 4.2874 -2.0749 -9.5551 -3.2306 -8.7069 1.5910 8.9562 -1.1663 +#> 0.8192 11.2540 20.1876 -12.9242 -10.9081 8.7150 8.3009 1.9462 +#> -0.7929 -2.0514 7.4391 7.8435 -2.3212 -4.4998 0.5574 -2.7245 +#> -14.1838 0.0514 -14.8192 -10.1813 -7.8843 5.9334 14.7841 -11.0592 +#> 4.3808 9.1414 -9.0137 7.9655 8.0009 -12.7833 6.9089 7.5214 +#> -5.2681 20.7088 3.2978 4.0480 14.2196 1.7780 -2.1520 -6.3127 +#> 5.8338 7.5677 1.0579 0.7008 10.7610 10.9471 -2.2719 -4.1100 +#> 10.0725 13.0931 -4.6233 15.3859 3.8250 0.1703 -5.1177 11.1111 +#> -0.7654 -7.1043 -6.0416 0.2592 -4.7001 -15.8419 10.7481 2.4375 +#> -5.4736 -3.9784 6.6280 -0.1362 2.2109 0.2694 -2.6858 6.7014 +#> 5.5710 -0.7875 2.5261 7.4350 -11.4584 0.8176 -13.5017 5.9151 +#> 7.8448 -4.9647 -6.1809 8.5925 -1.7261 -5.2174 1.3180 -6.7331 +#> -4.3430 6.0933 8.7123 1.6033 3.2952 11.4633 6.3643 -2.0127 +#> 2.9574 -3.4387 2.7256 -12.6858 4.4055 3.7674 11.1957 8.3339 +#> 9.4475 2.7473 1.5837 11.0178 12.8108 -3.7032 4.0755 4.9520 +#> -4.9123 13.2245 -0.6136 -0.0522 2.2529 -3.7863 0.2554 0.1037 +#> 4.5806 3.2915 -7.4074 1.4762 6.6544 -1.2248 -13.4508 -0.6399 +#> 2.9039 -1.6340 6.9400 -7.5700 6.8986 15.1708 11.1973 -2.6747 +#> -6.9221 -7.5585 7.5320 1.6463 0.6203 7.4859 4.6856 6.5935 +#> 5.7284 -1.6975 5.4235 -11.5777 2.5505 6.7303 -14.7043 -7.0326 +#> -0.7517 -1.1776 -7.2456 10.2495 -2.1493 -2.8518 -0.1505 -3.0757 +#> -4.3981 -5.4743 10.1640 -2.4632 3.0505 7.5561 2.0925 7.4318 +#> -0.0157 9.1111 0.1285 6.9749 6.4397 -4.5390 -13.2734 -5.0305 +#> -6.0661 7.3484 -3.0283 -8.9787 -4.3967 6.0583 4.7974 -2.1257 +#> 2.0119 -4.1015 2.2100 6.4800 -5.0461 -6.1380 7.4747 8.5014 +#> 0.3777 1.4523 0.8882 -4.0412 -6.6179 11.7257 6.5241 -0.8750 #> -#> Columns 43 to 48 -5.4550e+00 -8.2143e+00 4.7637e+00 -2.9903e+00 -7.2637e+00 -9.7950e+00 -#> -5.4919e+00 -9.6887e+00 4.7355e+00 -6.6842e+00 -9.0013e+00 -1.1458e+01 -#> 1.1702e+01 6.5723e+00 4.7653e-01 2.5971e+00 -2.8433e+00 -8.7995e+00 -#> 1.3533e+01 2.4328e+00 -3.6331e+00 1.5955e+00 -2.2165e+00 -1.7262e+01 -#> 3.8044e+00 -2.0567e+00 -3.6479e+00 -4.1294e+00 2.9667e+00 -5.5330e+00 -#> 8.1044e+00 -8.6189e+00 -5.4637e+00 -1.1795e+01 -2.1088e+01 -4.0552e-02 -#> -2.9690e-01 -6.5130e+00 -9.3919e+00 -1.9817e+00 -6.7502e+00 6.5811e-03 -#> -1.9122e+00 -1.1089e+01 -7.5356e+00 -4.9767e+00 1.1579e+01 9.7925e+00 -#> -4.7898e+00 5.8781e+00 -1.0547e+00 -5.0681e-02 -7.6112e+00 -6.2750e+00 -#> 2.2555e+00 -3.7843e+00 -4.7802e+00 -8.5480e-01 1.4987e+01 -7.9747e-01 -#> 1.0052e+01 3.6775e+00 -1.0574e+00 1.0857e+00 -2.2764e+00 -7.6102e-01 -#> -1.0788e+01 -9.3425e-01 1.2396e+00 -1.4440e+00 3.6199e+00 7.8463e+00 -#> 8.0964e+00 4.8538e+00 2.7574e+00 -5.8478e+00 -6.2870e+00 -2.7797e+00 -#> -1.7868e+00 -1.0333e+01 -2.0574e+00 4.5673e+00 -2.6122e+00 3.1900e+00 -#> 5.2439e+00 5.9873e-01 -1.0739e+01 -5.0399e+00 -2.8870e+00 -1.0834e+01 -#> -6.6098e-01 -6.5914e+00 8.4657e-01 8.7373e+00 9.3333e-01 -1.7683e+00 -#> 9.2482e+00 8.5724e+00 -3.8986e+00 -9.4177e+00 -6.1733e+00 -4.7650e+00 -#> 1.3976e+00 5.4746e+00 6.8451e-01 1.0722e+01 -6.5352e+00 -5.5735e+00 -#> -6.8063e-01 -7.1734e-01 -1.0688e+01 3.1244e+00 9.7000e+00 4.6155e+00 -#> 1.4267e+00 1.2498e+00 -3.2097e-01 -3.9869e-01 6.3889e+00 1.3758e+01 -#> 4.5269e+00 -3.7422e+00 -4.6177e+00 -3.7689e+00 5.7216e+00 5.8025e+00 -#> -2.6467e+00 2.3303e+00 1.0984e+01 1.1895e+01 -8.3679e-01 7.5611e+00 -#> 1.0457e+00 8.9304e+00 -6.0809e+00 7.7563e+00 -9.1621e+00 1.1348e+00 -#> 2.5585e+00 7.1815e+00 -3.8037e+00 -5.7326e+00 -6.0681e+00 -2.6218e+00 -#> 8.5473e+00 8.0866e+00 1.9267e+00 -5.5515e+00 -1.0811e+00 5.9082e+00 -#> 4.0650e+00 -1.3018e+00 -1.4162e+00 -5.3452e+00 1.6563e+00 3.4752e+00 -#> 3.5318e+00 -2.5060e+00 -1.2967e+01 -1.5486e+01 -1.1734e+01 -7.6932e+00 -#> 1.0912e+01 1.1026e+01 2.3755e+00 4.6339e+00 1.2357e+01 -1.4554e+01 -#> -5.2169e-01 1.8166e+00 2.7753e+00 6.4007e+00 -4.7057e+00 1.1531e+01 -#> -1.6006e+00 -7.4824e+00 7.2213e+00 -7.9190e+00 6.7513e+00 -8.9247e+00 -#> -6.4194e+00 1.5810e+00 -4.7193e+00 6.2955e+00 7.0297e+00 6.8899e+00 -#> 2.6142e+00 -1.6475e+00 -2.5523e+00 -1.0377e+01 -8.8961e+00 6.2604e+00 -#> -1.8070e+00 6.4933e+00 9.0026e+00 -7.7026e+00 2.2729e+00 -6.4668e+00 +#> Columns 41 to 48 -6.2253 15.4813 5.4665 1.7043 -6.0134 11.2739 3.4207 10.5951 +#> 1.7634 -4.2631 -3.8672 -0.8163 0.1975 -6.5871 -2.4852 -0.5717 +#> 5.6023 -3.1966 -0.3015 -6.8502 -5.4750 3.8639 -2.1092 3.5328 +#> -7.9157 -3.4104 -15.3541 2.3773 5.5924 -19.1730 -2.5667 2.2097 +#> 9.9243 5.1434 14.3880 -14.6639 -2.6193 4.4196 3.6730 -12.1041 +#> -1.9230 -10.9819 9.2371 9.6889 -9.2786 -10.2420 8.5802 -1.4945 +#> 0.6482 -4.0058 4.6641 9.1679 7.9664 -11.4321 8.6949 6.1002 +#> 0.0771 -6.0239 -15.3810 -10.3905 1.2433 -7.9479 1.7519 8.1579 +#> 3.7030 7.0018 3.8837 -18.1678 2.3074 -12.9184 -6.3444 -7.0652 +#> 0.2832 -12.5081 -1.6503 -12.7904 -0.7906 4.5055 -2.0274 -7.0966 +#> -1.1560 7.0276 2.4356 10.6392 11.2842 3.4248 4.4802 -1.5989 +#> 3.3875 -1.8682 6.3087 -3.9602 1.7666 2.2462 5.6006 2.6341 +#> -14.4572 -10.4930 -8.8586 -1.9750 -12.9727 -4.7007 3.8266 8.8408 +#> 1.6228 -5.1363 -4.4258 -10.8498 -7.5557 -5.3916 6.5801 8.8889 +#> 2.8427 7.2956 -8.0200 -1.5578 -5.0323 5.3388 -5.9133 -0.9252 +#> 8.8019 1.7589 4.1468 -3.7787 -1.4694 13.9980 -1.7290 -9.1751 +#> -2.4335 19.8203 4.8795 10.1656 11.7867 11.2572 -12.0224 -6.1928 +#> 13.4877 2.4026 0.1878 -1.2813 5.0872 -6.6813 -0.9762 -5.9967 +#> -5.5528 -9.3455 -2.1656 10.8958 0.7286 2.3748 -5.7412 -9.6578 +#> -6.0045 -8.5685 -0.5930 -5.4759 -1.8792 -7.3016 -14.8040 -2.4047 +#> 1.8737 -7.0144 -2.5284 -11.7359 -6.8526 6.9042 9.9385 1.6060 +#> -12.9047 -0.9142 -7.1893 1.6719 -5.1746 1.1802 7.3469 0.9788 +#> 10.8455 1.7521 8.3575 9.0212 9.4134 -4.4178 -9.7777 8.4185 +#> 13.0378 -2.6627 11.0654 -2.6261 -1.4146 -16.2292 4.7065 -8.3873 +#> 9.6175 -8.6752 -2.4736 5.6130 13.7771 -4.7949 -15.8744 -0.6387 +#> -1.2921 -11.4872 -1.7810 -10.5419 -5.9407 2.5456 1.6647 -4.7831 +#> -1.9197 -10.4179 0.3066 -2.0262 -6.5656 -10.9171 7.0313 -0.0362 +#> -5.0607 -11.0022 -4.7930 4.4554 6.1903 4.9043 2.5441 -6.7533 +#> 3.5607 5.2236 0.7005 -2.0445 -6.8758 -6.7025 1.6693 -9.5756 +#> -2.7160 10.0189 9.0864 0.9660 -1.2260 4.2049 2.8616 -6.1965 +#> 3.0842 13.8484 -4.8341 -2.2035 -10.7796 -2.2567 -3.8369 16.7756 +#> 1.0556 -1.7092 -3.7212 -7.8805 2.5508 1.4942 -2.1015 -7.4245 +#> 5.8052 -1.8614 -13.0850 1.6800 4.2464 -4.5503 -0.7407 0.9004 #> #> (15,.,.) = -#> Columns 1 to 8 0.8048 -7.1302 -5.1982 4.7184 2.3049 0.1458 -2.9323 0.9045 -#> -6.2981 8.2727 2.8437 3.7729 10.3314 2.1071 0.9795 7.1802 -#> 5.0801 1.4905 -5.0455 -4.1448 7.4240 -0.2848 -4.4704 6.6526 -#> -4.3386 -0.6268 -12.2354 15.0675 -11.1815 7.1535 0.6211 -2.3970 -#> 6.0735 2.6415 -7.8096 0.1928 -6.0289 -3.4852 -1.8490 -6.5341 -#> -5.5020 -4.1683 -21.8447 -7.1531 -10.1589 -3.6331 -6.6435 8.8253 -#> -5.4820 1.1505 -1.7518 13.0789 5.8027 -6.2200 -4.9524 6.7853 -#> 3.2055 10.5159 -3.1530 -4.1114 -8.2848 2.9294 9.1991 -6.8600 -#> 0.7853 -0.2040 -2.6513 -8.2570 4.7800 -6.0429 -1.0372 4.5380 -#> -10.0597 -1.5282 3.6515 4.6160 -6.1629 7.2925 1.3604 -2.4042 -#> 5.5991 8.1511 -5.1106 -12.7090 6.2902 -14.5277 8.0083 -2.9098 -#> -1.2806 -1.3863 8.2275 2.2868 7.8467 1.3516 -3.8672 -3.2717 -#> 1.9597 3.3709 -8.9261 7.0613 12.2671 -7.5790 -3.4860 9.1355 -#> 0.7325 -5.4724 6.2299 -8.6116 -21.3077 3.4404 -13.1769 -1.2893 -#> -4.7072 7.5819 -2.1514 10.3851 2.1876 -4.8796 -9.3614 3.8329 -#> 13.7107 5.2684 6.4132 -3.1331 -7.7471 9.2193 3.7722 -10.6658 -#> 6.7938 -13.5632 -3.4073 -6.4364 6.0194 -6.4552 -3.6073 -2.3002 -#> -3.2604 7.2644 -6.9982 3.0262 0.3497 6.5678 -3.8303 -2.5425 -#> 9.5488 12.8055 10.9672 -4.7713 -4.8149 2.6481 14.4671 -0.8024 -#> -0.1062 4.0740 0.1192 -2.8975 -3.5445 -4.3332 3.5695 -5.8257 -#> -5.9688 10.4781 -3.7207 0.8464 3.3031 -2.0499 2.5568 2.4314 -#> 6.8408 -2.4286 -2.0812 -13.5789 -11.9642 1.9043 1.6696 -5.7336 -#> -0.3294 -4.0051 4.4825 8.7400 4.2389 6.1872 -4.8511 3.3600 -#> -7.3160 -2.2606 0.7169 -5.0866 -0.0417 -2.8604 -12.7086 11.6121 -#> -1.7032 -1.4316 11.1245 0.1705 9.2713 1.7239 -2.6730 3.3785 -#> 0.5772 -2.7065 -2.8858 -4.2171 -10.9184 3.6095 -0.5072 -4.4745 -#> -8.3830 -5.4281 -6.8301 -2.5117 11.5640 -9.2411 5.6765 8.3588 -#> 2.6178 11.9343 9.2576 -3.4401 -3.8225 -3.3271 -12.3327 1.9006 -#> 5.9909 2.2253 5.8577 -8.6208 -2.9306 -6.7949 1.7518 -8.0359 -#> -3.3776 4.6775 6.3503 -5.2518 6.9190 -4.8524 -8.8138 -1.2051 -#> -6.7259 -4.4902 7.2270 7.9758 4.6113 2.5120 -4.4357 -6.3322 -#> 7.7574 -13.7695 3.9471 -4.6606 5.5693 7.0922 -10.3325 4.2199 -#> -10.8634 1.3126 2.9160 2.5213 6.1139 -5.9908 -5.1855 2.8433 +#> Columns 1 to 8 -4.2747 11.2209 5.3836 -1.4030 -5.9563 -0.9162 17.2545 4.5411 +#> 3.2806 -2.3690 1.9042 8.4447 8.6115 -5.9054 -2.2237 2.2706 +#> -4.5532 3.4310 -7.5214 -2.8339 -1.9221 4.4026 -0.9524 -4.1235 +#> -1.8922 -12.9170 12.2323 7.9637 1.5879 -1.4672 2.6015 -2.3395 +#> 1.7052 14.0402 7.7936 11.8928 -3.0232 2.0031 -1.2634 -8.4372 +#> -9.4591 -1.3585 -1.0784 -14.4314 -5.6974 7.1326 -9.7584 11.2040 +#> -6.7563 -3.1252 0.9994 -7.5600 6.6968 4.9823 -6.4021 -2.5075 +#> 1.2933 5.8467 6.7220 0.6309 6.9515 7.2260 -4.5833 -10.7179 +#> -14.1855 2.3998 3.8708 18.3877 10.7002 -5.7762 -5.2950 10.0083 +#> 14.0508 3.0589 -9.3336 -6.5368 -3.9655 -0.9708 -0.6588 6.5599 +#> -2.6133 -5.8309 -14.9207 -2.0724 0.5182 -0.1810 6.9787 1.1528 +#> 2.0274 9.1401 3.8578 5.4996 4.6371 9.2297 5.1012 0.9382 +#> 6.5537 15.8814 5.0876 -4.8474 3.5665 -4.4088 -4.2359 2.9431 +#> 10.6382 3.9061 5.5868 -2.7831 -1.9625 2.9950 -0.3136 -2.8474 +#> 9.9382 12.4973 10.1195 2.9316 16.6530 -1.8438 18.3835 -0.8610 +#> -3.9924 -2.8585 -17.5996 -3.5616 1.5926 -2.5963 -9.7335 -2.8453 +#> 4.4835 -10.9601 -9.5063 -6.7245 1.4008 -17.1639 9.7064 0.8067 +#> -5.6122 8.1782 2.0082 -17.2075 13.1548 9.5857 -5.4116 -9.3046 +#> -2.0921 -4.4457 -5.2193 -14.1155 -0.5663 -0.8361 2.7828 5.6409 +#> -4.9276 4.1458 0.8858 2.4367 -1.2432 3.1711 0.9293 -0.4187 +#> 0.0853 4.1620 2.4880 -4.1946 2.0009 -10.6949 1.2272 16.6223 +#> 1.4169 -2.6146 -0.2968 4.4078 2.2711 -1.5178 2.2351 4.9065 +#> -4.3045 0.0909 -1.2028 -0.6383 -3.2996 5.6568 -6.0963 -5.8985 +#> -20.9009 10.2937 9.3113 -3.3180 -3.0311 7.3176 -2.3713 3.4409 +#> -9.3364 -13.6321 -4.6866 4.3415 -9.5569 -4.7031 -13.1400 -2.8893 +#> -0.9151 3.0460 4.6602 -4.3565 1.2116 -0.4645 -4.1205 3.3254 +#> -6.5715 9.9600 14.7939 -13.0391 1.7620 4.8612 3.2664 3.8959 +#> 5.6417 -0.4934 -6.0265 -8.6849 -9.7323 0.3718 -0.3412 6.8578 +#> -9.4668 -4.3314 6.1339 6.5446 7.3414 5.5083 3.1474 1.9006 +#> 1.7123 24.7829 1.2456 -21.5154 9.4511 3.0373 -2.3033 11.0565 +#> -3.8991 1.1865 10.2420 7.7787 3.3465 -8.0858 3.4400 -1.7019 +#> 16.4395 6.8679 -12.3294 0.1245 0.7837 -6.7297 -8.5040 0.9146 +#> -8.2886 -5.1431 12.2854 10.0587 10.4088 4.0534 8.5897 -2.7896 #> -#> Columns 9 to 16 -2.6243 0.4328 11.1646 8.5593 -12.1996 -7.4265 -12.9429 7.7398 -#> 3.5453 -0.1579 -4.2605 -9.5878 -3.6614 5.4460 -3.9349 6.1239 -#> -3.2706 -6.6456 -13.3301 1.4546 6.8946 4.3316 8.8452 -5.5344 -#> 8.2053 -8.7390 13.9397 24.9905 4.2868 -0.4157 15.1577 1.2894 -#> 2.3995 5.4825 -5.9720 1.8767 -4.0682 -6.2251 -14.7602 -7.7949 -#> -2.3086 8.8263 15.2860 5.3245 -0.6444 -6.3166 -6.5798 -2.2219 -#> -0.9974 9.5613 14.1790 10.5027 -4.9540 -13.3136 3.9889 0.6169 -#> 4.0747 18.7485 -9.3609 -11.3180 -9.3307 2.9839 -9.8000 -4.7789 -#> -1.2814 -5.5160 5.1901 6.1370 10.5471 -2.6609 5.4535 -13.1699 -#> 8.7097 4.1678 -5.8543 1.4070 4.3947 -2.2228 -2.4664 8.3158 -#> 4.0793 5.4742 -15.4222 1.8686 12.2919 6.3672 -7.8565 7.6350 -#> 3.6448 6.1630 1.4843 -8.0472 -2.3761 -7.7439 0.7194 10.8694 -#> -0.4451 7.7842 -7.7088 -6.5198 -5.2164 -7.2539 8.5703 9.0153 -#> 10.1100 -9.1459 23.6299 -3.6850 -6.6768 -9.6936 -10.3292 -13.5748 -#> 9.9733 8.9384 -9.7610 8.3322 -7.7173 -10.8625 0.3825 4.6803 -#> 6.4227 -1.0801 12.4142 1.6283 -1.2001 -4.3134 -6.8145 -0.7343 -#> 4.0345 9.0029 6.5919 -3.8529 -7.8487 -6.0298 -1.6113 -8.0233 -#> 0.3470 8.4495 3.4920 -5.8563 -9.9557 -10.6460 7.1741 9.1540 -#> 0.2047 10.9090 -1.1599 -6.6670 -5.3418 0.5331 -15.7860 -3.4584 -#> 2.2713 7.9691 -2.9790 -11.3593 -3.0517 5.9772 -3.5811 3.4882 -#> -1.9983 5.4185 -0.5448 1.1243 3.6822 -5.9691 1.3919 -3.8577 -#> -6.0237 1.4433 -0.1638 0.7065 -6.3657 2.9962 0.7895 -7.5776 -#> -7.1163 6.1097 -2.6729 16.1532 1.7291 5.4875 0.1021 5.3662 -#> 0.2978 3.3122 -3.9261 2.0635 -0.0566 -5.4560 10.9724 9.1025 -#> 1.3748 4.8877 8.7258 1.8500 6.0718 -2.0596 -7.8329 -2.4864 -#> 2.1345 1.4062 8.9350 -0.1680 0.5001 -3.2611 -15.8572 -11.3055 -#> 10.6560 20.8887 12.0689 -4.7632 -11.5247 -18.9330 7.3607 5.1821 -#> 11.8770 0.2722 -3.6730 -15.7264 -0.4359 -5.6856 -11.3708 0.7707 -#> -3.0294 9.8799 12.4789 0.2152 -15.0768 -6.8379 -9.5639 -8.6209 -#> 19.6157 -3.2185 -6.2283 -15.6918 4.8533 -2.8065 -9.8387 7.6355 -#> 3.2747 15.9104 18.6214 12.1918 -7.1561 -8.6773 5.4100 -6.6609 -#> 0.7128 -12.3952 4.7757 -10.6673 2.7547 13.9214 -3.6430 9.8891 -#> 6.0475 -2.3505 2.2329 1.5311 8.0533 3.1398 -1.4593 11.9712 +#> Columns 9 to 16 2.5876 -12.9979 7.7099 -6.2017 0.9518 10.2987 -6.6717 10.0881 +#> 1.4349 -0.8321 -0.5845 -1.1423 -0.3030 3.4329 -5.6098 -3.9766 +#> -11.3756 -3.2481 1.2326 0.2942 7.2329 12.1213 10.2287 3.0747 +#> 2.1961 -0.1428 1.1030 2.1117 -1.7190 1.1502 -2.0437 5.3869 +#> -2.5092 9.4117 10.9734 4.1672 3.3402 2.4821 -6.4350 3.0019 +#> 0.5083 -8.8514 -6.7338 1.5207 -2.0410 -9.5737 -1.7273 9.9099 +#> 8.3670 1.2663 -14.3858 -8.4964 -10.4892 9.1447 -3.2528 -10.1467 +#> -2.4563 2.7110 0.4576 -5.5571 7.6568 3.7240 4.5972 -0.6151 +#> 0.5692 1.1357 -3.2869 15.6175 -0.7590 -9.1329 10.9456 -2.4756 +#> -4.1670 -1.7299 7.5588 6.5154 -5.2341 3.7069 -1.7579 5.6123 +#> -8.6358 3.3087 11.5492 5.9000 0.7000 5.6203 9.5818 5.8684 +#> -8.3734 -9.0231 -7.2242 0.5823 -5.4014 -0.2007 6.0797 -7.0188 +#> 1.4658 -5.6203 -8.7261 -1.4989 7.8541 -9.6464 5.6319 -2.0984 +#> 2.3988 5.6698 1.9776 -5.4519 -4.6376 2.6983 -5.7701 -7.8565 +#> -3.5355 -7.5361 -6.0313 -6.8370 0.7085 -1.8911 -0.8698 -12.2137 +#> -10.3301 1.1440 -3.3687 0.3721 9.2846 -4.4569 5.2276 -4.2738 +#> 5.9532 -2.5153 3.9358 3.5199 -7.2063 -1.2784 1.4520 6.9584 +#> 0.8911 1.6841 -5.1877 -10.0137 9.5660 10.9914 -12.5710 6.1001 +#> -1.3461 -10.9042 -0.2153 -6.0340 1.0096 -2.3690 -2.5191 1.3392 +#> 5.1408 -5.5534 -0.6703 7.6078 -6.4556 -6.8282 -3.6198 2.6698 +#> 3.8401 -2.4434 -5.5857 5.3737 -7.8142 6.1887 5.7830 -3.0767 +#> -9.3553 -20.9373 -12.4017 4.6379 1.8046 -10.1554 -3.8313 -2.8380 +#> 2.2838 17.2925 -5.7846 -13.6586 -10.0679 6.2485 0.6100 -10.6211 +#> 9.4378 -0.9839 -5.7292 0.7477 -2.4847 6.3036 -1.3421 2.5304 +#> 9.0686 14.1414 -10.0648 -10.1142 -15.5967 -2.1761 -15.5058 -11.2852 +#> 3.1401 5.3112 2.2429 5.1124 -10.7240 -0.5680 -8.4167 2.0790 +#> -1.0588 -1.6112 -8.4336 8.7863 2.5908 -2.1235 -3.2482 6.9029 +#> -5.1237 -3.6513 5.2006 0.4776 3.2072 -6.1609 -3.1893 9.6612 +#> 9.7545 5.0189 -0.9617 7.2267 -15.4872 -0.7203 -3.4099 -4.8678 +#> -3.8151 0.4245 -3.3436 -6.5632 -1.0615 -2.0232 7.5643 -1.0563 +#> 3.0647 9.8594 0.1297 0.1789 11.1698 -11.7139 11.9365 -0.2432 +#> -3.7420 9.6675 8.3256 9.5601 -6.0883 -1.5649 3.6960 -4.0880 +#> -1.6746 2.7384 -0.1467 1.4261 1.1921 -2.6601 2.2365 -4.1107 #> -#> Columns 17 to 24 -10.7269 -7.0341 -4.5691 -19.3047 11.3635 2.1366 -11.1086 9.7337 -#> -1.3675 -0.4500 -11.2419 -17.4118 1.6026 -4.6512 -3.6940 12.8833 -#> 1.3415 2.8172 3.8470 -3.8784 -0.9783 9.3358 -1.6777 5.7717 -#> -15.0082 -2.9175 4.9480 -1.9858 -1.1864 17.0469 8.2187 7.8884 -#> -2.2238 2.6507 1.9936 -4.5235 -12.0722 8.0846 -5.2894 9.9181 -#> 3.4597 -5.0310 -18.5341 -7.4792 -1.1131 4.3999 10.0050 1.9466 -#> -10.2704 0.7731 1.3454 -2.3996 20.8438 -5.3910 1.9644 8.1886 -#> 6.5566 1.6228 -7.1471 -9.1481 -2.6575 -5.3536 -5.5653 5.4137 -#> -1.3179 4.3835 -3.4015 -1.2802 -4.4014 -6.5830 11.9631 -9.0388 -#> -1.6018 -4.5212 7.1977 7.7312 2.8460 4.3718 -3.1728 -3.9669 -#> 5.3512 -3.0614 -7.0596 -3.8677 -1.1902 2.3101 -14.7512 8.5772 -#> -4.0062 0.7587 -3.6666 -4.0924 1.1767 -12.3443 -1.2912 0.3650 -#> -10.4419 -1.7139 2.5798 -6.1205 3.7598 7.3134 -6.3118 9.1148 -#> 4.5062 6.7279 -0.1350 3.8005 7.8222 8.7671 3.8129 -8.1235 -#> -17.0484 0.8677 5.0648 -11.1856 18.0983 -2.6071 1.6093 18.9739 -#> 4.1095 -4.1844 -1.1886 -8.2871 -4.1897 15.7950 0.3164 7.1537 -#> 6.0536 -3.2139 3.1367 -4.3572 -3.2948 19.5925 -6.7900 7.3236 -#> 9.3605 -2.2390 -7.1153 -11.1644 2.4283 -5.9793 14.6581 4.1310 -#> 11.8628 -5.5734 -1.3126 -0.1468 0.7981 4.7050 -6.4429 -5.4245 -#> 3.4164 -18.6147 -5.6152 -7.5493 -2.3179 -8.2711 -0.4826 -5.3061 -#> 2.7276 0.1413 -2.7000 -2.6496 2.5378 -9.9883 7.9165 2.5381 -#> 6.2961 9.9005 -2.7543 2.1022 2.6281 -0.8926 -2.7783 -13.4023 -#> 0.7441 -3.5912 -10.0708 7.8162 3.0236 -3.2449 1.3602 -10.5133 -#> -7.4043 -8.2482 1.9733 4.5619 -1.3130 12.2857 -3.8850 -7.3048 -#> 1.8633 0.2468 10.0071 13.2876 2.1962 6.7229 6.5548 -1.1695 -#> 5.3615 1.4604 7.1541 3.6082 6.6734 3.7640 3.4410 -0.6691 -#> -11.0977 -0.3524 -5.1584 -6.6272 7.4835 18.1811 1.3001 18.6657 -#> 9.3068 -2.9136 17.1541 3.0779 -8.9603 21.3401 -16.4021 -6.1953 -#> -2.7839 3.9002 -1.7010 4.1217 13.8331 -5.2049 7.4136 -2.0544 -#> 4.3481 -7.7533 4.3164 -6.1902 -6.0672 8.6225 -16.4504 0.4909 -#> 1.8774 3.1294 -6.3901 2.4610 15.0536 -11.9390 11.1065 -4.4631 -#> 2.7281 -3.2621 -4.2025 0.7014 -20.1567 3.1274 -1.7161 -10.5776 -#> 1.0278 -7.0991 -9.2778 -4.2494 1.0259 -11.1924 -0.1329 -13.5445 +#> Columns 17 to 24 6.6184 1.1030 2.0648 -10.7212 5.4404 10.1149 -8.6124 2.8432 +#> -7.0805 -3.1861 9.1699 0.0404 -5.1002 -3.7227 4.1894 3.4530 +#> -1.3490 2.6690 -7.0640 -10.3683 3.5994 4.2780 3.5495 2.2408 +#> 7.7307 -7.0473 -3.1147 -1.7412 -2.3436 -0.6346 -2.6097 10.7840 +#> 8.1260 14.5299 11.3759 -5.2874 4.2734 7.3765 6.1671 -11.4377 +#> -0.7681 -0.0344 -5.9003 -3.9739 -0.6213 -8.2776 2.9905 -0.3785 +#> -15.7247 0.9331 1.6161 -1.8922 -3.6438 2.3390 -1.5774 -3.8441 +#> 3.8893 -4.3369 -10.8468 -10.6531 -8.9493 -2.2181 -4.9513 1.9359 +#> -14.2442 1.5807 16.8771 11.2701 3.9620 -11.5948 8.4619 3.7181 +#> 1.9457 1.4270 4.2615 9.2280 2.8886 -0.5203 15.4807 -4.5500 +#> -2.7279 -15.9951 -13.8036 7.8027 5.3600 8.5416 6.4683 0.5478 +#> -2.9861 4.3129 -13.5209 -10.1400 -4.1135 8.4483 -7.0568 -7.2461 +#> -0.2546 -2.3467 4.5596 -2.6388 13.6028 7.3492 3.1828 0.8961 +#> -6.7606 4.7067 23.2489 1.9132 -2.8624 2.3499 10.2202 1.2802 +#> -2.9727 4.5097 -0.6156 -8.9329 -8.0880 6.7563 -9.0279 -4.5531 +#> 5.1356 -1.5607 -5.4880 -5.0374 -2.8647 -4.0657 -3.0986 -4.5374 +#> -4.0223 -8.0928 -1.8474 8.2301 -0.7528 18.4746 -9.7340 -3.6031 +#> -0.9243 -18.7331 -3.9924 -11.7917 2.9637 -14.7175 4.8231 -9.0357 +#> 7.1258 -11.1329 -9.2663 0.7886 -0.6687 -2.3403 -3.1528 -1.9531 +#> -5.4994 1.4657 11.8501 -2.5899 5.3709 2.2074 6.4882 5.5628 +#> -0.0761 2.5887 5.3327 5.6399 -10.0858 11.0612 7.8867 2.7175 +#> 4.8096 8.3581 -19.6857 -7.2523 -8.9552 6.5619 -9.8403 2.6775 +#> -4.4180 -1.6368 6.2281 -5.5233 6.8090 -1.5552 -3.1684 -11.6280 +#> -4.9894 1.5259 -2.0899 -8.9529 -0.1386 -6.0879 -0.8879 -0.1137 +#> 2.6106 -2.8868 8.2288 -1.8687 5.5861 -2.8573 4.1611 0.6215 +#> 7.9863 3.1406 11.3741 -0.3891 3.8970 -5.1409 12.6278 -0.3163 +#> 3.2934 -3.3294 9.9129 -8.2976 1.8597 0.2873 7.9849 -7.9432 +#> 5.7547 4.0399 1.2295 2.5538 18.4518 1.5167 9.7285 -6.8311 +#> -2.4178 -2.9201 4.9999 0.3869 -4.8894 -3.3738 -0.9356 7.1583 +#> -2.7385 -2.6651 -1.8665 4.7754 3.9906 -4.9644 3.7301 -10.9044 +#> 2.8843 6.5246 20.2371 1.0709 0.0653 8.1621 2.2304 7.0330 +#> 4.4385 10.2909 3.4981 21.4640 18.0731 2.7552 4.3355 4.4733 +#> 2.2048 -7.9052 -5.1967 10.6472 -4.4152 -2.3398 -4.9826 3.4552 #> -#> Columns 25 to 32 -1.7462 -1.6269 -0.7915 -0.8197 -3.9971 -10.4806 -10.2680 -2.7534 -#> -0.2765 6.7450 -2.0712 10.6486 -13.9731 16.2608 -5.7437 3.4435 -#> 7.3143 4.7691 0.7026 2.9372 -7.7866 -4.2075 6.0117 13.2946 -#> -5.4815 11.9231 6.8967 9.7563 15.3568 -1.2720 16.1653 -1.1744 -#> 3.4551 1.6304 4.1245 0.6843 4.5899 -6.6570 -6.3757 3.0269 -#> 2.4357 -3.9644 -7.9284 -0.0059 2.5904 4.4955 7.8125 -0.6694 -#> -5.8358 -7.0826 -3.6999 -0.5470 -1.9090 1.5121 7.6959 -6.5233 -#> 6.5241 -0.8224 -16.7985 -0.5287 10.2887 -3.1625 -11.5654 0.9984 -#> -0.1712 0.7338 -4.5138 -9.9664 -2.2649 0.5123 -5.3444 -5.0459 -#> -5.5832 -4.8549 5.5569 13.3486 12.9264 -5.5433 1.9425 4.5420 -#> 1.1549 -0.1853 -6.9617 1.7310 1.1553 11.2502 -7.4437 12.6783 -#> 0.9435 -6.3769 12.0910 9.3257 -1.9542 -4.7526 -1.3705 -0.8283 -#> 5.5251 9.6266 -0.4007 -1.7914 -9.0324 1.4803 18.9263 0.3953 -#> -8.7347 -4.0501 8.7921 11.5538 -12.4976 1.0826 -19.7760 -10.8927 -#> -2.3679 4.2283 3.5410 7.1489 -13.3737 2.6299 13.3520 -5.4859 -#> 2.0640 -0.0326 2.0723 10.3406 -2.1520 3.9185 1.0878 -4.0766 -#> 14.8852 -2.2712 -14.5972 -7.5811 -3.3774 -6.0105 14.9241 -8.3811 -#> 13.5728 11.6081 -1.0403 11.6453 -8.1370 3.6349 24.3273 3.0830 -#> 3.2080 5.0707 -9.2790 -8.8123 4.9685 -1.6484 -4.7333 5.8462 -#> 2.9742 -10.7546 -12.3970 -0.1404 0.0318 -1.9171 5.8307 -11.9025 -#> 1.2260 -2.8130 0.4420 1.9131 3.8702 -5.9096 1.6282 2.5812 -#> 1.4588 5.4399 -0.8877 -2.4271 -5.0846 -3.9904 -6.7946 1.9089 -#> -4.8377 -9.2531 5.5385 -5.5002 3.8937 7.2787 7.1057 2.6130 -#> -8.5997 -3.5724 -3.3184 3.1191 15.3997 9.4976 10.3288 10.3277 -#> 3.6108 -1.1064 -4.6679 -4.5344 -8.6885 -1.0672 8.9834 4.1420 -#> 9.4999 1.1589 -11.8681 -11.2911 -9.8568 -14.2836 -5.4645 -7.7589 -#> -4.2400 -2.4742 -1.2214 4.4757 1.6532 0.8790 14.6292 0.4674 -#> 8.4445 15.9800 4.0901 -7.3633 -3.6679 -3.6021 9.9330 12.6796 -#> -7.9000 3.8494 1.4938 0.4920 -3.1299 4.9682 -6.1952 0.9764 -#> 8.7349 -1.2985 -2.5513 -14.5349 -6.8205 -1.1748 -7.4282 -1.3516 -#> -3.6078 -7.8039 2.0198 1.2103 8.5414 -3.6797 -5.4959 -2.0035 -#> 8.1339 -1.8808 -8.5537 2.8481 5.4491 7.7438 2.7956 -0.6117 -#> -8.0023 -0.9707 6.7169 0.4876 6.8987 8.7232 -2.7651 8.5866 +#> Columns 25 to 32 -6.3050 4.7238 -17.8670 -0.5880 8.0363 -8.8384 6.7973 10.1670 +#> 0.7819 -8.7086 -0.5493 -1.5067 -6.3630 -3.1176 -3.8060 7.1644 +#> 12.4999 0.5950 -7.2478 4.9652 0.2354 -10.5477 -8.1457 -8.5105 +#> -9.0250 1.0340 -5.4598 -3.3421 -1.3794 -7.9057 -10.9088 0.5491 +#> 0.3745 -7.6168 -0.4208 -6.3234 -5.2446 -0.4377 9.6929 -3.7409 +#> -3.3252 9.8781 0.0665 -2.0162 1.1815 10.7428 11.6444 -6.6607 +#> 9.4381 -5.2888 5.7532 0.7087 -8.6443 5.2028 0.3598 2.4486 +#> -3.0277 3.3279 -12.2310 15.2771 4.9515 -13.7606 -2.0435 -3.9806 +#> 13.1074 -9.4906 -3.3137 -7.0977 -0.6819 -3.5135 -1.4732 13.4122 +#> 3.2798 4.9833 6.4333 -2.1565 7.3589 1.2299 -2.4760 -2.0664 +#> -5.1800 0.7813 5.0688 10.7303 -14.3422 -13.1902 -5.4290 -0.1870 +#> 5.4465 2.4001 -12.8189 -8.4338 8.4190 1.8156 0.8274 11.9054 +#> -2.0857 10.1048 4.2619 -6.7065 -3.3476 -1.1451 -2.0377 -1.7887 +#> -5.3566 15.0664 -7.1205 5.9788 -9.1359 -7.9899 18.9300 17.3375 +#> 5.4603 -1.4439 -7.1876 -3.2696 -1.5934 7.0721 12.3014 17.3448 +#> 7.9303 1.3446 1.5933 12.3317 4.6062 4.2817 -4.2334 -12.1762 +#> 8.1221 3.9732 -0.2661 -15.1008 -0.9461 1.0207 -7.1194 -1.4479 +#> 9.7294 1.1051 7.1813 20.9401 -14.5844 3.9706 14.4483 -1.4965 +#> -3.9381 5.8195 2.3962 11.8602 4.9107 -2.3144 -1.4896 -12.3850 +#> 0.5819 5.5068 12.9107 -5.0659 0.3248 7.8091 -4.1615 -0.1643 +#> 6.0217 -1.3741 -3.2747 -1.1051 8.2488 -1.2873 -11.3907 4.4985 +#> -5.7118 4.0587 -0.3100 -5.7755 11.3952 11.5330 -1.9617 -9.1594 +#> 5.7893 -4.9262 -10.4986 -9.1852 -10.4091 -6.0330 0.7095 11.6377 +#> 2.5615 6.0328 -3.8714 -2.1092 2.2874 3.8614 -0.0351 3.4032 +#> -4.6734 3.5800 0.3835 -5.4194 -11.5737 6.3313 -6.8266 -9.3864 +#> -1.4295 -7.0318 -4.3435 2.8048 3.7325 0.2020 -0.6500 -0.7863 +#> -6.9036 -0.0105 -5.3543 1.8927 -0.5046 5.5343 10.8065 -1.0212 +#> -3.9819 0.0611 -4.3249 9.5066 5.6024 6.1746 -5.7912 -14.9529 +#> -2.0487 10.6287 1.2395 -4.4636 -0.7882 0.2121 5.6188 1.0587 +#> 3.3456 -3.7282 -4.3858 13.0972 -10.8682 -2.4497 10.0444 16.6966 +#> 2.6152 7.4653 -5.2857 -2.3116 -4.8892 -0.5763 5.0725 1.6763 +#> 4.0750 -2.5667 1.5579 1.7010 -2.0998 -4.2782 -8.7313 -6.9343 +#> -9.7400 -5.8549 -4.2980 1.2974 5.3345 -1.1588 -7.3566 6.2945 #> -#> Columns 33 to 40 3.4080 10.7186 5.5730 4.6531 5.7616 3.4705 -11.4040 12.9549 -#> 7.7377 6.0998 -1.5554 4.7076 -8.1016 -9.8182 3.8232 -0.1717 -#> -5.6604 1.0921 -11.5067 -0.9772 2.9314 -7.4592 -9.5098 -3.6219 -#> -2.3808 -4.4176 -0.4573 5.1452 -4.5173 2.6218 4.3377 -9.7166 -#> -3.7069 5.9415 -9.7631 -3.8982 -8.0165 -2.5303 -1.2263 0.7284 -#> -0.3598 -6.7104 -0.3492 11.4228 11.0449 3.6593 7.6630 4.0337 -#> -5.1844 -7.9579 -0.3715 3.9422 3.8204 7.4046 -11.1925 0.5115 -#> -7.0582 -0.8884 6.2280 -2.8426 -14.4377 7.6894 4.5536 2.4210 -#> -1.4600 -10.2162 -8.5577 1.7303 4.5618 -11.8549 1.7316 -11.9648 -#> -4.1447 10.3017 13.7237 -4.0415 -4.9265 2.7495 3.8622 -6.7376 -#> -11.5153 2.8323 -6.5427 12.0739 -0.6204 4.8624 -1.5153 3.3605 -#> 13.2823 5.9974 1.2621 -4.7486 1.5167 5.9744 -0.3291 0.6503 -#> -1.0654 0.5911 -4.0941 8.9973 1.0366 6.0193 0.5471 0.4281 -#> 0.2312 -10.8132 1.8276 -0.5442 -4.3519 8.0915 4.7438 -0.1606 -#> -0.0113 2.5344 -11.3341 -2.8751 14.1932 -4.1816 3.7660 1.8073 -#> 16.3061 -0.7111 -8.0503 2.8067 6.5130 -9.8521 -4.1031 1.1970 -#> -10.5519 0.9086 -8.5823 -0.0814 8.8444 11.8856 1.2298 4.7355 -#> 10.0114 -5.0213 -6.0665 -1.6630 7.1819 -0.7356 4.0499 -2.1666 -#> 0.6757 -0.3162 -3.5780 -4.7094 -2.8122 -1.5863 -1.3931 -10.5494 -#> -2.0870 0.9603 7.7240 -4.6320 5.4591 0.2743 14.8810 -2.1514 -#> -1.8110 1.1190 -2.2812 2.8365 -0.6039 1.2504 2.5801 2.9844 -#> 2.4130 -9.4174 -8.4426 -12.0919 7.1439 6.6067 -6.2722 2.3164 -#> -4.1834 11.1998 3.0447 2.6957 22.4190 7.6576 -5.2527 18.8177 -#> -20.2723 1.3253 10.8418 9.0811 6.4476 3.7484 16.8607 -14.8302 -#> -2.7513 6.6503 -12.5883 -1.9143 13.6749 -2.2026 -0.0576 7.5686 -#> 0.3314 -11.3378 -0.9537 -6.7157 3.6694 -7.5760 5.5724 6.0488 -#> -8.1499 -5.8161 -9.8587 19.9884 2.0863 1.5576 -4.2066 -0.4436 -#> -7.1017 1.5927 7.4132 -3.1600 -16.2266 17.7469 11.1453 -3.6420 -#> -3.5196 -6.1302 -17.1096 1.9950 12.6488 -7.2172 1.1653 8.3086 -#> 3.9420 1.1401 4.9253 8.3876 -3.8281 9.5859 8.0676 0.9702 -#> 2.5050 -6.1517 4.7386 5.9677 9.2732 2.9076 0.0195 -0.0915 -#> -10.9789 -1.3337 12.6203 8.5702 -4.3448 -3.6564 4.6815 10.8041 -#> 3.2875 -0.0805 18.5864 12.3418 1.3776 0.4639 10.9487 -5.0897 +#> Columns 33 to 40 8.4627 9.6192 0.5056 -4.7102 11.6024 -2.0951 4.1913 -1.5111 +#> 2.8340 -6.4928 6.8795 2.9201 -2.1075 -1.8531 0.5551 2.0819 +#> -7.5933 -2.2260 -9.1166 -3.2124 4.8456 -0.2283 1.4762 -0.7194 +#> 6.2248 -8.5167 3.5322 3.2227 -5.6294 -0.3967 10.2963 10.5078 +#> 3.1985 6.7279 4.6912 6.3275 0.2253 1.7753 -8.2086 -12.8037 +#> -16.7505 -2.0902 3.7017 -5.0439 -2.6029 10.1196 -1.6105 -14.3003 +#> -15.0534 3.9078 -5.5869 -11.5304 -3.5092 -7.4240 -3.7589 11.6518 +#> 2.5050 -12.6827 -12.8706 -1.9666 -5.5736 -1.6785 1.3554 19.1239 +#> 1.7287 1.2349 14.3629 13.1778 -16.3137 9.5405 14.7577 -9.1000 +#> 0.4104 -5.2002 4.7632 0.3833 0.8436 6.1623 -3.5593 -4.0756 +#> -3.1540 6.8193 -8.8962 20.6007 1.0029 -7.6812 -7.1488 1.4358 +#> 6.6526 -2.8659 -10.8030 1.4553 -1.1354 -3.5005 -2.7119 9.1697 +#> 6.8097 0.8033 -10.0922 -8.6253 11.5531 7.4769 8.1791 12.6088 +#> 11.3124 1.5183 3.8205 10.4646 5.2921 0.5983 11.7163 18.4572 +#> 10.9357 2.8577 -6.7735 -3.1426 2.6630 -1.0412 5.0757 0.4454 +#> -13.6616 -7.8840 -3.0935 2.3106 -2.6839 3.9880 4.8708 1.0188 +#> 7.5740 5.5523 5.7893 3.9466 5.6309 -6.2144 -10.4352 -3.0217 +#> 8.2918 -6.7735 -13.7922 1.1264 1.9380 -8.5805 -1.6173 10.0197 +#> 0.9718 -16.4808 -4.5497 -5.4669 2.1980 -0.1691 -9.4305 0.6820 +#> -5.4696 1.2961 8.7297 -2.2501 -3.3067 4.1099 4.4256 -8.5122 +#> 4.1438 -5.0817 2.8268 -2.3660 1.0834 -5.4129 4.7049 8.0334 +#> -14.3751 -6.6880 -6.2389 -13.0992 0.7792 7.2334 4.0696 -1.9419 +#> -7.2310 5.2424 -4.8411 -1.7995 -4.1335 -13.3840 -7.6249 3.8943 +#> 3.3899 0.2378 -3.0897 -2.7173 -9.5046 -7.9173 2.6721 2.9327 +#> -6.8260 -5.3483 -3.2417 -2.5692 -8.2699 -9.4297 -3.3118 -2.2543 +#> -7.0935 2.9322 11.5962 -3.1335 -2.4853 3.7401 1.9402 -8.2695 +#> -0.4618 -1.1653 3.3451 -14.2864 3.5546 10.7668 -0.5330 -6.3669 +#> -11.9906 -2.9861 -3.1784 7.9281 -6.7173 -1.0408 -4.9628 -16.1489 +#> 11.3069 10.8252 8.9816 -5.3561 -4.4633 -2.5525 -3.4003 3.8927 +#> 9.1441 3.4465 -8.9527 6.7487 6.2379 0.9032 0.3956 0.1699 +#> -2.6357 9.3995 8.7429 1.9425 1.7303 4.3151 -0.6646 7.0418 +#> 0.5546 -4.8317 0.9649 17.4945 -7.4146 3.6819 -1.2988 -6.8112 +#> 14.4165 -3.7656 -0.7285 -5.3543 -3.2077 -15.4640 -3.7080 2.8010 #> -#> Columns 41 to 48 4.5416 -4.9024 8.8311 10.3463 0.2651 -8.4339 -6.6832 7.2281 -#> -10.1922 -8.5999 0.4567 1.2905 11.9904 8.3649 -0.2820 -0.5000 -#> 2.1755 -3.7282 1.3097 2.5777 7.8753 -2.0035 14.1423 6.6787 -#> 1.1655 0.5492 4.4501 0.3171 -5.4586 2.6379 0.0697 -9.4739 -#> -6.2303 -14.1027 0.0100 -3.9314 5.2509 -8.1181 2.6001 1.2095 -#> -6.8354 1.3839 10.1818 6.8880 -11.1387 -13.3290 2.5360 -2.4812 -#> 8.3362 9.4515 4.1199 -9.3917 -14.5917 1.4332 0.4153 -3.2342 -#> 0.0806 0.2949 -9.1927 -16.4414 5.4621 -5.5275 -6.3721 2.0919 -#> -1.5245 1.7350 -2.6612 9.0850 -1.4021 -0.8051 8.9440 5.5216 -#> 2.2669 1.4136 2.1153 1.3907 -1.9028 3.3937 5.5590 1.6864 -#> -9.5550 9.3133 -0.5917 -6.6588 8.5990 -9.0556 -1.1651 0.3522 -#> -0.2138 1.6576 -7.4984 -7.4907 -8.1598 2.7208 -5.3047 4.1849 -#> -8.7061 2.1911 11.3166 -7.0701 -12.3447 0.3809 -8.7935 -1.9093 -#> 5.8310 -3.7924 -14.1226 -0.9867 1.1463 -12.4631 -7.0898 -12.4898 -#> -4.3149 6.3384 -2.0539 -0.0828 -16.0012 -1.3459 -0.2730 -11.1067 -#> 0.2034 -1.7032 -7.7734 4.6387 -4.2106 -1.3526 -4.4212 -3.4719 -#> -12.2013 -1.7224 3.9268 3.3508 -8.4421 -6.3359 -12.8927 -2.1470 -#> -21.1804 6.3041 -4.5008 -0.5402 -20.8187 0.5318 2.9913 -13.6855 -#> -3.2797 7.9700 -0.3886 -4.6745 2.1036 -4.6107 -4.7608 7.6923 -#> -15.8307 8.7412 -0.0962 7.8266 -13.6592 -0.8376 -7.8022 -13.3426 -#> 3.4127 -3.5618 -6.9771 2.4669 -2.9717 -8.7935 2.0971 -7.9943 -#> -5.0785 -3.3369 -9.3461 -5.6587 -0.6787 -3.1871 -9.3324 -3.0281 -#> 2.5578 21.7349 15.3899 10.4132 -2.5514 1.7782 12.5657 -6.7548 -#> 8.6913 11.6463 18.1946 0.7303 -4.4140 -0.7748 4.9841 -15.1897 -#> -11.2238 6.7093 4.6444 12.6038 -5.5794 -10.4248 4.1176 -1.7744 -#> 1.6525 -3.1180 -0.3574 17.7211 -0.1144 -12.3549 -4.0452 -5.5704 -#> -4.9651 13.9408 8.6249 -11.5936 -16.6941 -5.3931 -3.6881 6.0155 -#> -5.4065 -2.5217 1.4385 -0.3922 1.6123 -0.9188 -7.3587 -2.3773 -#> -8.6857 -5.0989 -6.1459 -7.0026 -8.0787 -14.5593 -10.4636 -12.8228 -#> 2.6677 -4.7651 1.0568 1.3425 2.7920 2.0309 -8.7960 8.8150 -#> 9.0281 12.5455 -2.5788 -12.3628 -11.9505 -4.1404 -0.8740 10.5614 -#> 3.1092 -4.0124 7.5946 5.5313 -0.2117 -1.3845 8.1836 4.1690 -#> 2.6316 0.1588 6.9737 10.9898 3.5931 3.1511 4.9429 -1.8327 +#> Columns 41 to 48 -10.8241 -12.5592 9.8794 8.1459 -8.0249 -5.3232 -7.3662 -6.6561 +#> -1.9464 8.5595 -3.7384 1.7510 15.6552 -1.0824 -5.7959 -3.1648 +#> 1.6102 -9.2710 8.5078 1.5640 8.5394 6.6464 5.9841 -0.9512 +#> -6.0430 10.6005 -3.3871 -0.5440 -7.9448 -3.5804 2.1058 13.3017 +#> 2.1819 -4.2182 -4.0471 8.4911 7.0756 -7.7260 -7.3000 -8.5854 +#> 2.1067 -7.5725 -8.9682 4.3029 2.1925 -1.4281 19.4983 14.4373 +#> 2.9312 6.4773 1.3713 -10.6200 6.4025 2.9545 -8.5119 8.9705 +#> -1.7926 5.3045 5.7370 0.5968 -10.0502 2.1086 -0.9249 9.5904 +#> -1.8064 14.1006 -1.9522 0.5508 4.0472 -1.6355 -2.1794 -10.7252 +#> 6.0747 -8.3286 -7.3287 4.2511 0.1022 4.0733 7.1620 8.2707 +#> -14.8316 0.0931 3.3676 1.4202 -6.7653 -7.9898 -6.8231 -3.1254 +#> 2.4596 -1.6326 6.1167 8.8923 -3.3707 -6.6921 -7.9243 -6.2614 +#> 0.7403 -8.1644 2.0377 8.2480 -6.3439 0.9885 20.8536 5.7158 +#> 1.1119 3.6225 0.1497 -11.9439 -12.5551 -5.3223 -1.1399 10.8148 +#> -0.8456 3.9264 8.1905 -1.7234 3.0473 3.4385 -2.2770 0.2595 +#> -7.0555 2.3944 -2.5763 -5.5024 -8.9512 6.8928 4.5017 -5.0469 +#> -6.1114 -7.5485 8.2868 4.4023 -10.1372 -1.9716 -12.8082 -6.7290 +#> 1.0195 11.0753 0.1624 5.6988 -9.9563 1.7751 -5.6086 7.5327 +#> 2.3055 -4.6038 1.0535 12.0757 4.9606 -2.2908 2.2219 3.1930 +#> 0.1517 -8.4319 -9.5501 5.2974 5.1445 13.2255 -5.7966 10.5574 +#> -6.7888 -6.4484 -1.8910 -2.4853 8.7156 -4.2482 -0.8505 -6.0441 +#> 3.6000 -9.1188 2.1149 2.0131 9.8010 -0.2025 16.7531 0.1040 +#> 3.8869 6.0433 3.6712 2.9468 -7.8337 5.9737 -9.2323 4.2390 +#> -2.8802 6.6061 3.0407 4.1871 10.1265 -10.4854 -5.2429 2.2165 +#> 2.8970 4.7978 -7.9631 -3.6446 3.4185 9.8806 -11.0710 5.4011 +#> -0.0199 -3.1293 -11.8688 4.2835 2.7631 8.6910 3.0006 7.1971 +#> 4.3071 -5.4259 -6.5849 11.6391 -6.4536 -0.3236 2.7658 6.5286 +#> -0.8604 -4.1629 -11.3665 7.3753 2.2787 0.6569 5.8722 1.9402 +#> 3.4090 6.8764 1.7934 -1.8218 2.9799 5.3587 -8.9751 -6.0064 +#> -2.4390 1.8613 7.0550 8.3929 -4.1681 -14.5310 10.5581 -1.6691 +#> -9.1057 -1.2082 -1.4793 1.9302 -7.6886 13.2929 -1.5715 -7.4436 +#> 10.1393 5.8404 -3.8208 -5.8047 -4.8160 -3.8602 4.4829 0.5871 +#> 2.3434 6.6981 7.1134 3.3371 1.3401 1.0295 -8.4443 -9.2548 #> #> (16,.,.) = -#> Columns 1 to 8 2.6537 10.0281 -1.3741 0.9900 -12.0867 -4.0296 -7.6636 0.1825 -#> 2.4319 10.9279 6.3170 -3.5904 -3.7944 -9.5351 3.8456 9.2553 -#> -7.8803 -3.3544 -1.3375 -3.0423 -1.6107 9.5581 -4.5955 2.3276 -#> -4.4451 -0.2926 3.6284 7.7152 -2.6448 14.0143 24.4411 -11.8132 -#> -7.9960 8.9860 -4.5813 7.6282 -6.9701 1.1402 -2.3306 -4.5071 -#> -4.4226 12.0704 11.6664 -6.9571 3.1467 -4.9787 5.0473 -10.9933 -#> 11.2864 -1.5729 1.7127 9.4951 -2.6290 8.0230 0.0378 -8.6601 -#> -9.4625 3.9488 -2.1094 9.5814 -13.2363 -5.8638 -6.0246 -2.4959 -#> -0.1048 -13.5206 6.7469 -4.3198 3.5434 -0.1837 -0.8146 6.3350 -#> -3.9342 7.8975 1.1074 10.9452 2.6244 -1.7462 6.9768 -5.0688 -#> 1.7446 -0.1351 -0.6821 2.8836 -5.9138 7.9013 4.5623 14.5249 -#> 6.4212 0.7026 -1.7461 -4.0589 2.0305 -1.0969 -1.0820 -0.8338 -#> 0.2226 10.2770 -2.7988 -6.5288 -9.7603 3.7378 -2.5321 -3.4819 -#> 3.0488 -11.7935 10.9536 -1.8877 11.9208 2.3212 -2.7537 1.0941 -#> 2.1382 5.3092 7.1833 -6.4094 1.9917 3.8928 -3.1746 0.3565 -#> 8.1730 0.7460 8.6897 3.0966 5.1147 -2.4684 -0.6720 -0.8584 -#> 9.6087 10.6647 -0.1727 1.2635 -4.7348 1.4423 -10.2314 2.2201 -#> 5.6906 -5.4915 6.5053 -7.2581 1.7664 -2.1022 2.1175 -0.0348 -#> 9.1430 1.9296 -7.5327 8.4665 -3.0810 1.2409 -4.6988 4.8817 -#> 1.9465 2.5159 1.7587 -3.4185 -3.5874 -3.4295 -1.3687 2.3190 -#> -6.6130 -9.0796 -1.8347 2.6115 -1.1207 9.5405 -3.5588 -4.4271 -#> -4.1785 2.8673 4.9747 -6.1164 12.9784 2.4537 -4.2924 3.0161 -#> 8.0232 4.7341 1.0049 -3.2322 8.3828 4.6395 -7.5344 8.6833 -#> -4.2088 2.6784 0.1928 0.3867 -0.6369 -3.5143 10.9183 5.2169 -#> 8.9835 -6.3049 -4.8523 -13.6846 18.6929 5.5845 -3.0672 5.3202 -#> -2.9213 -3.7387 5.5182 -3.6154 1.3478 -1.7320 -1.5216 -6.9537 -#> 9.2671 -3.2407 7.5714 14.1982 -3.3357 1.1475 6.5716 -10.1794 -#> -4.1577 8.2047 4.1041 12.3912 -11.5370 -5.1664 2.5799 3.8036 -#> 3.8338 -5.2952 6.7403 4.5124 13.3406 8.5143 -11.3196 7.2957 -#> 3.7297 14.7927 -2.2709 1.0681 -16.8867 -19.4837 -11.9069 5.0476 -#> 11.2957 -5.4608 0.8200 7.4871 -1.2263 -0.4746 -6.9441 2.1174 -#> -9.0418 0.4256 5.4519 -12.3165 6.7599 -6.1433 -4.6055 1.1761 -#> -1.4819 4.0796 0.9065 -0.9019 5.4088 -7.9440 14.5377 9.0100 +#> Columns 1 to 8 5.1261 -2.8450 -3.2382 -8.7989 3.0773 -7.4989 -4.2167 3.6598 +#> -1.0038 0.9312 11.1684 7.0661 -8.1220 6.7760 2.8751 -2.0495 +#> -2.1045 -2.1887 -0.3678 3.3220 -2.1537 4.2073 5.3580 -13.6370 +#> 3.2350 -4.1579 -2.9122 -5.1370 10.1330 -3.5221 -8.0732 1.8269 +#> -4.7509 8.8498 -8.5420 -10.2327 -19.4137 0.6527 3.6232 -10.9135 +#> 5.5381 -5.6384 3.9632 3.2175 -3.2445 3.2333 5.2998 8.4650 +#> 4.0765 -7.3636 4.8385 12.6403 -8.2223 10.1804 8.8325 -6.7026 +#> -3.9838 -4.9200 -10.5703 -2.4202 7.1313 -4.8207 3.3515 -9.3286 +#> -8.5621 9.8314 4.7020 9.0598 5.0971 -13.7285 -9.2223 13.1704 +#> -0.1157 -4.1365 5.5928 -9.7817 -1.4935 -5.3224 2.8611 15.7890 +#> -5.2081 -1.9686 2.7150 -3.7723 13.0990 3.6701 -0.1524 -6.8796 +#> 13.2258 8.9368 3.3121 -7.0790 4.7623 -2.0034 -2.6128 -1.4503 +#> -11.2120 7.4422 5.5518 -0.1588 4.8228 9.8168 -6.8340 -1.7672 +#> -3.4332 -3.6753 8.2436 -1.9458 -16.5379 -10.9161 5.3190 5.8212 +#> -1.1222 11.4132 9.4151 5.9887 -8.0350 -5.6568 1.0801 2.7832 +#> 1.9269 10.6649 -6.1271 7.9513 9.5201 0.8032 -1.9908 2.2831 +#> 8.1346 2.7858 7.9123 -6.5829 11.5281 -4.9781 3.8680 -2.0012 +#> 10.2440 -0.5996 4.3847 7.8699 4.6680 1.2474 -6.1953 -6.1151 +#> -2.1528 -5.0708 -4.6581 9.6572 9.5949 10.4371 1.1159 3.0433 +#> -1.8566 -12.8853 3.6096 1.2526 -6.7440 -1.4534 1.3010 3.9254 +#> -1.6968 -3.8088 -0.5291 2.9351 -5.4835 4.5089 0.3231 6.0435 +#> 7.1190 -0.9704 -2.2517 -4.2518 -0.0369 -0.2925 0.0045 6.5996 +#> 1.3046 4.8211 0.8576 7.0307 7.2529 10.4599 8.3072 -6.0055 +#> -3.0841 -6.9428 -9.5165 3.0634 -3.5058 2.9692 -15.5600 -5.5499 +#> 1.9411 5.5068 -6.3341 9.3663 5.9734 8.1237 -2.7955 1.3979 +#> 3.1859 1.0869 0.1808 -3.4309 -8.1068 1.9914 0.2076 12.4701 +#> -4.9744 -1.5919 -6.1288 4.3464 -8.4726 -1.8149 5.4670 3.9777 +#> -6.8355 -5.9781 -11.1834 -9.5106 0.8818 7.5806 7.4133 11.7849 +#> 2.6733 0.1011 -4.3735 3.0455 -8.6630 -7.8654 -8.5211 -1.5228 +#> -7.2386 4.1866 7.5937 10.0426 4.5374 6.7831 -0.5036 6.4681 +#> -1.8181 5.4128 -2.0206 0.3848 -6.7987 -11.6497 0.2210 -13.1029 +#> 3.9093 13.9236 -6.4055 3.4992 1.0158 0.5141 1.9188 13.7409 +#> 3.8692 -3.9149 -11.5660 -3.1436 4.0488 -5.6748 -9.2204 -0.3520 #> -#> Columns 9 to 16 -6.8426 -4.7429 -2.6128 -4.6095 2.6961 -0.8184 -12.1658 2.0165 -#> 3.9022 5.0333 6.4491 -2.1842 11.7931 12.3021 3.1147 6.1278 -#> -6.9729 2.7342 4.6440 12.5455 -3.9127 5.4592 4.1639 1.4389 -#> 8.6323 6.5326 5.2528 -3.1248 -4.5754 1.5873 -5.0231 -3.1225 -#> 2.8939 0.1219 2.8283 3.6899 2.2707 1.9394 0.6548 5.5807 -#> 0.0017 -3.8882 10.1598 7.6993 -0.0444 -7.9066 -4.5745 -12.3957 -#> 0.7785 5.4766 3.5906 -7.7531 1.0911 -5.9815 -12.2547 7.7808 -#> 1.0523 -5.5980 3.0210 -0.2250 1.8311 5.0031 -2.5257 1.5206 -#> -4.9046 -6.8045 2.1573 10.7057 0.4956 7.5452 -4.8635 -4.9029 -#> 6.7992 6.8604 -8.2919 -13.6156 -2.8000 5.0487 -7.0615 -1.5223 -#> -12.9161 2.7160 8.2835 13.1736 3.0676 -4.2209 0.4369 8.7412 -#> -0.3872 1.1803 -6.1056 4.2021 1.5244 -6.7314 -1.8443 9.4874 -#> -3.6800 -10.8111 -3.7154 7.3191 -1.2210 -15.8530 4.8161 -7.8834 -#> 10.7230 0.1590 -2.8018 5.6006 -12.5767 -6.6004 -16.2261 7.2019 -#> 0.4843 -5.2169 -1.0586 4.1696 -10.5364 -8.4845 2.7401 3.2174 -#> 0.4920 -0.2655 -8.6780 -8.1274 -1.6072 2.6221 0.2542 -3.7309 -#> -8.9139 -4.1685 0.6980 -4.3601 -0.0753 -2.4713 -6.2920 -11.3251 -#> -2.9412 -2.9277 -0.9006 -1.0688 -3.6055 7.9323 -7.5708 -12.4720 -#> -9.2443 -2.1692 -6.7854 1.7093 -8.0463 6.3150 1.6444 -13.0162 -#> -6.2650 -1.9294 -7.3652 -5.3038 -1.9747 -0.5525 -2.9674 -6.6390 -#> 8.1219 11.4121 -0.0902 -3.8964 6.4237 -0.1665 -11.2574 -0.7778 -#> -2.7504 -10.8360 -1.8227 4.4445 -2.9894 -0.9454 10.1992 5.0257 -#> -1.9871 -8.9973 -1.5199 -7.0012 -4.1810 -9.0287 1.5581 -2.0986 -#> -6.9044 -1.8868 -0.8582 5.7889 -7.4550 -9.6949 -4.8682 -18.4330 -#> -1.8776 -1.4501 -3.1208 -1.5706 -8.5306 0.0210 -0.4809 -8.2916 -#> -2.3846 0.7277 2.7183 -12.9379 -4.1543 1.4221 3.2912 -10.6667 -#> -0.3494 7.2240 10.9552 -11.7152 4.5154 -6.7795 -11.9837 -3.2178 -#> -13.4474 -15.3883 -3.8726 -5.5541 -11.9312 -2.3773 1.6819 -5.5822 -#> 0.3630 4.3099 -16.3680 -0.3777 -2.2528 -4.0222 -1.3204 0.7607 -#> -4.4686 -19.8499 -10.0706 -2.9009 12.4972 -4.3342 -9.9224 3.8666 -#> -10.1040 -6.5496 -5.6094 -16.8005 -4.2389 -6.8117 -18.5713 -1.8870 -#> 1.7998 -1.0995 1.8782 14.4492 0.5544 -1.6617 12.2205 -10.4153 -#> -6.9645 -1.6298 8.3926 5.3486 -1.4503 2.8527 -1.8521 6.0394 +#> Columns 9 to 16 -11.0716 7.8926 -1.5192 11.1751 -5.6606 -1.6120 7.3872 -0.3674 +#> 3.3136 -6.1990 5.8662 -3.0578 -7.9727 -3.6502 -9.8288 -0.0878 +#> -5.7984 4.8727 -1.0836 -3.3400 -11.9352 0.2132 3.3643 -12.2731 +#> 4.5368 -8.4935 -0.8240 -5.3146 -0.0723 1.8665 -9.9431 4.7557 +#> -6.0098 12.1897 -0.4899 3.5732 0.6463 -1.9791 0.6883 2.7912 +#> -11.0578 -7.7644 0.7946 -0.5218 -5.7665 -24.0823 3.7099 -3.5103 +#> -2.8711 -14.5286 7.0938 5.6453 7.7450 -3.5059 -0.7206 -4.1606 +#> 9.2334 -4.7448 -4.0372 2.4442 0.9533 8.6785 -8.9648 -4.5411 +#> 14.7544 -5.0897 -2.8168 -8.4430 -5.6290 -4.4548 -5.3709 -2.3346 +#> -8.9407 5.7096 2.4833 -6.0196 7.3999 -6.8425 1.6314 1.9350 +#> 0.5937 0.8597 -5.8163 -17.2911 -0.6945 7.6235 0.4653 -0.2495 +#> 1.1909 -5.4046 3.9597 0.5139 -2.2671 -8.6106 -2.2423 -3.8151 +#> 14.6383 -5.4473 8.4067 -3.0250 -4.9272 -4.6375 -13.0177 3.0851 +#> 13.9487 -3.8829 -2.5261 2.9440 10.4917 10.7231 -2.7004 -7.8963 +#> -4.8642 -1.9137 -0.4015 -0.3533 -3.2627 1.9848 -8.1712 -13.0388 +#> 13.6087 0.5360 2.8865 -5.0504 1.4859 3.3201 4.1823 -1.6874 +#> -5.5746 10.4820 -13.4285 5.2714 -0.5863 1.1479 5.2240 -0.3015 +#> -1.0061 -4.7373 5.6559 -6.7207 -1.5914 -5.0994 -4.4543 -3.2439 +#> 0.8013 1.1788 -2.2453 -5.3738 -8.8849 -3.0521 5.7252 7.9563 +#> -4.9584 -4.3350 -5.5550 -3.0845 -8.0257 -12.6339 -2.9030 -4.3063 +#> 2.3830 -0.0033 1.6715 -3.5396 -5.6952 -8.7490 -1.2171 0.9261 +#> -18.7848 -1.9961 4.5367 6.1949 -11.8174 -12.2647 -5.3147 -4.0750 +#> 9.4608 -6.6229 2.3834 2.1527 7.4591 1.0893 2.5343 -9.5999 +#> 3.7772 -14.9920 4.2505 -3.7771 2.8131 -4.4641 -0.6396 3.4831 +#> 10.6338 -7.9093 -11.4937 -4.3720 11.6796 -3.0354 -0.3707 -7.6393 +#> -3.8859 -1.4952 0.4492 -1.2558 6.8768 -9.6030 -5.7386 0.8783 +#> -7.3742 4.1089 -1.1013 3.4691 3.2846 -10.6651 4.2856 1.6132 +#> -14.8183 2.1003 7.7073 -5.5211 -2.1223 -9.9208 7.1001 5.6111 +#> -2.2851 -10.0254 -7.1918 0.0860 3.4937 5.2893 -8.3288 5.1660 +#> 10.0078 -2.0671 9.0152 -3.1767 -3.0365 -1.6561 3.9795 -1.5960 +#> 13.1446 -4.6746 -5.7532 3.5775 -5.9741 9.7639 -1.5420 -0.7079 +#> 0.7970 8.8009 -0.6542 -4.3686 -3.3261 1.8994 7.8499 5.4588 +#> -11.9677 2.4533 -6.6983 -8.2325 4.5343 5.4002 -3.8636 0.0917 #> -#> Columns 17 to 24 -3.1629 -0.0568 2.9113 14.5280 14.7589 3.1293 -12.1515 -1.2125 -#> -2.5092 -9.6162 4.5885 2.9099 8.3468 -4.3173 -0.8217 -1.2561 -#> -6.6908 -9.5136 -6.0419 10.0233 -3.2612 -4.1823 -1.2707 1.4180 -#> -12.0190 7.8139 -13.8348 0.5338 5.1126 4.1994 -21.2723 -7.0667 -#> -7.8274 -4.9705 -2.1304 4.1353 -8.3330 1.0937 0.1881 -4.3878 -#> -12.7670 5.6393 0.2593 22.5342 -10.9020 5.1398 -12.4250 -2.1257 -#> 5.8816 -4.6940 -8.5924 -8.4002 12.8787 -7.0274 -6.9082 -0.0870 -#> 7.1171 3.6475 9.1813 -9.7746 -10.4951 0.8722 1.6396 -7.0983 -#> -1.2520 -11.8828 -8.1181 8.3765 6.4687 -6.8427 9.5647 6.9806 -#> 6.4404 -2.9231 -11.3833 -3.3352 0.4295 2.9849 -0.1354 -1.9745 -#> -1.8990 -8.8391 14.1990 -4.9804 4.0018 -5.7063 6.9528 -12.5414 -#> 10.7867 -2.3682 -2.0609 -10.6862 3.9373 -3.7516 7.8622 -7.7914 -#> -3.6083 9.3223 16.5962 1.2439 -1.8558 3.5668 -11.2451 -6.5508 -#> -3.8310 5.8341 -5.7072 6.0198 7.0461 -4.2640 -0.8619 12.6715 -#> -13.2245 -0.7354 3.9689 2.9254 7.2269 -8.9177 -11.8530 4.4396 -#> -6.9530 -0.6109 2.6992 0.4603 9.3217 -0.9783 0.3191 0.5973 -#> -2.3645 -0.7434 14.0873 0.1423 2.8636 1.0353 -9.0741 0.3920 -#> -5.8839 1.9932 12.2294 0.8054 6.2002 2.5218 -4.2763 -7.6965 -#> 0.8513 7.0868 7.3695 -0.3998 7.6253 7.4863 5.9059 -3.1743 -#> 5.8895 11.1340 16.8882 2.2584 -4.4054 2.7326 -6.0199 1.4574 -#> 5.9178 1.1990 -9.1185 6.5885 -5.9489 2.0860 3.2412 2.0335 -#> 2.9457 -4.2214 2.5596 0.2399 -8.8712 3.9793 8.2923 -2.3617 -#> -1.2055 9.2519 0.7123 9.6024 -3.8783 -0.4447 2.7004 2.2629 -#> 10.3553 16.5592 -0.2477 6.4830 6.7097 1.7149 -4.0558 -19.0939 -#> -2.5081 3.0439 2.1285 8.5253 -10.4950 0.9928 4.8042 9.3855 -#> -12.9153 4.9772 3.6428 -0.2269 0.5490 2.9480 -7.1561 11.2938 -#> 3.8155 -3.4244 -3.3108 8.2549 9.9998 -4.2891 -21.9953 -8.6520 -#> -10.2267 11.0747 8.1871 0.2964 4.0452 13.9275 -8.5610 1.6311 -#> 3.8581 -6.7254 4.5158 11.5645 3.9171 -6.2779 7.3628 -2.6553 -#> -9.4337 -2.1890 12.6193 0.1468 -3.0054 15.4094 1.0468 9.5217 -#> 2.4823 0.7290 -10.0452 -5.2096 9.9998 3.4954 -4.4458 2.5061 -#> 2.7536 11.1029 3.4073 -1.4913 -11.4818 0.4184 -4.7221 -0.0167 -#> 3.4516 1.3483 3.1828 2.0351 3.8541 7.9194 1.7277 -8.4165 +#> Columns 17 to 24 -3.7604 -2.1218 10.2643 11.3296 -4.1961 -7.8892 -2.9116 21.3383 +#> 2.7713 -2.3177 -10.6566 5.0730 6.5690 16.2669 1.7761 -6.2731 +#> 5.5539 0.2088 1.2515 -3.5379 3.1489 0.5724 3.5163 -3.9862 +#> -7.1346 0.8066 -8.0818 4.8729 0.9078 3.1405 -6.1009 1.4360 +#> 4.8925 -1.4863 4.0143 -1.4077 11.2512 3.4900 5.6795 4.4313 +#> -10.3108 -5.9039 0.1143 6.4796 -0.4888 2.5535 -3.9071 0.0379 +#> 1.7902 -1.7095 -12.2436 -4.1077 4.1913 -6.9233 5.3244 -20.7955 +#> -4.1053 5.2045 -4.0090 -0.8069 -0.7462 -3.5991 0.7425 -6.0462 +#> 6.1688 1.8786 1.8677 -3.9815 10.1993 7.9289 12.0169 5.9694 +#> 2.7438 -2.0493 10.2124 -7.8059 6.6905 8.8442 -0.7276 5.9513 +#> 6.3550 -2.4962 13.5204 -0.9947 -7.6063 2.3687 -4.9018 7.2481 +#> -6.2239 -1.4190 5.9511 -0.1006 -1.1228 0.9932 1.3151 -2.4219 +#> -9.0519 -8.6618 -7.7160 26.2304 -1.2801 -1.8879 -10.7122 15.0455 +#> -8.2650 -5.6845 -13.4935 10.5413 -7.0751 -0.2893 -5.5108 6.9336 +#> -2.0213 0.6318 3.9644 6.7782 -3.5172 -1.5176 16.9523 3.6875 +#> 0.4846 4.2697 8.7208 -8.7379 -18.5525 -11.8871 -2.2346 -1.8242 +#> -0.1416 -2.4817 2.6448 -4.6931 -0.7918 -11.4044 10.0879 2.1032 +#> -4.9870 7.9952 -0.6473 -10.9716 7.5003 6.8877 -1.5101 -8.8006 +#> -3.6111 -0.5425 -0.2632 0.7723 -5.4256 5.6935 -4.5463 6.0522 +#> -0.2661 -11.4720 -1.4190 -2.0422 10.2721 -5.9003 5.1691 -0.2372 +#> 5.8354 4.3365 -1.8049 14.4065 7.4103 2.3427 -2.9091 2.7755 +#> 0.3120 -4.0293 -6.8875 -3.5073 -9.2093 0.1938 1.9776 7.8692 +#> -10.2363 -2.0231 1.7242 -8.8492 0.3548 -5.0216 10.2280 -9.8666 +#> -2.4458 -3.1471 -7.7955 7.0580 3.7621 -2.7298 -3.7527 -1.7677 +#> 0.2750 3.0765 -13.2988 -14.2917 -10.4668 -8.8943 -6.4412 -0.5008 +#> 3.7819 -2.5216 6.3017 -3.1720 2.6112 4.1207 -0.2363 -2.5184 +#> 1.3873 5.3469 -1.5839 10.3203 12.6063 -8.8295 -2.7760 2.3282 +#> 8.4450 -2.8892 5.9488 -7.4181 -0.0546 12.6295 -1.2329 5.8607 +#> -11.4617 1.2900 -3.1312 3.5400 -8.6505 3.3382 0.1915 -2.8183 +#> -3.7683 -6.8829 4.0607 10.8703 -2.3494 2.1553 5.7061 10.8609 +#> -0.4346 6.6904 1.8583 9.9106 2.3102 -6.4464 2.3357 1.9055 +#> 3.1398 -1.9802 16.7641 -1.4764 -2.5059 2.7730 1.5432 1.2536 +#> 7.4855 18.2824 -5.1477 -13.7752 9.0591 5.4391 3.2000 -3.2036 #> -#> Columns 25 to 32 4.0505 8.9516 8.9033 -7.6845 -8.8602 1.5821 -8.4482 -6.3353 -#> 2.2318 11.5904 1.8198 3.8042 2.1653 2.6246 -1.8867 -2.0695 -#> 2.4190 6.7486 1.8535 -1.3479 5.0385 9.5143 5.8353 -9.7707 -#> 0.3160 0.1693 5.0241 2.3182 -5.7190 9.2947 -3.2825 9.5995 -#> 8.3221 0.3005 0.5396 -2.9437 -11.8083 7.2209 -2.4893 -14.6031 -#> 0.1779 4.2406 -7.3055 3.8547 -6.1461 -8.7149 -10.9975 -25.8239 -#> 5.6674 -5.5394 6.8157 -2.5307 -3.3665 -4.5521 -11.3748 10.3743 -#> 10.0272 -0.1362 -2.2936 -12.8996 -9.6990 -3.0641 5.2434 -3.1504 -#> -4.6833 -3.7969 -2.9460 7.8764 6.6890 1.9937 -0.5641 -7.1264 -#> 2.1160 -10.3251 14.0864 -5.1683 0.7573 -4.8519 -4.4410 3.3829 -#> 4.6316 -0.5813 6.1136 -6.9190 10.1993 -5.3839 3.2224 -3.4258 -#> 5.1394 -6.1845 -3.6422 2.5954 -3.9365 -7.0041 1.5143 0.0143 -#> 7.2168 0.3847 -4.3440 -3.7469 1.3919 -7.6400 -0.1277 -2.9737 -#> -19.3379 3.5289 -15.4244 12.3707 -1.3818 8.6222 -18.1597 -0.9059 -#> 10.2140 6.9681 -2.4340 -15.3828 8.3304 -4.1101 -3.6007 -12.4290 -#> -10.1915 15.1152 9.9963 -4.5331 -3.1466 1.2303 -2.1024 0.4509 -#> 1.8998 5.4893 1.7368 -15.3842 0.2894 0.6480 -8.8291 -15.4097 -#> -7.6948 5.5280 -9.6765 -4.6556 15.5769 -6.7131 -10.0724 2.7616 -#> -1.0178 4.2935 -0.9750 9.4335 -6.8237 -1.9242 4.2828 6.9149 -#> 0.5209 4.0523 -7.8269 -9.1706 12.9358 -13.0383 0.8386 -5.0926 -#> -6.7296 -15.0976 -5.1138 2.4820 -1.8629 2.3847 2.7592 -3.1532 -#> -5.8144 1.5334 9.3806 -8.4229 7.0976 -1.4525 14.7540 -1.5626 -#> -7.7576 -7.1915 6.7963 -7.5794 13.1287 -11.2520 -5.9283 -4.3028 -#> -10.0635 -2.0883 8.4976 2.8765 9.4888 -7.2230 -2.2691 6.7774 -#> -6.2538 -2.5045 -5.7848 -0.1290 11.1655 0.4228 1.2409 -6.2375 -#> 8.9935 3.3222 -6.6574 -15.2159 -4.5156 3.4840 -1.4515 -11.9367 -#> 7.8913 -3.0932 12.0142 -10.1397 -6.4581 -3.4470 -10.4847 -0.8180 -#> -8.3068 2.8102 -5.8828 8.7412 -5.1551 -0.1998 -16.8988 3.1662 -#> -6.2529 7.3636 0.1179 -10.5478 11.0667 0.3758 2.9674 -6.9911 -#> -2.4338 0.0214 -1.6522 0.4852 -12.8848 -3.1333 -9.7729 -2.2721 -#> 0.7034 -8.9684 -5.5336 -6.7640 -2.4479 -1.6852 -10.6143 3.1703 -#> -0.4357 8.9272 -9.1191 0.3087 0.8216 -0.8100 0.8083 -13.8424 -#> -6.3769 -3.2665 4.3365 15.7927 6.6798 -6.6067 -13.9550 3.5308 +#> Columns 25 to 32 -8.4375 5.8681 -12.6815 -2.7522 18.9005 8.1561 -1.4711 0.6721 +#> 5.1552 -2.2365 -3.1831 -1.5080 2.2412 6.0099 -4.4138 -13.4387 +#> -1.3817 1.2058 -9.0776 -8.1363 2.5808 6.8256 -3.2355 -4.8606 +#> -6.4562 3.8584 2.9348 1.5592 -1.6639 -13.3910 -6.1613 4.9691 +#> 2.2316 -7.7670 -9.4927 6.6779 13.2614 12.4068 3.5718 4.5467 +#> -7.7871 -1.7283 11.7222 9.2901 -6.0210 1.9537 -6.2021 3.6399 +#> 17.4142 -3.5749 16.7383 -12.1120 -12.2599 8.6659 2.8077 -10.2484 +#> -4.9452 11.4132 2.6347 0.2709 -10.3623 -8.7913 -2.0598 1.5628 +#> 12.4483 -7.9866 -9.5169 5.7624 2.8241 -7.3315 6.1326 -4.1919 +#> -8.2068 2.6452 0.1561 2.2225 -6.3467 2.1432 -3.3846 -1.5751 +#> 6.4031 9.4122 -6.2390 5.3088 -3.2782 -17.0041 -2.8831 4.5766 +#> -4.6749 0.2783 1.0030 -2.0822 7.1267 8.4780 -0.8306 -8.8267 +#> 2.8908 4.6225 1.7612 -10.2183 -2.0849 1.9763 -3.1229 2.9150 +#> -3.4476 7.1192 -7.8110 -2.4038 -11.9407 7.3889 8.5001 1.2828 +#> -1.5388 -18.7380 -1.6629 -10.2506 9.6399 16.3542 11.1127 -5.5020 +#> 4.1398 -2.2345 -0.1368 -1.3296 3.0933 -8.4330 -2.1578 -1.0009 +#> -5.5802 -6.4512 5.4265 2.5174 15.3016 1.4209 5.7125 -7.9210 +#> -8.4848 -5.3513 -2.1688 7.8100 -15.1759 2.2116 7.0934 -2.2250 +#> -1.8009 1.7342 5.9228 8.2168 -6.5668 -6.1326 -16.3601 0.6873 +#> -1.8678 -4.7639 0.9619 0.4322 -0.1557 6.9369 6.6921 -6.9461 +#> 17.0084 1.9508 3.7898 -15.8282 0.4422 8.6399 -8.9750 -3.9676 +#> -9.4709 -5.4104 3.8096 -16.2421 12.5024 6.5037 -6.6907 8.3447 +#> 6.7814 1.9486 6.7006 9.7204 -0.5838 -4.7788 8.7128 -13.3932 +#> -0.3491 -12.4743 1.1914 2.4251 -2.5598 13.9371 -0.8433 -0.6254 +#> -1.5615 -8.1052 3.7613 15.6856 -3.0549 -5.0018 3.2002 -7.6677 +#> -0.2573 -1.8555 -5.5380 3.9263 2.2249 2.6079 -6.4966 -4.3779 +#> -5.1127 -2.9139 9.7634 3.4248 -1.8733 0.1754 -5.3005 -0.8686 +#> -11.2679 9.0640 5.1368 15.3644 2.9642 -5.0790 -10.0058 0.6656 +#> -5.1098 -2.9547 -10.8836 -6.8406 -5.1310 6.3634 11.1377 5.6923 +#> 8.4288 -4.1183 3.9715 2.0472 7.2207 -2.7269 -3.8387 -5.4165 +#> 2.6938 -2.6882 -6.6016 -4.5369 -1.6985 -1.4199 18.8089 7.8581 +#> 2.3302 4.4734 -0.5091 5.6254 -3.7959 -9.1441 -12.4383 3.9693 +#> -7.7333 -2.1194 -0.9992 -3.6487 0.7344 -5.6113 2.9804 11.8883 #> -#> Columns 33 to 40 -6.9297 -11.0017 -7.3290 -0.9733 -4.0245 0.7207 -0.8083 -11.6688 -#> 0.9887 6.7787 9.3630 -0.1089 9.4970 1.2405 16.4210 5.2920 -#> -1.9242 1.4191 -1.4146 0.6657 -1.8452 4.3975 6.8781 4.3683 -#> -6.2923 -5.6739 2.7374 -5.7019 4.6604 8.9459 1.3710 5.9450 -#> -11.2656 -1.8411 1.3547 6.1710 0.0621 -7.3169 -2.9158 6.4003 -#> -0.6419 -4.1902 -7.3557 -7.8505 6.1350 9.9960 5.9794 -7.8682 -#> 0.3170 -0.8213 -2.7116 -1.0315 -3.5961 7.2136 -5.3460 -5.7550 -#> -9.0557 1.3421 7.3307 10.7694 0.1778 -19.3088 -4.8544 14.3715 -#> -10.4370 7.7783 -7.5868 -8.5351 2.8893 7.1334 14.0060 -15.1075 -#> 11.9832 -7.7026 -6.9726 1.5130 -6.5501 3.8620 -5.2957 -3.4377 -#> -2.8358 2.7340 -7.3989 -0.4648 -2.3398 5.1737 13.2490 11.8018 -#> 3.8140 6.2382 -2.4453 5.1233 -0.4008 -5.5641 -6.0084 -3.2443 -#> 7.0816 -1.5572 -3.5374 -2.6719 -1.3513 -0.7589 -18.0983 -1.5163 -#> -4.1255 -3.3912 7.7073 -9.9341 4.0253 -12.1809 12.0770 -8.5870 -#> -2.8708 6.5475 1.2193 -6.1879 -14.8457 -1.3914 6.6650 -12.9305 -#> -0.4384 -6.7738 0.2258 -4.2270 -1.8498 5.0849 -4.2075 0.1013 -#> -2.4174 6.0093 0.3795 5.1943 -10.8393 -5.2192 1.1280 -1.1510 -#> -0.2956 -3.8886 5.1311 -9.1582 -2.3592 2.3350 -3.5444 -2.0641 -#> 6.5872 -0.8988 -9.7604 11.0533 3.7212 -6.9216 2.8120 -2.2940 -#> 6.3180 0.1680 5.9209 -2.4832 -12.1692 -3.6259 -7.8507 -6.5393 -#> -3.1110 12.7384 9.9380 -5.2919 6.5667 -6.4021 5.7608 -1.6890 -#> 1.0232 0.6634 0.9311 4.7265 -3.2909 0.7162 -6.8898 15.5118 -#> 9.0084 7.1687 -3.9944 -12.5334 -15.4884 0.3428 -5.3218 -13.0022 -#> 14.7850 -3.2750 -18.7341 -5.3742 -9.2112 4.6256 -11.5047 -9.0995 -#> 6.2066 9.0095 4.8608 -8.1920 -3.6004 -5.2042 13.9607 -7.7939 -#> -7.7216 -3.3453 5.6436 -1.4099 -4.4756 -4.7878 6.4228 1.2972 -#> -1.9253 -5.2992 8.9140 2.5100 3.1209 3.7009 -0.3293 2.2241 -#> 4.0245 -5.1316 -12.5800 -14.4542 -8.1623 -18.2699 0.8162 -7.9554 -#> -4.4564 6.1181 2.9378 -5.8014 -4.5335 -0.8213 0.8772 1.1595 -#> 3.6789 1.1213 -8.5050 -13.9654 -1.0107 -13.9829 0.1980 -15.7043 -#> -1.7556 1.8067 -6.1722 -10.4958 -7.0932 -15.9986 -7.6157 -13.4282 -#> 10.2085 -5.0717 -4.6299 6.3475 8.3986 0.7032 2.2475 -8.1552 -#> 12.8033 -4.9932 -17.7588 -13.1457 2.3762 9.0886 12.7045 -8.7254 +#> Columns 33 to 40 1.2831 -3.0478 4.4763 -7.8062 5.2588 -0.5161 -14.6356 -2.2508 +#> 0.7360 5.7460 1.5698 5.3998 -3.5024 5.3727 2.5852 -7.4993 +#> -3.5804 3.4000 0.5893 -3.1498 -0.6533 3.0402 0.6515 2.6806 +#> 3.8781 -3.5728 2.4448 2.4720 2.6148 3.8832 -7.4060 -9.1886 +#> 0.3891 10.8568 0.6245 -7.3897 1.0589 -0.3865 2.1085 1.9306 +#> -8.2267 -9.1467 5.0025 11.2596 3.9119 -14.7153 0.6415 11.7746 +#> -5.8712 -4.4464 -12.1352 4.3904 0.8929 -3.5592 -4.1323 5.0740 +#> -5.5298 6.3088 -0.6864 -0.2968 10.1380 7.5405 -5.6006 8.3173 +#> 4.2371 15.4334 -11.2456 0.1605 -9.2520 12.9677 -0.1820 -22.2803 +#> 12.5241 -1.6203 4.9386 -7.2035 3.0404 5.0285 8.5747 -3.3096 +#> -3.9372 -8.7109 -8.4101 2.9968 -10.5678 11.2122 -0.2524 -3.8859 +#> -0.6715 9.4249 -2.3240 0.2733 -9.8237 8.4368 -3.1764 3.7562 +#> -2.8956 -8.5119 15.9026 9.9389 -0.1198 2.3390 4.7742 -5.8192 +#> 2.8115 5.4684 0.0520 -1.4681 15.0784 4.7902 -8.3521 -7.0152 +#> 0.5207 -6.3620 3.3055 3.2268 -8.6868 -9.3651 1.9347 11.8192 +#> -6.8734 7.2728 -1.5241 2.6622 0.1594 2.4368 -1.2347 6.6607 +#> -0.0684 -11.5418 -3.2310 1.8720 -9.9884 -0.6127 7.7466 -13.1663 +#> 8.2251 -6.1936 2.3715 2.1034 9.7459 -17.6537 9.1238 23.0090 +#> -9.2687 -13.9224 -0.1046 -8.9311 2.5911 -2.3880 4.6002 3.0171 +#> -2.9448 2.7912 7.4211 1.3856 4.2842 7.6593 7.8723 -11.2764 +#> -7.5781 8.1443 -8.1191 -5.5758 -4.2076 9.1548 1.4482 -10.2092 +#> -2.8872 -6.7994 -4.4007 4.3353 -3.5851 -4.8136 -1.8761 8.0400 +#> -0.4290 -3.1065 -14.5496 0.5649 2.6692 1.9439 -7.1964 6.0803 +#> -5.9097 10.5603 -8.1681 -10.1826 -1.2288 10.4813 -11.9262 8.6101 +#> -3.9755 -1.6607 -13.4679 -8.4485 4.3782 8.9802 -6.5016 -9.2057 +#> 7.8079 10.0995 4.3641 -2.4094 7.4531 -2.1450 3.8124 -10.7495 +#> -6.8749 3.1467 5.7576 -2.9737 10.1921 -10.5106 1.4517 -3.2319 +#> -1.4083 -16.2801 -3.4105 -4.7959 4.4371 -2.9939 9.0503 3.7294 +#> -2.1158 14.4688 4.0173 -3.6502 -1.5406 0.8567 3.8255 -1.6407 +#> -3.6414 3.8361 -12.4512 -0.9657 -1.2168 -7.1060 -4.4479 6.5655 +#> -4.0126 8.1649 7.2663 4.2748 -2.1237 3.2451 2.5936 -11.4434 +#> -0.7338 10.4724 3.8298 -1.1074 -10.3991 5.8741 9.9483 -2.8942 +#> -0.0887 10.6792 -4.6705 -18.2520 0.6415 5.9036 -1.8907 -0.8449 #> -#> Columns 41 to 48 -7.4805 -1.3145 -13.7431 11.4765 -0.9648 -12.0847 4.7179 -1.4272 -#> -2.2829 2.5950 -10.7235 -2.1609 -13.8357 -9.4849 4.9866 -4.8983 -#> 2.5329 2.3297 -6.3099 -0.2002 9.5547 0.6236 0.4863 10.6733 -#> 6.6941 -8.6938 -2.0557 -5.7574 1.9355 -6.3002 1.6139 3.4015 -#> -2.2127 4.6527 -3.2228 -6.1964 3.3754 -3.8984 5.7287 11.5439 -#> 0.4154 -14.2939 3.1326 -4.1760 -2.6193 -3.7922 3.6896 7.2133 -#> 3.8841 5.6008 -0.8244 7.2883 -3.8380 -6.6507 -3.1420 4.8811 -#> -6.9786 13.0368 -1.1869 6.9741 -2.7768 -2.5288 1.3483 -3.5707 -#> 6.9048 -5.1701 4.1549 5.8017 -8.5916 2.9603 -7.7201 7.7871 -#> -11.9918 3.8684 -2.6689 -6.1250 -9.2973 1.2899 3.8853 -2.2765 -#> -7.7719 11.2667 -19.5185 4.6244 -1.3942 -8.3421 -8.6290 -1.2741 -#> 0.4772 4.3330 5.6445 -7.2651 -7.9712 -2.0373 -5.2405 -1.3881 -#> -2.8972 5.4049 -5.5721 -3.8811 19.0670 3.8932 -2.1869 0.7545 -#> 15.8109 -8.8330 7.4110 1.7833 -0.2962 14.3180 2.0180 -11.7855 -#> 2.8560 6.9780 -17.5480 10.3052 -3.1295 -4.2072 1.4636 -6.0688 -#> 0.8833 -13.2746 -8.3860 -4.9834 3.1834 8.0150 1.5190 0.3426 -#> -4.9003 1.4852 0.1459 -1.5525 0.3936 -2.1157 2.1987 7.1499 -#> 0.3587 -2.1121 -0.5518 -1.9261 -9.1747 11.4873 6.3786 5.7065 -#> -6.8265 -2.9418 -4.8807 0.7788 2.3878 3.5582 -4.0625 -3.5805 -#> -10.6284 3.5065 -4.6024 6.8657 -11.1292 15.1727 3.2767 -8.7483 -#> -5.0826 2.9713 8.4317 -5.3287 -8.3769 -4.4589 -5.1314 8.1267 -#> 0.7844 0.2151 -0.4487 8.8005 7.6252 -0.6333 5.5800 -3.0729 -#> -11.8212 -7.3095 -0.3658 0.8800 1.5696 -7.7159 -9.3507 -8.9309 -#> 5.5200 -1.2981 3.8761 -1.1395 14.0902 13.1584 0.2560 -4.1000 -#> -2.5606 -2.6205 2.2323 -1.8492 -9.6116 -7.1594 -3.9362 -3.7241 -#> -5.2702 -10.7750 -0.7620 6.2300 0.7716 -0.5113 -4.7611 -2.4473 -#> 0.7879 -8.2632 5.1632 -4.9078 3.8705 -0.7683 -0.0107 7.0924 -#> -1.8081 0.7194 -2.0832 -16.3230 15.7687 9.7112 -12.0240 2.1642 -#> -7.5212 1.5560 -3.9287 11.9568 -9.6883 0.9000 13.2467 -7.7876 -#> -9.0240 15.0953 -5.1962 -7.3965 5.9510 4.0735 -10.1522 0.6561 -#> -4.0246 -1.4343 12.8208 5.0154 -10.6280 -4.3388 -6.2407 0.0779 -#> 0.7169 -1.2498 4.6823 -0.6321 6.2968 3.7195 3.3005 -6.5767 -#> -1.1701 -0.4908 -7.7588 -6.1339 -17.1114 -5.4716 -5.5306 -2.6368 +#> Columns 41 to 48 11.0287 23.9223 -10.9727 5.7250 -15.1863 0.6890 6.0917 5.5327 +#> -7.7970 8.1815 -2.3361 -4.9361 -1.7301 1.2229 1.6241 2.1373 +#> 2.4145 -5.5961 -7.0176 -12.3579 5.8794 4.3002 6.9620 -2.4586 +#> -4.9267 -2.8628 -1.2118 3.4540 4.4838 4.0440 2.1566 0.3314 +#> 1.5155 7.3794 -6.5086 -12.0071 -3.3859 -1.0735 -1.3640 -1.2261 +#> 4.9562 -12.3253 3.9348 -0.1426 -2.9609 -4.5936 8.6444 -0.1615 +#> 2.7995 -3.8605 -2.9861 -2.6860 8.0441 -13.4360 -0.3204 1.2595 +#> -3.1475 -10.9079 8.1268 -4.6683 3.2842 -4.5466 -3.6702 -6.3878 +#> 0.2912 4.4247 -0.3868 -2.9161 6.7283 12.1133 -15.0527 -6.1177 +#> 3.8399 -2.5606 2.1613 5.9011 -3.0837 -5.0822 -0.7802 0.6256 +#> 10.4074 5.0157 9.0405 -4.5233 -0.1893 7.4060 2.1579 -5.0729 +#> 6.5912 5.1064 5.6458 -13.1088 -3.7531 -2.8180 5.5297 1.9090 +#> -3.9427 -8.9145 5.9143 10.2941 4.0567 -9.5471 1.9832 -3.8064 +#> 11.1200 5.2249 21.4086 0.7067 10.2527 -11.2665 -2.8074 2.0100 +#> -3.9126 4.0027 -8.0435 3.7892 -4.5951 -4.0826 4.4880 5.1268 +#> -12.5270 -5.4311 2.5436 -6.9655 5.6668 9.4169 0.8917 -6.0900 +#> 7.4471 6.1326 -1.2263 4.5595 0.6203 5.1312 4.0651 -1.7175 +#> 0.8906 -9.8464 -7.6223 -4.2308 5.1840 1.4342 11.6056 -3.5907 +#> -9.3793 1.4549 1.7463 -5.5863 2.4171 -0.3159 5.8861 -1.6003 +#> 0.3980 1.5253 -18.8303 4.1457 8.2941 -0.4017 0.3058 -3.4936 +#> -6.5450 12.0834 12.9839 -11.8848 4.0145 -9.3704 -2.7580 4.9590 +#> -2.4938 2.1248 -12.9813 -4.5869 -16.2373 -1.9499 4.0333 11.7855 +#> 2.9872 -6.4620 3.3748 -0.0389 4.9888 -6.5724 0.9417 -5.2329 +#> 4.7134 -4.7245 -13.5988 -5.3534 16.2090 -1.3043 1.9982 -3.2839 +#> -14.0372 -6.9747 -0.3115 1.7478 19.5211 -0.7504 -10.7181 0.0177 +#> -4.7587 0.5045 -1.1509 -2.5681 2.8321 -5.8245 4.5838 -1.5990 +#> 3.8367 -1.5328 -0.2166 -5.3373 11.3631 -7.3298 0.7675 -0.0700 +#> -5.0838 0.8220 -7.0111 -3.8552 -15.1995 0.5013 3.4304 -6.1871 +#> -2.3995 -0.0312 3.7932 -2.7801 6.4899 -1.4659 3.6165 -1.1040 +#> 7.4466 5.0951 6.2980 -0.5562 6.1656 -3.1261 0.7208 -4.7690 +#> -4.1971 -0.0518 11.8565 -1.4557 4.0945 -0.9249 -7.2542 -5.5782 +#> -20.0819 -8.0617 8.4675 0.3628 5.5791 4.2936 0.1786 -10.7128 +#> -2.8940 9.6314 -16.2302 -6.9059 -5.7656 7.3872 -12.2754 2.8481 #> #> (17,.,.) = -#> Columns 1 to 8 -2.7704 10.5603 0.7267 -7.7291 -3.7433 -1.6578 -3.3570 -1.1029 -#> 2.5729 2.4487 -2.5715 -7.8141 4.2293 -11.1039 1.5680 8.5610 -#> -13.6032 -0.1561 2.3610 -0.0955 2.1723 4.9194 -0.1423 -4.7875 -#> -18.6177 -16.6249 7.0370 0.4203 5.7234 8.8069 -4.4259 -3.4729 -#> -3.0531 -0.4844 -2.4866 -2.8341 -2.3717 0.2669 -10.3285 1.0651 -#> 6.0293 -2.8649 -5.4014 -7.0805 -7.5987 0.7204 -1.6050 -10.0098 -#> -3.7357 7.8525 3.0803 -3.4952 0.9365 -1.8273 -1.7159 -4.0662 -#> 7.0489 -2.4002 0.2068 -1.4921 1.5017 -11.9677 -13.1278 9.2036 -#> -2.5369 -4.1299 -4.7285 6.1634 -4.5615 -4.8606 1.4767 5.1493 -#> -10.4036 -10.7121 4.2236 -9.0293 6.1111 7.8108 -7.0556 11.0722 -#> 0.9993 4.4852 -8.2668 -3.4930 0.3539 -4.8302 -1.4111 9.8303 -#> 0.9946 3.3462 -5.6483 1.6089 -5.8641 1.4127 2.5571 -4.2720 -#> -0.2602 3.9641 -1.0204 -2.6129 -3.2721 11.8840 -2.5988 -5.9541 -#> 2.0741 1.7521 1.4423 7.5366 9.0240 -22.3413 1.5828 -1.8651 -#> -0.4998 3.5266 0.7295 -2.1794 1.4527 -2.7818 -8.7797 1.6922 -#> -0.0240 -3.9744 2.0120 -8.4228 -0.8473 2.9716 -0.0982 -9.6429 -#> -6.8080 13.1794 0.6546 1.0582 -5.0821 -9.3842 -4.4272 3.9259 -#> -2.1990 -5.3386 -2.8094 -4.7910 8.2978 -9.5896 7.6713 -2.5908 -#> -3.1451 1.2174 -2.7183 0.9241 -3.2326 7.2417 -0.7122 -7.7775 -#> 20.0796 -6.6055 2.9293 -1.0359 2.0235 -13.0951 9.0062 1.1787 -#> -6.9559 2.6484 -3.8156 -2.2437 1.5578 1.2313 1.7346 -4.0805 -#> 1.6198 -3.9982 5.2072 5.4066 1.4270 -3.5072 -7.0815 -2.8528 -#> -3.6409 12.0129 5.4795 0.9914 1.0420 -4.8794 12.5829 1.7580 -#> -5.3814 -1.0697 2.6472 -0.6693 9.2690 18.7628 -1.3712 -0.7372 -#> 0.2149 9.5615 -6.8986 -1.1008 -0.6883 5.1239 1.9762 2.9490 -#> 6.4925 -3.5990 9.6852 -8.9323 -1.3355 -3.7827 -0.8172 -10.7852 -#> -5.4988 2.4479 -4.5648 -4.2705 7.0228 5.1410 -20.2350 6.6242 -#> -14.4974 7.3483 5.1486 -2.0932 -1.0024 -1.4598 11.5936 1.4477 -#> 4.3321 7.3866 -2.9594 6.7808 2.5951 -7.6477 -11.4815 -1.3703 -#> -4.0412 11.8890 1.3561 -10.6676 -13.7145 -2.2311 8.0976 10.0307 -#> -15.1334 5.8670 5.7254 -1.1431 -0.2199 -11.0346 5.3607 1.5764 -#> 9.1307 0.9232 2.6315 0.2508 -1.3830 4.0534 -3.5289 -7.6598 -#> 5.0985 -5.2785 -1.8614 -4.2947 -5.6576 1.2913 9.4377 10.3827 +#> Columns 1 to 8 -6.7101 -11.4777 -1.0029 2.4885 -4.9770 0.7825 -4.2027 -4.5052 +#> 0.3459 4.8314 -5.4571 -4.7593 -4.6608 0.2986 3.2795 -1.5996 +#> 19.8249 4.1899 14.6791 0.5789 -2.0847 0.8952 1.9519 -1.4155 +#> 6.2983 9.3254 0.1402 6.9671 -2.2408 1.8283 3.0115 -0.0823 +#> -3.2566 -10.4584 -12.1326 -9.2020 -0.8667 6.1097 -0.3324 -9.6863 +#> 5.9892 -6.9177 2.0852 12.3194 8.2921 -7.2529 -3.1114 -0.5145 +#> 6.6196 5.2669 0.6946 0.8671 3.2244 -4.9872 0.5402 6.9031 +#> -3.2299 6.8019 3.9077 1.6574 -2.3924 0.4771 0.2643 11.1557 +#> 0.3952 11.4992 -5.9060 0.5920 2.9335 1.6913 -11.2096 -2.7676 +#> -3.7975 -1.5835 4.3984 -2.6634 -0.7476 -6.5118 -3.8307 -2.6033 +#> -20.7084 3.8114 -3.2204 -6.0314 -4.9041 5.1997 6.2849 8.7627 +#> -1.4334 -2.1929 -6.3171 0.1353 3.6732 0.7619 -1.3566 10.4293 +#> -1.8562 3.7916 6.7467 2.6822 -7.7007 4.6581 0.4162 -7.4894 +#> -10.2735 8.7310 -0.5912 1.6107 5.4785 3.7882 -1.1886 -7.1015 +#> 14.4727 -3.8329 -1.4278 2.7269 -4.4700 -7.4967 -1.9738 4.5130 +#> 6.8452 10.1308 7.3494 -2.0400 3.6543 4.3195 3.1355 9.7870 +#> -5.9459 -11.1414 -0.2729 3.5982 6.4495 4.1605 0.2170 -5.8012 +#> -4.5511 10.9316 -4.1601 7.4514 -6.1557 2.0782 8.1392 2.4922 +#> 0.0213 -3.3917 4.5134 1.4458 -2.2109 -0.8068 10.0207 8.8992 +#> 2.7701 1.8638 -0.7078 -7.6232 0.3998 2.7054 -9.8599 -5.1184 +#> -1.2312 -4.3599 0.9120 -7.0603 -6.7828 -11.1520 -3.7187 -1.8712 +#> 21.5495 -5.9258 6.5790 2.8749 1.1707 -3.6663 7.2855 3.8327 +#> 2.2530 0.2939 -8.3601 5.5444 14.1916 1.5324 -1.2279 10.7738 +#> 4.2856 2.8972 -1.3524 7.9621 7.4640 4.4297 -14.1368 -1.5305 +#> 12.5635 -1.7788 -9.6753 -4.2210 11.5715 7.2179 1.0202 6.6613 +#> -0.7550 0.4094 3.4700 -2.4425 -5.9166 -5.9489 -5.8825 -7.2441 +#> 6.2465 -15.3493 1.6315 0.1454 -4.2356 -2.0912 -2.2255 -7.1869 +#> -6.7266 -13.0365 -1.2436 4.4574 -10.2964 -7.8975 1.7467 5.7052 +#> 6.3476 6.2439 -1.2488 -3.0344 2.2518 -0.3197 -4.7615 -11.2520 +#> -7.9013 -0.3427 8.5573 4.5705 2.0897 4.0988 -0.9844 2.5164 +#> -2.0412 6.2283 -9.9075 -1.1132 -1.1746 -1.0053 -6.9266 -11.6848 +#> 1.5173 4.3332 7.6094 -3.5713 -4.6466 -0.5351 -1.7853 5.7034 +#> 6.0532 -8.2225 0.3326 -11.5499 -7.2081 1.5923 -1.4379 5.2997 #> -#> Columns 9 to 16 6.7341 1.9863 8.9667 12.0119 5.6970 0.6868 15.2604 -4.1424 -#> -4.2998 -8.5678 9.4961 2.9845 -3.5134 1.8394 -0.9021 -2.1620 -#> -3.2300 5.0553 -2.9733 -1.1748 -0.8557 1.1872 -3.1148 3.1607 -#> -11.7453 -1.7068 -5.4682 -12.8065 -2.6870 7.2390 -0.2386 -3.2514 -#> 1.0902 0.9017 1.4176 3.7273 -0.6858 1.1306 -0.6308 -6.2185 -#> 5.4508 1.4973 0.2619 -8.4212 13.0744 3.6572 -0.3702 6.1075 -#> 0.2905 4.0318 -10.3974 13.1124 2.7247 -4.9409 5.5627 7.1809 -#> 2.8171 6.4971 1.1240 0.0898 2.5301 -4.2880 -21.7980 2.0108 -#> -15.7529 4.7049 -3.7587 -10.8786 9.4058 5.3792 -2.8985 -5.8478 -#> 0.7016 -13.5083 2.7154 1.1747 -2.4312 -3.8385 9.1867 11.6649 -#> -15.5568 4.4197 -7.8619 0.2513 -10.2832 11.4538 -14.1472 5.2680 -#> 8.7746 -9.0890 -3.8961 2.9851 4.9018 -19.8198 10.5791 -2.6983 -#> 9.1059 11.0467 -2.0032 -1.5611 -0.6221 -3.1742 -0.9975 1.7780 -#> -0.7256 0.6946 3.3463 1.4316 5.4650 -0.5997 -3.0255 -15.4508 -#> -0.2171 5.6657 9.0344 2.1456 -4.4352 -4.1476 5.2174 -4.4732 -#> 2.8979 7.3370 0.8185 2.5894 -6.4739 3.8603 5.8764 4.0880 -#> 1.1180 17.6122 -5.3422 5.0036 4.3687 -5.0587 -9.5809 10.2824 -#> 3.1349 11.4819 -2.5447 -8.5379 -5.8880 -7.5417 -13.4420 5.0741 -#> 7.3487 4.7572 0.9641 0.0457 8.7562 -0.4766 -11.2908 3.6942 -#> 2.3793 3.5841 -0.3000 -11.0359 -0.0331 -2.5764 -7.8028 12.3705 -#> 1.1415 -4.4475 4.1464 -4.4200 4.0291 -16.3471 -6.8999 3.4234 -#> -1.4660 1.7638 -7.1693 4.2245 -4.1514 8.3268 -8.7105 -10.3372 -#> -0.6831 -7.3303 -3.0960 -2.8838 -2.9398 3.8557 12.8259 2.2771 -#> -1.6265 3.4067 -3.4404 -14.4183 7.1554 9.3766 -4.0037 18.9329 -#> -0.8861 -2.8863 10.9595 -3.0864 1.1135 0.2013 1.0840 1.5685 -#> 13.0322 4.8567 13.5926 3.8107 8.6678 -0.9883 -7.2145 -1.4742 -#> -1.9749 -1.3267 1.1079 12.0055 -0.5795 1.0858 -3.1108 -3.2832 -#> -0.5156 11.9009 5.1915 -12.5264 5.8314 4.6921 -14.1464 8.2093 -#> 0.1146 4.5829 6.4958 5.2190 -10.4601 6.2816 -5.5749 -10.3199 -#> -2.6049 5.9377 12.4423 2.7328 1.6565 4.3421 2.3708 3.0174 -#> 10.3795 -6.6289 -8.6209 1.4962 0.4001 -4.7952 1.3191 -1.1169 -#> 8.8352 -4.9195 3.9531 -15.5500 7.0633 -5.7947 4.2496 -3.8436 -#> -7.5820 -15.2724 -0.5123 -9.0253 3.9423 4.5669 13.7463 5.4912 +#> Columns 9 to 16 3.6851 -5.5823 -0.8896 -7.6001 1.9750 3.4856 6.0084 1.1896 +#> 1.9349 7.1622 -1.3940 1.6941 -3.8618 -7.1291 -1.7178 7.9887 +#> 8.2891 -1.4174 13.3630 -1.4146 1.4920 1.2760 3.0477 -1.3736 +#> -4.7718 14.5363 3.3013 10.9345 -13.1247 3.2536 0.7368 2.8326 +#> -4.5275 5.6231 -2.3968 -6.3981 -1.2304 14.1423 4.9638 0.7035 +#> 1.2616 -1.0991 -11.4668 -2.9844 -2.7750 -4.4085 4.4696 3.6687 +#> 3.4103 4.0733 -0.6153 1.6811 3.1296 -5.2233 3.9391 2.8672 +#> -5.0149 12.5380 6.2359 9.5709 7.7510 8.4097 -14.2098 -2.2919 +#> 3.4180 -9.5783 4.3574 1.8264 -3.5829 -14.2357 -10.9522 9.3056 +#> -5.5409 4.9382 4.0326 -4.1310 6.6968 -7.6930 -4.8090 1.4086 +#> -7.1578 -2.2782 -4.3995 -4.5459 3.2513 -2.9472 5.0713 -9.2009 +#> 5.3264 -7.4082 3.6674 12.6978 -4.0372 0.2962 -12.3301 -5.6356 +#> 13.3602 4.5467 -2.8874 2.7407 -10.3076 -5.0587 -3.3660 2.8274 +#> -7.0421 7.4721 -8.9882 -1.9446 -7.3005 0.0265 -2.8710 2.8790 +#> 2.2557 -3.8773 -4.1271 1.0536 1.7114 -5.9833 -15.0314 -8.0907 +#> -11.9969 -1.1218 -0.0272 3.2888 9.6942 -0.4746 -4.2843 -8.4002 +#> 2.5310 -8.5808 -17.4459 -2.6292 5.3209 -1.6922 -0.0420 11.5135 +#> -5.9935 8.3489 -13.8455 -1.4210 7.8496 -7.3958 -18.0197 -8.2921 +#> -5.5460 2.1932 0.4417 -3.3319 -4.8748 -3.5933 -4.1547 2.2401 +#> -9.7388 0.6479 4.6112 -16.9316 -3.9626 -8.7223 -7.2928 11.5077 +#> 3.8072 -3.8731 4.8798 10.9572 -5.7474 3.7387 -7.3638 -3.6598 +#> 1.2881 -3.3529 18.1911 10.6599 -10.7666 -11.0110 6.3142 -4.1415 +#> 9.2746 0.1981 -11.8911 -2.9984 12.6167 3.8637 -2.1943 7.6611 +#> 4.4887 -5.7109 -2.8101 -2.9143 -5.9146 10.7833 -7.6034 -2.7181 +#> -7.0522 6.6725 -12.7007 7.1897 -0.2077 -5.6401 4.4739 6.9485 +#> -0.4787 0.1785 9.9144 -2.9775 7.3733 2.1053 -7.2497 4.2792 +#> 1.5835 10.4767 -12.8557 -1.0089 -4.9936 2.5060 -0.8951 5.7842 +#> 0.8011 -1.0961 6.0491 0.8477 -4.1108 -4.3715 14.1876 -1.3003 +#> -4.6499 -1.4993 -2.6450 -1.6413 0.1944 3.7243 -11.6397 -1.9370 +#> 5.6309 -4.3383 -5.9791 -16.7630 3.9101 -0.1012 -16.4182 -2.9557 +#> 0.5716 1.1852 -11.0379 3.0770 0.1567 0.1318 6.1487 8.6028 +#> -5.7520 -11.0449 16.2766 -1.6632 0.8697 5.4534 -3.7527 -0.9934 +#> -0.8892 3.5720 8.9218 10.0915 -3.2897 2.8101 -10.1093 -6.3926 #> -#> Columns 17 to 24 2.8659 7.9970 -2.2369 13.0924 8.6808 -0.2852 -9.3335 14.6500 -#> 7.0788 -7.9463 -3.4163 -1.3608 -4.7428 -0.0517 -4.0457 -4.6531 -#> -3.1597 1.8588 -12.6884 -11.0860 4.6732 2.3709 -9.3827 -0.1688 -#> 11.8203 2.5997 4.2648 1.1745 5.2093 5.3027 1.5585 -16.1315 -#> -10.1067 1.5029 -5.0066 -9.1026 -9.8618 -9.7336 -11.3681 -7.6435 -#> 3.4721 1.6037 -5.9775 -4.3065 0.5484 4.4562 1.1557 0.8167 -#> -0.5257 -0.5474 3.3103 -3.4944 4.4801 -8.4647 0.1863 -5.7290 -#> -4.6142 -1.2074 1.7776 3.7075 7.3313 2.6778 0.3457 -1.0475 -#> 0.2307 5.1051 -4.3815 -12.3826 -3.9321 13.1424 1.0713 -13.4604 -#> 0.2598 0.6921 4.9805 7.6117 -8.1925 -8.7037 -6.8049 -0.0327 -#> 7.0166 -0.7439 2.9224 -9.7845 6.8724 9.8846 -2.1503 -5.7923 -#> -2.1224 -1.3258 13.4410 1.4991 -2.4490 -4.1900 4.0207 4.3657 -#> -3.2526 4.4835 3.9203 -3.7430 11.5783 1.6549 4.3792 6.2467 -#> -10.0576 -3.4088 4.0432 -5.1732 -17.3550 -7.1546 1.4905 0.1845 -#> -5.6070 3.0486 -5.6468 -8.1206 -5.4535 -6.7772 0.8812 8.7500 -#> 1.3125 -4.4642 -3.8045 7.3684 1.5014 -5.6692 8.8631 5.4753 -#> 1.1991 8.7489 -6.4687 1.2454 2.9509 0.9164 -8.4742 7.6956 -#> 15.4080 -4.4415 10.2667 -4.4999 5.5235 -13.4351 15.9167 7.7456 -#> 1.6523 4.7800 -0.0088 -3.5715 3.0276 -3.7797 -1.8262 4.8513 -#> 2.5692 0.6369 4.6692 -0.8968 -3.0227 4.5375 7.6932 13.8799 -#> 0.5910 -0.4237 9.7608 4.6418 -0.1699 -7.6320 -2.6444 -10.0515 -#> 1.9812 2.0879 -2.8302 -3.6234 7.9835 9.1231 5.0533 6.6013 -#> 14.6054 4.1328 -6.3627 -7.4908 -1.6861 7.2527 8.0787 7.1441 -#> -2.2543 -3.0008 -10.7692 -5.7324 -13.2544 -1.8763 -2.6397 8.1258 -#> 12.1308 5.8818 0.9452 7.1992 0.6095 -3.3979 -6.7545 2.6578 -#> 0.9111 8.0921 -0.1917 21.9292 4.9044 -2.3156 -1.1944 3.9495 -#> 9.2807 -2.8576 -10.0671 -3.6933 -3.1867 -0.5104 2.8762 -10.1065 -#> 8.1040 7.1307 5.4396 -10.0216 -8.6300 0.2725 -10.2342 4.1169 -#> 1.8882 -7.2394 1.6500 -6.4690 -6.5163 -8.4244 0.7696 2.0009 -#> -3.4141 1.8392 4.7830 4.7468 3.8604 12.8414 -2.3081 2.1538 -#> 15.9406 7.2814 5.9098 7.0757 2.5140 -1.6559 7.7894 -2.8529 -#> -6.7661 -9.5295 -6.3038 9.4841 -4.0221 11.3960 -6.2655 6.3740 -#> 15.0542 2.3747 4.9200 6.3924 -2.9990 8.0179 -2.8307 -2.4978 +#> Columns 17 to 24 -13.5687 6.5127 11.2827 -2.7570 -8.7587 2.8355 0.5006 2.2251 +#> 0.3867 2.3722 -1.3737 -6.3242 13.0167 -10.9999 -7.8048 3.2972 +#> 1.4502 6.9294 -3.0276 2.9232 -4.3504 -1.6233 1.3814 3.7063 +#> -0.1825 -0.8234 15.9316 -5.2203 4.8262 1.5230 -6.0799 7.2343 +#> 9.2279 -8.6581 8.5543 -1.0704 -12.5591 -0.7235 6.1389 8.5998 +#> 11.8555 1.8300 -8.1740 11.3302 3.3347 1.1953 6.3902 1.4844 +#> -5.4598 -1.9664 -18.0901 4.5571 10.7894 -12.4336 -5.0217 -13.9397 +#> -4.9063 -6.6183 8.7664 -9.4895 1.5648 -2.6755 -11.3531 -3.5207 +#> 8.5009 10.1183 8.8329 -9.2066 4.4645 -0.3128 -11.4763 -1.0231 +#> 9.5467 -0.5454 3.4559 1.3236 2.0064 -0.0237 5.8554 2.2589 +#> 8.2596 1.9679 -1.2458 -0.0506 3.9848 -2.3507 3.3796 0.5996 +#> -1.7898 3.4860 11.4615 3.0012 -10.2115 -5.6955 3.2178 -7.3044 +#> 4.1323 4.1176 6.8611 17.9405 -5.3548 -7.5897 2.6224 -1.3363 +#> 8.4927 3.7228 -2.9584 4.0962 5.2812 2.8066 -11.7284 -0.6806 +#> 2.6983 11.3749 -2.7474 4.4368 -4.0580 -6.6421 0.0997 1.3248 +#> -5.3917 -10.6345 6.9907 -1.5771 2.0337 2.5822 9.5103 -4.7180 +#> -3.3416 -0.9225 4.6518 -5.3008 4.8100 12.7337 3.0646 4.0448 +#> 1.3542 12.9571 -7.7277 -10.3433 3.5621 -8.1302 0.3377 8.7763 +#> 6.1591 -5.8663 -0.5179 -6.2179 4.6257 -0.3382 1.1006 -6.1489 +#> 5.0665 4.8555 -10.8439 11.4757 7.8647 4.7505 0.4624 4.7066 +#> 1.4569 0.5079 3.2724 1.9506 -0.0630 -2.3472 0.8453 -11.6049 +#> -0.5043 -6.1222 7.6402 9.8744 -15.7752 -1.2555 -1.5286 -3.3984 +#> -5.6876 10.6584 -9.5881 -1.3011 8.0969 0.6792 -4.3767 -9.1040 +#> -1.2803 0.9476 -5.1278 -5.5527 0.3106 -5.8792 11.3634 -5.4370 +#> 3.5373 -7.1395 -3.9328 -2.0724 13.0769 12.7756 5.0851 -8.1487 +#> -4.8530 5.1399 -4.0207 8.6512 4.5221 1.5550 2.2901 11.6727 +#> 5.3800 0.3494 -2.4451 0.0102 -0.5794 0.6976 0.8997 6.2500 +#> 12.4039 -2.2611 -3.5274 -1.9516 0.0779 3.7180 1.7358 4.8573 +#> -6.8875 -6.1839 3.8829 0.6373 -1.9786 3.3023 -6.6698 5.6416 +#> 14.4934 13.9035 -0.6031 -1.7887 -5.0468 -1.1163 -2.7402 -4.0482 +#> -8.8617 2.8904 1.8729 -2.2741 2.8444 -0.9365 -4.3174 3.7160 +#> 11.9332 -2.7173 4.4408 6.4466 -7.2732 10.7196 2.5931 -1.9942 +#> -2.1627 8.7172 3.3949 -13.9914 -7.7207 -0.9197 -0.0031 -6.5949 #> -#> Columns 25 to 32 2.1070 1.1357 -0.6737 1.5143 -8.0808 1.4888 4.5449 1.9640 -#> 4.4534 2.5352 -0.6067 0.8445 -0.0012 1.2110 0.8128 0.5631 -#> -4.9796 3.3211 6.7698 7.0588 6.1402 3.4425 -6.0022 -3.4105 -#> 1.5599 -5.6939 3.6210 20.0176 0.4168 3.2349 0.4631 10.5732 -#> 9.3376 -4.3873 0.4279 -3.8398 -7.9957 -7.3808 -0.1321 0.8090 -#> 6.2631 -2.7879 11.4126 -0.0802 -5.3166 -7.7050 0.0382 17.2720 -#> -8.1011 6.2337 -2.9383 4.1201 -4.2928 3.6873 -0.4019 2.7793 -#> 1.7744 -5.0834 -1.6528 -6.6120 -8.9883 -0.6799 0.8348 -0.0160 -#> -13.1987 -0.0450 0.9701 -1.2428 2.4142 -1.3553 -12.7588 9.6190 -#> 7.0441 6.1168 -3.5449 4.0805 -5.3083 5.9797 8.8975 2.7549 -#> -3.8628 -1.2028 0.1499 9.4044 5.8880 -0.8799 -1.2509 -5.9398 -#> -0.9154 2.7582 -0.1849 -8.6003 1.1950 4.5682 7.6503 -1.1930 -#> 1.5326 -8.2111 4.0244 -8.2650 6.7523 5.4653 2.4483 -7.4292 -#> -4.2883 0.2562 -2.8330 -7.6422 -20.6673 -4.0809 1.7666 -4.3616 -#> -8.0310 0.3609 -3.1955 -5.4921 -6.0570 18.4650 -14.8922 -2.0463 -#> 0.1960 -3.4853 -5.3803 6.3456 0.3314 -3.9994 2.6963 3.8593 -#> -1.3784 -1.7650 0.1733 -3.8867 -5.2378 -5.9713 5.2499 -4.7100 -#> -13.8746 4.6234 -6.7364 8.9808 -0.9970 9.9604 1.1777 2.4911 -#> -7.9730 -5.1767 -4.4298 -5.4043 6.5203 -6.3957 -7.0252 7.0823 -#> -7.5692 0.2801 -6.0745 -7.7463 -13.9535 5.2160 -5.9360 0.5628 -#> 1.3956 0.3247 4.9877 0.6447 -8.8323 -2.6314 1.5729 12.2125 -#> -11.6241 -8.0160 -9.6314 -5.0019 -3.2613 4.8675 3.5103 -3.1493 -#> -2.5003 5.0246 3.1325 -0.2385 9.1923 2.3005 -0.7771 -2.0385 -#> 4.8006 -2.8842 7.8990 1.0820 7.9377 -3.5297 -0.4941 -3.6435 -#> -3.0593 -1.8972 0.7686 3.1528 1.8651 3.5969 0.1643 3.4832 -#> -1.2656 -3.8606 -6.7093 -0.4224 -10.3953 -8.6362 -0.9552 4.5440 -#> 0.2829 -5.4841 2.0222 0.5593 -0.8317 -13.1011 17.1616 8.9623 -#> 9.4220 -2.8666 -5.2726 0.5688 3.8026 -8.5452 -3.6207 -17.8513 -#> -15.7801 -7.5726 -5.9551 -7.1169 -13.2869 0.5127 -2.9001 0.7436 -#> 17.4660 -1.4932 -7.7020 -5.5903 -1.6124 6.7695 -0.5188 -9.5186 -#> -13.4290 10.1790 -6.9775 2.2547 5.4104 1.8314 2.4659 7.4010 -#> 9.0881 -1.5618 18.9436 -6.0187 7.7381 -9.9503 2.6436 -4.7875 -#> -1.2500 11.6404 3.5587 10.2993 6.2472 5.6981 -3.0613 1.0673 +#> Columns 25 to 32 11.4745 6.9269 4.8774 -2.0958 -9.7285 5.9198 0.9012 5.1188 +#> -4.8288 0.2665 -2.8089 -0.0742 -2.4463 -2.8109 4.7577 -3.8448 +#> 6.1666 -4.2836 -9.8506 -7.3292 -11.5697 6.0078 0.9813 5.2271 +#> 3.1652 1.2859 3.9376 8.1368 13.0757 -5.7888 3.5054 -2.8935 +#> 9.6099 -3.2063 13.8705 -6.0158 -3.9418 -3.1853 -0.9793 -1.8307 +#> 5.2521 2.6800 9.4957 0.5549 -1.0532 3.2449 11.0121 6.9269 +#> 1.2321 4.1570 0.2087 0.2731 0.5162 -7.2604 8.5382 -16.8329 +#> -0.2639 2.5039 0.8767 7.3226 7.9762 -15.7648 -3.7707 -0.3491 +#> -14.7445 9.4461 -10.0301 -4.5420 7.2404 2.6139 -3.8939 10.2656 +#> -12.3670 1.6378 -6.1903 -7.0113 -0.2487 -3.1509 -1.4754 8.9767 +#> -0.6004 -6.3564 -1.7146 10.5536 -4.6391 -5.5797 -5.9850 17.0420 +#> -1.0708 4.3684 4.5661 5.5823 2.9018 -0.3809 -9.4206 0.5866 +#> -8.4398 2.9568 -10.3268 -5.6140 -7.1608 6.9569 4.7336 4.1830 +#> -1.6025 0.5531 4.3948 3.0332 -2.2063 -3.1956 5.3363 3.8775 +#> 2.3409 -2.1857 -0.7678 0.0617 5.0413 2.5835 1.8820 -0.0052 +#> 6.5754 -4.3946 -1.5501 -1.1785 -2.1480 -1.2932 2.8489 0.5282 +#> -9.1387 8.4629 -9.6648 -4.6575 1.5888 5.4846 8.5441 6.5225 +#> -13.0260 5.2306 3.8946 -3.7950 -5.6353 -0.3656 5.5660 -5.4506 +#> -2.7561 -9.4117 -1.8517 -1.9591 -2.1227 -7.7888 5.4212 8.4974 +#> 4.4705 2.4724 0.8234 -9.2992 -1.6666 7.2636 6.8622 10.0846 +#> 4.6718 -2.2679 -5.2187 -6.3119 -7.6130 -13.8678 -0.6389 0.5356 +#> 18.1821 2.3081 -5.9324 -4.6010 -0.3936 0.9049 -2.9524 1.3514 +#> -11.5694 -3.8010 5.0844 7.3215 -1.7548 3.1009 -2.0693 -9.2262 +#> 2.3637 0.5598 5.5983 -7.4253 1.3303 1.8550 4.2324 -0.1752 +#> 1.6527 -11.6107 10.7182 -4.1860 3.1728 5.8682 1.4406 -5.4344 +#> -9.0393 5.0287 5.8522 1.7742 -4.9223 -2.5139 2.7156 1.1177 +#> 1.1954 6.5779 5.5969 -3.3282 2.6744 1.2588 14.6031 -5.5260 +#> 0.8213 -1.4125 4.1455 -2.2743 -0.2482 1.2909 -6.7753 7.8871 +#> -8.9178 0.2694 3.5532 4.8254 0.4340 -2.0997 7.5575 -2.5730 +#> -11.7140 5.0675 1.7414 1.8056 -9.8308 -1.8322 6.3071 7.7251 +#> 0.1694 8.4420 -4.7378 4.9153 -2.4625 -0.1151 5.8404 -0.8228 +#> -12.1115 -8.8196 -5.7316 0.4696 1.2350 -1.6596 -7.9293 8.1358 +#> 7.1959 -3.4427 6.5927 3.2206 11.5673 -1.1585 -7.7801 -6.9818 #> -#> Columns 33 to 40 -6.0871 -7.8560 -5.1843 10.8660 7.2719 -2.5899 -5.5856 1.1496 -#> 0.0269 4.3973 -2.5121 -1.8696 -6.6044 3.7320 3.2216 7.9404 -#> 2.6763 -5.6407 1.6508 -4.2718 -4.0539 17.6827 -4.3344 -7.8905 -#> 7.9267 15.5898 -0.7649 6.3951 -6.6746 5.9853 5.0476 -9.6068 -#> -1.3657 3.8838 -2.8952 0.2352 -1.4886 0.8382 1.1905 -0.9388 -#> 1.1199 0.8771 -0.5850 16.6982 9.6759 1.5541 -7.3544 -0.4983 -#> -5.8925 -3.7602 2.7142 7.4090 -1.1657 -4.8527 -10.1923 9.1371 -#> 4.8444 10.4564 -11.3587 -8.9570 -0.4252 8.3873 -5.0638 -12.8834 -#> 4.1751 -4.8812 3.1374 6.2684 3.2427 4.9984 1.3892 -8.1252 -#> -3.6175 -2.0191 -2.8556 6.7477 -5.4764 1.6779 6.3614 9.1818 -#> 10.9193 10.6641 0.7074 -2.9042 1.2107 8.3301 5.3420 -7.7657 -#> -1.8419 -9.6558 9.0585 -6.4754 6.5303 -11.6757 -4.2685 9.5693 -#> -8.4170 -0.1091 9.5069 -4.8290 -7.7341 3.9342 -4.8279 -8.3420 -#> 2.0843 5.6943 2.3081 2.1958 -1.7479 -8.2845 -17.9574 -1.3221 -#> -0.1133 0.1275 2.5143 3.3279 -13.4875 3.4412 2.3122 -11.5105 -#> -0.3208 -0.4993 -2.3072 -3.3562 -9.6989 7.0908 14.1526 -1.9749 -#> 4.1466 -5.4791 -1.0779 2.0537 0.3336 0.2411 -5.6312 -10.7543 -#> -3.3062 -6.7042 5.1662 10.0127 -9.5516 1.1809 -6.6059 -13.0325 -#> 2.3824 -5.0299 -1.7504 -11.8770 5.7312 2.6594 5.1171 -4.1069 -#> 5.5454 1.3222 -4.5464 6.1858 -11.6493 1.1564 2.2874 -3.6223 -#> 4.7625 -1.0953 -4.0312 1.7370 3.8948 -3.4990 -13.1045 -3.7373 -#> 5.1932 -0.9977 -4.4916 -6.2493 1.4565 8.7194 7.8811 -4.6788 -#> 5.1834 -1.3572 -2.1052 6.3175 1.3448 -7.9595 5.8325 7.4940 -#> -3.9020 -0.5996 11.2718 6.6009 -4.9164 5.5207 -6.5937 -5.3866 -#> 5.5396 -9.1300 -2.2336 3.5168 2.0850 -6.4280 0.9162 -6.8538 -#> 5.9051 3.5531 -14.0328 7.0267 -3.1132 4.9326 2.0346 -10.9407 -#> -11.1485 6.5715 6.9071 1.6247 1.6972 -17.4898 3.0868 -1.3412 -#> -2.8866 6.7616 -4.6754 -1.8187 -7.1617 -3.5344 0.5114 -7.9344 -#> 0.9396 -10.4522 1.2673 2.3761 -4.5045 -5.8762 2.5294 -5.7347 -#> -11.7426 0.7682 -4.7147 1.3250 -2.2800 8.3357 6.3525 -7.6078 -#> -4.0112 -7.7059 3.6581 3.6947 10.7464 -6.6490 -12.4564 -3.1245 -#> 0.4887 1.7864 3.1950 -1.9047 1.3532 1.7572 -4.5550 -3.3779 -#> -3.7773 3.4754 4.8371 10.9474 3.7645 -7.2888 1.0882 11.2958 +#> Columns 33 to 40 -6.0692 2.0793 4.3350 -12.3235 3.0037 1.1288 2.5813 2.6110 +#> 0.5308 -4.3238 3.5379 -1.9671 -6.8099 3.1928 1.1679 2.0679 +#> -0.3491 11.8161 6.8303 -0.2416 3.8417 -7.2661 -13.6116 -3.0796 +#> 2.6402 -13.5588 3.2563 6.5625 4.2423 3.3360 5.5788 -3.7050 +#> -10.4350 -4.8099 0.2079 0.3846 -6.8584 4.3211 -1.0947 -4.5229 +#> 5.6016 0.0254 13.1502 0.7369 0.4473 -2.5565 -4.7408 -10.8955 +#> -3.4334 1.2943 -0.3995 -5.9715 -7.7765 6.2900 -2.2444 10.3684 +#> -7.1959 -0.6748 3.9565 -1.2811 5.5968 4.1287 7.8194 -3.8093 +#> -6.4766 -8.6170 5.2415 -3.6940 -7.9119 -11.8260 4.7828 -2.1268 +#> 10.6714 -6.6086 5.9842 -3.2699 7.3213 -0.6271 7.1429 -15.8847 +#> 18.2600 6.0113 -8.8513 5.8896 2.6609 -0.6577 -6.3455 -11.8124 +#> -2.2230 -3.2465 2.5827 -12.6842 -6.5214 3.3242 0.9225 5.8058 +#> 4.6609 -5.2502 4.9229 -2.7032 5.7908 7.7743 5.2959 -3.2931 +#> -4.9631 -10.0461 -0.5561 11.6973 -0.6897 20.1431 15.2375 3.0726 +#> -5.5589 5.7592 -5.1090 -5.8342 -8.6684 -7.5513 -3.2661 -2.3548 +#> 3.4268 6.9213 4.6162 -5.5217 7.6247 0.0046 4.1052 0.3439 +#> 4.3462 4.1992 -2.5999 0.1596 -8.8188 2.0628 -12.0417 -0.2866 +#> 4.9834 -1.3398 3.3698 4.3658 1.0337 -2.3643 -8.3714 -10.8860 +#> 15.4978 -0.0545 1.0423 -3.8502 -0.2772 4.2026 -2.4977 -5.5629 +#> 14.3952 -4.3112 4.2074 -1.1560 3.3123 -0.8910 1.3908 -9.3950 +#> 8.7375 4.9504 2.1718 -1.5356 -11.9605 8.1222 0.3876 6.7565 +#> 1.9322 6.0241 -2.8762 -10.9811 2.6327 -4.1093 0.5120 0.0892 +#> -7.0380 4.0626 6.4581 0.9974 -1.0643 -8.1721 -4.3548 6.2531 +#> 1.9169 -0.0199 14.0205 -4.7170 -6.8027 -8.3949 -8.1569 1.8699 +#> 11.2147 0.0931 5.2265 9.3769 -9.3777 -10.1494 1.1812 11.0793 +#> 5.9984 -0.8354 2.9185 -0.0188 -2.8839 0.5440 13.5881 -3.4950 +#> 0.2542 -5.4256 5.4084 -0.3341 -2.0828 5.2383 9.3085 -9.4333 +#> 12.6739 -2.6515 -2.8827 -16.4875 2.2863 -0.0353 10.7818 -6.4361 +#> 0.6490 0.5729 -4.1580 10.4702 -6.2550 -5.1557 1.2929 -7.2775 +#> -2.0955 1.2751 0.6529 -5.3223 2.2847 7.3465 0.5465 -4.8587 +#> -12.9309 4.2264 4.9639 2.3792 -4.9550 -5.7829 -0.2025 0.9620 +#> -3.3703 0.5198 -11.8149 -0.9858 5.5318 9.6888 14.9919 -9.3329 +#> 2.8768 -7.5404 -14.6591 -4.7665 3.9690 -14.6539 -6.7533 2.7032 #> -#> Columns 41 to 48 -6.0387 -11.2056 9.6341 2.8017 -12.2098 1.0781 5.1328 10.4557 -#> 7.2590 -8.2040 -2.9756 -13.0488 -3.0499 -5.4872 -1.5956 10.6881 -#> 1.2324 4.3335 -3.6939 -3.1393 10.2875 -1.5746 -9.9184 -6.8965 -#> -2.4542 -13.3423 3.5852 6.7524 -1.8796 -4.5611 7.9572 -1.0148 -#> -1.3396 4.4177 -0.2213 6.2529 3.7517 -0.7370 -3.7494 1.9093 -#> 2.6653 8.3401 -3.1142 -8.5770 -2.8581 5.9668 -4.6497 3.4966 -#> 7.7550 -8.6419 5.5229 -6.6355 3.0054 2.1490 5.7685 10.6066 -#> 7.2441 12.5011 -0.4768 -3.0536 -0.6593 1.0614 -1.2096 7.1685 -#> 5.6518 12.7438 2.9351 -12.3455 2.5277 1.2141 -7.0547 -8.5420 -#> 4.3700 -6.5105 -4.3187 3.5244 9.4576 -1.7260 4.4559 4.3307 -#> 5.7719 1.3263 -4.4880 -9.1984 9.7216 0.2186 -1.0691 5.3772 -#> 1.1191 -1.0495 -0.4789 -0.2497 -2.2224 11.6902 -0.4531 -0.3812 -#> -6.3349 -0.9310 -4.5166 -0.9552 -2.1166 3.4339 2.0261 -2.2041 -#> 12.0080 -5.1870 13.9815 5.0460 -4.5699 -8.4260 -5.1223 -2.8219 -#> -3.1362 -1.5496 1.5033 -14.7490 -5.9815 2.0039 0.5765 5.5446 -#> -5.5697 -14.0296 0.7314 -3.6337 6.9127 -2.6981 5.8657 2.1826 -#> 7.7615 4.0952 -2.6710 -11.4045 -1.7372 -2.0555 9.9285 1.1991 -#> 8.1680 -6.1382 5.9534 -15.0505 4.3782 9.7085 3.4996 4.0257 -#> -2.0523 0.5244 -3.8419 -0.8932 -11.1999 2.5236 0.9160 -3.5783 -#> 9.2817 3.7083 3.4058 -3.5454 2.9044 -1.7373 5.6485 -4.9491 -#> 2.7605 7.7002 -2.6598 -6.8164 2.5014 -1.5200 -6.9747 0.8509 -#> -5.1617 11.3389 8.3555 -4.8374 -0.2337 0.2449 -4.0933 3.7114 -#> -7.2699 -0.3516 -10.9980 -6.3405 -14.9887 2.6767 -5.0563 3.6578 -#> -1.8291 7.8125 -13.2615 6.5019 5.0765 3.9017 2.8786 -14.1297 -#> -8.8028 -0.7751 0.2333 -13.4476 -5.6117 11.4009 4.5270 -1.2784 -#> -6.5362 5.0214 12.6471 -2.5547 -4.7357 -9.1172 0.4524 8.0845 -#> -0.1702 1.2163 -0.2177 -12.3578 -8.4017 -3.1552 -3.2782 -1.3680 -#> 6.3253 -4.4724 3.3072 5.0575 -4.4235 0.6756 1.1694 -15.7861 -#> -5.6443 7.3664 4.6533 -11.5215 -10.3234 -0.0212 3.2675 7.5162 -#> -4.0903 -2.8207 -1.8034 -8.4524 6.7105 -4.1710 5.3431 -0.8284 -#> 1.0200 4.1784 1.0915 -6.5063 0.6506 9.9118 10.2774 0.8494 -#> 2.7498 3.9871 -11.0478 16.3057 -6.8933 4.1712 -5.2481 -2.9344 -#> 9.1198 -6.8364 -11.0031 2.7434 11.8762 8.8112 0.1716 0.3366 +#> Columns 41 to 48 7.8763 -9.7199 -4.4690 9.6089 4.8405 3.3520 -2.7235 -6.4251 +#> 2.9170 -3.8034 1.1004 -6.1312 2.1313 -4.1998 -3.7231 10.7095 +#> 2.2840 -7.5836 -7.1392 -3.8762 -3.8631 4.9332 -5.1161 4.4003 +#> -6.2286 -6.8129 -4.6185 -2.2939 1.1028 3.5578 2.0399 -1.0122 +#> 4.6452 2.0789 9.9866 -0.0370 -0.8002 -1.6254 -8.5539 -1.6654 +#> -1.4131 7.1423 -0.6537 2.8323 -2.1313 -10.4077 10.6749 5.2659 +#> 1.9635 12.4574 -3.7505 -11.5363 6.1292 -5.1505 -3.4485 14.4767 +#> 7.1639 -8.9321 3.6841 2.2668 -3.3532 4.5838 1.1326 0.4709 +#> -13.8005 -14.1317 5.9086 5.7031 -7.8651 -2.1370 -5.4105 -0.1254 +#> 1.0900 3.2574 12.4159 -3.6929 1.1065 -0.8303 2.6351 6.7417 +#> -3.3045 9.9247 5.5519 15.7988 -7.2889 7.8238 -13.1443 -14.0744 +#> 2.8655 -1.1382 2.3960 10.8223 -4.9765 7.1994 -2.2211 -0.3499 +#> 1.7840 -2.8242 -9.5109 -1.2672 -4.8167 -0.6918 17.9051 -1.9648 +#> 2.3403 -3.6133 1.1202 6.5131 -6.1245 -4.9689 -1.4683 8.1377 +#> -6.3661 -3.4269 7.7904 6.9561 2.0698 -4.1060 8.2306 2.8670 +#> -4.3051 -14.7653 -5.2190 -5.9767 -8.1131 -1.9674 0.9269 -14.8752 +#> 1.0752 5.2407 -2.9673 1.1213 -2.4811 0.1844 -5.8937 -12.1171 +#> 4.8922 -12.5024 14.2966 1.7855 5.8768 -7.2377 1.2134 16.1354 +#> 9.1479 -6.0209 8.0430 -8.3879 1.8758 1.8084 1.4744 -2.3208 +#> -11.5399 8.7769 1.3258 -10.4887 -1.2018 -9.7503 -0.7932 8.2395 +#> -4.6584 1.4419 -2.6616 -4.1353 -0.1446 -3.7500 -3.2242 -0.7995 +#> -11.7634 -4.2080 -8.4132 -4.4086 -8.5494 -0.5896 9.2247 -15.2114 +#> 15.8126 12.5480 -6.7916 8.2607 1.7568 -7.0495 -5.7526 9.3061 +#> 2.2299 -8.6338 3.8069 0.2027 9.7364 -1.7221 -9.2119 21.1579 +#> 12.4882 3.2241 -9.5911 -10.9391 -6.2601 -6.1291 -9.0527 -1.0655 +#> 0.6824 1.7477 0.7488 -6.5637 -1.7814 -8.5923 1.9031 2.8719 +#> 5.5882 -4.5095 4.4700 -1.6041 7.5863 -5.9989 13.6049 -0.3071 +#> 5.2941 2.9734 12.7298 -2.0739 -14.8459 -7.2190 0.6030 -10.8661 +#> 6.0007 3.8168 -0.8319 3.5197 0.6885 5.1498 0.6077 -3.1995 +#> -1.9940 2.7808 8.2217 14.2816 4.6736 -6.6666 3.1037 9.3884 +#> 5.0540 -1.0323 -4.9521 6.5307 -6.0250 1.3596 6.7707 -9.4023 +#> -0.0900 2.9771 2.5626 -9.2355 -18.0492 2.6934 -0.3286 -13.9506 +#> 4.2294 -1.5628 7.5533 -3.8401 10.4743 0.2519 -5.4022 -5.3539 #> #> (18,.,.) = -#> Columns 1 to 8 -3.8668 -5.2559 -1.3421 -3.4314 -5.5902 -1.2348 7.5022 5.1467 -#> 7.3846 6.2321 -11.4451 1.7548 -20.0545 6.2218 -3.4738 5.3867 -#> -7.4439 8.5497 4.9470 3.4129 -6.3982 6.3543 4.4766 -1.0839 -#> 1.8430 2.1162 6.6763 12.9154 14.1324 -3.0693 2.5126 4.9445 -#> -0.5132 6.6500 5.4475 -7.0266 4.8493 -3.7801 6.1027 -9.6499 -#> -19.8086 -7.2166 -5.5340 -1.8946 0.1660 -2.3499 -2.5856 -1.3366 -#> -0.6751 -0.9848 11.5909 7.2691 -8.7376 -6.1050 1.9485 4.0363 -#> 8.8061 5.0337 -2.1156 0.1317 -2.0671 -4.1119 7.4026 -2.9720 -#> -11.1357 -4.2862 -1.5577 11.4097 -10.9833 5.1710 -7.6701 3.8730 -#> -2.0210 -3.4727 -6.0439 -6.3764 4.9350 0.0568 -0.2317 -5.7015 -#> -8.7964 8.9918 1.7091 1.7820 -1.7458 2.4526 10.1842 -0.5978 -#> 1.5600 -7.0254 6.6511 -0.7881 -2.8247 -1.2636 -6.7303 0.7457 -#> -9.9962 -3.3429 2.2513 -2.2121 -3.8652 -2.3950 2.6836 1.1698 -#> 5.8844 -0.5286 -6.5471 11.2424 3.0500 -0.0954 -16.5267 -0.3183 -#> -5.6467 7.9594 -12.9433 3.7634 -18.3734 8.7222 2.4835 -1.1996 -#> -5.1361 0.8167 -4.7019 3.1790 7.8726 -1.0832 -2.2899 -2.2924 -#> -4.2281 2.7615 -4.2521 -13.8693 -2.5246 -11.5597 8.7394 -0.5678 -#> -1.4691 -0.6020 -18.1740 16.6846 0.2870 10.0524 -3.2751 -0.9453 -#> -1.9186 -1.3545 -4.1410 -4.9974 -6.8474 -1.3620 -6.9289 9.6157 -#> -1.6624 -1.9641 -24.7416 13.0001 -6.9840 4.9895 -7.7299 -6.8454 -#> 7.8452 0.0362 -4.2189 1.4746 -5.9102 8.0202 -0.0475 -1.3546 -#> -3.4357 -0.9328 8.5267 1.9319 0.5512 -16.5238 -5.4286 -6.3495 -#> -17.0909 -5.6792 -10.8661 -6.1793 7.4291 -7.0865 5.0553 -8.5706 -#> -27.6425 -7.6394 -10.0787 2.3910 9.3487 -0.0833 -4.5750 -1.5715 -#> -0.3811 7.1165 -9.3529 -10.1394 -2.6536 10.3981 7.1494 6.2481 -#> 10.2249 12.1080 -6.7486 -1.7461 -4.8264 -0.0810 11.8476 -2.3056 -#> -10.0728 6.0447 4.7626 -13.1478 -1.3342 -20.2861 4.2710 2.8576 -#> -5.9852 -5.6862 -6.5108 -7.5676 11.4240 -8.8640 -1.1609 -3.7681 -#> -1.8751 1.1401 -9.4994 5.1197 -14.1007 0.8723 -8.6336 3.2089 -#> -5.1206 -13.8672 -0.9405 -14.4086 -7.9146 -5.0967 5.1145 1.7116 -#> -0.0410 -8.1056 8.9243 4.9892 -2.0418 0.7855 1.2673 3.1330 -#> -12.9567 3.8870 -2.1128 -2.1108 6.1276 7.9823 -2.7277 2.6696 -#> -9.4991 -8.8730 -2.5337 6.7111 2.3819 9.8990 -4.2177 3.4382 +#> Columns 1 to 8 -3.6453 -2.6579 1.0205 -19.2537 14.9200 17.0821 12.3673 5.4109 +#> 1.7824 -10.3962 -6.6121 -4.9261 3.5545 -13.9317 -0.7564 4.6510 +#> 7.0070 1.9875 4.7275 4.3578 -2.6972 4.3838 -5.9402 -12.5366 +#> -4.5100 2.4487 6.3459 -8.7580 5.3897 -12.2287 -8.3092 11.3734 +#> 6.5372 -6.4463 2.9906 -6.3791 -9.6312 -2.3495 14.0604 0.5863 +#> -9.2287 -0.4555 -7.1418 16.6878 -8.7363 9.0154 18.1642 -0.3775 +#> 1.1358 -2.4334 -10.1680 5.9642 0.6275 -10.5348 -0.9737 -0.7177 +#> 8.6772 -0.3514 9.1072 -18.3253 0.7804 4.9254 -2.5228 5.2816 +#> 6.8709 16.4975 -4.1184 1.4430 -6.6037 -25.3350 3.9478 -8.7836 +#> -1.7890 7.4037 -5.0774 2.9740 -6.1798 9.9556 -2.9724 -5.7923 +#> 3.3146 -3.3497 -10.4625 6.0432 12.0398 14.4923 -0.7702 -0.6348 +#> -0.5661 -8.0927 5.3426 3.2935 -12.1543 7.3922 -1.3355 -4.0372 +#> 5.2187 -1.9892 4.1576 -8.0040 -12.5869 7.1160 -5.3116 -4.7617 +#> 11.4069 -7.6713 -8.6458 -11.0721 -9.2120 2.7215 -0.6181 18.3522 +#> 11.9974 -10.3743 10.9011 -1.0668 1.0419 2.8335 4.5804 -2.0573 +#> -1.0470 1.7463 15.0426 7.5185 6.7436 -3.6470 -9.4602 -8.8461 +#> 3.3860 -2.4703 -4.4880 -2.9963 7.3801 13.3359 -7.5165 0.5895 +#> -6.7674 5.0262 3.1154 6.2015 6.4862 0.3410 -2.0817 -2.1105 +#> 1.3559 -10.0833 15.8514 0.1794 3.6656 13.0581 -7.3427 -5.6156 +#> -11.1527 9.4416 2.6333 1.1133 -6.5177 -4.0262 4.1868 4.2921 +#> 13.1061 -11.6603 1.1239 -7.1197 -0.8983 9.0235 5.3988 -4.4506 +#> -2.6373 -0.2666 10.8509 1.2169 0.2335 -0.5019 -13.0790 -20.3252 +#> -5.7588 -0.2518 -18.9002 3.2621 -7.7811 -5.8618 2.6940 3.2461 +#> 4.1567 -0.1518 17.3141 -3.6499 0.7910 -4.0651 20.5443 2.2293 +#> -2.3704 -0.5042 -1.2511 4.9921 -5.6246 -15.1894 -10.7815 -7.7068 +#> -10.8883 4.1255 -8.3133 2.5646 -1.1387 -1.6310 6.8993 0.2765 +#> -0.9623 1.7098 -0.7709 0.0830 -3.0090 10.9351 18.2482 -3.0070 +#> -11.5276 -2.1557 -12.1451 8.9624 -0.3518 0.6111 3.6210 -13.9250 +#> -0.5594 7.5094 8.0810 -5.0133 5.7311 -9.4000 3.2041 9.8999 +#> 1.6788 0.8314 -5.9028 -6.0460 -4.1476 20.1882 20.4712 -3.5843 +#> 14.0586 3.3318 3.2964 -7.2740 -6.7567 -5.0984 0.8962 10.1492 +#> 0.3662 1.8839 7.5564 4.7655 -11.8075 -9.0917 -5.9243 -22.0228 +#> 0.0666 5.9959 5.2991 -7.3632 8.8079 -7.0946 -12.1705 -11.2574 #> -#> Columns 9 to 16 2.6155 -3.1598 0.5561 0.9017 -0.8640 -13.1992 -10.6243 0.8509 -#> 10.1564 4.2755 4.0640 -2.1607 5.6714 -4.9804 -1.0634 0.5981 -#> -2.5515 -1.1962 0.4510 -7.8476 2.5789 -0.8973 7.3582 4.2570 -#> 7.0324 -5.5265 -3.5624 -5.5224 -4.6802 4.4413 -5.8475 -18.2055 -#> 1.4783 3.6747 5.0357 -3.0983 2.1574 -12.1662 0.8275 -6.6068 -#> -3.2798 10.5581 -2.9460 4.3101 -4.8227 -1.8283 1.5126 3.0180 -#> 8.6264 3.7093 -6.5137 1.8973 -3.4125 1.3862 -1.5053 -1.1352 -#> 6.5437 7.7399 3.0553 -2.0195 6.2153 -9.7967 -6.6664 7.3130 -#> -2.9593 -0.3026 3.8410 -4.0726 -4.7563 5.4368 8.2580 3.8967 -#> 2.4642 -4.0941 -6.1839 -10.0695 3.2336 1.1617 3.3472 -2.6711 -#> 6.9811 2.8503 -5.4271 -6.5792 7.9685 -8.0903 5.3719 5.1022 -#> -5.9984 5.1151 1.4408 6.2052 0.7562 1.3832 -1.8367 -1.3511 -#> 0.7537 2.3841 0.1785 2.3359 -7.0487 6.0956 -9.5434 0.6965 -#> -10.3330 8.9429 7.5808 10.8931 4.8602 -14.7879 4.3269 -9.7338 -#> 9.5932 -5.9092 3.4381 4.2429 -11.2346 0.6286 -10.5995 8.1673 -#> -2.3872 2.5932 4.0034 -1.2594 -9.1051 -5.9047 -7.7488 -2.8257 -#> 10.0295 5.3519 -2.4834 2.4865 -6.3106 -6.8691 -0.9673 -1.8831 -#> 0.0335 4.8028 -5.9487 2.8925 -16.2756 5.4352 -4.0029 -3.7733 -#> -1.5558 5.3827 2.6516 -13.2862 -0.6100 0.2447 -4.9061 6.3422 -#> 3.9959 -1.8448 1.9519 10.5935 -14.5738 1.7463 -4.8509 2.2005 -#> 0.3484 -0.1279 4.0334 1.7024 3.9858 -3.5049 2.5102 -0.7298 -#> 1.4837 9.8924 3.6415 3.7319 -2.4233 1.6162 -5.2266 19.6050 -#> 2.7476 -7.0835 -13.0863 4.3597 -0.2027 8.2926 9.9258 0.5601 -#> 0.0557 -10.4625 0.9327 -7.9863 9.9888 5.2492 -0.1595 -0.1648 -#> -0.5021 -1.4746 -5.3854 -3.6369 -11.8902 -5.9471 3.5275 -2.5313 -#> 3.1709 -2.3712 2.9537 -1.2457 -8.2462 -14.8765 -9.2953 7.6170 -#> 11.9909 11.4902 -4.1442 -5.9850 -1.6904 3.0835 5.1076 -0.1874 -#> -7.7562 -0.5789 -3.5707 -10.4800 5.7294 -4.6781 -9.5600 -12.0115 -#> 3.4318 0.3919 11.0936 2.4332 -6.3888 -5.5107 2.1030 9.6941 -#> 5.8946 -0.0350 -0.2174 -5.8357 8.9972 -3.4707 -8.0726 -5.5292 -#> 4.1600 -6.9609 -3.7461 -14.6550 -0.8750 0.0199 -3.6804 -6.1085 -#> -19.3142 3.0839 -0.7835 8.6145 11.9654 5.3337 13.8425 -3.3817 -#> -0.5039 -4.3831 -2.1489 -7.5294 2.8402 2.1701 1.9791 -5.4358 +#> Columns 9 to 16 -0.4150 7.3297 4.7385 6.0212 -5.8500 -2.6622 -15.9227 -2.3034 +#> -0.0915 -9.7693 4.5898 1.5310 0.7251 9.4142 1.3755 -9.1093 +#> -6.2384 -4.8587 -1.6812 -0.8036 -2.7377 7.6864 -4.7117 4.1426 +#> 5.9765 -0.2671 -0.9736 3.6647 -6.0316 -12.8932 -1.9193 -6.5710 +#> 6.5148 -2.3052 13.0146 -4.8592 10.9307 3.6934 -14.8444 3.0526 +#> -4.3232 0.9341 -14.3279 -1.2807 4.8322 -6.9900 5.1972 -2.0346 +#> -7.6589 -4.0042 -9.1963 -4.7624 -1.2916 0.7860 14.1884 15.7203 +#> 3.9638 -1.8889 -1.4839 0.5603 -6.1389 -0.4716 0.1734 1.7695 +#> 6.0370 -6.2876 14.9213 -7.2597 8.7252 0.7768 -7.5648 -10.4185 +#> -8.0372 1.1112 -2.7146 7.3474 2.6524 0.5096 -6.8336 4.6407 +#> 8.3613 -2.7376 0.5527 -3.4527 -14.2165 -7.5961 3.4256 -10.0965 +#> -2.5307 6.9927 3.5505 -4.5900 4.5145 -2.2089 -7.3691 -3.8217 +#> -8.3336 -7.3397 -1.3364 12.6717 -6.1033 -4.6683 7.5983 -8.9458 +#> 4.8714 4.9646 0.3718 0.1617 -8.6646 -9.7267 -11.3852 -2.7581 +#> -6.7464 2.5849 -0.2127 7.5808 -1.9457 10.3010 -4.3726 -4.5917 +#> 0.5463 -9.5342 6.6990 -1.3655 -3.5564 10.5332 -0.2083 -2.8623 +#> -4.4357 4.4824 -5.2298 10.3264 -7.3772 -5.4804 -6.4382 -10.8502 +#> -7.6610 2.8621 -8.2088 0.4311 1.1014 1.8976 5.4160 2.7136 +#> -1.2590 1.6385 -11.3218 2.4333 -2.3540 11.8564 3.9640 -2.7040 +#> -1.8074 -0.6682 -9.7941 6.2760 8.0811 -3.7647 -0.2252 1.1985 +#> -8.5855 -4.5691 0.9683 4.5863 -1.2817 2.2082 -7.8620 2.3120 +#> 2.0347 3.4235 -8.9792 -2.7356 12.5742 3.2900 9.6394 12.4246 +#> -5.8081 -2.6720 1.7077 -8.4530 -8.8805 -1.8012 10.4114 2.5574 +#> -2.7157 -2.1973 3.0315 -8.8928 9.2534 9.4810 -13.3899 -2.1078 +#> -5.8773 -2.9140 2.9319 -13.1199 -5.0528 10.1364 1.6731 -1.5246 +#> -8.3230 -2.3498 -0.7791 7.5833 9.7928 -3.4929 -5.6999 0.9562 +#> -3.8060 8.0338 -0.3327 0.9759 -0.1954 -7.6175 -8.6705 6.8945 +#> -1.7274 3.0938 0.8773 -5.9809 8.0806 -1.8621 1.9676 5.9436 +#> 3.7119 -6.8380 2.9771 3.9365 3.8053 1.9199 2.9381 -11.9290 +#> 1.7151 -2.1160 -0.3891 4.3032 -0.3586 -12.8600 -0.9216 -4.0962 +#> -2.3794 -0.9357 8.1649 8.0264 -3.1704 2.7305 -1.3373 -16.8143 +#> -0.4313 -9.5439 9.6550 2.3922 5.4265 -7.7437 -0.8577 -1.9617 +#> 1.3139 15.5181 7.2527 -8.5931 -1.3176 4.9786 -0.7705 8.0786 #> -#> Columns 17 to 24 -0.7860 -5.2558 8.8485 -20.9941 -9.0655 3.5294 -10.1439 -15.4498 -#> -9.5280 -5.9107 -4.1893 -8.4292 -4.4653 -0.5671 -8.2579 -5.4353 -#> 0.9009 -5.9605 -5.2960 -0.0580 4.2290 6.6839 4.1097 10.2514 -#> 7.4349 -0.9851 8.0691 10.2494 5.0533 -8.2979 2.1494 8.2970 -#> 2.4778 -6.8777 -4.8544 -1.9120 2.3932 -2.1848 -2.1859 1.3520 -#> 6.7057 -11.6855 -4.0378 3.1691 -2.3374 -4.3761 -7.1103 -8.3558 -#> 1.2603 -5.3465 7.3288 0.0823 3.4294 -5.4725 -1.6565 -1.1264 -#> 4.1951 -0.9868 8.4957 11.0593 -0.4520 -5.0130 -7.8880 -13.4144 -#> 1.7785 2.6223 -8.1713 4.1476 8.1134 12.6581 9.3881 7.1014 -#> -11.4726 -3.0691 3.9983 -6.6128 2.8902 -4.2356 -8.2007 9.8636 -#> -5.5642 -5.4402 -7.3251 3.8875 4.0312 -2.7634 -1.9829 9.1520 -#> -0.9521 6.8743 -6.0237 -2.6349 -3.3604 -9.5427 -6.5401 -3.5823 -#> 5.0215 -0.8272 0.7952 3.4548 -3.2609 -6.3531 1.7328 -9.2392 -#> 10.4847 3.6584 4.0901 -0.0507 -9.9594 5.5010 10.7630 7.5907 -#> -5.7430 -3.7902 -1.0271 0.4131 -8.0020 -3.5380 2.1057 3.7887 -#> -3.0929 -0.8988 1.4206 -3.6087 2.6007 2.3756 4.4542 5.3190 -#> 4.8061 -2.6334 6.0383 -3.5185 0.1744 -6.8400 -1.0821 -11.2657 -#> -3.1119 5.3678 2.1130 0.3014 2.6299 -6.9807 -8.1485 2.6010 -#> -1.3879 -0.9456 -4.7146 12.1872 0.9405 5.8407 -3.8907 0.7099 -#> -1.2914 4.6096 4.2702 -5.0991 -2.4684 -3.7089 -1.5956 -3.0278 -#> 0.9443 -3.4175 5.8033 2.0396 2.6150 -13.5490 -5.6807 9.9841 -#> 6.0496 5.3381 2.1143 4.7890 6.5540 11.7075 10.9428 2.0711 -#> -8.6852 -0.1733 0.9401 -4.7324 -4.7015 2.6617 -5.5006 5.2733 -#> -7.6679 5.3552 -4.1996 6.3112 -13.5730 -1.9100 -8.2240 6.4587 -#> 1.4878 2.8627 -3.5439 -5.4882 4.8272 -1.4257 -2.8101 15.8103 -#> 6.8095 -4.1825 13.2839 -1.0319 0.8730 6.4668 0.7699 -2.7591 -#> 12.1832 -2.5764 0.0200 10.0529 9.1707 -10.8723 -5.3037 6.6403 -#> 13.7693 -13.7331 1.9829 -0.2371 -11.3306 -1.6697 3.9548 -9.9244 -#> -8.8938 11.2271 -1.6461 -1.5718 3.9387 6.0064 4.2124 21.8599 -#> 8.8075 -13.4189 1.4054 -8.9115 2.0479 -12.0521 4.1982 -24.0073 -#> -3.6085 6.9181 7.9440 1.9889 4.7326 -6.7069 -8.5142 -5.6065 -#> 0.1416 6.5440 1.8905 4.0071 -8.3706 7.7118 -0.1617 -10.8803 -#> -10.0878 -4.8186 -8.6435 -13.7511 -7.2138 -1.0321 -5.4247 -8.6376 +#> Columns 17 to 24 -3.3802 -16.5795 -4.8073 -0.5404 -7.1475 -3.1667 2.7997 -2.9929 +#> 9.3943 3.1952 4.0808 -5.7957 -4.4734 11.8072 -7.5091 4.1751 +#> -12.4842 -3.0911 3.9793 0.8904 3.5557 2.9183 7.4246 4.0798 +#> 5.7370 -0.7496 1.9616 0.6393 -0.2739 3.2794 -5.9360 -1.8160 +#> -14.9989 -7.0250 -7.3100 3.4486 -18.7775 -2.6968 0.8530 -1.9575 +#> -7.5552 1.9067 -1.1024 -2.5404 7.9428 -13.8391 2.1016 10.3724 +#> 2.1475 2.5338 11.0880 -13.6220 3.8781 5.2712 -16.2984 8.4264 +#> 6.1028 4.9763 -11.7471 3.8455 -3.8900 -2.1165 11.4968 -2.9493 +#> 11.9565 19.0238 4.0692 1.9450 -18.8761 12.4061 2.2446 4.8582 +#> 5.0876 -2.1769 -0.7845 -0.2564 6.9307 -3.8769 4.6191 4.7758 +#> -1.3357 -6.3469 -10.4244 -7.4268 14.7157 3.8146 6.1740 -11.4708 +#> 5.5352 -0.7440 -5.8605 -3.8285 -1.9345 -6.0109 4.3798 -6.5518 +#> 5.7718 11.8434 4.4963 -15.0592 8.9086 -5.8297 17.4436 13.1879 +#> 11.3384 -1.6285 -6.2323 1.5047 5.4067 -4.8151 3.6423 1.7930 +#> -1.0057 0.7231 -6.8517 -2.8450 -3.5516 -5.5350 0.4510 11.1484 +#> 3.6108 5.5331 2.1428 20.6810 5.8252 0.2176 6.0349 -8.0966 +#> -4.2849 1.6567 3.4039 2.6399 -5.0305 7.7684 -5.0250 -3.7902 +#> -6.2979 -3.3068 9.3983 6.2687 2.8225 -4.8381 4.6628 5.7471 +#> 9.4056 2.8777 -7.2065 -0.9392 8.3859 3.1559 9.6467 2.4561 +#> -12.5639 7.1517 11.0490 -5.4041 -9.9790 -2.4337 -4.0309 -0.5188 +#> 10.2878 -1.3573 -5.4229 -4.0446 -6.4615 4.1894 9.0969 2.4457 +#> 0.7747 -12.4015 -0.7291 3.3445 -7.4514 -2.5345 -1.4617 -1.6583 +#> -1.1796 10.3824 1.1136 -7.8257 6.1854 -1.8923 -4.0510 3.0664 +#> -5.5701 -1.1843 2.0442 -4.0832 -0.7427 -9.5607 -6.9620 11.4737 +#> 7.9242 16.1232 4.9848 5.9106 -2.2443 0.0575 -10.4117 -10.2186 +#> -2.5571 -2.4189 3.8435 5.7330 -2.2310 -5.6454 -3.5400 -7.1145 +#> -2.6510 6.0102 0.7565 6.3312 -9.3734 -1.4229 7.3555 7.7973 +#> 1.9639 -5.7450 -4.8747 -4.4045 3.4776 -2.4852 6.6250 3.6135 +#> -3.6018 -1.9261 -2.1462 8.4476 -13.4601 -3.1309 -11.1919 -11.7957 +#> -3.1745 6.5586 -0.7759 -8.8976 3.5520 -1.2292 14.6989 15.3786 +#> 2.7685 -0.6769 -2.5516 -1.6768 -7.6459 -0.3419 6.6799 -7.8551 +#> 2.7665 19.3113 -11.6959 8.2523 -1.7164 14.3424 9.1995 -7.8113 +#> 6.8839 -2.9863 -9.9830 3.2360 -2.1380 -4.5266 -4.6093 -6.9626 #> -#> Columns 25 to 32 5.8348 -7.2655 -1.5712 -10.6256 -7.4372 -1.4391 -11.4552 -7.0477 -#> 0.3226 3.4154 14.7550 -4.5340 4.0858 -3.4755 3.5059 -4.8757 -#> -11.4289 -0.3527 13.9969 2.6939 -3.1083 -3.0465 -0.0892 7.0621 -#> 5.9597 8.9556 8.5256 2.0979 1.2178 6.0288 -5.4063 -13.5261 -#> -3.1176 0.5134 8.1586 4.2564 3.3197 -8.7756 -1.9437 8.7018 -#> -2.9178 2.5326 6.1470 -0.5083 -4.6868 -7.8694 -6.4656 0.8148 -#> 5.0851 4.6391 2.5312 -6.5178 -1.5761 1.3420 -15.3398 -7.1221 -#> 3.2214 2.9499 -5.3047 0.9502 -2.6619 -7.0484 6.5908 4.1343 -#> -19.2554 0.6993 0.6807 13.9843 -11.8171 -6.7210 0.4342 -8.2857 -#> -0.2347 -2.5530 -5.5027 6.8211 -3.2994 6.2850 -0.9099 -0.9908 -#> -10.3087 11.8646 7.4830 -5.8137 -0.1379 -6.7583 8.5932 -4.0547 -#> 4.7290 3.4967 -4.3611 -1.3191 9.5178 3.1170 -4.6229 10.6171 -#> -2.6505 -1.3561 7.5753 -12.7398 -1.2293 0.8599 -7.6208 9.2033 -#> 4.8080 0.7353 -2.8968 -8.4572 4.6661 -13.1305 1.8542 -9.5437 -#> 0.0877 1.0816 7.2096 3.7373 -13.8221 -5.2305 -8.0411 0.1576 -#> -0.5292 0.0716 12.2252 -8.5118 -5.0807 -5.6914 -10.2480 -9.0764 -#> 0.1724 -2.2463 3.3778 -3.3526 -14.6650 2.1788 -4.2253 -2.9203 -#> -9.5554 3.8573 12.3497 -0.4372 -4.4459 4.8477 3.3281 -11.1750 -#> 3.9233 -13.3457 -1.5832 -7.1649 -1.5820 -11.0861 3.7966 0.0886 -#> 3.8153 -7.0028 -6.0608 2.2563 -10.7381 5.2756 1.5506 -0.0917 -#> -6.7651 -2.4536 -6.6022 6.8422 8.3161 0.5805 2.1232 0.9424 -#> -0.4326 7.7848 -4.7841 3.1087 -5.1200 -4.0048 0.2341 9.0584 -#> -11.2660 -2.3502 2.1099 -0.5210 -7.9694 -0.1528 3.6844 0.5896 -#> -10.2571 7.9452 -2.3368 1.3291 -4.2773 9.0338 1.8009 1.2802 -#> -7.7180 -3.6896 -9.0833 2.9145 -2.3874 -5.3301 8.4110 -5.9553 -#> 8.5449 -13.3886 -12.8695 0.5592 -11.1546 0.8784 -2.8044 -13.2828 -#> -4.9942 1.9179 5.1382 -0.4088 -1.0402 -13.1228 -6.7094 -10.3743 -#> 1.4642 -17.0698 3.6303 -5.0720 -6.8388 4.6268 12.0566 -10.4237 -#> -6.5882 3.0184 -3.5185 0.3690 0.4860 -16.3141 -6.1981 -3.1381 -#> -4.9649 -4.1968 -2.9713 -10.4720 -15.6504 0.5116 1.8237 1.1983 -#> 2.2559 0.6364 -7.3063 -3.4943 -8.5275 -0.2126 0.0538 -0.7336 -#> 2.4349 -1.4577 7.1449 -1.2379 2.1649 0.5370 1.3001 -8.0382 -#> 2.7743 7.5586 -6.7374 3.7699 -7.0262 9.1465 6.9093 -9.5367 +#> Columns 25 to 32 -3.2291 10.7030 -2.8386 7.5368 -3.6817 0.5457 -12.1813 1.8038 +#> 2.8274 -4.3918 14.8735 7.7960 1.7378 12.6509 6.7462 -0.0910 +#> -2.9892 5.6809 6.3600 4.2879 -5.3626 -4.5011 -7.1817 6.2683 +#> 4.9344 0.0673 -1.6913 -1.4739 2.3819 8.6632 7.8988 -4.1273 +#> 0.6691 4.4379 -4.4961 0.2016 11.0722 5.8352 -4.7882 -0.5294 +#> -7.4656 -6.6462 -0.6537 5.1728 6.3793 -7.0337 -7.6226 10.6279 +#> 11.3895 -18.2443 0.8376 3.1360 1.6836 2.7688 7.8754 -16.5484 +#> -0.7623 -0.0741 3.6667 12.7978 2.2446 4.0945 8.1088 -17.3363 +#> 12.5698 9.8655 10.7244 -5.5314 9.2732 9.5926 12.5526 10.3565 +#> -3.0678 4.9167 -1.2763 -5.2317 2.9389 -6.2865 6.7550 -0.5897 +#> -2.7771 28.5695 13.9840 -0.6955 -1.6058 -7.0847 -5.5598 -11.4288 +#> -0.1134 15.3724 0.9233 4.6458 13.2193 5.5101 1.5288 -2.5896 +#> -8.3060 10.4701 12.4269 -2.5010 7.5621 1.3840 6.5992 6.4586 +#> 1.8848 0.2941 -12.8773 -7.3082 2.6298 17.5829 20.0359 -3.0621 +#> 3.7175 1.8944 1.5716 2.8683 -0.2183 2.2398 -1.6514 1.9437 +#> -8.7121 -6.7781 -6.0320 -8.0381 -7.7988 -8.1748 0.4033 -3.9238 +#> 0.4005 3.0571 -1.9216 0.4900 -7.7397 -12.0784 4.6738 0.2380 +#> 3.6112 -6.0575 5.1367 4.2013 1.9351 -8.3423 -1.3090 0.6960 +#> -1.7518 11.4851 7.0932 1.9152 -4.6589 -15.6921 -11.3339 -2.4502 +#> 4.9115 -2.6085 7.2744 2.0863 -4.5220 1.1088 9.4088 8.8456 +#> -2.4644 7.4740 14.9799 20.1974 7.3838 6.7452 -1.2426 -9.4013 +#> -4.2160 9.6512 3.1419 5.0515 7.3958 3.0585 -15.0977 2.1559 +#> 12.8328 -10.2693 -4.5694 1.4815 6.9491 11.4255 0.1258 -8.2059 +#> 8.6309 1.8218 -4.0163 1.0393 -5.3220 -5.2826 -6.3134 -4.6092 +#> -1.5395 -4.6064 -15.4316 -7.3492 -3.0026 10.8899 7.8679 -5.7423 +#> -4.0104 -9.9856 -2.4603 -3.2330 3.0295 2.2225 2.0814 8.4577 +#> -8.6324 -3.2244 1.5286 -4.1569 -0.5881 -5.1474 -0.2732 5.2499 +#> -2.8235 2.4385 2.4365 -3.3335 8.6081 -10.5246 -23.2637 -4.6503 +#> 9.6978 -0.1554 -3.5184 -0.3277 -3.2259 4.4653 7.0787 6.0881 +#> 1.0039 6.9666 8.5641 -4.3593 -4.6308 -7.4341 -2.8288 -1.2116 +#> 2.0377 -7.5815 -4.1055 -3.9629 -0.0647 3.4498 6.6076 -4.4810 +#> 4.2784 2.8188 -7.2315 -2.3046 -0.8632 3.4735 -1.2786 5.4659 +#> 0.0191 14.2118 4.6856 -1.2515 -0.4736 3.1603 -9.1913 -1.5629 #> -#> Columns 33 to 40 7.5027 6.5333 -0.0116 -5.6049 10.2883 3.3521 12.5631 1.5284 -#> 3.2782 -5.3731 -0.3792 -0.1593 -3.4329 4.7939 -11.5708 0.0229 -#> -0.3696 -6.6871 -12.1762 15.2650 -5.1596 9.6189 1.4396 -5.4593 -#> -5.5469 -9.9894 -2.5025 11.4326 -19.5473 -12.5112 -1.3405 -14.6335 -#> 3.3988 -3.4589 -13.5032 -6.4191 10.6713 -10.2992 1.6308 -3.1208 -#> 1.4465 -2.1320 -6.9581 6.5072 3.0123 3.9350 -3.9943 2.0850 -#> 4.2501 -5.4135 0.6682 -17.2987 2.2371 -4.3746 -5.0197 1.8515 -#> -3.4838 2.9367 -1.8368 -2.3554 13.1617 -18.1611 -11.9097 6.4819 -#> 4.7710 -3.2932 -7.1364 1.5846 -0.7487 19.7362 -11.5068 17.0332 -#> 4.7742 5.7331 -7.4818 -2.4472 -9.6425 -3.7683 11.4337 3.0286 -#> 2.7958 -6.7184 -14.5433 1.4455 20.1422 -12.3778 -10.6355 3.6799 -#> 2.7496 2.2887 3.8542 -11.3247 3.0819 -9.4470 -1.8825 5.2718 -#> -1.6504 3.3703 0.0674 9.4359 3.2942 -4.4481 6.6880 -6.9526 -#> 9.5634 -12.0699 11.6217 3.5743 -21.1038 19.2175 3.4960 -1.7261 -#> 8.5224 -13.1445 -10.6559 27.6880 -13.1003 -4.8193 3.7349 2.6295 -#> 5.7360 -4.9928 -0.6631 -1.0594 -14.4707 13.4307 3.8149 -11.6830 -#> 4.8149 -2.1025 -2.8097 6.9264 1.9033 -14.3990 2.6695 -4.9869 -#> 4.8526 -9.6141 2.1947 13.7938 -20.2344 10.8255 -9.7898 -8.3773 -#> -7.9815 15.2409 -3.7938 -5.3458 6.4106 19.3269 -3.7322 5.5608 -#> 9.5753 5.0733 -3.7613 17.2755 -4.4581 -1.9309 2.1579 11.7440 -#> -0.0213 6.9128 5.5643 -1.6708 -5.6212 3.4767 -15.9278 5.8476 -#> -2.4029 -7.9804 -5.8736 2.8209 3.1041 -4.5196 1.0162 -6.9719 -#> 2.7896 5.2301 5.4749 6.9678 12.5461 -0.9798 12.0340 -2.3895 -#> 1.9614 12.2263 -2.0617 15.6980 -6.3249 15.8998 7.9298 14.7955 -#> -1.4827 -4.7149 -2.3174 12.9012 -15.4193 1.5327 -5.1090 -1.9725 -#> -3.0517 2.2253 0.3231 6.1099 -6.0135 -1.0435 4.3048 -0.0366 -#> -1.8208 -2.9453 -9.7977 2.7620 10.3637 -10.1950 -0.5822 -7.4750 -#> 3.1300 9.3976 5.5887 0.2071 -5.8449 8.8785 15.9674 -6.2410 -#> -2.3326 -0.7725 -5.7696 3.1336 -5.6467 4.5828 -10.7735 6.1770 -#> 12.2428 4.0546 7.2391 -9.2279 15.3162 5.3007 8.9519 5.0663 -#> -2.0020 4.3573 5.1197 -6.6489 -1.9023 -3.3735 0.3393 5.5041 -#> -2.6711 15.2617 6.2822 1.1592 0.5775 -3.1309 7.5555 -1.9678 -#> -1.8282 3.8881 2.8608 -9.5459 -5.4770 4.2785 2.9235 10.4418 +#> Columns 33 to 40 5.8220 -5.9585 -11.1234 -3.1603 -4.6316 8.4807 10.5010 5.5024 +#> 6.1066 7.2097 -7.3635 -0.5837 4.2434 -5.8463 -9.9194 0.7706 +#> 7.3368 4.5503 4.5224 11.4279 9.0379 6.6149 -0.8279 -2.2151 +#> 9.6338 -3.2460 1.2729 1.2180 2.5470 1.4183 0.5273 -1.5177 +#> -7.2292 -13.9029 -1.5003 -7.1057 -0.3813 -4.4605 10.9278 -6.7865 +#> 4.0703 -7.5708 0.4387 9.7501 2.1490 3.6674 0.4522 1.4119 +#> -1.1331 6.6840 -4.8100 1.1518 5.0439 1.6307 -14.7160 5.6886 +#> 2.5314 -2.3441 10.3273 -3.8606 -13.4257 5.5911 -8.2073 -2.9753 +#> -3.6279 -1.4415 0.3305 -10.7637 8.4690 -3.6397 5.8529 -16.6335 +#> 3.7892 4.0938 6.8738 0.8232 -4.8434 1.1250 4.9787 6.2021 +#> 18.9465 3.1542 -3.0669 -4.7318 -8.4856 3.6816 -1.6007 3.5327 +#> 9.5519 -5.9685 2.1909 2.6486 -0.7384 3.8348 -0.6606 0.6095 +#> 1.4465 0.5264 -1.1730 6.3559 -10.9870 2.2555 7.5011 4.6379 +#> 5.1074 -0.0674 -7.0179 -5.2227 -9.8421 5.2808 -1.8671 8.2903 +#> -1.3154 -12.9196 1.5084 8.8700 3.7121 6.2527 -5.9607 7.2043 +#> 5.3534 1.3893 7.6863 4.0875 -8.4294 2.6728 -3.6680 -1.9937 +#> 7.3930 -2.4865 -0.6695 -5.1212 8.1023 3.6920 8.4380 -0.4609 +#> 10.3266 -2.1785 11.2308 7.1412 9.4480 4.0511 -12.0939 -2.9586 +#> 8.0646 -2.0491 -3.3040 2.6962 -7.3753 2.2280 -4.0516 4.4782 +#> -3.9627 3.3026 -3.5714 -3.1277 -14.1868 -7.6111 3.9365 -1.8307 +#> 2.4939 14.9946 -6.9179 -2.8400 -7.8571 -7.6483 -5.0890 7.9494 +#> 2.6553 -3.1783 -10.5153 1.5278 2.4003 2.6209 6.6390 3.3985 +#> -5.5345 1.4169 0.0556 0.9806 3.8910 8.3930 -6.8202 -7.9651 +#> 0.8440 -8.0188 -1.9172 7.6415 0.6154 2.6541 -7.1366 5.4497 +#> -1.2573 10.5624 -3.2536 -4.0046 -7.8857 -1.5806 -9.9374 1.6591 +#> -7.5206 9.3328 -1.2461 -0.2772 -8.1923 -7.2622 2.0394 5.6648 +#> -1.0974 -3.7975 -0.4945 0.1787 -7.2873 -0.7737 2.3343 -1.1023 +#> 2.4373 -15.1106 -2.2588 -1.5399 -3.4147 -5.9233 0.5065 -4.0432 +#> -9.2517 6.0256 -3.9838 -6.5566 -5.4267 -0.9787 -4.2490 1.3722 +#> -0.9727 -1.7382 -0.9337 2.2735 -0.5028 12.1543 10.1493 2.0821 +#> -2.3873 -0.8254 4.9093 -1.7384 -8.9157 7.7980 -3.3649 -5.1609 +#> -15.6301 -0.9252 7.8800 1.5679 -4.8594 -8.6393 1.3922 -3.7080 +#> -2.8121 1.1704 -4.7964 -5.1482 1.4181 -0.2449 -7.5492 -6.0980 #> -#> Columns 41 to 48 10.6075 8.1955 6.3594 9.6548 -3.9281 -5.5283 -4.3969 -5.2311 -#> -0.0549 -6.8490 6.1117 10.3936 0.6194 -1.7077 -2.4671 -8.3272 -#> -0.7429 -0.7171 -5.5075 -6.5202 4.8573 3.5552 -13.5905 0.2427 -#> -2.6324 1.8097 -6.6340 15.7721 15.1312 -0.8379 -8.1369 12.4140 -#> -1.1339 1.6051 -11.0083 -4.0315 6.4343 5.9605 -11.9441 -0.4597 -#> 8.0709 -5.0276 6.3454 1.9127 6.5007 0.9900 -8.9867 -7.4269 -#> 1.6719 4.0843 2.6772 11.4787 -1.9626 -4.5868 6.2614 -4.5262 -#> 5.7108 -1.7419 -15.6123 -16.1967 -6.1014 2.6904 1.5191 5.5925 -#> -5.1404 5.7229 10.0120 -1.0770 -4.9319 -2.1359 4.9363 -14.8991 -#> -6.5668 15.5995 1.9847 -2.6831 3.6583 0.6838 2.2724 -0.4155 -#> 0.1617 0.1575 -4.6440 3.1532 1.0411 8.8002 -9.9338 -3.5080 -#> -9.0213 -0.1910 -1.7430 -2.6874 -3.9154 -1.9654 8.2127 -10.7969 -#> 2.2560 1.8975 0.7868 5.4140 -2.2687 -6.4610 -7.9165 -0.2637 -#> 4.2571 -5.9356 1.2642 2.6764 4.9651 2.8574 3.9194 1.9587 -#> -3.9367 0.4815 6.5117 14.2914 4.0803 -9.3343 -6.3550 -8.5403 -#> 1.8754 8.8220 4.3447 3.7271 8.3342 -0.7177 -6.6892 -9.8216 -#> 2.9226 -0.6007 2.7407 10.7963 -1.4039 -2.7413 0.1470 0.0305 -#> -2.0551 -8.6517 -1.0070 0.2237 3.9467 -3.5014 -1.3727 1.9502 -#> 7.4919 14.7980 6.8258 -15.9342 -2.7612 -9.0467 -0.2408 -6.1911 -#> 4.9543 0.7682 8.2976 -7.3017 -1.8134 -4.6807 1.0936 2.2199 -#> 0.2476 -8.2005 -5.5142 -8.9249 -1.5899 1.5298 -0.2571 6.9172 -#> 3.8945 -2.5934 -1.4054 3.2700 1.6870 10.3187 -2.0064 6.1178 -#> 13.7704 1.9819 8.4786 13.2931 -0.3295 -6.7888 -4.4256 3.2921 -#> 3.7184 10.6178 7.6598 -0.3294 0.1958 -7.1434 10.6028 -3.0216 -#> 2.6404 -1.8616 13.4526 8.1869 -2.2343 -6.3750 -4.1366 6.2893 -#> 2.4454 -5.7420 5.2624 -0.4159 2.7483 -5.1336 1.8708 1.7837 -#> 7.8029 6.1585 -6.6585 17.4982 -1.5582 1.0887 -5.1489 -7.9415 -#> 5.6980 -1.8182 0.0233 -0.6164 -0.6737 -2.6375 0.4123 17.0397 -#> 1.3305 -3.6551 12.2228 10.3632 1.2346 3.8860 1.8377 -0.7569 -#> 8.7651 10.0290 5.7246 -1.3062 -12.2864 -5.9924 -4.5115 2.5171 -#> 8.9476 2.4347 4.3081 1.3072 -11.2390 -5.1412 6.4073 2.6543 -#> -2.9282 0.0720 4.3174 -8.4152 -1.8963 -8.3885 5.4650 -5.5437 -#> -7.5952 8.7533 17.1642 4.6203 -1.5054 1.8863 1.8967 -3.7182 +#> Columns 41 to 48 -11.1306 0.6919 4.8512 17.3918 -4.0362 -8.9822 -1.9174 -2.6997 +#> 0.9836 -5.5430 -0.1749 1.3844 3.0945 -6.2191 4.9300 0.9478 +#> 0.7872 5.1292 -4.1745 -2.1452 -9.6652 0.5110 -3.5500 -1.8006 +#> -5.1029 0.4365 9.5328 -4.4164 -2.7142 -2.2890 1.1373 -10.2070 +#> -4.2318 -1.1601 -3.0408 6.4436 7.6427 -9.5761 -1.1885 -8.6383 +#> -5.5641 7.0299 -11.1836 3.9675 1.9286 3.1148 6.4438 17.2768 +#> 7.1689 7.2436 1.2148 -15.2031 12.8163 6.5868 -10.4234 16.1784 +#> 9.0185 3.9652 4.8276 -6.6401 -2.6230 9.7936 4.8730 -9.0933 +#> -1.2321 -18.4359 10.3138 2.3768 -3.4326 -17.6045 -2.0458 -5.3509 +#> -6.4869 -8.0318 -0.0468 5.0349 -7.1426 0.6006 9.4161 -5.5588 +#> -7.1758 8.7406 20.8345 -10.1867 -10.1063 1.3316 4.4973 4.5348 +#> 2.1176 0.6602 3.7209 4.3050 3.2850 -3.1070 -1.2137 -0.2957 +#> 4.2311 -5.2565 -8.1201 1.8208 -2.8954 4.1180 -1.6627 1.6529 +#> 1.9328 3.8919 -5.6913 -3.1739 -6.6300 0.8863 -5.7888 0.2582 +#> 1.9327 -0.9987 -0.9545 6.2232 -5.2541 -7.0955 1.0247 6.0885 +#> 5.6303 -5.0227 -8.3033 -1.4571 2.2641 -0.4002 2.8168 -1.3227 +#> -4.9022 -10.4092 10.1600 3.7506 -9.8174 -10.2585 4.4503 -3.0098 +#> -0.0012 6.7839 -2.8909 -14.9251 -5.1689 2.1843 -4.4906 3.3159 +#> 1.2218 8.8697 -1.0477 7.6454 -6.7688 -0.0112 14.1941 8.3535 +#> -4.9380 -6.6958 -4.8488 6.4205 -3.2710 3.3907 0.6245 8.4166 +#> -5.9622 -2.3321 1.4829 8.3279 7.8688 -6.5363 -5.5891 8.1517 +#> -3.8473 -3.1054 -5.8659 7.4631 2.2225 -0.2593 6.2682 3.9445 +#> 7.4341 7.8476 5.7572 -7.3124 -2.7070 1.4620 -10.6436 13.1956 +#> -5.1010 4.6871 -4.1689 -1.5598 8.9147 -9.3880 -11.0180 2.8470 +#> 0.1611 8.2333 -6.9146 -2.0030 3.1563 2.3113 -0.8183 9.7559 +#> -4.3718 -9.2927 -2.0732 9.2403 3.7783 7.1507 -4.1151 2.7890 +#> -5.4616 3.8117 -4.1010 4.3467 -3.4613 6.2492 -1.5053 4.1627 +#> -7.9354 5.9562 6.9274 9.0557 -7.5317 -1.0009 14.8822 9.4534 +#> 2.8936 -9.4752 -0.4965 -3.5914 13.2439 -0.7371 -10.0037 -1.6373 +#> 2.3857 5.8190 6.1638 7.8189 -6.2333 -10.4108 -6.1803 11.9791 +#> 7.0986 -9.0875 2.2925 -0.2476 -1.9190 8.7548 -7.0915 -10.2016 +#> -0.7546 -2.9609 -2.9446 -0.2993 -0.6294 -8.1636 1.1149 -0.7690 +#> -5.9044 8.9972 7.6192 -2.5183 -6.4683 10.9135 -4.8663 -10.6644 #> #> (19,.,.) = -#> Columns 1 to 8 10.8395 10.7757 -3.0870 5.3283 12.8517 -5.9433 -2.2106 14.2127 -#> -2.8759 1.0786 -4.1254 0.8018 -0.2598 -8.2426 3.3772 -7.8797 -#> 0.0327 5.8181 -4.2337 -7.1393 7.7752 2.3379 -9.4840 -3.3952 -#> -4.0998 8.0561 2.5215 0.6297 0.6777 10.5896 9.7402 -16.0670 -#> -1.0089 8.3787 -2.9934 -2.6356 -0.0877 7.0668 4.5577 -0.7420 -#> 9.5502 9.5713 4.4936 -0.6498 6.3097 3.3118 2.3432 8.6385 -#> -4.0563 -1.2055 3.8899 9.1231 6.7793 -12.0974 -3.1299 9.9029 -#> -0.7493 0.1963 4.1077 -0.6971 8.1487 19.8076 6.8223 -5.6851 -#> -6.0644 -7.6939 6.7336 -12.1034 -5.3430 -4.1995 -5.5885 2.5117 -#> -0.8601 1.1812 5.3597 3.9339 -3.5057 -0.7212 3.2272 3.0324 -#> 1.7426 0.3936 4.8376 -2.4991 -0.5326 2.9792 7.5664 0.8225 -#> -3.7376 3.5097 2.6515 8.5253 5.7521 8.0427 -4.7524 -1.5751 -#> -0.2459 5.9787 -11.0733 0.3648 10.8524 4.1613 -1.0832 -7.1325 -#> 1.4894 -3.5983 -5.5288 -13.8901 -10.3781 6.5673 -5.9233 3.2369 -#> -3.6380 -4.0456 2.1913 -5.4734 18.1547 -3.7933 5.0162 3.9899 -#> -0.3314 7.9992 4.3374 -0.0042 -9.5690 -1.3394 17.0652 -4.3626 -#> -5.2768 -3.1344 -1.1358 1.3520 9.2251 5.6593 -5.9296 9.0785 -#> 9.4193 -6.8123 8.6052 -4.0533 6.9688 1.3662 15.2029 -7.0616 -#> 3.9928 -2.9939 -4.9972 -8.6826 -18.3501 -11.7910 5.2548 3.0088 -#> -5.7589 -14.4873 7.5612 -9.8233 11.5037 4.0116 11.8311 -4.7630 -#> 4.2859 0.5745 3.4160 -4.1035 3.7191 -4.9911 3.2578 -10.4416 -#> -4.0827 -9.8458 -10.4471 -12.1190 -1.5617 0.8391 -12.4993 4.9041 -#> -2.0138 -9.3498 10.1790 10.3041 12.8580 -2.0940 7.0812 16.6040 -#> 1.3709 -9.2536 -3.8364 -1.8204 -9.9328 6.5994 4.5961 10.5244 -#> 6.4022 -8.8969 3.6429 0.9117 0.9441 -10.0114 -6.0480 5.8424 -#> 10.7966 5.0398 1.4386 -5.0239 -1.1596 -8.2526 -3.6071 -0.0900 -#> 3.9841 -3.3743 -4.2212 -1.0025 -4.8112 -5.1390 10.2893 2.6183 -#> 4.2903 8.8365 0.8650 -2.0502 -0.1431 6.2520 -4.8942 -2.7032 -#> 0.3704 -17.7415 -5.7719 -10.0108 -12.9640 -12.5472 11.2287 -0.2070 -#> -1.0709 17.7364 4.6136 4.4929 12.2768 0.8676 -6.2423 -3.1522 -#> 0.5952 1.1386 14.9558 17.7456 17.0263 1.8315 10.4364 9.7502 -#> 1.5720 1.1694 -3.9902 4.9676 0.4082 23.6846 -3.0325 -1.5709 -#> 1.2193 4.3040 12.7433 8.2747 4.9245 -3.6125 -1.0537 7.9518 +#> Columns 1 to 8 0.0268 -0.3757 6.0375 -8.7826 -17.0956 4.2531 10.9680 2.5448 +#> 0.0495 7.0581 9.0656 -1.1926 -12.1456 -1.8420 -3.2290 -0.2959 +#> 7.9777 11.4858 7.2739 3.4636 4.9199 -5.3706 2.8446 -9.9530 +#> 2.6601 -0.7499 -7.2544 -12.9090 -7.2110 0.1346 -9.9113 -3.0219 +#> 8.7960 6.8082 3.6763 -7.8660 -13.6202 4.0760 5.1185 1.1282 +#> 1.8266 -1.7872 -2.9200 -3.7922 8.7956 2.3854 -0.2362 7.3092 +#> -4.3778 0.7753 -0.7186 10.7946 5.8511 -0.7413 2.2387 4.5521 +#> 4.3605 -11.7745 -2.2958 -0.2394 12.2962 -5.1317 2.4964 -13.2891 +#> -0.0563 3.3435 8.0466 -7.8452 -13.0751 -4.5739 -18.4470 4.7742 +#> -6.2856 0.9669 8.5684 7.8602 15.9807 -1.2085 -14.2517 -1.0861 +#> -11.3462 -18.3101 -6.4877 -6.8844 -2.5102 3.2966 -2.2104 -10.3777 +#> 0.8654 -2.3130 4.0081 -6.3313 1.3738 -0.9346 3.7568 -5.4381 +#> 3.9105 -7.3376 8.1461 14.7779 11.0156 -14.6919 -7.4955 -13.1915 +#> -4.6555 -8.6328 5.3334 0.4184 -13.0719 2.2033 5.9864 2.7905 +#> 0.5894 4.7579 1.6137 1.7790 2.4720 1.7490 3.0407 5.4525 +#> -0.4121 -8.2638 0.8681 -6.9749 7.6413 -7.4411 11.3417 -1.8989 +#> -3.1684 3.9039 -4.1948 -6.7739 -0.6042 5.5872 -4.8040 8.3046 +#> -3.0163 -4.2427 -6.6639 4.5024 7.8388 20.9367 -3.3742 -4.9199 +#> 1.4918 -3.7849 5.9119 2.1112 5.5031 -4.4142 2.5144 1.1172 +#> -4.5655 -3.6217 5.7729 4.2136 7.1163 6.8666 -6.9217 -3.8492 +#> -9.3415 -5.5559 5.1257 10.2804 -8.1302 -6.8186 -5.0925 -3.9069 +#> 6.4905 12.9940 14.2940 -3.2576 2.2758 -2.1263 -1.7688 -13.0083 +#> 2.7562 7.0061 -4.2407 1.0654 0.4941 -1.3026 8.0126 8.0354 +#> 3.7603 -2.4793 2.4157 -1.3281 2.0871 13.4004 3.3887 -5.8957 +#> 7.4692 2.4086 3.8459 4.8023 -9.1148 6.8317 7.5146 -2.9114 +#> -0.6001 1.1131 0.3110 4.1938 -4.3754 5.3183 -7.5500 5.1761 +#> 6.2630 -5.1274 -4.8614 1.0958 -1.2175 3.6862 0.5034 1.6907 +#> -0.4244 -0.9863 -4.9482 -12.9478 2.3922 0.8760 -11.2191 0.2017 +#> -10.1134 -0.1794 2.0449 10.0937 -1.9397 4.8323 3.8194 1.5351 +#> -6.2615 -10.7897 0.1184 3.3322 0.8641 4.0431 6.7040 4.6131 +#> -0.4006 -1.5759 3.9827 -2.8200 -0.5042 -11.0836 10.0118 -1.2860 +#> -3.4027 -8.7748 -4.8088 2.7602 -2.1160 -10.3954 -12.6993 5.7771 +#> 3.6088 -2.6600 -2.4538 3.3201 -5.9343 1.4111 0.2936 -13.5548 #> -#> Columns 9 to 16 -4.7201 -3.1236 -4.3389 -1.9527 4.6023 6.3952 -12.6313 -21.1348 -#> -0.6620 -3.7689 -4.2902 13.6811 -2.3209 6.6041 -4.8710 -17.3554 -#> 5.1904 10.6001 -6.5149 -4.8909 -9.0926 -11.8929 5.2871 2.2837 -#> 0.7013 4.9385 11.7092 2.8884 13.2337 -17.4017 14.9012 9.3459 -#> 6.8411 -1.4300 -1.6803 -2.5781 10.4844 -4.2093 -3.0250 1.0544 -#> 12.9819 -4.7711 -14.4137 5.0339 8.2432 0.5383 -11.8861 -2.3156 -#> 2.7924 1.7839 2.2759 1.9873 -5.4555 -1.7162 15.9806 6.6636 -#> -4.0410 -15.5223 -13.1287 -2.8102 -5.5585 0.1104 -2.2128 -12.1074 -#> 17.3425 8.5051 3.8251 -12.2377 -1.1293 -5.4657 0.8050 4.6297 -#> -4.8510 6.7570 6.4678 10.1318 3.9934 -8.2748 13.6497 13.6396 -#> 9.3899 0.2428 -1.8257 5.4489 5.2872 3.9363 0.1621 -8.0930 -#> -9.2866 -7.1185 2.7267 0.8633 -1.3314 0.9149 -9.6874 5.3744 -#> -1.1862 -7.3784 -2.1169 6.4714 -8.7672 -7.9466 -5.3680 -11.5957 -#> 13.7131 -4.5149 10.2006 -6.1466 12.6197 19.8093 -3.2519 -13.6451 -#> -5.5197 10.4320 0.0603 5.5517 -6.6388 -7.9648 1.7688 3.0486 -#> -7.3826 0.1136 1.5554 -5.5288 7.0923 -0.7994 1.9077 -8.2893 -#> -4.4383 0.3800 -2.3695 1.3786 6.1753 -4.8534 12.6246 -18.0979 -#> -9.0982 -13.8421 -5.1902 6.5919 -3.6211 -3.6110 -0.9803 -8.3294 -#> 8.1154 -3.1195 0.4546 -1.1779 -3.6951 -4.3140 -1.4386 3.9892 -#> -4.9093 -4.5519 0.8064 15.6809 -2.5004 -1.2447 -16.7001 -13.4800 -#> 11.5612 -2.9265 -1.7209 2.6694 -0.4203 -5.8078 -1.4483 1.3615 -#> -5.2648 12.7933 -4.3861 -0.3763 -1.6852 4.4957 4.9136 4.4373 -#> -3.9523 11.9814 1.7973 1.4953 -2.6835 0.2446 -4.5562 17.3886 -#> 5.1934 10.6655 0.6115 11.1418 3.1180 -8.9435 6.6520 3.4674 -#> 5.1238 8.8625 4.3751 3.1573 2.5184 -5.1505 1.8224 10.4806 -#> -2.4792 6.4549 -6.0669 0.6875 5.4914 3.0087 -2.3207 -5.9153 -#> -2.4603 -2.0192 0.0122 7.6925 3.2375 -23.4830 10.0626 -7.1549 -#> 4.1439 -5.9518 5.2762 -1.1040 13.5939 0.5049 11.1101 -11.0639 -#> 8.4713 7.5715 0.9777 8.2896 0.3026 10.2661 4.7805 4.8929 -#> 7.5424 -5.8435 -1.1392 -13.0447 1.6035 0.5726 3.4629 -18.9380 -#> -6.0775 3.4473 -5.3616 -4.7318 -5.5823 -0.2472 14.1388 7.6101 -#> 0.9798 -11.0886 -8.4849 -3.8576 -9.8913 8.9775 -13.6314 -10.7274 -#> 1.0321 -0.2241 4.4221 10.9133 13.4562 14.3194 -7.3366 -0.7615 +#> Columns 9 to 16 4.9225 5.5600 -2.2441 7.6603 1.8716 0.7380 7.2255 -1.5654 +#> -6.4574 -6.3887 6.6581 -8.1694 -8.8087 2.1191 22.9908 0.1538 +#> -16.1547 1.0222 4.1697 2.2358 -8.9400 4.3203 5.4363 1.0150 +#> 8.6192 -12.6086 -10.3566 3.1265 12.9864 6.6163 9.2424 -3.4180 +#> -8.4447 12.5822 -4.7761 3.0417 -7.6632 15.3903 2.5236 11.6599 +#> -0.3860 -17.4155 -2.9785 15.5705 8.6716 -9.0990 -13.0978 1.3521 +#> -13.1463 0.9251 -4.9657 -3.8783 6.3122 -6.5268 -2.2784 1.4956 +#> -7.6302 -8.5406 4.6711 1.8267 0.2083 -8.9586 0.6774 4.4917 +#> 3.9469 -11.8175 1.4958 3.1491 -10.3108 3.7795 21.8913 5.5163 +#> 0.7508 14.7510 -0.4539 -15.7338 0.1425 -5.7490 1.3307 -5.3326 +#> 11.6687 12.5395 12.0068 -10.5433 7.4114 9.1138 -1.9093 -8.9764 +#> 2.9976 -2.9200 0.0076 -6.3508 4.8695 -6.1133 7.2441 10.0213 +#> 4.3059 -2.4060 -1.9804 -12.7432 3.4749 -2.4262 -8.1165 15.6262 +#> 8.3939 -0.1630 0.8528 -11.0749 3.5464 -2.9716 -2.2068 10.6706 +#> -9.1988 3.5714 3.2794 -1.6498 -2.0392 -7.5501 -2.0919 5.6080 +#> -6.0028 -2.9252 9.3408 2.7734 -12.8275 -8.1112 -2.5071 2.4990 +#> 5.6441 17.8487 -10.9258 -1.2157 9.4649 1.7805 3.5160 -0.4948 +#> -14.3434 1.2701 8.8466 0.4224 6.4197 -4.1490 -9.6471 -6.0742 +#> -6.7458 -0.9015 1.2883 -5.7606 -5.1605 -1.3763 -4.4218 -0.3204 +#> -3.5323 7.4151 -0.1791 -8.4691 -0.2713 -6.7305 -1.2468 9.6714 +#> -2.4161 4.7951 5.5261 -2.3472 -5.4762 -8.4961 13.9995 8.3659 +#> -2.8123 -4.4264 -13.0146 -0.2199 1.3056 1.3500 5.1790 4.4767 +#> 0.0135 -17.3878 -3.4409 -1.0390 4.9462 1.7660 0.0994 0.8319 +#> -1.2961 -2.4429 0.1393 4.6054 14.3563 -7.6293 -5.0934 -5.9340 +#> 3.8720 -2.3441 -9.3442 -13.2364 -3.7195 10.1455 -1.2176 7.5793 +#> -1.0601 0.7780 6.8379 -13.3178 -8.6855 -0.9878 9.4483 -5.2107 +#> -9.6257 5.1942 0.7178 2.6521 0.2496 4.0240 -14.6032 -0.5868 +#> 5.7428 -2.1691 -2.3975 -0.0737 -4.1814 11.5471 -1.0604 -2.2776 +#> 3.2835 -0.1539 6.7294 -4.2859 0.3658 -8.3379 3.8094 2.9161 +#> 0.0561 -5.0773 8.4428 -1.5961 -7.1156 -4.5487 -3.9622 -1.4178 +#> 3.7956 -9.3947 12.9836 4.7540 -2.3314 -3.9858 0.4320 10.9241 +#> 4.3397 -4.2929 2.2238 -6.8958 -11.9045 6.8032 13.4853 4.5593 +#> 3.1307 9.4933 -3.8672 -11.1129 7.2185 18.0700 0.4513 -10.4555 #> -#> Columns 17 to 24 3.2404 -7.3528 -4.0721 7.6482 -2.2974 -1.6315 -8.5402 -0.2590 -#> 9.0915 -5.5299 7.1584 -6.4244 -6.2699 -4.5453 0.4297 1.3497 -#> 5.0526 -2.3287 16.0916 -8.3235 -1.0199 -0.6070 4.0202 4.3790 -#> -7.1261 11.7021 0.9335 1.3722 -7.4772 -8.3906 -0.6237 -9.8234 -#> -2.8154 -1.1493 11.3990 5.7868 -9.0052 -8.9318 -0.5868 1.0273 -#> -7.2551 -2.0263 -0.2881 3.6759 -5.8995 -0.1478 -3.0632 -8.5266 -#> 3.9098 -1.3945 3.8332 7.5970 3.7336 0.7507 -11.8417 -5.6555 -#> 0.2132 8.4414 3.7995 -9.1395 -8.8469 7.1621 7.9866 5.6705 -#> -8.7067 -4.8448 -1.2625 3.3080 0.7969 1.8951 1.2556 1.2725 -#> 3.4546 6.4591 -5.8498 4.8281 -0.9510 -5.3729 -1.1176 3.3406 -#> 8.2234 -12.0190 16.8075 -19.5644 -3.8102 -0.2338 3.2206 -3.4571 -#> -0.3564 -5.0141 -4.0951 7.5192 3.4239 0.3308 -5.1510 -4.0848 -#> -0.3188 4.4719 10.9368 -10.5394 0.3799 -0.0291 3.2460 0.9790 -#> 10.2404 0.1642 -5.1547 8.4400 -8.4119 -10.9495 2.0886 -1.9074 -#> -0.9937 5.1533 19.3627 -7.2296 -9.1919 -4.4116 3.1740 -3.1250 -#> 1.2888 -1.0215 8.0473 4.1319 -5.1494 -14.1310 -1.6532 1.7811 -#> 0.3926 6.3051 3.6569 0.1893 -11.3633 5.9868 0.7072 10.7594 -#> -7.1072 -3.0477 12.2373 -4.7590 -6.2750 -3.4573 7.7633 -8.3014 -#> -8.7176 -2.8906 -0.7154 -14.9467 5.2296 -7.2645 5.2165 10.9547 -#> 2.6475 0.4443 1.2017 -12.9848 -5.7442 7.7503 1.7651 6.4125 -#> 0.2760 -11.0673 -3.5735 1.7614 8.2979 1.2129 -2.4041 -1.7983 -#> -5.1471 -7.7904 5.4553 -3.4230 -8.1335 7.8878 6.8528 2.7138 -#> -2.6071 -6.1638 5.2968 3.1989 1.4251 11.1042 3.8692 -6.5996 -#> -15.7746 15.8617 -1.6893 -7.4314 11.2806 -12.9195 0.9744 -5.6331 -#> 4.5438 -5.6933 -0.7325 2.9716 1.4225 0.3591 2.3317 -0.8351 -#> 4.9513 8.1065 -9.9296 -2.4630 -5.4124 5.6269 1.6865 12.9014 -#> -0.4381 7.9463 5.5066 11.0034 -7.7295 -13.9815 6.7824 5.4234 -#> 11.2191 12.7392 -2.9574 -5.2218 -9.9673 -5.6994 4.9155 5.8823 -#> -5.3755 -14.1145 20.0468 -2.6852 0.5977 -10.9694 -1.0630 6.6494 -#> 0.7967 -1.2790 0.7432 -5.6644 -8.0744 14.7843 -4.1934 3.5868 -#> -8.6367 3.4889 1.5427 -0.7841 6.2432 3.9866 -7.5451 -3.6884 -#> -1.1392 12.9018 -9.7556 3.6774 4.3340 1.9292 -0.7439 9.6608 -#> -0.7898 0.4567 -9.0658 0.7583 -0.5643 -2.7536 -12.0739 -13.5161 +#> Columns 17 to 24 -4.5403 0.8627 -5.5399 16.7652 -3.3005 0.3912 -2.2864 -0.1981 +#> 0.0705 -6.0419 4.7120 3.4012 -3.2563 4.4886 -6.5086 -10.7036 +#> -2.2246 -8.1928 -7.3928 0.1447 0.5969 3.3527 1.4047 3.0847 +#> -20.8609 -0.2642 -1.0628 -3.9638 3.2940 2.4475 0.8673 5.0464 +#> 0.4571 -7.2577 0.3695 15.4552 -2.3910 -1.7355 -5.4584 -0.1109 +#> 3.0723 7.3549 1.5265 2.2355 -2.5095 -4.8412 3.4412 0.9306 +#> 14.4901 -1.5159 1.9517 -13.8063 -4.0890 0.3825 0.3586 -4.0783 +#> -12.4548 5.4844 -0.9215 1.2928 2.0309 1.0115 -0.9829 6.1028 +#> 2.3406 -4.1557 1.3609 6.1630 -13.2867 -2.1212 -2.2850 6.6842 +#> 8.1618 5.0559 -0.2792 -7.3086 6.6220 2.8348 -4.4975 -4.3933 +#> 1.9959 4.5803 6.4670 -4.8582 -12.1061 -0.3311 -1.8318 0.2960 +#> 1.2757 5.0919 4.1235 3.6204 4.7513 -1.6185 2.2168 -5.9940 +#> 0.8802 -0.7256 7.6243 -7.6476 13.5457 -6.1795 3.7220 -0.3460 +#> -0.6865 3.3372 6.5970 -0.0107 0.7561 5.8117 -5.2296 -5.3790 +#> 5.0755 6.1097 1.0909 -0.6166 9.5765 13.2437 1.5516 3.9194 +#> -8.0604 -5.9008 -6.0673 -1.4995 2.3548 -2.3083 -0.9492 15.5202 +#> 2.4620 -0.0056 2.9948 -5.8729 -4.7114 -4.3107 -3.9687 3.2706 +#> 7.1155 14.5256 11.7819 -10.3028 0.8881 -3.2271 -7.0647 1.5374 +#> -4.3952 2.6930 1.8190 -0.8021 -4.3241 -5.7911 0.3484 -9.6943 +#> 2.0471 -1.0732 -4.2291 1.7310 5.0389 -2.3117 7.2055 6.4297 +#> 5.6211 -2.2049 -4.5974 -6.0471 -4.8597 -1.2184 -6.1280 -7.1372 +#> 2.6220 -5.4533 -15.3593 0.0618 -3.3244 1.9433 6.7403 9.9059 +#> 2.6229 6.7076 11.8256 -4.5230 0.7833 -6.2725 -3.9163 -8.9228 +#> -4.3641 5.8146 -0.3310 0.7695 -5.1522 8.2024 -0.9187 -0.4964 +#> -11.3966 0.7391 -4.1380 1.0357 -12.4213 -3.6986 2.0429 -2.5611 +#> -2.7803 -3.9230 -2.0136 4.3543 12.3046 2.3671 -1.8022 -3.0155 +#> -3.9181 3.3108 -3.0345 6.7626 2.7906 -9.2037 -3.1566 6.3978 +#> -1.6257 6.3275 -10.3144 -4.6663 0.8116 -0.6236 -3.0478 -0.3901 +#> -1.1515 6.2684 -3.8367 4.8982 1.9917 1.1414 6.9544 1.9681 +#> 3.9706 10.2930 22.5673 7.9787 3.0717 -2.1829 -13.0866 -1.4238 +#> -0.8366 -5.9752 7.0010 0.9696 1.7413 -0.7517 7.2532 1.0929 +#> -6.1598 7.1434 -13.5498 3.0724 -3.9919 -1.2601 -1.1236 -1.4062 +#> -1.0377 11.7243 -11.7727 0.4309 -1.7176 -1.1098 7.8816 -4.5404 #> -#> Columns 25 to 32 -7.7718 -0.9114 2.4102 -2.2426 -8.2447 -8.6054 4.0331 4.6473 -#> 1.8486 -5.2750 7.4340 15.8399 -5.6456 -5.6514 -10.1373 3.1087 -#> -9.4465 4.7849 1.6913 3.6872 -6.2903 8.9057 6.9255 -5.0254 -#> 14.8307 -4.0019 0.4373 -15.9645 12.4092 -11.6289 -2.6368 -14.3868 -#> -0.2235 3.8137 3.8534 -1.3829 2.1283 -1.9937 4.9293 -5.9287 -#> -2.5982 0.9202 11.7816 -7.2020 -6.4888 -4.1582 -5.2661 -7.1377 -#> 4.0172 1.0735 2.9697 -3.6743 1.0773 9.7277 3.3830 -1.5863 -#> 0.8849 1.0239 -5.2432 0.7883 5.8000 -3.4362 8.1262 7.7836 -#> 2.9349 -5.4463 0.5602 10.1004 1.6811 -1.1345 9.2585 5.7046 -#> -6.0164 -1.3181 -0.4065 1.5494 11.7829 -14.4608 3.9402 -5.7432 -#> -17.4480 5.4720 1.7400 5.2470 -9.6330 4.0045 4.5097 -11.8818 -#> -2.0877 2.5754 2.0526 -2.6236 -5.7712 3.4179 -0.6518 3.2757 -#> -6.4103 3.7287 4.6183 -17.0621 -15.5844 5.8730 -3.4688 -5.3081 -#> 10.6664 -2.7789 -12.8203 3.8798 -2.2326 5.6436 -5.2723 0.9585 -#> 3.0664 1.0020 -7.0297 -2.8026 4.1825 0.0879 -3.6404 -1.6307 -#> -1.5760 -0.0641 2.7851 8.6238 -5.8379 -13.0112 -4.4457 4.8467 -#> -3.5930 -0.6990 -7.8559 8.1605 -8.8669 3.5128 -8.1397 -6.4161 -#> 4.2051 -4.6264 -2.4763 -4.6992 -7.7277 -11.2350 -10.4281 -6.7836 -#> -2.7288 5.3781 -0.7707 3.8182 -4.2097 2.8139 -7.3372 10.2148 -#> -7.5773 -4.9700 -8.8037 1.7967 -2.2666 -4.4975 -7.7480 15.4940 -#> 4.9859 0.0118 1.7544 -7.8191 4.6211 -0.8371 2.3063 4.5158 -#> 2.3803 3.3274 -6.8530 -3.5117 3.6384 -7.1110 5.9185 -9.9121 -#> -12.0744 -5.7320 -8.6097 9.6187 6.9884 0.7670 4.1691 3.6504 -#> -9.9666 -1.6004 -6.3201 -10.2167 -10.4199 1.9198 -1.2735 -9.4717 -#> 1.4589 4.1667 -2.6370 -1.6384 4.4989 4.7677 -0.8968 0.3371 -#> 8.3363 0.9305 -3.7238 6.0712 4.8480 -1.6509 -0.6134 7.1978 -#> 10.3574 5.0577 1.0890 -12.0661 -0.4778 3.2472 -7.8195 -2.8836 -#> -9.1254 -9.6882 -3.2917 10.9773 0.0025 3.2516 -6.4559 -5.2064 -#> 6.9633 -0.8168 -10.9703 -0.6805 3.4517 -5.9585 -4.0768 1.5045 -#> -15.8190 -6.2055 11.6044 13.8867 -15.3518 -11.9771 19.2955 3.1466 -#> -0.1233 -9.5387 -9.2900 5.5027 1.5485 3.0759 10.2926 10.8285 -#> -9.8313 0.3688 -3.3588 7.8980 -14.6756 7.7726 -2.7930 3.4806 -#> -11.8913 -9.0171 5.8413 3.9101 1.3863 -8.5099 0.7576 -9.0370 +#> Columns 25 to 32 -3.3639 3.7089 -3.5293 -1.8418 -6.5532 4.9634 9.3293 -1.1334 +#> 1.0921 -1.6256 -0.4790 -10.5491 1.3899 5.8573 -9.6310 3.5270 +#> -3.6231 7.3822 -5.0488 -5.2696 -5.8890 6.7486 -1.3528 -10.1037 +#> 6.5933 -4.4395 -0.8298 0.7569 4.5329 -4.3295 -3.3510 3.3305 +#> -7.3630 -0.8983 -8.4753 2.5676 -11.0820 -2.3869 -6.1060 -2.5663 +#> 3.7198 5.8540 4.1679 -0.4445 -1.2934 -0.0859 19.0920 6.4379 +#> 5.1275 -1.0242 5.4139 -5.2090 -2.3867 -3.1714 7.5581 -10.5045 +#> 6.1018 -8.0742 1.3731 8.5287 -6.1802 3.1121 -4.1473 -0.1143 +#> -7.5814 -5.9677 -5.7110 -3.1808 9.9725 0.3796 -17.4838 -2.7362 +#> -2.9835 -6.5368 8.3865 -3.4417 -2.8261 -7.9488 13.6199 5.6744 +#> 5.8382 -1.7312 -11.4873 5.2917 9.1595 -6.1192 -1.3211 5.3039 +#> -5.6978 -1.7276 5.9020 3.5445 1.1818 -7.1539 4.9696 -4.9703 +#> -8.1501 -10.6983 -0.6169 -3.6448 -10.5499 -0.0997 1.1262 17.6429 +#> 4.5481 -9.6244 1.3043 5.5085 -11.3478 -10.8300 2.1328 8.0171 +#> -15.1748 -17.6798 13.9718 11.2357 -14.5809 2.7135 -6.1892 -11.7734 +#> 12.1469 3.9165 9.3138 12.5281 5.2217 2.0857 -6.2557 3.6950 +#> -5.3670 -1.0265 7.8035 -10.8065 -6.0649 0.5879 3.3191 -9.8317 +#> 6.6331 -15.9322 9.1446 17.5641 -8.7397 3.9684 5.7379 -14.6294 +#> 9.5837 3.9262 -1.9871 5.7472 -1.9088 1.6816 -0.1372 6.5131 +#> 1.8731 -2.9632 11.2780 -7.6983 -6.4604 -10.5026 5.7501 13.8107 +#> 2.5895 -9.0015 1.3132 1.4014 -0.4176 -2.5248 2.3322 3.1885 +#> -4.3353 16.3129 3.2437 -7.7630 5.1971 1.7249 -1.4453 -12.7346 +#> -7.8411 -5.7639 -3.6366 -5.8391 -8.9470 -3.5877 7.7259 -6.6988 +#> 7.7574 -9.4756 -1.3226 6.6673 -6.5002 -4.0112 -2.6859 -4.9014 +#> 1.1800 7.3869 3.4971 0.9487 -6.2506 -8.8993 0.6066 1.0809 +#> 0.8496 -1.1507 3.9585 -0.4666 -0.9697 -7.0459 5.3007 14.1793 +#> -5.4650 1.0423 7.3941 0.2114 -8.2729 6.8540 14.6386 7.4352 +#> -2.3820 14.5245 -1.4404 -0.4711 -2.0351 -0.3895 1.5225 3.7831 +#> -7.9096 -3.5263 -2.8515 2.8540 -2.4600 -7.6752 -4.2027 3.7889 +#> -7.6223 -15.2149 1.1952 8.8253 -16.4614 -2.0181 5.3236 2.5961 +#> -2.2341 -4.9732 -3.0079 -0.7461 -2.6474 3.5996 -8.9855 2.7606 +#> -20.7502 4.7540 -0.1366 2.4125 -11.6241 -5.3454 -7.1686 12.4429 +#> 0.6590 -0.3549 -1.2210 1.7432 10.0225 3.8491 -5.5875 -10.6072 #> -#> Columns 33 to 40 2.9796 -5.8591 14.4235 4.4064 -2.7400 0.5498 7.9668 10.1102 -#> -9.9294 -5.8406 -4.4334 0.4285 3.8579 2.0006 -2.0403 2.3884 -#> -1.8515 -4.7375 -1.2654 7.3044 15.8168 -2.7423 -2.4067 -11.8371 -#> 4.4462 -14.5330 -4.3808 3.9995 2.6533 -7.2618 -12.5946 0.2572 -#> 3.2368 -4.2141 -13.9740 -8.8299 7.9667 7.1578 -1.1920 -3.9704 -#> -3.5651 -0.3440 -16.0170 6.9821 0.7315 -7.4706 -11.1905 2.1866 -#> -1.2230 1.8032 2.2447 13.5615 -13.7201 -7.6674 10.3916 10.3359 -#> 4.5096 -0.2489 -8.7388 -5.6336 6.9253 2.8208 -13.1318 2.1139 -#> -2.1393 9.3337 -4.5421 -4.7718 11.8707 -8.6940 6.3966 -8.0506 -#> -2.9345 -15.1383 10.7131 -7.4241 -6.2026 1.8937 6.6104 3.1484 -#> -4.1044 5.3933 -2.1359 5.8452 10.8731 -3.4783 -7.0924 -6.3855 -#> -9.3098 2.1850 -4.6465 -7.2282 -2.5180 0.1917 13.3671 9.2172 -#> -5.2428 -2.8354 -1.1434 12.6908 -9.3882 13.7698 -7.8651 -0.5879 -#> -7.4181 5.1397 -13.1685 -9.6985 15.4629 -14.1861 7.3946 10.6126 -#> -4.1513 -13.3208 4.5188 0.1917 -0.8268 11.2592 3.6528 -0.6625 -#> -0.6403 -6.9440 -4.0975 -3.8169 12.0874 9.4207 -10.7510 3.7073 -#> 10.7357 -5.8957 -1.9186 8.6922 -3.0942 8.8761 -14.8420 1.1825 -#> -11.9155 -2.5841 -12.9962 5.0492 4.6539 -0.6236 -4.0959 -4.7469 -#> -11.6471 4.8921 5.0823 -5.0213 9.7070 3.0254 -12.2953 3.9251 -#> 17.0879 -4.0036 -1.0710 0.4942 -2.4409 6.5400 -0.3668 1.1020 -#> -4.8131 4.7524 -9.4196 -5.5224 -1.1471 -14.8131 9.7049 4.3751 -#> 0.8249 -11.9130 -4.0199 1.9304 6.6916 6.5608 -3.2226 -4.6872 -#> 13.3971 8.0947 -1.4470 8.0229 -7.0477 -9.1122 13.7528 0.1964 -#> -2.6919 -9.1373 5.1295 -6.3844 0.0863 -2.1316 -0.3428 -0.3389 -#> 0.2724 3.4743 7.8588 -0.7303 -2.4073 1.6905 4.5028 -7.8179 -#> 10.6832 -0.2296 7.1542 9.0711 2.4966 -3.8835 -5.3003 2.0246 -#> -8.4005 -7.8312 -15.2336 6.9674 -1.8741 -5.7587 -17.6714 15.3893 -#> -4.9575 -0.1593 -1.1159 3.2139 7.9358 12.5113 -9.8510 -4.5967 -#> -10.1662 -6.7715 1.6739 -8.9887 3.1111 -5.8976 13.2658 -4.7742 -#> 0.2688 0.8505 10.0891 4.0398 -2.2841 15.1094 4.5957 -6.2957 -#> 0.2787 5.7893 4.1793 1.8786 -1.3759 -13.6348 14.0891 11.3234 -#> 3.5430 14.9865 0.7801 -0.4608 -9.5619 -6.3796 0.0200 -5.6714 -#> -4.2869 2.2431 11.6142 1.4366 2.0043 -7.9774 14.1313 -5.3636 +#> Columns 33 to 40 -11.4110 9.7205 1.5401 -2.7510 1.2792 15.0599 0.0496 -9.1622 +#> 5.4631 3.3218 2.2288 -6.9674 -9.6491 2.6538 11.1732 5.8183 +#> -3.0262 4.0388 -10.3257 -8.8676 -8.6690 -0.4287 1.6248 8.4191 +#> 8.0845 -2.8988 11.9952 0.4718 -2.9628 4.2875 13.1174 6.9494 +#> 2.8617 -2.7812 2.3745 -3.8281 7.9429 0.8949 1.9820 -9.1806 +#> -5.1250 15.9504 -4.2533 -1.3836 -4.5819 -1.7996 -5.7484 5.3758 +#> 6.3032 -10.7816 -9.6452 -1.1218 -9.1026 -4.4460 0.6274 -2.9784 +#> 5.1642 -13.3591 -1.0159 -4.7850 -5.4670 -7.8327 2.5576 1.5834 +#> -3.9484 2.6348 32.8767 0.9171 21.6369 -3.1694 16.0368 10.6583 +#> -0.7267 14.1048 0.5024 7.7016 0.3269 -3.7291 -11.3429 -2.6302 +#> 3.6972 2.6802 2.6978 -4.3538 -5.1271 -3.2816 4.0178 12.4351 +#> -5.8301 0.6046 -3.5828 -8.4483 -0.5102 -0.9339 -0.5123 1.8580 +#> -0.8860 0.8671 9.9673 11.6676 -11.4563 14.6641 0.1757 -0.6393 +#> 18.4382 18.8040 2.4818 19.5073 -3.9704 7.3254 6.2920 -0.2283 +#> 4.1849 -6.0036 17.8827 8.4112 -8.0271 4.4468 1.9496 -5.6960 +#> -7.4727 -15.3136 -1.9712 -3.1628 3.9776 -5.6435 2.5168 -1.0798 +#> 3.0072 -10.3900 15.9210 -5.6552 -2.3916 2.4912 3.7345 -4.9033 +#> 2.6913 -7.3348 -10.0893 12.8524 -10.6272 -5.8566 -0.6821 -1.9945 +#> -5.5521 5.0511 1.8665 6.4624 -9.3969 -3.0982 -8.1768 5.5550 +#> -5.2313 7.7024 6.8639 -0.2529 1.2518 1.4687 1.6648 11.0534 +#> 3.8489 1.7666 5.6743 8.2437 -6.8864 0.9819 -1.7740 5.7260 +#> -15.6474 -1.8256 -1.8265 -5.3433 2.2131 -1.4583 -7.3534 -9.2084 +#> -1.0694 2.0757 -10.0864 -2.4707 -5.4852 -11.5234 2.0255 4.8720 +#> -12.8576 -6.8095 -3.1034 0.8008 5.0198 8.0740 -3.1447 -2.8974 +#> -8.0587 -0.2285 -10.8136 -1.8792 -7.9253 -4.7240 11.9411 0.0463 +#> -1.7676 5.3562 -7.7130 -2.6117 -0.8972 7.5100 -1.8642 6.6625 +#> 1.7393 5.9138 4.0167 -0.1128 2.6578 12.0800 5.0687 -5.0415 +#> 7.4276 15.2452 4.2611 -2.8698 -1.3799 -3.2962 -10.4635 -4.2773 +#> -14.2017 -7.7666 -0.1308 -2.3338 3.3958 0.2123 3.0525 1.3932 +#> 5.5766 2.5379 15.1168 17.7801 4.6992 2.2616 -2.0001 -4.6684 +#> -0.1415 -0.1108 7.1028 -4.6800 -1.1069 4.8969 5.6910 7.4224 +#> -0.6975 2.5799 13.4043 -5.6343 12.2655 -2.8038 0.8236 5.1554 +#> -7.0196 -2.1910 -5.0328 -3.9712 7.4484 3.2843 -2.1928 2.6612 #> -#> Columns 41 to 48 2.7537 -8.4081 0.8808 1.8540 4.4716 2.6500 -9.3430 9.6118 -#> -2.9996 6.7433 -0.1671 -6.4098 -9.4328 10.6736 -6.7612 10.3954 -#> -1.8288 3.1022 -3.4933 -9.7086 -6.0963 12.0057 -4.3052 4.2917 -#> 13.1396 -4.0272 0.2545 -14.0850 5.3521 -8.7964 -0.6994 -10.0611 -#> 5.0306 6.8330 -0.3310 -1.9005 -12.3920 1.2069 -0.4763 -4.8570 -#> 8.2146 13.0620 8.1760 -10.1780 -11.1487 -3.5516 8.0910 1.5439 -#> -2.7358 -5.7154 -1.5612 -1.4865 -8.1558 0.7511 -5.1962 -2.5678 -#> 7.7914 12.1014 7.8445 9.9661 -15.9547 1.7632 -3.3189 1.1190 -#> -0.3257 11.3838 13.9759 -1.6907 -12.5652 -9.6671 -2.0921 0.4294 -#> 5.1802 -7.9128 0.9615 -11.0031 3.9570 4.3288 -0.5359 -6.5904 -#> 2.7734 5.8330 -4.6551 2.0356 -21.9550 20.8268 -5.8366 9.2868 -#> -1.5492 -6.3833 3.1915 5.4757 -1.5982 -6.8798 11.5828 -2.1211 -#> -2.1020 -1.9674 -2.8494 -1.0265 3.5978 8.0475 4.5782 2.6527 -#> -9.5135 -1.2447 -12.3409 -11.5975 -0.7767 -3.5692 1.9104 6.9491 -#> -9.3450 -11.3317 7.6089 -13.9269 -11.1711 11.9842 -3.0715 7.0820 -#> -5.8388 -2.1508 -2.8305 -5.4169 9.6731 3.7725 0.5182 -6.3051 -#> 2.8293 2.7246 0.6847 -6.7214 -6.0762 5.8649 1.2158 -1.6557 -#> -0.9985 2.9063 -2.2778 -4.0852 -3.4789 3.1871 -3.4138 5.5535 -#> 4.2588 -2.1259 -6.4136 10.6392 -5.6241 0.1208 1.3676 -4.4714 -#> 9.0142 7.3940 8.1806 1.3387 3.6750 12.4905 -4.2329 11.7774 -#> 1.2015 2.8161 -5.4220 1.0295 -9.5838 -0.5959 3.4263 8.4852 -#> 3.2337 4.1017 9.9291 2.2344 0.6333 -3.3308 3.4500 4.4569 -#> -3.7095 -3.7454 -13.8820 8.2878 0.2788 11.7937 0.7563 6.4430 -#> 3.9434 0.0988 2.9141 -5.8733 0.7279 1.4817 9.6820 -0.6106 -#> -3.4528 -12.7778 -4.5525 4.0766 4.5152 -3.7994 -9.8088 0.2060 -#> -4.0679 -4.0544 4.9276 -0.5949 -0.2665 -0.3859 -5.7662 -0.2687 -#> 0.1229 1.3327 -2.0212 -10.0097 -17.2825 -2.8027 1.7124 -8.1562 -#> -14.8658 -6.8483 -3.5400 4.9178 -5.4110 2.2206 8.5238 1.9944 -#> -3.6567 -6.3353 -7.6350 1.1357 -8.3796 2.2761 -6.0554 9.7495 -#> -6.2436 -2.9375 10.3187 7.0562 -1.3654 1.9643 2.6747 7.7124 -#> 0.9717 -5.3566 -0.0893 9.3926 -4.3974 -7.2100 -5.8098 -6.3129 -#> 3.7503 5.8549 2.2083 3.3030 3.9939 0.3026 0.6920 5.8119 -#> 3.0689 -2.5120 5.5766 -6.1170 1.9618 -0.7384 1.8105 4.9841 +#> Columns 41 to 48 -4.4035 -2.5830 4.7022 8.1924 -2.1343 -9.3300 8.1459 21.9151 +#> 1.8002 -2.3036 -2.1351 -3.0386 3.5527 7.5935 -3.0330 -3.0010 +#> -0.1520 1.3074 -2.6433 2.3873 6.5859 6.2012 5.0539 7.0840 +#> 8.5743 -3.4502 1.1069 0.7373 13.5990 8.9227 1.5544 -7.8347 +#> -6.4117 2.6053 5.9950 12.6009 -6.5350 4.6728 10.3349 8.4037 +#> 10.8904 2.5062 -4.5442 15.8562 -8.4158 -3.7200 1.3020 -18.3469 +#> -17.9926 12.9330 -14.3240 -9.4925 12.8679 -15.0604 1.3404 -7.4257 +#> -13.3334 -12.3136 2.1616 -3.9185 -2.1787 -1.2888 -2.4949 -2.7250 +#> 13.8176 -0.4837 -0.2999 -1.0246 3.0575 10.9575 -2.9807 0.8904 +#> 6.5266 0.2682 2.7695 -2.0101 -6.3439 -6.1463 -1.7554 0.7309 +#> -12.4386 -12.0755 -3.3243 17.1322 -7.2759 -5.2054 8.6104 -8.5648 +#> -4.6738 -0.6317 -3.0386 5.0054 7.9068 -1.5953 -7.0438 -0.2524 +#> -0.6661 1.2747 4.6270 12.3603 -3.4111 -5.3579 -22.1942 7.7327 +#> -13.1994 -0.5398 5.8172 -0.9190 -10.5103 -1.4204 -3.9475 3.8975 +#> 3.9828 -2.2820 -2.2537 4.6675 23.5178 2.4094 -1.9476 8.3381 +#> 7.0983 11.5469 8.0891 -10.8906 -4.7653 7.6345 -6.3178 -3.7133 +#> 11.6996 0.2107 9.4498 -2.5221 12.1700 -0.4884 8.7833 1.9933 +#> -5.9609 7.2075 -1.3952 -5.2639 -5.0297 12.9433 2.6567 -14.6284 +#> 1.4042 -5.2056 -1.2353 0.2738 -13.1495 -4.1853 3.0999 -1.9822 +#> 3.6216 4.0033 -6.8942 1.4035 14.3725 -11.0724 -7.7239 -7.1996 +#> -3.3201 1.5721 -1.8065 -4.9702 -1.1000 -5.9707 -6.7182 4.7676 +#> 14.8526 5.8902 0.9221 -4.8476 7.3163 -2.9450 13.8894 13.3704 +#> -9.7871 5.2464 -0.1787 2.1715 7.4210 4.9738 -6.4512 -8.4699 +#> -14.6873 8.6363 -8.5709 5.3960 5.3547 2.5137 2.2699 4.4890 +#> 1.9063 7.1748 9.6539 3.3108 5.5583 -0.4651 -12.1162 -3.3217 +#> -0.0195 5.8952 6.7473 3.3462 4.1393 -4.7460 -5.5523 -6.4609 +#> 0.9311 -6.0597 7.9514 5.2771 4.5957 -13.7509 -4.6339 0.5827 +#> 13.3875 2.7049 -3.1045 3.2848 -6.8063 -8.3993 2.3832 -5.3796 +#> -6.1878 -5.2134 -2.6677 -8.3963 8.3594 5.6325 -3.0220 0.7048 +#> -8.4356 -0.3373 8.7754 8.3716 -14.5055 -6.0169 0.8381 0.9769 +#> -6.4941 -5.5264 9.1333 -2.7699 -3.5451 3.5283 -8.2073 -0.4342 +#> 13.9317 2.0593 11.0732 6.9864 -0.4443 -7.3887 -10.1937 1.8091 +#> -3.3268 -7.3441 1.2955 -6.3151 10.3992 -7.0971 8.5107 7.5068 #> #> (20,.,.) = -#> Columns 1 to 8 2.6317 -3.2704 6.4893 4.0617 -3.9221 -14.5928 -0.2165 0.3166 -#> -1.7656 12.3500 -5.9235 10.2710 -5.4963 2.4739 7.8553 -11.0974 -#> 4.2000 -3.5453 -0.8064 -4.0630 6.7606 -12.7523 -2.3810 4.4675 -#> -2.4432 7.5452 5.9066 -4.3508 14.2043 -4.4407 -11.5158 0.9818 -#> 2.1510 -1.3800 5.0692 -2.6042 4.9786 -15.2360 5.3043 -2.0795 -#> 6.1902 -6.8906 -4.6501 0.0899 -6.4293 -5.1811 -13.5472 0.0140 -#> -8.3217 -5.8681 -2.1790 6.5348 -0.1611 -11.5489 -0.5299 8.3786 -#> 11.3522 8.5959 -1.9855 0.1147 -7.1166 -0.7803 18.1329 -13.3221 -#> -0.4951 1.8566 -10.2217 2.6549 1.3131 12.8182 -29.6905 9.1980 -#> -6.8784 -12.1935 10.4695 -6.8811 5.5331 3.3465 0.9023 12.8057 -#> 7.2101 4.4492 -3.0272 1.0221 2.6727 -11.9136 11.3500 -10.3183 -#> 0.3164 1.4806 -0.3643 4.1276 -0.4381 3.7842 5.1534 8.5298 -#> 1.8226 0.0824 13.5183 -1.3857 6.3781 -17.4850 4.9547 -2.4414 -#> -6.9603 13.8362 -16.1456 -0.7156 -13.0805 6.3018 -11.9658 -1.1504 -#> -3.5703 4.7417 6.4240 1.6624 -17.1390 -2.1930 4.6340 -4.7743 -#> -2.9960 10.0738 5.9415 2.8126 2.9826 1.8867 1.4020 6.8104 -#> 0.6208 -2.6765 3.8367 -0.8087 4.7581 -26.0039 -1.0620 -3.7928 -#> -3.1788 13.3034 7.1760 9.3869 -2.0361 3.8333 -6.9656 2.2902 -#> 1.5968 2.1896 1.7029 12.7194 -3.5679 7.4415 -0.9195 -7.3017 -#> 8.2864 3.4497 4.3539 4.6864 -14.1377 14.5084 -3.1976 -2.3807 -#> 5.5481 -7.7046 0.3324 -1.6636 5.1512 6.7901 -11.8125 -2.2419 -#> 9.1587 1.7614 -5.1246 1.2268 11.3454 -3.3256 10.4029 -4.0478 -#> 0.3763 -6.8253 7.2491 -6.7257 4.1883 -1.1559 2.6184 12.4293 -#> 2.1107 -7.9201 10.0034 -7.4355 -0.6598 -4.3904 -17.2219 7.2302 -#> -1.0541 -4.4945 4.3513 -0.1292 -3.6126 -1.0940 -5.8077 3.4546 -#> 5.3242 -8.1556 1.4671 -1.5818 -14.3665 11.7792 -6.1844 -5.0912 -#> -3.0772 -1.5444 10.0528 -2.3464 -1.0673 -25.1990 1.1664 8.0409 -#> 5.6377 4.6678 6.3061 -6.4699 6.3947 -1.2818 4.1777 -8.6689 -#> -2.0267 0.4467 0.3525 10.9873 -1.3003 0.2035 -5.4983 -4.9964 -#> 3.6723 -0.4729 0.4000 -5.0117 3.4031 -7.4031 10.6463 -10.4689 -#> -4.9988 -2.3497 5.1630 6.9212 -2.5057 -1.7708 1.4655 9.3307 -#> -1.1346 -1.0179 0.6928 -3.7315 -5.7301 5.4592 -11.9378 9.1582 -#> 1.1608 -1.4629 -0.4300 3.2595 -3.4088 12.4622 -8.1716 2.4827 +#> Columns 1 to 8 0.7204 3.9786 1.9640 -0.2059 -8.7830 9.1705 -1.8978 -0.6552 +#> 0.2677 4.5021 -7.5337 -8.7293 3.7135 -3.2791 -9.3102 7.7991 +#> 2.0238 10.4193 -3.7261 2.6004 -2.2357 -1.1399 0.6076 -1.2643 +#> 2.3528 1.5588 14.7682 -5.5033 4.5602 -7.5587 -4.0420 2.0467 +#> 11.5861 0.3590 -10.4944 9.0292 0.8143 -0.5998 3.8977 -6.2481 +#> 2.7058 5.9257 -4.5206 -6.0080 6.9172 1.0812 2.2904 -5.4772 +#> -7.9484 -6.3158 5.9529 -0.2769 3.3626 4.0939 -3.7389 6.0566 +#> -2.8703 -11.0456 3.3494 1.9221 9.4906 0.7238 -9.2836 -1.2321 +#> 18.6367 9.4555 1.5205 -11.1803 -1.6786 -2.4101 13.8343 -3.9238 +#> 6.4481 2.6074 -2.2367 -7.5460 0.5452 10.5616 -1.1428 2.4721 +#> -14.4207 0.5817 4.2402 10.3480 -1.6565 -8.9320 -11.1446 6.7180 +#> -5.4396 -4.9739 -1.9238 -0.3277 -2.4354 -6.3329 -2.5660 -4.4941 +#> -4.1368 0.3714 6.5016 2.5726 -6.0304 -1.2531 15.3232 -1.5994 +#> 5.0992 0.8464 -0.0608 -14.4345 1.3276 13.4636 0.9726 0.4641 +#> -7.3900 7.8303 2.1472 -8.6425 -6.0820 8.1696 -3.4664 -11.7957 +#> -3.5533 -9.1910 -1.8995 8.4852 -2.9152 -9.1580 4.6066 3.3084 +#> -6.5983 0.2991 8.8496 -4.1938 -4.0289 6.0695 -0.0695 -4.3260 +#> 7.7681 -8.5499 3.2238 -10.4777 -0.2759 -10.7467 -14.2476 14.7922 +#> -6.5367 4.8321 -5.5840 13.0064 -1.3181 4.7354 3.3749 4.3548 +#> -14.1943 -2.5453 11.3858 -6.9292 -18.6227 6.9544 11.2619 3.6564 +#> -12.7858 -4.0422 -8.5903 4.4309 -0.4839 -2.5949 1.3119 3.8463 +#> -7.2035 7.5181 2.8576 10.5868 8.0803 3.8978 12.9722 -7.8365 +#> -3.1810 -5.2704 8.3277 -7.3357 4.1917 1.4346 -3.3970 -4.6666 +#> 7.6164 2.7580 4.3462 -4.1236 -10.7497 3.7168 5.9468 6.9176 +#> -10.9850 -9.3629 8.9923 -8.7043 -6.0492 4.1793 11.9841 8.7307 +#> -1.3313 -1.2323 -6.3410 -6.6105 -1.0481 -5.3265 -3.4231 7.8613 +#> 4.5163 -5.0713 -3.0665 -9.9020 3.3558 -0.8184 15.1954 -4.9414 +#> 2.4900 6.2327 -4.7585 7.4480 20.2261 1.7684 0.4925 -4.7177 +#> -8.6338 -1.8386 6.8091 -10.7122 -11.2707 -4.4621 4.0206 -3.1576 +#> 3.8452 8.4092 2.1628 -4.3565 -3.2269 -5.2929 7.5236 6.6677 +#> 8.9679 -4.4450 -4.6570 -4.0979 -7.2786 -0.2456 3.9277 -6.8926 +#> -8.2695 5.8926 -4.0568 2.8527 2.0790 -12.3269 7.8215 -6.1889 +#> 1.9271 2.7948 2.0253 5.2458 -2.3982 -5.5214 1.6662 0.0322 #> -#> Columns 9 to 16 12.3189 -7.4661 -4.2392 11.7499 1.0018 -4.1385 4.2201 -5.1919 -#> -2.3632 3.5253 -4.6502 -1.3243 -3.5071 -4.1476 -13.1182 -9.8020 -#> -4.7953 1.5255 -0.6499 4.0517 2.5277 -1.7646 -5.2528 5.2599 -#> -2.0369 -3.6353 -6.1835 8.7470 9.1838 -5.3791 8.8067 20.0277 -#> -1.2960 5.6209 -3.3121 -4.8769 -3.9266 -5.8039 -12.2958 4.2274 -#> 10.5936 -6.9445 -0.8961 -12.4010 -6.4007 -5.3509 12.7792 -2.2874 -#> 2.5691 -13.2197 0.6038 8.3391 -5.8939 14.4678 -0.5205 -4.0887 -#> 4.1848 11.3388 -1.9578 -9.1297 -9.4681 -5.6152 -6.0008 0.7851 -#> 4.4057 -1.0972 9.0309 1.3224 -3.7618 0.6563 -8.6499 4.4606 -#> -5.0686 -4.4629 7.5355 -1.2612 -10.0848 2.7247 7.0528 7.2881 -#> -4.9709 15.5319 -4.2474 -8.5831 6.5794 12.3218 -7.9114 11.4092 -#> -2.8564 -7.3116 -5.2396 -1.0795 -0.0653 9.1868 -0.2262 -5.8491 -#> -0.8342 -1.9837 3.4885 -13.1955 5.6293 5.9005 -0.3317 3.5845 -#> -6.5592 9.3104 -11.2125 8.8877 8.0341 -5.8824 -1.1890 6.4209 -#> -0.6094 -4.6323 1.4846 -8.3095 3.2218 11.1752 -8.5526 -3.3383 -#> -9.9859 0.8720 -2.8285 -1.3160 9.5755 1.7600 -6.3442 -4.7246 -#> -1.9063 13.7199 1.2022 -13.4989 -8.4142 -0.5966 2.7934 -10.4546 -#> -11.8290 3.8885 2.5128 -10.7260 -14.9850 10.9759 0.1504 1.9472 -#> 6.3647 -0.5885 8.9794 4.3158 -16.9057 -9.6695 -2.0466 6.2499 -#> 12.7378 9.0779 4.4035 -12.7644 -8.6863 11.0101 2.7448 2.6279 -#> 8.2543 -6.9941 4.6757 13.2053 -15.9934 -2.1725 -3.7339 2.6762 -#> -15.5257 3.3541 -1.4097 -7.8562 7.5251 -3.9706 10.9223 -5.9378 -#> 4.2843 4.0547 3.5963 -3.8833 3.8719 17.3283 17.0613 1.3608 -#> 1.8062 2.1106 3.0785 -10.7370 -4.8876 5.7470 17.1128 20.2215 -#> -1.0501 1.8343 10.6267 5.2201 -3.1459 -7.6928 2.0141 -13.0849 -#> 9.1640 -0.2273 2.5444 12.4343 -5.9568 -9.7919 0.5788 -16.2262 -#> -6.0636 -8.2957 0.5474 -7.2691 -3.9940 -7.2372 3.5615 -0.1772 -#> -0.0116 18.5618 -0.0501 1.4772 -11.5969 -6.3888 10.5140 6.0256 -#> -6.4140 -6.3266 8.7698 4.9552 -8.6961 2.5264 -6.4764 1.3719 -#> -2.1428 13.2581 0.0243 -6.2169 2.2053 5.1598 -8.1174 -8.1981 -#> 6.9769 -3.2637 -1.7238 15.3795 -5.4236 9.5380 11.7204 3.4857 -#> 4.2541 -5.3250 8.2638 -8.9295 5.8716 -3.8351 14.4074 -9.4258 -#> 6.9027 -1.9696 -0.0950 6.5029 3.8962 6.0836 7.7747 8.1169 +#> Columns 9 to 16 -6.5101 0.4217 -5.7984 14.4422 -2.5475 4.9547 -9.7174 -2.6202 +#> 5.0385 -14.2081 -0.6895 -3.3577 -6.5326 8.4067 -14.1657 -2.9425 +#> 4.9895 -3.0387 2.3725 1.2926 2.1103 -5.2542 1.7631 -3.8400 +#> -1.0840 -8.8766 -9.9298 -7.4363 -5.1668 2.6538 0.6354 3.4885 +#> -0.9582 -7.3382 -15.7859 8.6350 5.3817 1.9753 1.7504 -7.1652 +#> -8.2010 6.7050 6.0290 5.1126 7.6760 -8.2627 -3.3050 -0.8235 +#> -0.0380 -11.6303 9.6557 1.0808 5.9470 0.7350 1.9527 -4.8094 +#> 16.1488 6.5973 -0.7724 -1.1442 -3.1127 4.3683 0.9927 -5.6264 +#> -5.6930 -10.9362 -5.7351 -18.9769 -10.5021 -4.8395 9.7254 -9.5025 +#> -0.9750 -0.1790 7.8659 -15.3504 0.8097 4.2399 -7.4569 -1.9779 +#> 5.3159 12.2311 -1.5734 -18.6719 -0.9234 3.3073 -5.4588 12.6989 +#> 2.1220 0.1937 -1.3568 -11.0159 5.9784 -8.6370 2.9642 0.0961 +#> -6.0101 -10.9997 1.7153 -10.8551 3.5893 -1.6006 -3.9109 -11.8404 +#> 0.6564 -17.8008 -13.0588 -1.9639 2.6150 21.6864 5.9739 -10.7310 +#> -5.3410 9.3782 -3.5936 0.7654 -18.8627 8.1400 -1.7319 -3.9905 +#> 11.7295 16.9428 2.2334 2.8814 1.9576 -2.6187 11.7411 7.9283 +#> -3.9663 -1.4653 0.3331 1.9336 -0.7061 7.8008 -5.0434 -2.1069 +#> 1.0235 -4.1951 13.1783 0.8484 -5.8056 -3.0008 -5.0903 3.3962 +#> 11.1815 10.4741 -2.8823 0.8246 -7.2101 1.0224 -8.8046 12.3669 +#> -1.6690 -0.2361 1.3809 -2.4921 -2.8937 5.5683 6.7189 -16.7126 +#> -0.0576 -0.5944 2.2741 -7.0879 -8.5062 -4.1132 -6.8532 -0.1614 +#> -8.9460 4.8719 -7.7315 3.1060 3.4187 -2.7045 3.2624 9.9242 +#> 0.1847 -6.6299 2.1498 -1.2818 9.1559 -10.6412 8.4213 -10.3737 +#> -5.8952 -6.2402 18.6318 8.4840 -3.2384 -3.6527 -0.7749 -2.9456 +#> 6.7507 -2.8179 -19.7545 5.7751 -6.0175 6.7579 11.4046 -2.5534 +#> 7.7275 -3.8353 3.4662 -8.6468 -0.4626 1.9800 -2.7629 -6.8681 +#> -2.6012 0.7715 -1.3032 8.3143 -3.8720 -3.3476 -4.2864 -14.2712 +#> -6.9771 9.1048 -1.1948 -1.5486 4.1960 5.2358 3.3625 6.5611 +#> 11.1025 -3.6381 -1.2623 2.0914 -7.1281 0.5444 1.6081 -7.9267 +#> -1.7125 -2.6828 7.7532 -9.1398 -1.3714 0.1140 -2.8279 -11.5659 +#> 4.4574 -5.4493 -6.1928 4.5434 1.0821 0.0782 5.2489 -7.7652 +#> -3.3410 7.2854 -16.1066 -10.1962 -3.5601 0.2287 4.1809 1.1185 +#> -6.3470 4.3083 -10.7393 -1.5773 -7.4302 -5.6992 1.0806 13.0260 #> -#> Columns 17 to 24 3.1896 7.1136 16.5850 21.3689 14.1253 10.9252 10.6069 -12.9360 -#> 3.1916 -2.3407 10.7862 1.1791 -2.3508 -0.8567 -2.7486 -0.3680 -#> -8.0882 0.5215 -6.1814 -15.5077 -8.7990 6.2791 3.8403 9.6876 -#> -17.2918 -3.3978 4.2483 -4.9023 2.4044 0.9558 9.9897 0.8995 -#> -4.9701 6.9746 3.1969 -1.7714 -6.7167 -6.3982 10.8513 7.5809 -#> 12.1795 22.3433 16.0185 7.4013 8.3001 8.4499 -1.4369 -6.1516 -#> -8.4451 1.9695 -4.3517 12.2279 -4.1713 1.0889 -4.5477 -12.6213 -#> -7.2546 1.2788 0.9460 6.0031 3.7693 -3.9883 -3.7087 -3.4668 -#> -6.9514 10.6885 7.4600 0.1471 4.4466 -0.3061 -0.6925 8.2213 -#> 11.2392 3.6268 8.8963 -4.9476 -0.7547 -13.0547 -8.4332 2.4718 -#> 6.9078 0.5906 1.6236 0.2955 -10.1980 3.3020 3.8726 -2.5545 -#> 5.3922 0.0548 -2.6088 1.4309 -0.8519 -11.7812 -5.1712 -7.9835 -#> 16.3538 -0.2866 -3.9257 4.2461 6.4649 6.9675 0.3583 -0.1692 -#> -8.3542 3.7870 -5.1359 0.5153 2.7189 7.9143 9.8094 2.3193 -#> 14.3007 -4.8221 10.5206 -3.7007 1.1044 12.1687 2.2237 -6.5298 -#> 6.7927 -0.3696 10.7242 6.0659 2.8485 18.9118 10.8026 -3.7626 -#> 10.4871 3.5258 1.9896 10.0899 2.9323 1.0943 11.1587 -5.3213 -#> 4.1095 11.5633 -3.8133 0.0403 -2.0528 20.9037 1.9324 -0.6095 -#> 0.3955 10.9574 14.2628 6.2530 -1.9365 -3.1222 1.6854 -4.4150 -#> 13.8606 4.9778 6.4612 10.9626 3.8364 14.6639 -11.1712 -4.2862 -#> -17.4411 15.5994 -10.8096 -7.8068 -2.7533 -5.6480 -10.9167 3.3260 -#> 10.4801 0.7587 1.2777 -0.4488 3.1472 9.4578 1.6560 -1.7689 -#> 9.2851 3.1067 -2.2496 5.5730 -5.7733 7.4467 1.4873 -16.7869 -#> 20.4946 1.6005 10.7375 -8.5792 11.5899 -0.3161 -6.1408 5.1686 -#> 2.6878 7.3988 4.1201 -9.0180 2.5914 1.3089 -4.1484 0.6474 -#> -1.9797 -2.2724 6.8919 5.8167 0.9384 17.1842 -1.2179 -0.5146 -#> 1.6663 7.3121 14.5475 2.8160 2.4474 -5.1266 8.7430 -11.3458 -#> 5.1560 5.9187 -0.3446 8.4645 -2.7297 -9.8186 14.9239 3.5063 -#> -1.9313 12.5361 7.1592 1.6826 1.1412 16.7364 -5.3221 0.5639 -#> 15.9264 12.0776 -3.7066 18.7699 9.2437 -3.5412 -1.8549 1.2673 -#> -12.8781 7.2273 0.9743 14.6515 2.3926 0.9963 -7.2469 -6.9089 -#> 10.2803 -10.6271 -10.5320 -2.2252 6.0630 -1.4732 -3.5993 7.3931 -#> 14.1778 5.7967 7.9909 6.9608 8.6100 -7.6136 -8.2632 -0.3995 +#> Columns 17 to 24 -4.2065 9.2939 3.5892 7.8959 -4.8402 12.8296 -9.3807 11.1112 +#> -11.2114 -3.0562 5.2946 -2.1114 -2.7639 -12.0290 5.9917 -1.6720 +#> 0.3616 2.4872 -3.7936 13.4046 0.8272 -2.2178 -2.6375 6.0133 +#> 5.5673 -1.4790 4.5947 -7.7102 4.4746 5.0750 2.9876 -7.6257 +#> -4.3234 10.5083 -3.2607 4.8954 13.0091 -9.5953 -6.3853 3.4346 +#> 1.1920 0.4560 -9.2309 -2.2875 11.4585 0.6218 -10.3348 6.7686 +#> 0.2741 -2.5543 2.9223 3.5489 -6.3500 -9.1605 7.2221 -5.1253 +#> 10.9131 -2.5585 -0.8123 13.5260 9.2868 -5.6773 -5.9908 -9.1926 +#> 3.1580 0.9885 -2.5142 -11.2389 -3.0914 5.3502 12.6157 -6.9490 +#> -6.9435 0.3712 -16.0947 -2.5742 1.4860 -0.7407 -3.4506 -0.6165 +#> -5.0914 -5.8298 2.8393 -15.8313 12.4452 18.8915 2.8618 -15.2937 +#> 9.0411 -0.7984 -10.5758 -1.1805 9.5549 -4.1774 4.9280 -6.5197 +#> -4.3194 -0.6405 -8.7203 0.4725 -9.4539 -5.0604 -4.3800 3.2696 +#> -11.9754 -4.5121 -1.7260 -6.6479 7.9674 -6.2285 -6.3391 -3.0738 +#> 15.3515 4.3867 -11.9714 -1.5730 7.3768 14.4843 -14.3893 -8.4822 +#> 2.1833 8.3083 4.9578 2.8695 -8.2362 2.0172 4.1385 -0.6822 +#> 7.1028 -2.0772 -0.5397 -4.9920 0.3588 -0.5026 3.9092 2.7471 +#> 8.2071 1.6106 -10.6276 4.8144 1.1925 -2.6815 -13.0539 -3.8571 +#> -5.9070 4.4068 -5.1588 -3.1788 4.0078 3.1869 2.6544 -7.2323 +#> -16.9996 3.6218 -0.9238 -5.0546 -6.4982 8.1155 -0.6223 10.5490 +#> -12.0021 -6.8792 1.1253 -4.1876 -4.9893 -12.1840 13.9612 -3.3745 +#> 13.8119 2.7478 3.3362 -5.4128 -4.2197 3.0150 -4.6457 10.8798 +#> 4.0486 -0.7900 -7.0948 1.7948 -1.7368 8.7998 -0.3975 -11.3148 +#> 2.0332 10.2583 -0.6361 8.6626 5.6039 -1.3762 8.0721 -3.0917 +#> -2.7974 2.1440 5.0882 -14.1731 -5.8871 6.0872 -6.9159 0.6201 +#> -9.6967 -2.0377 3.2732 4.7402 -4.2628 -2.1743 -5.7621 1.0514 +#> 3.9071 3.6333 0.4996 6.7331 5.9931 -1.8624 -15.1602 11.3564 +#> 0.9549 -2.2777 -9.9685 -1.9309 8.2334 10.5081 -13.4993 10.0008 +#> -0.8553 -5.8342 9.6803 -8.2090 3.7659 3.4646 2.6482 -1.8601 +#> 5.2113 -4.0875 -2.1884 8.5691 -0.6622 3.0998 -1.8729 -4.7714 +#> -6.1489 -0.7365 -2.4212 7.1418 3.8574 -0.3420 -2.1447 -0.3833 +#> 6.6469 -18.9635 -3.2369 -9.7534 1.0872 1.5693 -6.2348 7.0436 +#> 11.0446 -5.3982 5.3690 1.9652 2.7334 7.3781 -4.9787 -2.4262 #> -#> Columns 25 to 32 -4.9114 -6.5843 7.0307 -4.9146 -3.4639 2.0096 8.6154 -10.3412 -#> -4.9881 -16.5373 0.8815 -14.8672 -3.2208 0.1541 8.4281 -1.7680 -#> 1.6603 -3.2193 0.9856 0.7298 3.6267 -19.4214 7.9226 -6.4101 -#> 0.8370 -3.7783 -5.5143 7.5889 -0.3336 -3.7922 0.4118 7.4096 -#> 9.0980 0.0702 -6.6039 0.2466 3.8681 -4.9208 5.6830 3.2396 -#> -9.4309 0.0826 -21.4884 -13.7381 -2.1915 -3.1371 6.9177 1.3061 -#> 3.6304 3.2765 4.7168 4.3914 3.3643 -3.9210 -5.0385 9.1815 -#> 4.5755 -4.3560 -1.8081 -0.4241 7.7188 -4.3440 3.7476 -0.3437 -#> 2.5515 4.0793 0.5688 -14.8879 1.5542 -3.5263 -1.1997 -0.4273 -#> 1.3687 -4.1839 -7.1615 -0.2557 6.2164 2.9262 11.1157 1.1417 -#> 3.3176 -6.9254 -1.2322 -7.1140 4.5201 -15.2834 -0.9299 -4.2925 -#> -4.0570 3.7860 -1.8286 -5.3160 5.1573 9.9160 -7.1879 1.8805 -#> -10.5045 -1.4338 -2.9479 6.3257 11.2079 -2.9802 1.2451 -5.4419 -#> 8.1824 5.0542 4.5832 2.3177 -7.6769 18.4706 3.9747 1.6845 -#> 4.4435 -1.0758 11.1831 -0.3116 4.0288 -13.3231 11.6478 -3.9394 -#> 0.9642 -0.9493 -6.7294 4.8991 -14.8633 -0.9488 9.4906 4.4281 -#> -6.7962 0.2925 1.0403 8.6866 0.5212 -5.6361 0.6018 9.0195 -#> 6.7747 -2.7812 -1.8297 2.4707 -7.2375 5.6537 1.1593 5.4003 -#> -3.9716 3.7339 -5.5811 0.7016 14.3519 -5.9226 2.1348 1.1818 -#> -3.7538 -4.7981 2.3779 2.0818 4.0368 1.3383 10.4708 -10.9149 -#> -2.3645 2.2764 7.5331 3.0271 4.3130 -6.0624 3.7986 -1.1034 -#> 6.1696 -3.0635 -5.1319 4.8253 0.4628 2.3354 -4.9309 -5.5517 -#> -3.8062 0.3722 3.1567 12.7407 -13.9317 -2.3135 -2.0125 -4.3503 -#> -11.3840 5.4055 -3.2454 -4.5566 6.4311 1.2773 12.4571 -1.6851 -#> -3.2685 7.4117 20.0026 1.3566 -8.3665 -6.4926 -3.3746 0.9071 -#> 4.8057 2.7966 7.6597 14.1099 -1.2854 -0.8436 0.8242 -3.6776 -#> -11.2720 1.0225 -14.1237 4.8509 1.6023 -5.1638 4.0037 15.5045 -#> 3.3805 14.0331 -2.4699 1.1550 9.8972 6.2443 -3.5412 3.7678 -#> 11.9026 2.1099 17.6332 -3.5072 -6.8630 -0.3439 9.0685 -6.9890 -#> -4.7753 -5.2764 4.8345 -8.9518 8.0896 8.2099 -5.8078 -4.4894 -#> 4.6541 9.9653 13.8764 5.6798 0.0742 6.0066 -5.7651 4.0624 -#> -12.9363 -4.7503 -6.8934 -10.8562 -9.2596 6.5418 2.2233 -3.3519 -#> -1.8764 -4.3724 -5.7903 -28.4369 -0.7876 9.9454 -1.7579 -5.5863 +#> Columns 25 to 32 -0.4132 -3.0438 -2.5570 10.7310 10.3742 2.7517 -9.0624 5.3794 +#> -5.0913 -3.7807 1.4220 4.2462 1.2555 -8.1774 1.0138 -4.4225 +#> -9.0722 -7.4013 0.4344 3.6369 3.0903 0.8511 5.4934 -3.8948 +#> -0.7730 -2.0138 1.7253 1.1636 -6.0087 -5.1359 0.8172 -3.6832 +#> 1.0456 0.9586 6.2671 12.8951 8.2181 15.7230 -1.0166 -2.2864 +#> 11.6797 1.9767 -7.5161 -16.2880 -6.1565 -2.3005 -6.0404 9.5547 +#> 2.7918 -8.5076 1.0395 -0.0702 6.0480 9.1555 2.8280 7.2094 +#> -2.3676 1.2288 7.8325 -4.2620 -6.0670 -5.8802 0.1739 -9.9245 +#> -11.1365 3.1781 -3.0699 2.6420 -5.3172 -11.4722 12.2875 -7.3055 +#> 9.0573 2.6568 -0.5376 -3.5754 -12.3658 -4.9787 -9.3663 -10.2216 +#> -10.4300 13.0564 -11.5281 -9.5129 -14.5238 -8.5758 5.3888 -8.5080 +#> -1.9414 8.4734 -1.9602 3.2158 1.4234 -3.2866 5.3142 0.9027 +#> 7.7587 0.4377 0.2226 -12.1352 -7.4375 -10.9733 -10.0341 -10.5191 +#> 7.9724 7.3023 2.8092 -4.9314 -2.2222 -10.6973 -10.8776 3.6404 +#> 15.0376 -0.4391 2.2997 11.7165 7.5794 -2.4340 11.8190 3.9973 +#> -4.4037 0.2518 -4.4444 -7.6718 -6.4464 -8.3505 10.6614 -2.7018 +#> 12.6746 -9.1315 -5.6649 5.6829 16.9576 -1.7525 -0.2505 9.0063 +#> 13.4992 1.1240 5.1090 -0.6208 -8.2535 8.1563 -7.0806 10.3851 +#> 3.9611 -2.2554 -1.6692 -15.2887 -9.6588 -2.6689 -5.5181 -6.8727 +#> -7.1649 -7.8517 0.4424 0.5538 -8.3975 -1.6191 -4.9514 -4.5172 +#> 3.1934 -4.5653 -0.7603 -8.0631 1.5094 -16.9029 -8.7604 -5.2588 +#> -0.0315 3.7042 -3.8648 -1.9195 15.5775 7.2412 9.6295 -0.0544 +#> -5.8044 6.8197 -0.5106 7.3986 -3.3084 1.3203 2.6686 14.9541 +#> -1.4753 -4.2442 7.0028 10.8232 13.0157 11.0281 -2.1446 -5.7240 +#> -6.2500 5.5746 -2.9423 -6.3565 -0.9297 14.4385 2.4830 -0.9576 +#> 1.9308 -2.7205 3.7208 0.0194 -7.1378 -2.8584 -5.6605 -0.5520 +#> 6.1293 -10.4371 8.7476 -11.5443 -2.4726 0.8235 -16.1640 -6.9606 +#> -2.9057 0.7304 -8.7958 0.9712 -12.5550 -4.7932 -1.8880 -1.8263 +#> 0.9130 -1.5773 7.2767 1.3934 12.3891 8.7260 6.2282 -1.4019 +#> -5.1485 6.5462 3.3388 -7.5063 -4.3020 -14.0229 -4.9825 3.7668 +#> 2.2414 -3.7179 -3.4286 -1.9659 8.6955 -1.3795 5.7881 1.5206 +#> -5.4745 4.0108 -0.4451 -7.8754 -15.1264 -9.4654 11.8460 -15.9966 +#> -10.7536 5.9594 5.6992 -0.8804 7.0413 20.7508 1.7718 -7.0533 #> -#> Columns 33 to 40 -7.9782 -0.1174 9.2081 -0.5138 6.2838 3.0893 -4.8058 -18.1583 -#> -4.4992 0.1592 -4.9528 -6.1422 -2.1497 11.9840 -6.4663 -16.9782 -#> 14.0196 -7.9512 4.7963 -6.6332 -5.8331 3.4767 2.0578 12.3614 -#> -3.7671 -4.8483 3.5894 9.3979 -6.9846 -5.1328 16.0085 -1.0437 -#> 8.4310 -3.9356 -4.7848 -3.9968 -1.5736 -8.0616 3.9807 -3.8722 -#> 7.6377 6.1733 -11.0966 -2.6771 -0.5499 -6.9654 1.3978 -3.7241 -#> 1.2850 6.9083 3.2346 -6.3490 -5.6001 3.6724 -7.4045 -5.5348 -#> -2.1955 -0.1386 -3.9806 -3.2931 -5.8467 -5.0261 -4.8924 -4.6912 -#> 9.8305 12.6788 -7.4050 -5.2257 -9.9318 -1.4535 0.6971 3.5317 -#> 5.0632 -4.2288 2.2580 -1.8263 11.1705 8.9084 -5.8374 -6.0012 -#> 8.1201 -5.6505 0.6060 -6.7071 -0.3699 -6.9170 -7.7779 6.6524 -#> -4.3955 8.7702 -3.0082 6.5812 -2.8098 0.0620 -10.2726 0.1328 -#> -0.2795 -6.3769 -0.1139 14.9688 -2.7549 -1.6537 -5.3231 3.2797 -#> -5.0211 -0.9620 3.7316 -4.8731 -1.6097 -6.1001 9.7407 0.7674 -#> -2.1829 4.2108 -0.9183 4.6779 -8.4763 -4.7964 -2.1435 1.3281 -#> -5.8912 -4.4834 -3.0533 2.5049 -3.2299 -9.6439 -1.0027 -6.7984 -#> -9.0744 -9.8956 9.6744 -9.2164 -2.7103 1.2476 7.4178 -0.1035 -#> -5.2211 -5.5057 -3.2928 11.1094 -12.1035 4.6906 3.1275 4.5405 -#> 3.7232 -2.3739 -8.5902 0.9798 -7.2881 -9.8486 -16.0251 5.2795 -#> -9.9477 8.4432 -1.7972 5.5190 -6.1098 -1.9647 -7.1623 -4.2358 -#> 8.4954 12.9459 -10.8776 -2.0965 -1.3022 6.1228 -4.4629 14.5403 -#> -0.2208 -9.9050 4.5437 -6.8559 -9.4238 -9.3158 -0.3664 -1.7320 -#> 7.6426 -1.2814 7.7386 11.7444 1.8633 2.9149 6.0874 0.1475 -#> 8.4882 -16.9385 8.4649 11.7110 9.7894 4.5368 -6.7920 7.8080 -#> 2.8875 1.8071 2.7394 -8.7541 5.8108 0.4024 7.5638 15.7222 -#> -6.2697 8.1250 4.2751 1.8647 -5.8885 1.8181 5.5904 -1.6848 -#> -4.3160 3.0767 -5.3289 -8.2283 -2.4016 -2.6025 4.7989 -1.3792 -#> 0.9026 -8.5282 7.1421 8.1328 1.1274 0.1584 5.5324 -8.1130 -#> 2.6120 -2.0762 -6.0001 -9.1572 -8.5995 -4.7389 -6.9053 10.6624 -#> -0.1218 1.0712 0.8658 -5.4335 12.2737 6.7308 -10.1385 -27.2439 -#> 2.2173 4.4802 9.8104 -1.4112 -12.2861 10.1242 -5.8051 -2.2389 -#> 2.0607 -7.1254 3.2760 10.5545 4.6498 -2.4794 7.7428 0.6788 -#> 0.9462 1.8375 0.8980 3.4936 8.3570 7.2085 -0.4131 -14.5003 +#> Columns 33 to 40 12.4281 -16.4712 2.3275 7.1651 -14.1225 -6.8937 -0.7977 -7.3150 +#> -8.1894 -4.2377 -7.8425 13.4008 8.1175 -2.2782 -1.2210 -5.8291 +#> -4.0410 -6.1736 0.4422 -8.9344 1.6366 -5.9319 -0.1961 6.4251 +#> 2.8202 5.7388 -1.8082 13.6126 -2.7894 -1.2446 1.0118 -3.5126 +#> 9.7174 -13.4668 5.2051 -13.4196 -1.7748 -3.1553 2.4221 -2.7922 +#> -8.5377 -10.5196 19.9129 -0.6344 5.1032 -2.3426 -6.7487 19.8810 +#> -1.6694 9.7614 3.0036 2.7760 14.5643 14.3558 -4.5513 7.7336 +#> -0.0779 -4.0573 -14.0203 3.1155 3.2607 2.8354 1.1164 -0.4351 +#> -0.3682 -0.1078 -5.3947 1.0590 -11.2919 -4.5850 1.4685 -9.0223 +#> 5.3669 -8.4064 2.9503 -3.3809 -9.6942 1.1302 -3.7532 -0.8993 +#> 15.7925 3.8990 0.5087 6.7966 -16.0462 -4.7659 -3.3954 -18.1975 +#> 4.1802 -5.2493 -5.8310 7.9577 0.9913 -1.0684 0.5915 0.6320 +#> -15.9878 12.7780 -15.0252 2.7532 -4.3315 -14.0943 6.7232 8.2058 +#> 0.5793 -4.6638 -11.7661 -3.6801 -3.2603 -13.3315 1.2355 -11.0164 +#> -7.1668 5.4784 -17.4055 -0.4866 -11.8200 1.4205 0.4033 4.5948 +#> -4.7855 -1.4842 -0.0606 4.0005 -3.1935 2.3276 5.3923 -1.4242 +#> 5.3504 6.0079 3.3508 2.4204 -6.4490 5.5638 0.5387 -2.7749 +#> -8.6563 -7.1240 6.7491 2.8477 -2.7816 2.5721 -3.5453 -0.1386 +#> 3.1950 -6.7896 2.6337 12.2708 1.5116 -3.2596 -3.9244 4.4117 +#> -11.3447 -3.6547 5.0978 -8.3254 -5.2496 -1.4632 -11.6462 4.9283 +#> 2.6258 -4.9558 -9.9812 5.4392 6.3579 -11.2515 -3.3764 -7.2731 +#> 1.8118 -0.6791 2.3409 6.1242 0.0062 2.1666 6.3161 18.7852 +#> -4.1806 5.5259 0.5278 -4.9273 12.1832 7.7378 2.1827 2.4047 +#> 15.1877 -9.5137 -4.8248 4.3539 -4.2380 -4.8231 -5.6607 2.9108 +#> 8.5820 8.7402 4.9946 -1.3527 16.4840 8.5733 7.3263 1.2356 +#> -5.0214 -14.1126 3.0400 4.4395 -1.4161 -0.6688 -3.3167 -3.9808 +#> -0.1068 -4.4285 8.3274 -3.3712 -0.3419 -5.2243 -5.1254 10.9061 +#> 3.7933 -5.5490 8.4841 5.1688 -4.2200 10.7959 1.9520 3.0896 +#> 6.0787 0.2255 -3.6709 -4.6714 -3.0751 -7.5436 -0.8438 -11.5510 +#> -9.3023 -4.0976 -8.9739 9.4679 -6.1308 -15.6199 -4.1747 1.6300 +#> -11.3287 13.4706 -13.4466 -12.6591 -9.9158 -3.1640 1.2798 2.6026 +#> 1.3376 2.5337 5.7312 -5.4022 -4.4783 -1.6983 0.2834 -2.4832 +#> 22.3789 7.8134 -11.6687 2.9608 -1.2908 10.1047 1.5613 4.2445 #> -#> Columns 41 to 48 2.9347 -6.5413 -2.7144 7.3507 10.2499 19.5012 11.9704 4.0226 -#> 7.6969 -0.3759 -0.9515 14.8465 -0.4770 11.8782 5.7558 -0.1092 -#> 0.5673 10.7646 -7.6328 -8.9557 -3.4129 4.7266 -8.4608 -7.6704 -#> 12.3682 7.7268 -4.8314 -16.3501 -8.0761 13.1647 14.0554 -3.6347 -#> 2.5149 8.1191 7.5053 -0.6208 -6.2784 5.1073 10.8315 -0.1904 -#> -7.1793 8.9993 -1.1687 5.0640 10.9705 17.1949 10.9811 15.4482 -#> 11.3650 -10.3334 -1.9698 -1.5696 8.0683 0.7480 -4.9720 -0.0693 -#> 12.6532 5.2129 1.0802 4.8707 -3.9001 -10.7755 -1.1094 5.5256 -#> -14.9249 4.7129 12.2226 -3.0380 -5.4822 3.5384 4.7636 -6.5954 -#> -2.6631 -4.3724 -6.3074 2.9076 8.8519 -2.7298 14.7076 0.3063 -#> 8.0633 -0.6514 7.6628 2.0957 8.4992 -3.0437 -8.3949 3.6666 -#> -4.4896 3.1980 7.0277 4.3710 -2.4215 -6.1094 -0.3414 0.4719 -#> 0.2284 4.9868 -11.0636 3.0958 9.8737 9.6359 -6.2769 -3.0681 -#> 6.8440 -10.4315 -2.9086 -9.5762 -5.9412 16.2668 4.3936 -12.5563 -#> -2.7370 14.5742 -10.9801 4.9551 7.1912 6.3545 7.9185 -2.8689 -#> -0.6319 5.5352 -1.6038 -6.4819 16.2483 5.5396 7.8797 5.5145 -#> 0.2896 1.5684 4.5287 -11.5754 8.5899 3.2760 10.2598 -2.6193 -#> 3.8999 3.8732 -5.7280 -6.9490 2.7778 4.4373 2.9833 5.8405 -#> -3.0089 1.5390 0.4579 12.6652 14.0262 4.2430 -16.2028 6.4233 -#> -9.0877 3.0460 0.8589 5.1432 6.2836 -4.5454 6.0026 -2.2429 -#> 3.9197 -0.0397 0.7961 -1.7077 -6.5249 -14.8881 -7.8815 0.5807 -#> -0.6347 14.0922 3.1064 -5.3681 0.8794 -1.9129 -5.5581 0.0186 -#> -17.1356 -7.0935 -0.7364 4.1804 11.8833 -3.0501 -9.5860 -0.3069 -#> -15.5952 -0.5495 -18.1164 5.3012 11.3696 -3.1717 2.4292 13.4601 -#> -9.2839 -1.0520 5.2714 -3.2606 -0.7096 -7.9491 -6.9085 2.7991 -#> 6.4440 2.0277 -9.0858 -7.5276 14.1867 -2.4964 5.7862 -4.2194 -#> 6.3172 0.8966 -1.3876 -12.1631 14.6530 12.8441 6.9321 6.2326 -#> 0.7124 -12.4130 -8.9603 -6.4906 19.0114 13.5405 -3.5782 -0.8632 -#> -1.8861 1.0826 -0.0976 1.6771 -4.5812 5.2838 6.5466 4.5431 -#> 7.3507 -2.6981 5.3992 2.5461 17.2578 5.4251 -1.5992 -7.3955 -#> 7.2382 -4.5121 -0.9355 -7.8539 0.3607 1.1957 -6.9880 0.4112 -#> -10.8835 -1.8415 -7.2738 5.6169 -6.4167 2.0098 4.2866 -6.7110 -#> -9.0879 -6.7933 -0.8936 10.1913 3.3567 10.5202 13.4314 6.9530 +#> Columns 41 to 48 -3.6568 -3.3038 4.6883 -13.1722 -3.2869 9.3041 1.2363 -3.9984 +#> 0.8554 -4.4779 7.5493 8.4915 2.5667 -7.9665 3.0118 1.1276 +#> -0.3631 5.5444 -7.5559 -0.1196 5.7321 -1.9655 1.3066 -4.9991 +#> 7.4106 -0.3171 12.6276 -2.4802 -2.6901 5.2787 -2.9683 6.6626 +#> 8.7507 -10.2061 -8.7086 2.3781 -3.5093 -13.1991 -1.3439 -9.0805 +#> -2.2748 4.5691 7.7179 0.3833 -2.1670 -4.9527 2.4995 8.1217 +#> 4.9719 0.7007 0.7485 -1.4723 -3.9865 1.8007 4.1444 -3.0241 +#> 2.2218 -12.7739 6.5682 4.2967 6.6679 -5.1592 -0.0165 -1.7690 +#> 5.7675 1.2612 -5.4197 4.4680 5.0405 2.5639 -3.1297 8.7898 +#> 2.4283 -6.6306 0.8696 -1.8889 0.4649 0.0847 -3.0097 5.6809 +#> -2.3397 13.2972 7.0691 -5.0293 -3.4331 8.3509 -1.2005 14.6979 +#> 2.9298 7.0707 0.1938 5.9990 -3.8105 1.4523 0.1846 -8.9589 +#> -7.6527 -5.7518 -6.7757 -0.4943 0.1633 10.4889 10.6487 -1.2751 +#> -6.0931 -13.0421 -10.5523 0.7996 -0.7176 1.9356 3.6764 -1.4679 +#> 8.8261 -4.0749 -3.3512 2.5338 9.6804 5.6512 -3.5942 -12.3289 +#> -3.1321 7.1272 -3.3435 5.3821 10.7984 1.5269 0.4901 -5.6708 +#> -0.4924 -1.5028 7.2955 -4.7937 -4.7963 5.2281 -6.2452 2.7743 +#> 16.5464 -8.4592 4.9584 -4.8191 7.7133 -0.3130 -6.9707 5.1337 +#> 6.2497 -2.3872 6.9805 1.3411 6.0068 -4.6236 9.8051 3.4003 +#> -0.3205 2.0104 -3.0305 -8.0887 -0.4852 2.3644 2.2273 8.0652 +#> -11.6916 -3.3476 3.0293 0.6824 -1.8505 -0.5615 16.7081 -2.7346 +#> 1.6570 13.6416 0.2709 -2.0993 -8.9250 1.5405 -1.2260 -12.0698 +#> 1.4101 11.9977 -0.9030 1.8829 1.4618 0.2833 -6.6250 -1.9540 +#> 13.8602 -7.0519 -1.3545 -10.9911 -3.3629 -5.0637 8.7662 -10.0588 +#> 14.4918 12.5008 -3.0275 5.4626 1.6309 -11.4781 -1.3635 2.9841 +#> -2.2965 2.6517 11.1042 -2.1207 -2.0783 -4.8780 -3.0670 4.7831 +#> 0.2110 -6.2690 6.2956 -2.6013 6.8506 -4.1224 10.2412 7.5049 +#> 6.8159 3.1378 -3.7380 -6.0232 -5.6573 1.8460 -8.3177 15.4529 +#> 1.1127 1.1435 1.4419 -12.4564 -1.5157 -5.8976 -0.4586 -5.0206 +#> -4.2031 1.7970 -0.0877 3.3972 2.1657 9.7306 2.1279 2.2065 +#> -11.3591 -10.0992 -4.2825 -2.7245 -1.9645 -1.1735 1.4630 -4.6376 +#> -3.1599 -4.4585 -4.8284 -1.4836 8.4648 -2.7482 -0.4739 3.9991 +#> 6.9959 13.9255 1.2966 -5.6168 0.6488 -8.9664 -0.7662 0.7772 #> [ CPUFloatType{20,33,48} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Conv2d

    -
    torch_conv2d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  dilation = 1L,
    -  groups = 1L
    -)
    +
    torch_conv2d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  dilation = 1L,
    +  groups = 1L
    +)

    Arguments

    @@ -248,69 +280,69 @@ planes.

    See nn_conv2d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With square kernels and equal stride -filters = torch_randn(c(8,4,3,3)) -inputs = torch_randn(c(1,4,5,5)) -nnf_conv2d(inputs, filters, padding=1) -} +filters = torch_randn(c(8,4,3,3)) +inputs = torch_randn(c(1,4,5,5)) +nnf_conv2d(inputs, filters, padding=1) +}
    #> torch_tensor #> (1,1,.,.) = -#> -3.5486 -7.1217 -3.2540 -5.9799 1.1629 -#> -4.3378 2.2990 -16.4566 9.0228 -5.5777 -#> -2.6722 -9.2867 4.0511 -4.4372 10.4134 -#> -4.7778 -0.7326 -1.6019 12.0401 -3.1893 -#> 0.2123 -0.5914 1.1211 3.0260 6.8859 +#> -1.2037 2.8533 3.0966 2.7602 7.2992 +#> 6.6099 0.8040 2.1820 -2.2556 1.7042 +#> 8.9937 -2.8627 7.9067 2.2703 -0.2219 +#> -1.0960 -2.0544 9.6476 -4.1084 -1.7429 +#> 2.2151 -0.6322 -10.7968 -2.1303 0.9502 #> #> (1,2,.,.) = -#> -3.5645 -0.6611 1.0267 0.0105 -2.2313 -#> -4.8099 10.2621 0.1669 0.9807 -2.2823 -#> 0.7857 0.4305 0.4022 7.5398 0.2195 -#> -1.5292 7.5032 10.2237 -4.5421 -3.0606 -#> 4.4675 1.8375 9.2117 -1.7632 -6.9053 +#> -0.5524 -10.7146 11.9610 -1.8760 2.8073 +#> -1.4550 5.9693 1.4488 0.5483 0.5193 +#> 4.7319 14.7168 -1.9500 3.4680 -3.1870 +#> 2.3077 -8.0817 5.5639 2.1393 -4.4990 +#> -0.0674 -0.8898 -4.0815 -8.3260 0.0084 #> #> (1,3,.,.) = -#> -4.0560 -3.3721 -3.9417 1.2809 1.6676 -#> 3.7261 1.6556 -7.7465 -0.6448 3.6757 -#> -1.8130 -5.0859 -3.2322 4.3222 5.7618 -#> -2.3173 -7.4043 2.5139 15.0216 0.5374 -#> -1.4209 -1.0366 0.8718 4.0616 3.1472 +#> -6.0152 -1.1616 6.4675 -10.9283 -1.7122 +#> 2.0931 -3.9504 -5.1826 -5.1925 -2.1788 +#> -1.9279 -5.6841 3.6513 -4.5984 1.9415 +#> -2.6984 2.3140 5.6889 3.2366 -1.6959 +#> -1.2875 -5.5225 2.4667 1.2794 0.6366 #> #> (1,4,.,.) = -#> -9.5668 3.2701 6.5622 9.6684 2.5335 -#> -4.4349 3.4779 -1.2737 2.9751 1.7985 -#> 2.6595 7.2363 4.4882 0.3747 1.3252 -#> 5.3966 -4.0692 -2.2576 9.7051 -0.5021 -#> -0.2109 4.6618 0.3269 -6.7229 -2.3171 +#> -1.3725 -11.9432 6.6814 -2.0626 -12.8413 +#> 2.7790 10.4186 16.5919 3.1043 1.6991 +#> -2.1525 -0.6591 -7.1645 -2.4902 -2.3716 +#> 4.5753 -0.9017 -5.4827 3.2854 1.9315 +#> 0.5009 4.8452 3.5895 -1.3763 -5.0205 #> #> (1,5,.,.) = -#> -6.8665 -0.7471 3.0223 5.9422 0.0773 -#> 0.1951 6.6746 4.5183 0.2309 -3.8129 -#> 4.9832 2.5449 -5.4966 3.8797 3.9093 -#> -1.2040 -1.1130 7.1904 3.6236 3.7172 -#> -1.3379 -5.7016 6.7872 7.4608 4.0352 +#> -2.4641 -4.1384 0.0252 5.7390 -3.1903 +#> -11.9943 -5.2914 1.6707 -5.6204 -5.7324 +#> 11.1295 6.3085 -4.1165 0.6665 6.0104 +#> -2.1664 -4.9256 -2.1528 6.1870 8.8261 +#> -2.8591 -4.4202 0.0518 1.5891 0.1274 #> #> (1,6,.,.) = -#> -3.9338 0.3756 -3.0094 -0.7198 3.5040 -#> 0.3583 -1.8715 -4.4996 11.1578 11.3811 -#> -7.6832 -2.1231 13.6712 7.9567 4.1675 -#> 7.7207 2.5612 9.3426 5.4903 -9.0039 -#> -1.2806 1.9558 6.0637 -3.7703 2.7139 +#> 7.7084 7.0540 -6.6758 6.2570 1.1702 +#> 6.1343 11.9692 -0.8624 0.9134 7.8712 +#> -4.3698 -10.2884 -2.7717 -5.6516 0.0050 +#> 4.3435 2.9247 0.0498 -12.5641 -1.2759 +#> 3.1506 7.0418 11.2416 3.0961 1.4600 #> #> (1,7,.,.) = -#> -3.5015 -2.8558 -3.0872 2.0101 8.1843 -#> -2.7114 -4.0357 0.2954 1.3045 2.9680 -#> -0.0320 0.3198 -2.8986 -0.1796 -4.9480 -#> 6.8524 -9.7840 11.4722 0.6094 -8.8633 -#> -2.3049 -2.9029 -4.5625 -5.2812 -1.8420 +#> 8.8728 -10.0021 -1.0044 7.5667 2.3943 +#> -5.8614 9.1914 -11.8036 1.8316 7.5042 +#> -6.9303 11.0166 -4.5717 3.7854 4.8000 +#> 5.1606 -8.6169 1.7790 10.5232 -1.9409 +#> -0.5102 -2.8458 3.5851 -7.8170 -1.8686 #> #> (1,8,.,.) = -#> 4.1203 -6.8209 -7.9792 0.5355 -6.1896 -#> 8.1814 -14.2806 2.7375 -18.4326 10.9079 -#> -0.8891 -8.0756 0.7869 10.5435 0.2842 -#> -11.1754 5.4648 9.2422 8.0793 -3.8445 -#> 3.1076 1.0749 -4.5827 4.5922 -1.6976 +#> -1.2384 1.3963 -4.1241 7.3818 -3.9786 +#> -1.8505 1.5575 -2.4711 1.1911 -7.8127 +#> -4.5371 2.7213 -7.5125 -5.1210 -6.9012 +#> 3.0100 -9.5409 -0.6372 -9.0458 0.6989 +#> -9.1543 4.1365 -7.5186 2.5672 7.7889 #> [ CPUFloatType{1,8,5,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Conv3d

    -
    torch_conv3d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  dilation = 1L,
    -  groups = 1L
    -)
    +
    torch_conv3d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  dilation = 1L,
    +  groups = 1L
    +)

    Arguments

    @@ -248,12 +280,12 @@ planes.

    See nn_conv3d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # filters = torch_randn(c(33, 16, 3, 3, 3)) # inputs = torch_randn(c(20, 16, 50, 10, 20)) # nnf_conv3d(inputs, filters) -} +}
    #> NULL
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Conv_tbc

    -
    torch_conv_tbc(self, weight, bias, pad = 0L)
    +
    torch_conv_tbc(self, weight, bias, pad = 0L)

    Arguments

    diff --git a/reference/torch_conv_transpose1d.html b/reference/torch_conv_transpose1d.html index 5c7b195bc99384f0291add3b04ce07b7b14a8980..4228c8a57dc9295877fd5ae71d783708bc164c91 100644 --- a/reference/torch_conv_transpose1d.html +++ b/reference/torch_conv_transpose1d.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Conv_transpose1d

    -
    torch_conv_transpose1d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  output_padding = 0L,
    -  groups = 1L,
    -  dilation = 1L
    -)
    +
    torch_conv_transpose1d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  output_padding = 0L,
    +  groups = 1L,
    +  dilation = 1L
    +)

    Arguments

    @@ -253,5064 +285,4860 @@ composed of several input planes, sometimes also called "deconvolution".

    See nn_conv_transpose1d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -inputs = torch_randn(c(20, 16, 50)) -weights = torch_randn(c(16, 33, 5)) -nnf_conv_transpose1d(inputs, weights) -} +inputs = torch_randn(c(20, 16, 50)) +weights = torch_randn(c(16, 33, 5)) +nnf_conv_transpose1d(inputs, weights) +}
    #> torch_tensor #> (1,.,.) = -#> Columns 1 to 8 -0.6004 4.0901 4.1162 0.6192 6.3756 12.6002 -2.0543 -8.9684 -#> 2.5562 5.4577 -6.9559 1.3808 0.9724 -2.1635 0.0468 -9.2402 -#> 2.1054 -3.9812 4.3883 -11.3571 -7.2336 -7.9630 -7.3477 -3.1796 -#> 3.4706 -3.2964 -6.8210 -0.9201 -4.3900 -9.7930 -4.8746 6.7569 -#> -4.4986 -1.9795 0.7281 3.7279 2.8359 -2.9289 4.9091 0.0196 -#> -0.1566 -1.7666 6.7628 21.8539 13.1175 3.0355 11.8317 -10.9293 -#> 1.9233 -4.0127 5.4726 -2.8186 -15.2970 -14.9055 9.0864 9.7691 -#> -6.2536 -1.2273 1.0028 2.6480 -3.2984 0.7050 0.2384 -0.4342 -#> -0.7137 -1.2991 -1.9438 -0.9533 -0.0313 6.3989 -8.8965 1.5028 -#> 2.8515 1.2079 7.5720 11.2757 4.0208 4.0302 -18.4834 2.6042 -#> -1.2608 -1.9830 0.4157 5.8398 -8.0424 2.8853 1.9586 -11.1629 -#> 0.9166 3.7020 6.3919 5.6656 8.5167 17.4386 9.7697 11.3048 -#> 3.7391 5.0856 3.8721 1.7100 -1.3962 -9.1984 3.3265 -3.9582 -#> -1.6088 0.2460 0.5347 -2.3824 -9.6884 -0.5348 -6.0122 -0.1030 -#> -0.6148 -1.2322 1.0224 6.9799 -1.8193 11.8577 -19.9366 2.6108 -#> 1.4129 6.4468 -1.6165 -3.0123 -14.8057 -1.4289 -9.7638 -1.8674 -#> -2.9941 -9.5087 2.1096 5.6394 -6.3901 -4.2324 -3.9090 -13.8909 -#> -0.4531 2.0755 -1.2331 -5.6545 8.4489 -3.9393 -8.1968 3.5906 -#> -0.5256 3.3574 -1.5862 7.9468 9.7259 0.9548 15.2170 12.6929 -#> -1.0922 2.8931 -0.3569 1.4262 -5.5169 -1.0022 13.1522 6.1245 -#> 4.8329 0.7218 -0.3335 -5.6755 14.5178 -8.6349 4.7855 4.8308 -#> 1.7461 -3.7079 -1.3584 4.6693 -4.8814 9.9969 0.9081 5.7656 -#> -0.2636 -0.1443 -7.4224 -3.2104 -8.8714 -8.0212 -6.6852 0.9089 -#> 5.7567 -0.5389 7.3149 -8.6823 -13.8152 18.6386 15.1764 7.8335 -#> 0.3944 4.6797 -2.1249 -4.5136 -3.7679 4.9865 -0.3764 0.3817 -#> -1.5968 2.2421 -3.5420 -13.0538 7.0071 4.8528 1.0440 4.9850 -#> 1.6772 0.7801 6.6048 -10.8922 -14.4722 11.5427 -2.8412 7.4468 -#> -5.5516 -3.5058 2.0037 -2.6942 0.7408 0.6017 -1.2814 -1.5558 -#> 3.5480 -1.8899 -5.4336 -3.3593 -4.9892 -6.0104 -5.0009 -13.4594 -#> -8.6330 -0.3369 9.3590 11.0104 -5.5114 8.3425 -6.3278 -5.8439 -#> 2.4533 -2.4106 -7.8900 11.0399 -10.0707 3.2837 13.9738 1.2788 -#> -2.3656 5.0306 2.3163 -9.2069 8.0224 -4.3115 8.8182 8.4418 -#> 7.1396 -1.4040 2.8544 16.0946 -2.0559 -9.8389 10.0793 -3.5420 -#> -#> Columns 9 to 16 19.6296 -1.0571 2.2479 6.0396 17.4604 5.4292 -5.2999 -3.4805 -#> 4.4689 6.2760 -6.7538 10.4388 -5.3242 -4.4771 19.5997 -9.2312 -#> 7.3714 2.9589 3.8303 -3.8989 -1.2139 -7.2902 -12.3022 5.5554 -#> 1.1485 -3.1741 -14.2423 -4.7028 -7.3247 3.6348 0.5571 11.8129 -#> -3.9914 -9.8326 -10.1469 11.5117 13.0456 -7.4854 8.4747 -4.2242 -#> -8.1024 11.8125 9.0316 1.8900 19.6748 6.5998 -6.3706 1.9563 -#> -4.3288 -4.2767 6.9220 -6.2718 -14.5755 6.9661 11.9982 -11.3933 -#> 3.6337 -0.4349 -2.0598 -2.9402 -2.3109 -3.6216 -9.9334 -6.8303 -#> 3.0969 8.0662 -1.9271 0.9911 11.9432 -13.8506 -0.3506 -8.2148 -#> -0.3625 -10.1219 -3.2581 12.9066 -2.7811 -1.5516 1.4874 0.0285 -#> 4.2390 11.4472 14.2799 -4.3681 -1.2805 -7.7423 -7.4996 -9.5044 -#> 5.9248 17.1279 8.5384 12.4752 7.6657 4.4720 3.8107 18.1527 -#> -6.5947 -2.0549 6.2950 1.4028 -10.9088 -7.4368 15.4439 -4.2578 -#> 8.6277 2.5786 -2.0515 -2.8812 -0.1932 2.6490 5.3511 2.3057 -#> -5.8411 -6.6885 1.8910 -0.9538 5.1784 10.1539 -13.0269 13.5722 -#> -11.6944 4.1298 -3.8437 -2.5096 -6.4828 -6.6850 7.6715 2.7347 -#> -8.0584 8.5316 2.6845 -5.3957 2.3331 -4.4324 3.8076 -0.7175 -#> -18.2143 11.8918 10.6286 11.9950 -1.4405 -5.5318 6.0004 2.2942 -#> 8.1489 -2.0809 8.1918 16.5408 13.5879 -19.5593 -3.7664 -10.7886 -#> 11.0072 -6.6765 -4.8078 11.7145 -4.5950 12.0940 4.1818 0.9954 -#> 3.1661 -1.3012 6.0041 -4.4507 7.4049 -0.6932 13.2061 3.2743 -#> 9.5199 7.2679 -3.3639 -11.7749 8.3600 -3.3641 -5.7926 3.3618 -#> -6.4810 -12.4436 -1.3938 3.0750 -16.1594 1.9717 -5.5333 4.2702 -#> 11.1419 0.0955 19.3511 -4.1583 -0.4624 3.2259 1.0754 -2.9664 -#> 8.9533 -0.9478 -5.3318 8.1153 6.2505 19.1452 1.0230 8.2152 -#> -2.8706 -8.6784 -12.3010 -7.2931 -2.0150 5.7070 -7.1829 -2.9493 -#> 5.2020 -1.9136 1.4974 1.2598 -0.3027 -1.9155 -8.9421 2.2337 -#> -7.7501 -9.0444 -9.3237 -10.6197 3.8131 -5.0688 -24.0951 2.3849 -#> -7.7414 4.0537 -19.9329 0.5265 10.7112 -3.8184 -6.8621 -5.1801 -#> -4.2732 10.6180 -12.4161 6.6465 -0.6175 0.5263 -16.3148 11.5405 -#> -6.2823 -6.9256 3.7962 -9.9520 -9.6631 15.2088 6.3811 -9.0371 -#> -19.0446 0.8602 14.5508 -8.9178 -2.6391 -13.0266 -3.0735 13.5590 -#> -4.9628 7.5579 22.1511 -1.2725 -2.9494 -4.4844 2.3112 17.1739 -#> -#> Columns 17 to 24 -4.1825 21.5207 11.4520 -4.5708 -1.5438 15.3972 3.2125 17.3042 -#> -0.4366 5.4250 5.0380 4.0624 -3.6303 -3.8972 -14.9252 -4.3104 -#> -0.7165 -2.3864 3.1508 8.7325 -1.1502 -21.1438 -1.3657 -13.6428 -#> -10.3651 5.3801 -12.4743 -2.2434 -5.3823 -7.6754 -6.2727 5.5183 -#> -0.4864 11.9181 -14.7448 -1.3623 -10.4562 2.1550 1.3195 12.3553 -#> -1.9183 -2.3853 14.9228 18.9550 -17.5423 7.9292 17.5551 -2.3659 -#> 8.5572 -10.7424 1.5513 -10.6038 8.8510 -9.7853 -6.9093 -1.2731 -#> -3.3836 3.1122 15.9079 -7.9366 4.0856 -15.1774 15.0615 -10.2502 -#> 2.9098 7.0142 3.7753 -14.3875 -5.1446 -10.9741 -6.6907 -0.5269 -#> 11.1435 -2.2287 -18.3215 5.5178 0.0646 1.9037 8.9206 5.6702 -#> 15.9882 5.7715 -3.0261 12.0738 -12.1655 9.7216 10.7748 -6.2806 -#> 8.5648 10.9936 13.3946 6.0727 4.1547 15.0799 19.8217 5.5906 -#> 9.4260 -1.6321 -10.8740 1.2734 6.4871 -3.9937 -5.1651 -6.5331 -#> -21.2690 2.0586 1.4541 17.1033 -13.2072 0.4539 -0.3309 -7.4712 -#> 10.4926 4.8057 -0.4733 -2.7449 3.5052 -10.6738 22.6786 -1.7802 -#> 6.1693 -14.3340 -7.6485 -10.2864 -0.2380 6.3123 2.6203 3.5415 -#> -0.6640 -8.3534 12.0272 10.8964 2.9217 6.1839 2.3538 9.3204 -#> -5.1073 13.8817 7.7285 11.8317 4.1813 -16.0389 19.8230 5.0212 -#> 0.3832 -2.1313 -3.2197 6.2428 -7.2094 -1.7911 0.1555 0.9442 -#> -1.6238 9.3784 -3.2428 1.1754 8.0022 -9.8045 0.1608 -5.7132 -#> -2.2772 4.6684 3.3027 1.0269 7.1341 8.5374 3.5203 12.7100 -#> 10.5552 -6.7427 9.7302 -4.9299 -4.2069 -10.8554 -1.9141 0.6340 -#> 3.6097 -13.6879 -19.2162 11.0106 -3.8462 1.1233 -4.3375 2.9411 -#> -2.6338 -16.1336 1.1961 -16.3511 -2.5227 -17.6820 -18.0181 9.3209 -#> 4.9945 16.4619 -9.6469 -2.7882 5.7165 5.6490 -14.4887 -7.6353 -#> -9.1514 -8.5315 -15.5044 -5.3980 -3.1871 -6.8574 -0.2664 4.4388 -#> -9.5559 -2.3091 14.8938 -4.0996 5.5592 -19.4200 1.9836 6.5751 -#> -8.0059 5.4177 4.6515 -3.9604 -13.8529 -17.5115 6.4735 -8.0561 -#> -13.0087 7.8845 4.5359 7.1285 -9.0164 -0.6873 -22.5152 -2.0334 -#> -9.1964 0.6174 18.8030 6.6753 -1.9048 -5.0334 -0.6214 -9.7074 -#> 2.8011 -1.5303 2.3510 1.6620 -4.6654 -3.1248 -1.7060 6.6866 -#> -8.1370 -1.5563 -0.5429 4.6920 2.2020 -0.8108 9.9542 -7.6876 -#> 3.8961 21.6270 2.4845 -1.5813 6.1576 2.7277 -5.1375 4.4078 -#> -#> Columns 25 to 32 -2.3601 -3.4322 9.3053 -8.1667 4.9778 -13.8512 -0.7769 -1.5251 -#> 3.6972 12.7780 3.2833 -11.2094 4.9365 0.8090 -4.1411 11.1159 -#> 2.1876 7.3518 -12.6594 7.8482 -3.3922 7.2875 11.8075 -1.1018 -#> -2.9391 6.5277 3.6980 -0.4259 -6.1169 5.0371 14.8063 -8.8458 -#> 11.8637 -1.4726 21.8518 12.0867 -8.8219 4.1038 1.4818 -1.6840 -#> 24.4477 -2.4871 -4.6144 14.6080 -6.4686 -1.9640 -10.3194 0.6770 -#> -1.7146 9.5057 0.6013 -5.5986 7.1479 -2.1409 -8.9335 13.1021 -#> 4.6454 12.8925 -9.2337 17.1118 -3.7436 1.9554 0.4862 4.6698 -#> -4.8602 -19.4728 -0.3412 12.0404 -2.2184 12.8902 1.5057 1.0611 -#> 4.5539 -0.5962 -8.7740 5.9415 -14.5654 -1.0543 1.0969 -4.6110 -#> -13.4540 -1.8438 -27.4494 10.4325 12.9727 -5.3659 4.3214 -9.1497 -#> 8.4932 10.4420 -0.1234 -2.1550 -5.8957 1.4477 -0.5030 -4.5601 -#> -2.8887 10.2157 4.6852 -2.6711 5.0831 -4.4824 -4.4490 12.0549 -#> 10.8636 3.0547 -14.1468 -11.5786 -4.1872 14.1289 3.3529 3.5294 -#> -5.0578 19.5757 -10.7164 14.3858 -15.5695 -23.5430 0.6562 0.4816 -#> -17.2403 10.0789 8.6334 -16.7596 5.0725 1.1319 -1.7669 2.2916 -#> -1.0254 0.8765 1.0145 7.5937 8.4460 -11.7051 -8.6109 -3.9665 -#> 4.5205 6.8878 -2.7549 -11.9323 -22.9036 11.9790 -22.1282 10.1852 -#> -14.1557 -5.4662 9.6428 -7.9654 0.4415 16.7461 5.6598 14.2657 -#> 11.7360 15.4267 7.0044 -0.2906 3.8354 -16.8175 11.6116 4.2682 -#> 7.5793 -3.2285 -5.5165 -6.5840 2.1186 12.6062 -6.2278 -9.1656 -#> 2.0305 10.7111 2.9078 11.1756 6.8596 -5.1708 12.8105 1.4350 -#> -19.5372 -9.6168 -6.2032 -6.6678 -0.1795 -2.0621 0.1891 -0.2887 -#> -6.1471 -4.1238 5.0940 -0.5406 3.4143 4.3145 17.2420 14.2220 -#> 15.2717 -11.6314 0.6753 4.6649 -8.3632 -4.0830 -2.4835 2.9289 -#> -7.8195 -8.8384 11.2586 -6.0931 10.6657 -1.6245 5.1102 -8.9102 -#> 17.8543 5.0104 -12.4566 0.1958 -16.1957 -0.1449 12.1718 8.7589 -#> -1.8529 -4.3217 -6.6898 3.4766 3.4081 14.7981 -5.4841 -4.5221 -#> 1.1381 1.2830 11.2350 -3.1520 -9.8428 4.2556 2.6595 8.2908 -#> 1.7770 -4.4640 4.3794 23.1427 -4.7616 -7.9387 2.9123 -2.6110 -#> -8.3071 4.1271 -4.8753 1.2700 12.8085 -4.8894 -3.6147 3.9395 -#> -0.0611 6.6493 -13.0310 -1.8303 1.7900 -0.6020 -5.4802 -13.4728 -#> 26.6121 8.2568 -6.9124 -11.3372 -3.8023 4.2649 -4.5585 14.7970 -#> -#> Columns 33 to 40 -13.1377 -0.0706 5.6916 6.5058 -3.0752 4.5630 -7.6258 20.9413 -#> -0.8843 1.0609 3.0657 -0.0612 -3.1638 -3.3998 3.0816 3.0866 -#> 7.3812 -1.7633 7.9183 -13.8968 14.0850 -20.5245 4.3020 6.5696 -#> 4.5379 5.9704 3.7029 3.3116 -22.2722 2.8277 2.2261 -4.4455 -#> 1.5286 7.1339 3.0548 3.2838 2.5137 -0.4061 8.9326 17.9624 -#> 2.3315 -1.2700 3.9492 2.8422 18.2107 -7.2438 21.5324 3.9252 -#> 9.6432 0.4445 -2.2403 4.9207 -0.7474 -7.9729 -0.5129 -15.4101 -#> -20.4920 -17.6694 -5.8191 13.5121 3.1379 -5.5635 -1.6764 -9.0776 -#> 3.2790 -1.0290 15.0159 6.6719 5.8982 -3.4839 -0.0748 10.8644 -#> 10.9863 9.2873 3.2989 2.4354 9.8572 -2.2714 7.1368 4.1491 -#> -7.7011 3.7981 -5.6473 -9.6907 14.5500 -3.3643 -1.8682 0.4615 -#> 4.9033 -13.4153 5.4821 1.2573 5.0443 -2.3632 3.0864 5.1709 -#> -6.3609 11.7518 9.7436 6.3601 7.2588 -2.8778 2.2560 2.2091 -#> 0.6333 9.5380 -16.4339 -7.4011 -12.0568 0.4711 9.7366 -8.5114 -#> 16.3144 8.5388 0.5593 -8.6230 -5.3358 -1.3064 -3.4502 -4.9662 -#> 5.6114 -3.9022 8.0300 3.4313 -1.6205 -13.8910 -4.1147 -7.6525 -#> 15.3002 6.0428 -9.9738 -10.1090 10.0225 -1.4212 0.5714 -10.6084 -#> 6.0626 -5.3162 7.5097 0.3177 0.6618 -12.0088 5.9090 -13.9977 -#> -2.9831 -4.7394 1.3440 4.8945 17.2124 4.3742 -1.8140 -3.4268 -#> 3.9576 0.6434 -12.3714 -10.8635 9.2416 -4.4522 5.8705 -3.2069 -#> 3.4398 -2.3467 14.6558 -7.6711 -6.7356 -5.1956 5.4958 -12.6438 -#> -3.3017 -4.6428 -0.7086 -10.4358 6.3154 0.3267 -4.0852 -0.8664 -#> 10.5626 2.0228 -14.4048 8.2696 -0.0745 -10.8902 -6.6894 5.1885 -#> -12.5296 -12.9663 -4.4452 -2.9155 5.2775 18.9949 2.4824 13.1430 -#> 0.6722 -7.0047 6.4576 -3.7775 -9.1132 6.3353 -3.5569 4.1473 -#> 6.7089 -7.4115 10.6349 5.1406 2.1408 6.6096 -5.9116 9.5312 -#> 7.9793 9.1878 -15.3085 -6.6413 1.8546 15.5129 1.8919 7.0326 -#> -10.7967 -1.9923 -12.8950 -10.9955 15.2740 7.0095 8.7453 1.6567 -#> -3.5113 -4.6141 5.5052 -2.7090 -0.7984 4.3020 0.6053 -0.6996 -#> -2.2838 2.9193 -10.9042 9.5657 -5.6694 -0.0304 8.0606 17.9457 -#> -3.7685 -2.6690 0.9492 -7.3359 4.6296 -4.3763 -5.5983 -1.0029 -#> 16.1059 -0.4918 -5.9363 4.5314 -2.9774 9.2013 2.6621 -8.6090 -#> -1.3673 11.2294 0.8330 0.1796 -1.9448 13.2667 -1.8795 -11.9470 -#> -#> Columns 41 to 48 5.4623 6.0557 -2.3547 -10.5777 -8.9394 -17.5632 17.0012 -7.7506 -#> -3.4502 5.7822 -22.9018 -16.1433 3.3162 -7.2222 4.9175 5.3236 -#> -6.8991 -4.6364 -2.5290 -20.8305 -5.5299 15.0443 -3.6268 12.7072 -#> 9.9013 -1.1473 -9.4256 -2.5440 0.7606 13.8442 -2.8150 8.2952 -#> 0.2988 -2.1047 12.3417 4.8989 -4.7860 6.8126 0.6023 6.9697 -#> 6.4256 -18.5609 12.9657 -13.4234 3.5923 -3.0290 -19.5210 7.9649 -#> 8.5411 7.2910 -1.3473 12.0052 -4.6327 -5.4855 -9.0772 21.2862 -#> -8.1589 -3.7239 15.3635 -0.2900 -8.4298 -5.8980 8.3588 -31.6000 -#> 9.1120 3.6091 0.5912 13.4243 -0.9888 -7.5009 12.9340 2.7174 -#> 13.6130 -5.7139 2.6131 -4.1449 -13.8080 12.9982 1.1384 -5.4977 -#> 8.9638 6.3303 -9.3563 11.8058 8.1987 8.6516 -2.2029 -17.6940 -#> -4.9596 7.8049 -4.9706 -1.9639 -1.2503 4.1113 3.3422 -3.7928 -#> -6.4181 6.0623 -8.2092 0.7996 -6.1743 0.6363 -7.4182 -8.2864 -#> 1.0932 -2.6376 -9.6817 -8.3699 9.8409 12.7849 4.8182 11.9833 -#> -14.7259 -18.3420 11.0260 6.2939 -17.9582 10.5616 6.2648 -1.0947 -#> -10.5056 2.1502 -4.2209 -0.2779 12.3265 4.9331 -2.0322 -2.0515 -#> 8.0115 -9.9201 -15.0605 4.7110 2.5659 -7.8833 -16.4231 15.7626 -#> 7.2102 9.5568 -11.0781 -2.8903 -0.3292 7.6369 1.6897 1.3017 -#> -4.6736 16.3344 13.0339 2.9969 4.7427 -0.4004 -0.6330 -3.7610 -#> -12.2635 -13.3897 -0.6776 12.6669 -17.6320 -6.1654 0.0280 -10.9851 -#> -7.5166 -0.4481 -4.1911 -11.3640 3.6991 14.0500 -13.1051 7.0555 -#> 3.5955 -1.8552 1.2731 0.7391 -11.2164 -4.5338 -7.1506 -1.2045 -#> -7.6455 -12.3654 -11.3886 -10.4583 -8.9162 8.7736 17.7514 16.4935 -#> 7.3194 24.3887 9.7357 -3.6405 0.1455 -5.7188 6.6516 -4.2392 -#> 15.0456 13.1130 12.7170 -3.8100 -19.4943 -16.5896 17.4840 3.2342 -#> -7.2503 7.9477 19.6389 4.8204 7.8325 -9.6368 7.2113 -12.7309 -#> 4.2267 -4.0082 -7.8041 -1.5592 9.4208 10.1543 16.0493 4.4434 -#> -4.3093 -18.7923 -1.8592 0.0332 5.8462 -1.1693 6.3583 -8.1359 -#> -3.6780 -9.1685 -5.1180 -12.9000 9.9408 -2.7031 4.7989 23.9640 -#> 3.3705 -4.7995 2.9122 10.1147 -6.5252 -11.1011 16.6597 -12.7891 -#> 5.2562 6.5468 -9.5708 1.3940 -0.2515 -9.5245 -12.9337 19.8222 -#> -13.3648 3.8419 3.6211 -19.4950 -16.4319 5.5172 0.6711 7.0542 -#> 10.5676 3.0193 -13.6869 -17.7664 9.5023 12.4828 -11.6906 13.0204 -#> -#> Columns 49 to 54 10.4049 -4.3937 -1.2158 1.0199 -1.4881 -1.6759 -#> -0.5652 -7.3973 0.0691 -5.2152 -3.7536 1.2808 -#> 7.0183 5.8691 -4.2220 4.2102 11.6324 4.5266 -#> 6.6726 -7.8257 -8.2892 5.9208 -1.7354 7.0313 -#> -0.5905 4.8962 -0.2178 0.9379 3.7152 1.9126 -#> -6.6017 8.1995 -0.5955 -5.2730 -2.5379 2.6127 -#> -5.8200 -4.8252 5.8403 -2.0860 -1.0756 1.2638 -#> 11.4338 11.5370 -5.4214 6.2038 -5.4447 4.8322 -#> -18.9999 2.5189 10.5246 -0.0110 5.5061 -1.5023 -#> 4.4337 -3.9522 -12.7223 7.7476 1.5671 0.0794 -#> -18.2992 -1.8082 26.3236 1.9072 4.0479 1.4653 -#> -0.3543 -1.8854 9.7096 -1.0631 -4.9233 3.6277 -#> 4.8155 0.9072 -1.4398 -4.3319 3.4289 -4.0279 -#> 11.1888 -19.1820 -9.4257 0.9476 -2.8059 2.2915 -#> 0.8938 -8.0130 -3.0255 -4.9254 -6.8895 3.7856 -#> 0.6107 -3.4682 -3.0902 8.6203 5.0539 -7.3057 -#> -6.0682 12.0741 9.0698 2.2445 -7.1664 -12.1935 -#> -1.4495 0.8049 -3.1648 9.4595 -1.4624 0.4464 -#> 0.0692 -8.6347 -14.3722 -1.6164 5.0661 0.5902 -#> 9.6713 0.1688 -6.0017 1.9842 -6.2445 -7.3938 -#> 21.8469 -12.3380 -8.1051 -1.8002 0.4829 3.4799 -#> -12.2342 16.0267 3.8473 -11.1799 0.1126 -5.3088 -#> 0.2123 -21.1952 2.5425 9.4203 -1.4694 0.0316 -#> -3.6046 -4.2468 5.5372 -5.5574 3.6863 3.7841 -#> 1.9404 5.0886 8.4009 -16.4564 0.9253 11.0061 -#> -6.1988 -1.1603 8.1998 -3.0862 -3.1927 -1.6622 -#> -14.0336 -7.8097 7.9282 2.5255 2.7384 2.9216 -#> -10.8568 -11.8154 6.2506 3.0923 1.0809 1.9786 -#> -28.8676 -2.6244 11.4707 1.6968 0.9274 6.8802 -#> -18.4692 4.5212 -2.0207 4.3433 -9.6943 -3.0231 -#> 1.0947 -4.4575 -4.9881 -1.8650 -1.7975 -0.3476 -#> 5.9016 -4.6479 -9.4835 8.6000 -5.9623 -2.5017 -#> -3.7405 -2.3338 1.5422 -2.0316 -3.3168 5.1828 +#> Columns 1 to 8 -1.6353 4.4321 6.1203 3.7709 -3.4023 -6.1183 -5.4390 -8.2007 +#> 5.5697 -9.7264 6.0708 -5.7289 0.3792 -7.9255 -14.6813 -3.2065 +#> 2.7488 -3.8040 -3.6862 2.3940 -2.4815 16.9404 5.4111 4.9423 +#> 5.1705 -3.1706 8.2556 8.6014 -17.0782 -2.1720 5.2951 -18.1015 +#> -3.2398 4.1560 11.1199 5.7068 -16.2701 17.4736 -5.6358 0.3901 +#> -2.0510 11.1923 -1.5562 -0.7487 10.6704 -3.0011 -7.6444 14.3682 +#> -1.6671 -1.8266 6.6932 9.1583 11.6255 -5.8125 -2.4459 -8.9862 +#> 1.8152 0.6154 15.3009 -12.0627 8.2672 10.9483 -15.1192 -19.2267 +#> 6.0888 2.3311 -4.2057 1.2558 2.4469 1.8447 -3.5200 -3.4625 +#> -5.1195 -10.0449 -13.9448 6.3498 -6.7641 -8.2834 8.6267 -4.2428 +#> -3.6945 -2.8390 11.6784 -17.0383 3.5297 -6.0096 -9.7653 13.4390 +#> -5.7205 5.9699 14.0259 -4.8045 -23.3689 5.2313 7.3005 6.0076 +#> -1.1548 -8.7054 -4.9090 11.5328 -7.7649 13.0461 1.4930 -8.2168 +#> 1.6853 2.5822 0.2825 8.1012 -1.2717 -3.7791 -4.3516 -3.0138 +#> 7.4851 1.5490 -0.9358 6.4131 -3.6425 4.7491 -2.7134 -15.5698 +#> -5.7900 -7.0067 4.0435 5.2198 -10.6764 5.7583 13.1000 1.1120 +#> 1.8349 -3.1394 9.7660 3.0416 -5.8365 5.3293 -3.6430 -3.6948 +#> -2.8039 -15.9427 -2.1047 32.2348 4.5484 -6.0838 -5.6095 0.2993 +#> 13.2444 -2.2817 -3.4686 -13.9957 0.7390 10.9607 -3.2167 1.8028 +#> 3.4685 1.3891 -4.9012 -1.6123 6.4095 7.3353 -0.7901 3.8872 +#> 0.9645 -3.8791 3.0848 -3.6218 -13.0589 12.2856 0.8339 -5.4423 +#> 2.2557 -7.1083 3.4661 4.3524 -1.8454 -7.2964 0.8217 -10.6039 +#> 1.3620 -2.8747 0.9562 9.8035 8.7008 -8.6258 10.4466 14.0749 +#> -1.6509 -0.6748 4.2229 -11.8480 0.4392 -2.9401 0.9418 -10.3291 +#> -7.1141 -0.0717 -6.6829 6.0824 5.2061 -13.2283 1.0957 -6.9558 +#> -0.9038 -2.2526 1.5140 15.1949 -8.8688 -11.7858 17.6347 -13.8020 +#> -0.0936 8.9236 7.7583 -3.4908 -7.6502 8.8869 -7.2650 -17.5812 +#> -4.1580 -1.3969 17.6921 12.3450 -6.7449 -0.4531 -6.4342 9.3969 +#> 1.3312 -1.1501 2.0879 3.7027 -10.5830 -13.1696 4.9839 -1.2594 +#> 0.7320 5.6934 -7.6473 -2.8277 4.6002 -17.2764 12.0295 19.5532 +#> -7.4079 -3.1421 -3.1011 9.5189 4.4956 8.4683 -4.5437 9.2424 +#> 0.1470 15.2248 2.4783 1.4916 -0.4940 4.4596 18.6246 -11.7759 +#> 8.6609 -0.5493 -6.0460 -3.6809 3.8050 16.3690 -1.5648 -1.7205 +#> +#> Columns 9 to 16 -6.2851 3.8360 4.0812 16.2348 8.9454 6.8916 -5.3057 7.0084 +#> -9.7963 -6.0513 2.1007 -4.4073 -1.2336 -12.0758 4.9370 -7.8475 +#> 10.7937 1.2300 4.0328 5.5895 -1.3036 -9.0790 5.7282 -7.9205 +#> -9.7344 -2.7516 1.8011 5.4262 -12.7751 5.2865 0.1208 10.6316 +#> -7.8695 2.1968 3.7347 -3.2290 9.4674 1.4886 5.9791 -9.1102 +#> -3.9260 11.8193 -0.2911 1.9903 11.0502 4.7821 -13.5610 -2.8117 +#> 7.1338 -15.8626 -6.2505 -5.4815 6.8066 2.4072 -1.2749 10.9718 +#> -7.0970 16.9016 -6.4371 3.3848 13.3238 13.6089 -7.1793 -5.0847 +#> -6.3969 10.4879 -1.5486 -4.9103 -11.7403 13.0164 -5.4640 -9.1828 +#> 16.3038 -6.8361 1.4881 7.0337 10.5306 6.0375 -2.5512 12.6395 +#> -17.6439 -7.1204 -8.0048 7.8022 -5.6107 -12.6919 -1.5475 -11.0052 +#> -5.6755 -3.5202 7.6174 -8.6198 -10.6437 -9.0619 -14.1580 -2.8026 +#> 11.2652 -5.7832 1.7949 10.2232 1.9751 3.0577 -6.3639 6.9798 +#> -12.5294 0.2421 -5.5722 5.6743 9.4821 -1.6801 7.3115 -3.6876 +#> 8.9469 -7.7445 -19.2238 -8.9655 -5.9435 5.1982 9.8527 -1.9508 +#> -10.5100 2.7257 -4.9903 -1.8673 1.9766 -10.2879 -4.9044 2.2399 +#> 15.8220 -3.4858 -7.7946 6.5953 9.5105 3.9760 -9.4776 4.8341 +#> 1.2145 -14.3449 -16.1444 1.9052 -5.3336 -7.6917 9.1494 -4.7977 +#> -0.1992 6.7144 2.7534 -3.3795 -9.1793 0.7903 -13.2489 -4.9081 +#> -0.7303 0.8345 8.0622 4.0037 1.3024 14.6448 -0.4291 2.4696 +#> 2.6964 0.6241 2.8331 1.5836 4.1614 -12.3349 -10.7515 3.3005 +#> 5.7378 11.6491 -9.7662 -6.0303 4.0343 13.3529 -7.5251 5.8326 +#> -10.1247 2.9946 2.0462 7.6774 5.8304 9.0729 -0.5200 -8.0880 +#> 10.2583 6.3646 -17.3697 1.7589 -1.8800 -10.3373 2.6369 1.1038 +#> -5.8475 -20.4639 10.1173 3.4212 4.2065 6.1156 8.5716 9.1394 +#> 3.5173 -3.9945 3.4049 6.1398 1.9885 4.7099 -6.5305 10.9822 +#> -5.8909 9.3238 -7.9771 13.8210 4.0316 -1.1631 -2.6270 -12.8470 +#> -7.5884 0.7262 -11.8103 -18.4281 2.6524 2.5922 -9.9075 -1.9948 +#> -6.6363 8.0375 9.1075 -3.3916 -5.4109 19.7692 -9.1882 13.5041 +#> -2.2628 -6.4133 9.2126 6.6218 -3.5964 2.9373 -10.7443 2.5160 +#> 1.7958 0.9604 9.7169 10.9803 7.1440 4.7793 0.5224 3.2285 +#> 6.9006 -7.8793 5.4129 7.4761 10.3766 8.9483 10.8707 -3.1484 +#> 13.0729 0.6346 -4.5002 5.9900 -4.4645 -14.4995 -0.9068 7.1925 +#> +#> Columns 17 to 24 6.2779 4.0789 4.6342 -3.4759 19.3502 16.4900 -3.1063 12.2321 +#> -7.5077 -13.3862 -10.4926 12.8095 3.3936 -4.3838 -5.2340 0.8618 +#> 9.9686 -10.6574 1.3555 -1.4059 -6.0067 4.8209 -2.1228 7.1200 +#> -4.6190 -1.5177 -4.3027 6.5516 -4.3046 8.7342 6.8208 12.1705 +#> 1.4544 -1.1432 -3.0856 -4.5655 -8.7982 11.7490 18.8031 -7.0710 +#> 0.6605 10.6931 -6.2308 -9.2823 8.9047 3.2196 0.9699 1.5700 +#> -7.0478 -6.4703 -6.8466 0.2895 -3.2327 5.5190 -3.9232 -4.3466 +#> -6.9490 -9.7761 2.1006 -11.2086 7.0444 -1.6094 -10.4766 3.2639 +#> 10.9471 -12.1016 1.6891 -5.2735 0.0592 -6.1266 2.8895 2.3791 +#> 0.4054 -1.5346 6.7898 -5.8844 -18.3470 -2.5544 -7.9734 -3.2282 +#> -8.9070 -5.7965 0.5405 -9.0516 1.9035 -5.9659 -2.0333 -2.8367 +#> 7.4125 -5.1368 -5.3024 4.3004 -0.7027 -18.6231 -0.6111 -9.4889 +#> 9.5441 1.7020 -4.6136 -2.6403 4.6751 -2.7865 6.2623 -0.7319 +#> 15.1835 -13.3803 2.5517 9.3862 4.0670 -0.8356 -1.4139 20.8876 +#> 6.0132 -4.8067 -8.6996 2.5315 0.0879 3.9812 -2.0344 1.3711 +#> 19.0366 -9.8806 3.3997 6.6510 4.8274 -3.8462 -0.8375 -9.1399 +#> -2.6380 -3.0965 11.8619 -7.6993 -14.2918 15.5233 -8.5303 11.3023 +#> 11.2962 -11.7741 -7.8650 -2.0833 -2.5472 -1.5447 -12.6203 4.6628 +#> -0.3020 -3.8343 14.7717 -1.7855 8.9758 2.0816 -10.2800 16.3089 +#> 4.2869 0.7756 -6.4838 -1.8731 -2.3366 -2.2847 -1.4424 -0.6003 +#> 0.9071 -8.7299 5.8014 -2.2126 -1.8472 -3.9024 0.4044 1.3947 +#> 3.2037 3.6134 -0.6392 7.8643 -19.8176 11.0584 6.1518 -17.1921 +#> 14.8077 6.8448 -12.1126 -5.0377 2.2326 1.0797 -1.9270 7.2347 +#> -9.9347 -4.2147 11.3667 10.6318 -12.4513 9.2924 -9.0560 -0.6255 +#> 3.2851 9.6646 -3.1222 5.9368 6.8206 -1.0937 -1.7236 -9.7517 +#> 2.2264 1.0995 -5.7135 11.8533 9.7378 -2.2368 5.8345 -5.5508 +#> 24.7367 -2.7284 5.0194 -3.3506 -0.8626 5.2726 0.8546 4.7908 +#> -10.1723 -11.6227 -10.1945 -3.2437 6.0436 3.0644 -12.5749 -9.4807 +#> -11.9441 -7.7931 7.4653 -0.7346 1.8427 -6.7075 -2.2042 -2.2354 +#> -20.7180 10.4069 -5.9971 -6.1509 8.7020 -11.5701 0.8657 -4.3643 +#> -1.0755 10.4426 10.0029 -10.2691 11.3524 -0.3177 -2.5653 8.9292 +#> 3.2250 -15.8103 8.8180 1.9467 5.0783 9.4573 -6.3361 22.4272 +#> 3.2350 -5.9312 0.4309 -1.0309 -10.0581 5.4209 0.4992 22.4847 +#> +#> Columns 25 to 32 -4.5053 -2.1174 14.2360 4.7665 2.3424 -1.2624 -3.2785 -9.7989 +#> -1.9127 -11.7792 3.3175 1.9249 -16.6269 12.8381 -11.8423 0.8771 +#> -4.9612 -0.0928 8.8751 -1.1873 -3.0750 -3.5674 6.7220 9.7173 +#> 5.8238 -3.9061 5.9620 -0.0963 -1.9811 -5.0030 -3.3837 13.3573 +#> 2.6680 -8.8689 15.9369 -2.6194 -9.7763 9.6478 9.1596 2.8069 +#> -4.1380 -1.9067 4.3337 -11.5888 7.1389 0.0871 4.5074 8.0377 +#> 0.9044 -1.4701 -4.1012 3.8395 -3.9617 8.0290 -2.7151 -4.4134 +#> -14.8943 -2.8089 3.7858 8.4465 6.1233 1.4652 1.1410 -9.6152 +#> -4.7238 0.5790 -7.5091 -0.3505 -3.3624 -22.5187 20.4917 19.7872 +#> 4.0737 10.7911 8.4844 -9.8201 1.6337 -2.5235 -7.3762 -10.0326 +#> -3.9822 14.6734 10.1709 1.8469 -12.6385 11.2676 5.2270 -5.5252 +#> 10.8633 9.7329 1.7909 2.2507 -2.5958 -20.1454 19.3010 8.3581 +#> -6.7146 2.6502 -1.1800 -5.0328 1.1697 5.0351 23.4061 -15.1636 +#> -1.3179 1.1593 14.0804 7.1428 0.2216 -24.1185 -7.2368 -7.1155 +#> -5.5530 -3.7137 -6.2124 6.7344 1.6386 -5.8182 -5.4761 -1.0784 +#> 5.6480 -1.7854 -12.1789 -9.8640 -4.1237 18.6410 -1.5094 10.8923 +#> -1.6847 4.3116 3.1892 -1.9839 -7.9005 -6.1859 -1.0035 -2.2484 +#> -8.8033 8.2411 17.0673 0.5185 10.0113 10.6061 -1.7925 0.6375 +#> 2.7209 -3.5135 -6.6464 -27.2545 -6.5440 3.0897 -8.5191 12.1664 +#> 2.6476 -10.4624 -14.4767 16.1376 14.7092 -1.8240 14.6205 0.2579 +#> 16.4958 0.6475 -7.4110 -5.1637 2.5012 1.3436 -8.6728 -17.2104 +#> -8.1495 -13.4390 -3.5876 4.4540 0.0614 -8.1080 14.6534 -11.2639 +#> -3.6892 10.6439 -1.3047 16.4691 -7.9526 -10.2501 -3.6996 -1.7498 +#> -2.8092 0.1179 11.1337 11.5904 -2.4420 3.4980 2.2890 8.6400 +#> 0.1419 6.0515 1.7946 -5.1386 -1.1879 5.6720 2.1412 -14.1735 +#> 12.4793 1.2980 -2.9805 2.9048 4.7302 -9.6842 -1.9983 -10.3513 +#> -12.7380 -9.5773 3.2642 -7.8737 3.7460 -5.2990 -5.6387 -2.9537 +#> 8.8755 -14.0756 -2.9564 17.2987 2.1981 13.1206 6.9287 0.3181 +#> -5.0310 -0.6406 -1.5851 -2.7789 2.1290 5.1021 11.6930 -4.4672 +#> -7.7398 21.4764 3.9897 -0.7631 1.0018 3.7905 -6.6454 8.3266 +#> -7.4614 6.5273 8.2987 2.0473 15.6732 10.0316 -19.5524 -1.8505 +#> -5.6506 2.1858 1.0345 5.1929 -11.5952 -3.7969 -11.7629 -18.4464 +#> -0.1787 -12.8495 5.2724 -11.0030 -3.5747 9.2239 -10.0173 4.9648 +#> +#> Columns 33 to 40 -11.9268 7.6184 9.8609 13.3409 -9.7127 3.6975 13.8528 -4.9289 +#> 9.1265 4.4472 -4.3586 -20.8979 -5.4081 -2.3428 7.4659 -2.4016 +#> -14.5635 -8.6556 7.5654 -5.3610 13.9681 8.1991 -8.0856 2.2180 +#> -1.0739 -11.3050 -6.3948 -1.3663 6.4392 1.1543 -1.7980 4.4444 +#> -15.3729 -4.6911 15.3161 -2.1265 26.1166 -15.0314 -1.1702 0.0984 +#> -17.5054 5.5257 -4.4897 7.9732 -7.9022 -3.5838 8.2722 11.5031 +#> 15.9846 -2.7107 -6.5184 2.3077 -14.9634 9.2020 4.4676 -6.0666 +#> -26.6501 6.6041 10.1317 16.5637 -4.8222 -16.5782 -10.9113 -0.5785 +#> -9.0396 4.9282 -5.5893 -5.7280 8.2698 -1.7011 -0.8699 2.8684 +#> 4.3633 0.9977 -4.8213 -20.8736 -1.1724 13.0067 18.3303 -1.6187 +#> 2.2989 10.5885 -6.9858 -0.2267 4.2112 -4.1962 13.2158 -17.2928 +#> 11.0699 2.9381 -4.8933 -38.2932 0.1603 16.0582 -2.9725 7.8612 +#> -2.0308 0.0037 3.0131 -0.3492 12.0400 6.0014 2.0772 -8.6665 +#> -3.5162 10.9643 -13.8959 -1.6906 18.3387 4.0186 1.0276 4.2466 +#> 3.7584 -11.4404 10.2994 -5.4826 -12.6490 -4.0644 2.0176 -6.9175 +#> -3.2310 11.1564 9.4832 11.1806 4.6966 2.6487 -13.8709 -12.8078 +#> 3.7125 4.7114 0.1897 -0.4760 5.1806 2.0457 11.5016 -23.4014 +#> -21.3517 8.0657 9.8888 7.5029 -16.9792 -8.0239 4.0550 -7.0832 +#> -4.5678 -2.6708 11.5115 9.7412 14.8756 -7.0594 -8.0661 -14.4081 +#> 2.4442 -2.9840 3.5688 4.4359 10.2149 -5.4918 13.6324 0.8469 +#> 9.0768 4.8015 10.0607 -9.5290 -5.1481 -0.4383 -2.8543 -1.7835 +#> -8.3108 11.3465 11.2742 0.0542 -7.1745 -2.2104 4.1766 -10.4813 +#> -2.3820 4.2490 0.7564 2.1989 3.6649 10.0225 2.0802 -2.5912 +#> 1.3367 10.0371 8.8040 -5.2670 -14.0395 -6.0852 20.9049 -0.7500 +#> -7.8930 -6.1188 0.4769 -22.1620 -7.2019 17.3513 -1.6344 10.9354 +#> -9.7916 0.0431 20.0900 -7.8611 -10.3414 19.8199 0.6647 2.4200 +#> -9.6766 18.0063 2.6052 13.7560 -1.9542 -11.7086 -17.1135 -4.3562 +#> -11.2062 5.4855 -0.6854 -12.7413 3.4211 2.3197 -2.5488 -1.9933 +#> 1.3085 -1.1305 -9.4153 1.9846 -4.8051 8.0312 7.6671 4.6333 +#> 4.5424 -22.8243 13.8807 12.7908 3.7836 5.4811 8.1751 -14.9043 +#> -15.0608 -2.7074 18.1918 2.7607 4.9620 -11.5461 14.4178 2.9780 +#> -7.9901 -0.6274 -8.6149 15.8710 19.4579 0.6645 -21.5073 0.2367 +#> 6.2383 -2.7190 0.1438 12.9022 -1.5891 -8.3071 11.5200 -9.9123 +#> +#> Columns 41 to 48 20.4647 -10.5523 0.2340 7.0850 -11.2098 -2.3350 -5.0162 -10.3207 +#> -2.6782 -2.6592 -0.2630 2.4612 -10.6666 -3.4490 5.4198 -0.1389 +#> 9.8990 -14.8340 -6.4583 13.6772 -7.9724 7.2383 -0.3764 5.7707 +#> 10.7997 -1.8674 1.0840 -3.3264 0.4576 4.3198 -8.7924 -10.5595 +#> 3.4498 -1.2010 8.0379 -6.5539 -8.2836 2.1312 -26.0666 4.6215 +#> 0.7633 6.0816 2.7245 2.1741 12.7044 -3.9310 -3.9756 3.0319 +#> -8.7633 4.9305 15.4024 0.2374 -1.0643 -0.1309 17.7423 0.9355 +#> 7.7370 -5.1074 19.1092 -10.4697 8.9580 -4.5772 -2.8787 14.6211 +#> 10.9599 2.6534 14.1849 7.7106 7.4843 0.0934 10.1777 6.5403 +#> 11.4195 1.3583 -5.1359 -2.0273 -13.9047 -0.7697 5.7659 -3.4000 +#> -4.5752 0.8380 10.4752 -3.9706 13.7800 4.0281 -4.4736 -4.9348 +#> -2.1779 -15.2266 1.7633 -9.1774 -2.9366 17.0980 5.9149 -3.0456 +#> 14.2548 -7.3088 13.8884 0.0288 -11.7036 1.9510 -0.0916 -6.3025 +#> -3.9179 -10.9053 13.9168 -8.3460 8.7899 -0.7906 6.8612 1.4982 +#> -8.4043 -19.5186 0.4067 21.8964 8.2378 -8.3949 -12.4678 8.2527 +#> -4.7134 10.3361 7.1094 12.7306 -9.2579 19.6261 -0.4495 7.9905 +#> -2.7534 -6.5759 2.5518 -1.7753 6.1320 -5.3221 0.3079 10.4943 +#> 0.0245 -2.2497 -3.2277 9.1750 -13.2683 -13.2944 -0.1946 2.3356 +#> -4.1616 3.4521 1.2067 0.5371 10.9614 2.2384 21.4076 15.6762 +#> 7.0099 -0.8731 6.3360 -3.1024 6.2920 -16.2566 12.2112 -4.0807 +#> -1.1357 -8.1284 -9.2778 -2.5176 2.5829 12.9534 -2.0807 -3.2123 +#> -2.0304 9.9117 -5.6539 3.0012 -11.1934 -3.8649 -30.2153 -2.6543 +#> -2.7060 2.2340 -10.4610 4.1720 -19.0114 8.4332 12.9279 -9.7706 +#> -5.4916 -8.8655 -6.1061 3.4889 3.6215 -22.6714 -11.8122 -3.6672 +#> 4.6843 -4.2341 9.4974 9.0313 -18.9345 0.5230 12.7945 -9.2693 +#> 1.7356 -8.2352 -6.7436 5.8068 -26.6280 -9.7926 7.3267 -21.7316 +#> 12.2611 -8.2844 12.8783 2.4960 -2.6067 18.6078 7.7587 2.8397 +#> -7.0828 -10.0814 2.0135 5.3172 -15.1470 10.5911 0.8432 6.6044 +#> 6.0089 -2.5268 2.5346 13.8533 4.5407 -1.5666 -1.0369 -11.2153 +#> 8.7093 -10.0270 -21.6963 -2.8492 10.1861 -19.6865 -6.9108 5.8541 +#> 6.1980 -6.0629 -4.7703 -5.5004 -3.2540 -2.9782 -3.6518 1.2137 +#> -5.0318 0.4662 3.4348 1.4638 -6.4028 14.7323 -2.1698 -14.5576 +#> 1.8353 -3.1170 -14.5561 13.7529 10.9541 1.7576 1.0423 -10.6220 +#> +#> Columns 49 to 54 -17.0780 -6.5815 4.9577 0.7125 1.7213 10.7328 +#> 6.7054 6.2971 0.0432 -9.5257 -4.6027 0.2612 +#> -30.0003 -2.1935 0.5826 -3.1455 10.4763 2.6974 +#> -8.7301 7.5076 5.4824 16.5618 13.5231 2.7738 +#> -0.2364 -3.3846 7.3295 4.4764 -1.7245 -7.5216 +#> 1.3830 -10.6503 -6.3592 -0.1129 -10.5233 -3.6009 +#> 10.6488 -4.8841 8.9133 9.2143 4.2506 -3.3445 +#> -10.8800 10.1740 -11.0188 -1.2580 -2.2174 -3.9595 +#> -9.1663 0.6537 6.9500 11.0254 9.0903 3.5276 +#> 9.4244 -3.7664 5.9635 -19.0430 -4.0715 -0.0513 +#> -2.2399 -6.4013 -15.2913 9.7325 -2.1793 2.1762 +#> -4.4015 5.8905 2.1533 2.2135 6.1479 4.7397 +#> -5.8223 -1.9129 0.8422 -4.6915 4.3558 -2.4428 +#> 4.8444 9.1223 -9.5788 2.0387 2.3563 0.2254 +#> 8.4955 9.1437 12.6130 9.4622 11.8852 1.3344 +#> -12.5300 -10.8933 5.9414 -2.6376 0.0451 1.3944 +#> -2.5310 -24.5449 -8.2732 -3.2775 3.8950 -2.7564 +#> -3.7708 1.8577 12.7436 11.9870 0.1528 0.2858 +#> 14.0968 6.7989 4.6220 -0.1335 -7.6953 0.3397 +#> -3.7870 -5.0250 -2.7751 -1.8903 -7.0356 3.5137 +#> 13.1324 -3.5106 6.6707 -5.1084 -10.2100 -5.8738 +#> 12.0708 -12.8613 -2.4515 -9.8594 -4.1255 -0.2364 +#> -11.8370 8.1900 -7.6104 -6.6829 1.0282 0.4576 +#> -3.3568 -8.2177 5.6953 3.6257 -2.4383 -3.6482 +#> -10.5435 9.2601 15.5485 -1.8051 4.2065 0.5571 +#> -18.5573 -0.4799 16.9107 0.5709 -6.2331 -6.8055 +#> 6.9494 16.6523 -14.2954 -7.2552 12.9859 2.5998 +#> 3.4777 12.1104 12.9764 2.3842 4.0864 4.6834 +#> 6.1148 6.8673 11.0114 1.9545 -0.6595 5.6429 +#> -5.4072 -23.6101 9.7078 5.6821 -10.9979 3.5250 +#> -2.0657 8.6995 -1.6321 -6.7550 -1.4473 0.6146 +#> 4.1655 5.9886 4.5757 -7.0181 6.0293 -3.8261 +#> 7.4021 -10.2871 1.3421 -6.8808 4.4474 2.2112 #> #> (2,.,.) = -#> Columns 1 to 6 -7.7358e+00 2.7064e+00 4.1159e+00 2.3887e+00 9.3367e+00 -1.4353e+01 -#> 8.5316e-01 3.7687e+00 -3.4629e+00 6.8568e+00 -7.2856e-01 -3.5939e-01 -#> 4.5799e+00 -6.9411e+00 2.7158e+00 -5.9324e-01 6.3918e+00 3.3153e+00 -#> 4.7981e+00 8.9044e+00 5.5984e+00 5.0699e+00 -1.4592e+01 -2.0785e+00 -#> -4.6695e+00 -2.5667e+00 1.3037e+00 -1.0801e+00 -5.1928e+00 -2.5828e+00 -#> -3.8563e-02 -2.9945e+00 -2.9541e+00 4.8150e+00 7.5176e+00 -5.7202e+00 -#> 1.0594e+01 -1.3108e+01 6.0147e+00 3.7226e+00 1.9505e+00 1.4969e+00 -#> -7.6931e+00 -4.9361e+00 -2.7434e+00 4.6197e+00 1.1686e+01 9.4690e+00 -#> -9.7675e-01 4.7890e+00 -6.4019e-01 1.1604e+01 -5.3119e+00 1.1268e+01 -#> 3.5569e+00 -1.6191e+00 1.7762e+00 -3.0177e+00 -1.7011e+01 9.1518e+00 -#> 1.0447e+00 -6.3763e+00 -4.8049e+00 1.2495e+01 7.7488e+00 1.6410e+01 -#> -7.6849e-01 6.6844e+00 -3.0198e+00 1.0458e+01 -1.2576e+00 1.5203e+00 -#> 4.3539e+00 8.1895e+00 -8.7703e+00 9.5344e+00 -3.5441e+00 -1.4626e+00 -#> 3.7191e-01 4.0707e+00 -3.7622e+00 1.2110e+00 -1.2392e+01 -3.5298e+00 -#> 6.9584e+00 9.4487e+00 2.3289e+00 1.1768e+01 -2.5903e+01 -1.8819e+01 -#> -2.9398e-02 1.9986e+00 -1.7783e+00 -1.2250e-01 4.4910e+00 2.3489e+00 -#> 4.1574e+00 -1.2356e+01 -1.4853e+01 1.4956e+01 1.0511e+01 -4.9196e-01 -#> 2.0067e+00 -3.8693e+00 -8.2226e+00 1.2391e+01 -1.7505e+00 -5.3938e+00 -#> 2.2505e+00 7.4454e+00 -3.4723e+00 -1.5694e+01 1.0488e+01 7.3954e+00 -#> 7.9690e-02 4.2664e+00 9.6114e+00 -2.6502e+00 -8.2685e+00 4.5040e+00 -#> 2.7958e+00 -7.8079e+00 9.3878e+00 -1.3784e+01 4.6990e+00 -6.3731e+00 -#> 3.5866e+00 -2.8282e+00 -4.5961e+00 1.0287e+00 2.9950e+00 -7.6990e+00 -#> -3.3657e-01 1.6978e+00 -9.6235e+00 8.0490e-01 4.4091e-01 1.0902e+01 -#> 2.5087e+00 5.0536e+00 -3.8724e+00 1.0526e+01 1.6037e+01 -2.1520e-01 -#> -6.3437e-01 -1.1003e+00 8.1602e+00 -4.9335e+00 -1.2635e+01 9.8150e+00 -#> -2.1249e+00 3.7480e+00 7.7365e+00 -6.1236e+00 6.1852e+00 -1.3069e+01 -#> 7.3109e+00 1.2901e+01 -8.8473e+00 -3.7098e+00 -8.9201e+00 -6.5198e+00 -#> -5.4844e+00 -2.5238e+00 -4.1557e+00 -1.8064e-01 5.5304e-01 6.7819e+00 -#> -1.9159e+00 1.0311e+01 3.6737e+00 -1.1483e+01 -7.9527e+00 -6.1219e+00 -#> -1.0295e+01 3.7689e+00 -1.4383e+01 1.2151e+01 -4.4508e+00 -8.2422e+00 -#> 4.2292e+00 3.6646e+00 -1.0031e+01 2.9930e+00 2.1887e+00 -7.5918e+00 -#> 9.7002e-01 -2.7367e+00 7.7447e+00 -1.0757e+01 6.6942e+00 -8.9944e+00 -#> 7.1615e+00 3.2208e+00 -8.1345e+00 -7.2343e+00 1.8508e+00 7.8587e-01 -#> -#> Columns 7 to 12 6.1971e-01 -3.5566e+00 -6.0313e+00 -9.2937e+00 -2.9808e+00 6.3143e-01 -#> 6.1591e+00 3.1784e+00 -1.4260e+01 -4.0602e+00 1.9287e+01 4.2227e+00 -#> 4.9860e+00 1.1289e+01 1.4267e+00 -1.4943e+00 -9.9253e+00 2.7712e+00 -#> -5.2414e+00 1.2068e+01 -8.3227e+00 -5.1654e+00 4.5057e-02 -6.2535e+00 -#> 4.2992e+00 -5.7681e+00 8.0550e+00 7.8340e+00 -7.1266e+00 1.9123e+00 -#> 1.3649e+01 -8.0137e+00 -4.5874e+00 1.6664e+01 -6.1277e+00 -3.3137e+00 -#> -7.7574e+00 6.1543e+00 7.9361e+00 -3.3967e+00 4.5374e+00 -1.9442e+00 -#> 1.8258e+00 2.5381e+00 4.9820e+00 6.2882e-01 -2.1422e+00 -1.5931e+01 -#> 2.7219e+00 -8.0431e+00 3.3396e+00 -4.2228e+00 -1.0965e+00 3.3755e+00 -#> -2.8221e+00 -2.8199e+00 -7.3222e+00 2.3526e+00 -1.6953e+00 1.0169e+01 -#> 1.9237e+01 -7.6633e+00 -1.0494e+01 -5.6553e-01 -1.5653e+01 -1.5750e+01 -#> 1.7894e+00 -5.2464e+00 -5.6364e+00 7.6857e-01 -1.0334e+01 -1.7855e+01 -#> 1.8345e+00 -7.4619e+00 6.2523e+00 1.5827e+01 -7.7351e-01 -1.5328e-01 -#> -5.8410e+00 -7.1343e+00 -4.4032e+00 4.0195e-01 3.6980e+00 6.1992e+00 -#> -4.7707e-02 1.5093e+01 -1.3516e+01 -4.6621e-01 -1.1089e+01 3.5860e-01 -#> -1.9751e+01 1.6217e+01 1.2023e+01 2.6548e+00 2.0095e+01 -1.4241e+01 -#> 6.4834e-01 1.0354e+01 -9.9857e+00 -4.7512e+00 -3.9769e-01 -1.2231e+01 -#> -1.1477e+01 6.8674e+00 -3.0546e+00 -5.0975e+00 2.7640e+00 -7.8965e+00 -#> -3.2730e+00 7.8723e-01 -4.4877e-02 7.8095e+00 1.6561e+01 4.8170e-01 -#> 1.2527e+00 -8.8599e+00 -1.6237e+00 3.5457e+00 -5.1429e+00 -4.6922e+00 -#> 1.3649e+00 4.6369e+00 -1.6378e+00 -4.3386e+00 7.2234e+00 -7.3588e+00 -#> 9.8566e+00 8.9358e+00 -1.2413e+01 -5.5862e+00 -1.2195e+01 3.5331e-01 -#> -6.1922e+00 1.8767e+00 -2.6397e+00 -7.5956e+00 4.6306e+00 5.0578e+00 -#> -1.8053e+00 -9.5046e-01 2.7005e+00 3.5801e+00 4.6201e+00 -1.5657e+00 -#> -8.2460e+00 6.8687e+00 -1.0745e+01 -1.4851e+01 -6.3045e+00 1.3622e+00 -#> 9.1886e+00 -1.7410e+00 -4.4931e-01 -2.5491e+00 -4.5174e+00 1.1401e+00 -#> -2.4219e+00 1.1363e+01 -5.0151e+00 -7.2247e+00 3.0972e+00 1.2977e+01 -#> 8.8036e+00 7.9221e+00 -5.6821e+00 5.5599e+00 6.7689e+00 8.8789e+00 -#> 1.2687e+00 1.1066e+01 8.3913e+00 -5.0425e-01 1.0345e+01 1.3019e+01 -#> -1.8805e+01 4.4200e+00 -3.2719e+00 -3.9947e+00 -8.4469e+00 -6.8574e+00 -#> 1.2520e+00 -1.8530e+00 6.9782e-01 -6.9486e+00 -1.1507e+01 1.4238e+01 -#> 4.3166e+00 -1.6821e+00 -1.5755e+00 -1.6891e+01 -2.0169e+00 -8.6049e+00 -#> 1.1485e+01 -9.6323e-01 -3.6946e+00 -4.7002e+00 -6.9156e+00 -3.2433e+00 -#> -#> Columns 13 to 18 7.1991e+00 -3.1020e+00 -1.0215e+01 7.3400e+00 -3.8720e+00 -3.3105e+00 -#> 7.5273e+00 -1.8029e+00 1.1510e+01 -6.4125e+00 -5.0547e+00 -9.4835e+00 -#> 1.0358e+00 9.7088e+00 -2.3843e+00 -1.5570e+01 2.7400e+00 -2.6007e+00 -#> 1.7425e+01 -1.6332e+01 6.0853e+00 6.0137e+00 -3.3011e+00 -1.4179e+00 -#> 9.4562e+00 -1.8261e+01 -7.5022e+00 -7.5939e+00 -7.0870e+00 -7.4055e-01 -#> -3.9737e+00 1.8817e+00 -8.4407e+00 8.2835e+00 -4.1121e+00 7.4358e+00 -#> 6.4169e+00 1.1986e+00 -1.8904e+01 1.0954e+01 -6.5748e-01 -7.1496e+00 -#> -1.8261e+00 5.0867e-01 -1.4623e+01 -3.4692e+00 -4.8791e+00 -1.0374e+00 -#> -5.0341e+00 -1.4850e+01 2.4858e+00 1.1707e+01 5.5279e+00 5.3110e+00 -#> -2.7140e+00 9.7383e-01 4.0812e+00 -3.3657e+00 6.4222e+00 1.9005e+00 -#> -1.6522e+01 -1.1063e+01 6.8178e+00 -2.1295e+01 7.5161e+00 3.4128e+00 -#> -8.0671e+00 -1.9479e+01 -4.5783e-01 1.4737e+00 1.6592e-01 6.2285e+00 -#> 1.0884e+00 -1.0263e+01 1.7437e+01 2.9594e+00 -2.4843e+00 6.6805e+00 -#> 1.6568e+01 8.4857e-01 1.1532e+00 -3.9707e+00 7.1131e+00 -6.0201e+00 -#> 6.9973e+00 -3.9699e+00 2.1605e+00 7.8246e+00 8.5465e+00 -1.2609e+01 -#> -1.0380e+01 3.2347e+00 -5.6959e+00 -8.4189e-01 6.5300e+00 -2.4554e+01 -#> -1.1610e+01 4.5740e+00 -3.2509e+00 -1.4320e+01 -2.0172e+00 3.4778e-01 -#> -1.8972e+00 7.9535e+00 -1.1391e+01 8.0844e-01 9.2623e-01 -7.4139e-01 -#> 3.2082e+00 1.7688e+00 6.5023e+00 6.8089e+00 5.2405e+00 -4.9714e+00 -#> 5.2694e+00 -1.3602e+00 1.5576e+00 1.1241e+01 4.4270e+00 -1.4899e+00 -#> 6.5272e+00 -6.2469e-01 7.4468e-01 -8.0070e+00 -5.7550e+00 -3.5396e-01 -#> -4.3746e+00 1.2775e+01 7.9490e+00 3.3861e+00 1.5139e+00 -2.3440e+00 -#> 2.9495e+00 -7.8901e+00 4.4748e+00 -5.6787e+00 2.1329e+00 -1.4135e+01 -#> -1.9857e+00 4.7930e+00 1.0535e+00 1.0709e+00 -3.0928e+00 6.7112e+00 -#> -6.3406e+00 -3.5523e+00 1.2149e+01 -6.1518e-01 1.1473e+01 -6.0265e+00 -#> -8.7913e+00 5.0781e+00 -9.3632e-01 1.7481e+01 6.8463e-01 6.5693e+00 -#> -1.1081e+01 -3.8875e+00 -1.0769e+01 -2.0818e+01 1.7809e+00 -6.9261e-01 -#> 6.7785e+00 9.7152e-01 -7.7956e+00 -1.0056e+01 6.0275e+00 5.2517e+00 -#> 1.1351e-01 -9.0045e+00 9.5697e+00 2.3714e-01 -9.0294e+00 -2.9469e+00 -#> 1.3735e+01 3.8854e+00 -2.9617e+00 1.0569e+00 1.4450e+01 1.2248e+01 -#> -2.5803e+00 5.4637e+00 1.1467e+01 -4.1106e+00 5.5802e+00 -3.3711e+00 -#> -5.2019e+00 4.9950e+00 -1.9193e+01 1.9517e+01 -4.0764e+00 1.0695e+01 -#> -9.5082e+00 1.0037e+01 -1.1583e+01 -8.1655e+00 -1.2235e+01 1.6803e+01 -#> -#> Columns 19 to 24 2.2936e-01 -1.6888e+01 1.3698e+01 -6.3099e+00 9.9215e-02 -2.9322e+00 -#> 1.9769e+00 3.4036e+00 -7.5839e+00 -7.6613e+00 1.7599e-02 1.2105e+00 -#> -1.8907e+01 -2.0247e+00 3.6977e+00 -5.8704e+00 -6.2005e+00 1.5775e+01 -#> 6.5952e+00 7.7901e+00 -1.2322e+01 7.3395e-01 -9.5554e+00 -4.4870e+00 -#> -1.4227e+01 -2.1848e-01 2.5902e+00 -5.1566e+00 -2.3201e+01 -2.9847e+00 -#> 8.3254e+00 -2.6919e+00 -7.2369e+00 -2.7206e-01 -3.8869e-01 -1.1716e+00 -#> 4.6815e+00 -1.1390e-01 -3.7367e+00 3.8517e+00 -1.4061e+00 -6.6726e+00 -#> -7.7048e+00 2.2018e+00 5.9940e+00 6.2055e+00 8.6355e+00 4.7104e+00 -#> -4.9984e+00 1.8139e+00 8.1921e+00 -6.0335e+00 -5.7494e+00 -1.3691e+00 -#> 3.3706e+00 -1.1477e+01 1.1708e-01 -1.2528e+01 -4.0107e+00 6.8041e+00 -#> 2.3757e+01 -7.6485e+00 7.0026e+00 -1.4405e+01 1.2900e-01 6.8069e+00 -#> 1.7010e+00 1.7767e+01 -9.5315e+00 -3.5809e+00 -8.8127e+00 -5.7658e+00 -#> 6.8095e+00 4.8048e+00 7.5958e+00 -1.7835e+00 -5.2495e+00 1.0999e+01 -#> -1.2062e+00 -1.4216e+01 1.6161e+00 -4.8818e+00 9.8680e+00 -4.7297e+00 -#> 4.6286e+00 -2.3997e+00 9.1565e+00 -1.8091e+01 -1.8798e+01 -1.7613e+00 -#> -3.3277e+00 -9.2900e+00 6.0133e+00 3.0082e+00 -2.0833e+00 -7.1763e+00 -#> 4.2549e+00 -8.8263e+00 -7.8801e+00 3.9957e-01 -3.5176e+00 -1.8507e+00 -#> -4.9498e+00 1.5408e+01 -8.1279e+00 -7.4771e+00 2.3541e+00 5.4949e+00 -#> 4.1214e+00 -1.2128e+00 1.2296e+01 7.6001e+00 1.4415e+01 -1.6055e+00 -#> -4.9501e+00 1.3041e+01 -4.9621e-01 1.8703e+01 -1.8289e+01 1.0630e+01 -#> 1.0798e+00 1.5102e+00 -2.2588e+01 -1.9352e+00 1.3287e+01 -4.5660e+00 -#> 1.4408e+01 -7.8796e+00 -1.0958e+01 3.5238e+00 1.0787e+00 1.1946e+01 -#> -2.9766e+00 -3.4401e+00 2.6354e+00 -1.8404e+01 -8.8099e+00 8.9852e+00 -#> 1.6289e+01 -6.3441e+00 1.6091e+01 1.5075e+00 1.1968e+01 -2.3290e+00 -#> 5.2946e+00 6.6794e+00 -3.5291e+00 2.2196e-01 -2.1849e+00 -7.8252e+00 -#> -3.6348e+00 1.4114e+01 -2.7772e+00 8.4282e+00 -1.0159e+01 1.4503e+01 -#> -7.9663e+00 -1.0334e+00 -3.8956e+00 -9.7743e+00 -7.9716e+00 2.0617e+00 -#> -2.1056e+00 -1.1841e+01 4.4612e+00 1.1785e+01 9.6248e+00 -2.4593e+00 -#> -7.4521e+00 -2.6863e+00 6.6458e+00 8.8731e+00 -2.6243e+00 -7.9622e+00 -#> -1.4993e+01 -7.1811e+00 -1.8982e+00 1.7245e+01 -1.9409e+01 -7.2242e-01 -#> 7.3318e+00 -1.8824e+00 4.5499e+00 -7.1541e+00 1.9782e+00 -3.5518e+00 -#> -4.1648e+00 1.9168e+01 -1.1092e+01 -1.5164e+01 -3.6274e+00 1.6950e+00 -#> 2.6833e+01 -1.0341e+01 -1.7529e+01 -1.1678e+01 -3.8251e+00 -1.9653e+01 -#> -#> Columns 25 to 30 8.9089e+00 -2.2109e+01 1.4243e+01 -8.6281e+00 4.8957e+00 6.5300e+00 -#> -1.6790e+01 2.2822e+00 1.2295e+01 -1.3033e+01 -3.7050e+00 1.8875e+01 -#> -1.0381e+01 -3.2040e-01 -7.3908e+00 1.1772e+01 4.1685e+00 -1.2452e+01 -#> -1.0310e+01 4.5585e+00 -1.0004e+01 1.9990e+00 5.6939e+00 2.0298e+00 -#> -1.0004e+01 1.0409e+00 -1.3124e+01 -2.0799e+00 -7.1760e+00 7.3562e+00 -#> -6.0570e+00 -5.3023e+00 -2.1541e+01 1.9820e+00 7.8665e+00 6.3329e+00 -#> 6.2140e+00 3.1022e+01 -1.2335e+01 -1.1288e+01 1.1501e+01 -4.5731e-01 -#> -1.8300e+00 -1.7596e+01 4.1389e-01 -1.1390e+01 2.6726e+00 2.1959e+00 -#> -1.4649e+00 -8.5470e-01 5.2110e+00 1.0870e+01 -5.0161e+00 -5.2889e+00 -#> -8.2089e+00 -8.4885e+00 -7.0544e+00 1.2611e+00 3.7091e+00 6.7700e+00 -#> 7.2513e+00 -5.8998e+00 -8.3026e+00 -3.4304e+00 1.7832e+01 -1.1176e+01 -#> -9.1592e+00 -8.7092e-01 -1.0351e+01 -1.0681e+01 -1.2339e+01 6.6825e+00 -#> -6.7008e-01 -4.8440e+00 2.3226e-01 -2.9946e-02 5.6478e+00 1.3199e+01 -#> -1.2542e+01 -5.9341e+00 1.2123e+01 1.3029e+01 -1.2662e+01 -4.4325e+00 -#> -6.6169e+00 1.6597e+00 -3.5043e+00 -1.1193e+01 8.6377e+00 -5.4953e+00 -#> -5.0659e+00 2.7551e-01 1.3744e+01 -3.0966e+00 1.8892e+00 -4.4114e+00 -#> -8.6836e+00 -2.4412e+00 -1.4585e+01 4.2378e+00 -6.7566e+00 -8.1642e+00 -#> -1.2766e+01 -6.4137e+00 -8.1060e+00 4.6149e+00 -9.7195e+00 -7.7202e+00 -#> -1.4313e+01 -6.9195e+00 1.1693e+01 -1.4572e+00 -3.4242e+00 -1.8018e+00 -#> -1.0537e+01 1.6098e-01 -1.2272e+01 2.7677e+00 -8.9725e+00 -6.2632e+00 -#> -6.2350e+00 6.9955e+00 4.7859e+00 -2.2685e+00 6.8022e+00 2.9070e+00 -#> 4.5809e-01 -8.3880e+00 -1.9776e+01 5.2140e+00 3.7813e+00 -6.2155e+00 -#> 3.0042e+00 1.4669e+01 6.6397e+00 -1.2993e+01 6.9356e+00 2.2650e+00 -#> 1.1576e+01 -1.2529e+01 6.9593e+00 1.8562e+01 2.0904e+01 9.1805e+00 -#> 3.0764e+00 1.5342e+00 1.4406e+00 2.7671e-01 8.5106e+00 1.1056e+00 -#> 1.0280e+00 4.0112e+00 3.4976e+00 -3.5274e-02 -5.2699e+00 -3.1552e+00 -#> 1.1858e+01 -4.7703e+00 5.7537e+00 2.1920e+01 5.4029e+00 5.0997e+00 -#> 1.2290e+01 6.4362e+00 4.0488e+00 1.4499e+01 -6.0501e+00 -3.2366e+00 -#> 6.2689e+00 7.3161e-01 -1.2452e+00 1.4861e+01 -8.7672e+00 -1.1689e+01 -#> 1.1985e+01 -1.7174e+01 -1.0914e+01 7.4671e+00 -7.0140e+00 -3.4464e+00 -#> -7.8820e+00 3.4978e+00 3.0521e+00 -1.5181e+01 1.3071e+01 -1.4274e+01 -#> 1.9783e-01 -1.6555e-01 -2.6493e+00 1.3774e+01 4.8866e-01 -1.0697e+01 -#> 7.7743e+00 -1.2427e+01 -5.4104e+00 -1.0587e+01 1.4412e+01 5.0859e+00 -#> -#> Columns 31 to 36 -1.1357e+01 1.1981e+01 9.9716e+00 1.1857e+01 -1.8115e+00 -1.4695e+01 -#> 7.3422e+00 1.5548e+01 3.2082e+00 1.0147e+01 1.4100e+01 1.4162e+01 -#> 3.0844e+00 -7.3096e+00 6.9814e+00 2.2799e+00 5.8196e-01 -4.6313e+00 -#> 7.6082e-02 -9.9290e+00 -1.0213e+01 -8.2199e+00 -3.0799e+00 -8.5401e-01 -#> -3.4770e+00 1.3473e+00 1.4197e+01 -1.0314e-01 9.3214e+00 -4.5633e-01 -#> 1.0109e+01 -1.0152e+01 5.8044e+00 1.2882e+01 -4.3226e+00 1.0007e+01 -#> 7.5746e-01 2.2464e+00 1.1969e+01 -8.5846e-01 1.3138e+01 9.1428e+00 -#> -4.1221e+00 -2.3758e+00 -6.9433e+00 -7.5041e+00 -6.2797e+00 -4.0670e+00 -#> -5.4838e+00 5.2215e+00 1.3257e+01 1.2602e+01 8.5995e+00 1.5516e+01 -#> 1.5377e+01 7.5229e-01 2.2312e+00 3.4551e-01 1.2492e+00 5.7592e+00 -#> 9.3748e+00 -4.4612e+00 -1.1006e+01 -2.0006e+00 1.4334e-01 -1.1820e+01 -#> 1.6497e+01 -8.6081e+00 -6.0907e+00 5.8540e-01 -2.5686e+00 2.6386e+00 -#> -9.0482e+00 1.9906e+01 2.4323e+00 2.7765e+00 2.0396e+00 8.2006e+00 -#> -3.8722e+00 -8.3080e+00 -2.0545e+01 3.4558e+00 -2.0635e-01 -3.7895e+00 -#> 9.1895e+00 -2.5371e+00 8.2614e+00 4.9813e-01 -5.2952e+00 3.0628e+00 -#> 1.1221e+01 8.7408e+00 1.2788e+01 -4.7302e+00 2.2632e+00 7.0985e+00 -#> -7.5462e-03 2.7481e+00 -8.8928e-01 1.5583e+00 1.2498e+00 1.1898e+01 -#> -2.0618e+00 -1.0152e+00 7.9617e-01 -8.6114e+00 -6.5780e+00 2.5092e+00 -#> -5.6837e+00 3.7266e-01 1.2264e+01 -5.9856e+00 2.4086e+00 1.5265e+00 -#> 9.8641e-01 1.1195e+01 -9.3773e+00 -6.0877e+00 -2.7005e+00 -9.2722e+00 -#> -5.5643e+00 2.6269e+00 5.6519e+00 2.0768e+01 1.3134e+01 -4.0507e+00 -#> 6.6763e+00 -1.2067e+01 8.3925e+00 4.6361e+00 4.4979e+00 4.3787e-01 -#> -4.4566e+00 4.6770e+00 7.7471e+00 5.9833e+00 -3.9309e+00 -1.0741e+01 -#> -1.5834e+01 3.2567e+00 2.3690e-01 9.5044e+00 -1.1210e+00 -1.2841e-02 -#> -2.4358e+00 -5.8075e-01 -5.3243e+00 1.2629e+01 8.5652e+00 1.7653e+01 -#> 9.1133e+00 -1.1740e+01 -3.6817e-01 -5.1786e+00 -1.1297e+01 4.8665e+00 -#> 8.5231e-01 -1.4404e+01 2.4547e+00 1.9263e+01 3.3342e+00 9.9445e-01 -#> -6.6792e-01 3.1188e+00 -9.2717e+00 1.3892e+00 -1.7736e+00 1.7867e+00 -#> -3.4408e+00 7.4662e+00 1.0113e+01 -1.6295e+00 -3.7887e+00 -6.8620e+00 -#> -5.6302e+00 8.0703e+00 6.8752e-01 1.4518e+01 -9.6824e-01 1.1248e+01 -#> -1.8732e+01 -4.1066e+00 1.1552e+01 1.5887e+00 -3.4372e+00 -3.8972e+00 -#> 6.2236e+00 -1.1276e+01 -2.0439e+00 -3.1262e+00 1.2531e+00 2.0769e+00 -#> 1.0418e+01 5.4322e+00 1.2515e+00 9.2656e+00 2.2542e+01 1.0728e+01 -#> -#> Columns 37 to 42 6.0988e+00 -8.0439e-01 1.8655e+00 -5.1543e+00 -1.9880e+01 -1.4004e+01 -#> -1.9940e-01 -6.6575e+00 1.6445e+01 -1.5268e+01 -1.3676e+01 2.8681e+00 -#> -6.7479e+00 -9.1959e+00 -3.7102e+00 1.6594e+01 5.5031e-01 5.4496e+00 -#> -6.0228e+00 2.1354e-01 8.0222e+00 -6.6496e-02 -6.0220e+00 1.3493e+01 -#> 2.6637e+01 3.2170e+00 7.4214e+00 2.3912e+01 -1.2703e+01 4.4444e+00 -#> 9.1571e+00 1.4790e+00 9.4493e-01 2.1464e+01 -9.7413e+00 6.8209e+00 -#> -6.4138e+00 -1.7047e+01 -2.1116e+00 -5.2546e+00 -7.3529e+00 -9.4311e+00 -#> -7.6436e+00 1.5305e-01 -3.3620e+00 1.1230e+01 3.0282e-01 -9.2001e+00 -#> 6.4844e+00 9.1075e+00 5.0587e+00 -8.0096e+00 -4.7839e+00 9.3991e-01 -#> -1.8610e+00 1.5192e+01 -8.9527e+00 2.0508e+00 6.9849e-01 1.4954e+01 -#> -1.0395e+00 -3.6823e+00 -7.6426e-01 -6.1870e+00 1.3009e+01 1.7793e+01 -#> -1.3253e+01 -1.1113e+01 3.0313e+00 7.1198e+00 -2.9371e+00 8.8453e+00 -#> 8.1860e+00 -1.3867e+01 6.3761e+00 -1.4327e+01 -6.9700e-01 1.3793e+01 -#> -1.2574e+00 -1.6937e+00 -3.5716e+00 -1.3432e+01 2.9394e+00 4.8733e+00 -#> 1.1381e+01 2.4025e+01 -4.6198e+00 5.5551e+00 -9.6417e+00 -1.0214e+01 -#> -1.2939e+00 4.9727e+00 7.6788e+00 -3.4079e+00 5.5547e+00 -7.1482e+00 -#> 1.2859e+01 6.2153e+00 4.7319e-01 4.6990e+00 -5.7847e+00 -1.0949e+01 -#> -2.2701e+01 1.3047e+01 5.7216e+00 -7.4328e+00 -1.2135e+01 3.5048e-01 -#> 1.4917e+01 -8.3515e+00 -1.0262e+01 8.2380e-01 6.2886e+00 -9.5574e-02 -#> -6.3037e+00 -4.2987e-04 -2.0615e+01 2.1270e+01 1.2580e+00 1.0511e+01 -#> -6.2005e+00 -3.6623e+00 4.5152e+00 -9.2875e+00 -3.2735e+00 6.2376e+00 -#> -8.5251e+00 5.6726e+00 -1.0867e+01 3.3117e+00 1.0386e+01 -4.3640e+00 -#> 2.3705e+00 -8.4754e+00 7.1369e-01 1.0543e+01 3.1853e+00 8.7148e+00 -#> -1.0430e+01 -2.7178e+01 5.6021e+00 6.4076e-01 8.2638e+00 1.2219e-01 -#> 1.0713e+00 7.1091e+00 -6.5412e+00 -1.4832e+00 1.4617e+01 8.5215e+00 -#> -5.7575e+00 5.8882e+00 6.2337e+00 7.9177e+00 -2.3209e+00 -1.0861e+01 -#> -7.8439e+00 -3.4696e+00 -4.0875e+00 -9.2443e+00 -4.3091e+00 -7.2878e+00 -#> -1.6487e+00 6.6453e+00 -6.2274e+00 1.2108e+01 7.4702e+00 3.9402e+00 -#> 1.1521e+01 5.7844e+00 1.4997e+01 -6.6601e+00 -7.1250e+00 -5.4158e-02 -#> 1.6169e+01 1.8308e+01 -9.9830e+00 1.2630e+01 -5.8414e+00 -1.6383e+01 -#> 3.4601e+00 4.3866e+00 3.0461e+00 5.2795e+00 -1.1307e-01 -7.0420e-02 -#> -6.7810e+00 -9.2056e+00 -6.5165e+00 1.7744e+01 6.2753e+00 1.7646e+00 -#> 2.3100e+00 -7.1944e+00 -9.5489e+00 -7.0821e+00 -4.1903e+00 9.0515e+00 -#> -#> Columns 43 to 48 1.8651e+01 5.8580e+00 -2.5540e+00 5.9798e+00 1.3616e+01 5.9374e+00 -#> 1.4042e+00 -7.1437e+00 5.3403e+00 4.8087e+00 -4.3136e+00 -4.4749e+00 -#> 4.2473e+00 -3.3378e+00 -5.9252e+00 8.7385e+00 -2.1737e+00 5.5311e+00 -#> -7.1610e+00 1.5589e+01 -8.4832e+00 -1.0002e+01 8.0409e+00 -1.4458e+01 -#> 9.0307e+00 7.9130e+00 -1.3612e+01 3.4962e-01 8.6784e+00 -4.7658e+00 -#> -3.8549e+00 2.0299e-01 -7.7250e+00 -4.7844e+00 -1.3173e+01 2.1174e+01 -#> -9.5103e+00 6.2254e+00 1.5409e+01 -2.4048e+00 8.4910e+00 -7.8213e+00 -#> 1.4605e+01 -1.4826e+01 4.2888e+00 -1.0561e+01 -5.2321e+00 -1.0529e+00 -#> -7.9933e-01 -3.1178e-02 5.4608e-01 6.5385e+00 9.7176e+00 2.0845e+01 -#> -5.3888e+00 2.0419e+01 -2.5235e+01 4.9453e-01 4.0366e-01 1.4562e+01 -#> -1.5129e+00 -1.3035e+01 5.7489e+00 3.9341e+00 -7.8148e+00 2.0222e+01 -#> 1.7552e+01 3.7504e+00 9.3178e+00 -3.3478e+00 -1.0304e+01 1.3753e+00 -#> 1.0735e+01 -9.2614e+00 -4.0927e+00 1.0137e+01 6.5703e-01 6.3034e+00 -#> -1.8100e+01 -4.4140e-01 -3.2765e+00 1.1385e+01 8.2657e-01 -9.7269e+00 -#> 7.4825e+00 3.7591e+00 -9.3485e+00 -2.8601e+00 -5.7009e+00 -1.0053e+01 -#> 2.8396e-01 -1.3266e+01 -1.0565e+01 4.4111e+00 1.4520e+01 -1.1182e+01 -#> -1.6583e+01 1.5507e+00 -1.2863e+01 9.1985e+00 1.0691e+01 2.8953e+00 -#> -1.2593e+01 -1.3589e-01 -9.3040e+00 1.3394e+01 -1.0354e+01 1.3999e+01 -#> 8.2749e+00 7.5436e+00 1.7821e-01 1.1863e+01 6.5062e+00 -5.8205e+00 -#> -7.1312e+00 1.1181e+01 3.9970e+00 -1.1016e+01 8.4926e+00 -1.1297e+01 -#> -1.1315e+01 -2.8348e+00 9.9462e+00 -1.4857e+00 -2.0638e+00 6.0903e+00 -#> 1.4766e+01 1.5735e+01 6.4633e+00 -1.0304e+01 1.8137e+01 1.3001e+01 -#> 2.0535e+00 8.0652e+00 -6.2877e+00 1.0530e+01 -3.1022e+00 -1.1258e+01 -#> 1.7652e+01 1.0152e+01 -2.7717e+00 9.9863e+00 1.9430e+01 1.6897e+00 -#> -3.1389e+00 1.5090e+01 5.4868e+00 -1.5961e+00 -1.1954e+01 2.6257e+00 -#> 2.6598e+00 4.5923e-02 1.9518e+00 3.6742e+00 4.0554e+00 -4.1224e+00 -#> 5.6864e+00 7.4536e-01 1.2337e+00 -9.2960e+00 6.0701e+00 6.4188e+00 -#> -3.7751e+00 8.2547e+00 -1.3889e+01 8.6632e+00 -1.6368e+01 -8.1808e+00 -#> -1.3147e+01 1.0182e+01 -9.7701e+00 -6.5206e+00 -1.8821e+00 -3.4522e+00 -#> 3.7844e+00 1.0395e+01 -9.9069e-01 -8.6665e+00 9.4882e+00 -1.9181e-02 -#> -2.2905e+00 -4.3181e+00 6.0473e+00 7.4193e+00 -1.3721e+01 -1.1281e+00 -#> 5.2884e+00 1.1093e+00 9.2624e+00 -6.7758e-01 1.8620e+00 -6.4113e+00 -#> 8.1688e+00 9.3747e+00 4.6817e+00 -1.0918e+01 1.2850e+01 2.5548e+01 -#> -#> Columns 49 to 54 -7.9121e-02 -9.1412e+00 -1.0781e+00 8.9441e-02 -2.5786e+00 -2.5158e-01 -#> 1.3240e+01 -2.2763e+00 4.5066e+00 3.8285e+00 6.0378e+00 4.7827e+00 -#> -3.1463e+00 2.6070e+00 -8.4117e+00 1.5027e+00 8.7442e+00 4.2795e+00 -#> 1.3520e+00 -5.2432e+00 -2.7057e+00 -9.7851e-01 -8.7705e-01 -3.2616e-01 -#> 5.5362e+00 1.0861e+01 7.3578e+00 1.9651e+00 -1.9482e+00 3.4265e+00 -#> -1.0524e+01 5.4771e+00 8.4625e+00 -8.9160e-01 3.9857e+00 3.4231e+00 -#> 1.0566e+01 9.0016e+00 -1.3289e+01 -4.3572e+00 1.4488e+00 2.0633e-01 -#> -1.1994e+00 -1.0704e+01 3.1709e+00 -1.4812e+00 1.5413e+00 5.3184e-01 -#> 8.8366e+00 9.7180e+00 1.0314e+01 1.0961e+01 3.0913e+00 1.0606e-01 -#> -1.3399e+01 2.8587e+00 -1.0157e+01 -5.0444e-01 -4.7002e+00 -6.9904e-01 -#> 5.0243e+00 2.1031e+00 2.2536e+00 3.3043e+00 3.4988e+00 -1.4744e+00 -#> -4.8099e+00 -1.8123e+00 5.6880e+00 -5.0763e-01 5.4416e+00 -2.5526e+00 -#> -4.8819e+00 -1.3431e+00 6.0133e-01 2.1635e+00 -3.5245e+00 2.3077e+00 -#> -3.7879e+00 -5.3912e+00 -4.9927e+00 7.2770e-01 6.8276e+00 -2.4188e+00 -#> 1.3457e+01 1.4833e+01 2.3735e+00 7.1478e-01 -1.3741e+00 -1.6697e+00 -#> 3.7287e+00 1.2958e+01 -2.6546e-02 9.0811e+00 1.4764e+00 -5.4536e+00 -#> -7.3302e+00 1.3150e+00 -7.3113e-01 1.1774e+01 4.5585e+00 2.0654e-01 -#> -7.4635e+00 -3.2945e+00 -1.6004e+01 1.0148e+00 4.5388e+00 -2.1846e+00 -#> 4.0827e+00 4.6811e+00 1.1506e+00 8.8552e-01 -2.1057e+00 2.4387e+00 -#> -6.3624e+00 -9.1315e+00 -1.6520e-02 4.3562e-01 -4.4460e+00 -1.1886e+00 -#> 5.0401e+00 5.8519e+00 9.1493e+00 -4.4009e+00 6.4286e+00 -1.5992e+00 -#> 2.8538e-01 -9.3507e-02 -3.7667e+00 4.3281e+00 2.3204e+00 -3.5145e-01 -#> 1.6218e+00 5.5460e+00 -7.3334e+00 -5.5465e+00 -5.7455e+00 3.0386e+00 -#> -2.5979e+00 4.6217e-01 -3.6948e+00 -9.4825e-01 4.1991e+00 2.1228e+00 -#> 7.4229e+00 3.5157e+00 2.8468e+00 -3.2869e+00 3.3928e+00 1.8340e+00 -#> 8.2374e+00 2.4794e+00 -7.6380e-01 -7.9588e+00 -4.9124e+00 -1.4156e+00 -#> -1.3288e+01 -2.0089e+00 -6.0241e+00 1.8306e+00 3.4282e+00 -1.2858e+00 -#> -1.3545e+01 1.6046e+00 -1.0394e+00 2.9090e-01 1.8811e+00 5.5214e-01 -#> 2.4362e+00 -1.8237e+00 1.6450e+00 3.1722e+00 4.5288e+00 5.4734e+00 -#> -1.0636e+00 -3.5529e+00 3.3539e+00 2.2426e+00 -4.1244e-01 1.8183e+00 -#> 1.4988e+00 2.4386e+00 2.3093e+00 1.1566e+00 4.6186e-01 1.3810e+00 -#> 1.6040e+01 -9.8615e+00 -1.0179e+01 -8.8022e+00 -3.5965e+00 -2.8926e+00 -#> 4.9511e-01 4.6599e+00 -1.6281e+00 3.8184e+00 5.6074e+00 -5.0963e-01 +#> Columns 1 to 8 4.6859 4.1131 -10.9593 -5.3069 -1.0974 21.6343 13.6091 -17.2558 +#> 2.0795 -3.6493 3.2076 -1.6711 8.9436 -18.0291 -0.7971 -3.2565 +#> -3.9027 -4.2938 -0.0134 -1.1303 -19.3438 13.8252 -7.1970 -11.2471 +#> -0.0801 2.3289 -0.7113 0.0903 -5.4598 12.1508 -9.1313 -1.3847 +#> -6.1731 10.5736 -5.7994 -3.1141 13.6859 1.4036 -2.1168 -5.9932 +#> 2.4950 2.2010 -12.0723 2.8580 -5.8241 1.4615 -4.1857 -12.2922 +#> 4.7572 -3.6669 6.6668 -6.1736 9.5856 -8.1682 1.5696 -4.3483 +#> 1.5150 0.7789 -7.0650 -1.0792 2.6825 2.1119 4.6470 6.2645 +#> -10.7507 -2.7751 4.3205 -4.4414 -2.8109 -3.4361 -8.7092 -6.7775 +#> -10.8596 6.5341 7.6802 -2.7770 -4.4282 -11.5187 -4.9560 -3.8449 +#> 1.2454 3.7008 -6.8420 6.1895 4.6964 -1.0605 8.2338 -5.2163 +#> 0.5068 10.6750 2.1842 9.8193 0.8160 -10.6605 -13.9829 -1.9758 +#> -0.8746 1.6290 2.2230 5.5392 -8.8911 6.6383 -3.6992 -9.8343 +#> -4.4780 -3.2753 5.7312 -8.8011 20.8802 -3.1306 -7.6031 11.4210 +#> 4.7628 -9.5496 -7.8169 -1.8763 10.0707 3.0489 20.1976 3.3912 +#> 3.5079 5.0603 -4.0514 5.9147 -1.5210 -2.5629 1.6045 -13.0763 +#> 1.0827 -2.3540 10.7913 -0.2960 2.9449 5.5754 17.2647 -7.8718 +#> 2.4678 -0.6623 7.7071 -3.1429 -9.5457 3.3259 2.3223 -5.6405 +#> -4.9396 12.0043 2.3191 9.3860 -4.9175 -4.0139 -12.1309 2.9604 +#> 1.6151 -11.2512 3.5507 -6.0524 -8.2152 6.1504 6.0037 5.3351 +#> 6.5633 5.6831 0.5622 -6.4501 11.7547 -0.0808 -8.9524 -4.5911 +#> 6.5665 -9.3553 -7.7590 11.1860 -11.7417 -3.3426 5.8870 18.7182 +#> -5.9680 -4.4924 2.6471 -2.4900 14.9461 -5.4856 -0.3581 -5.6541 +#> 3.8303 -0.8612 -8.0441 11.2660 4.9639 -8.5106 13.2178 15.1816 +#> 0.0305 1.7321 6.8071 -9.4864 -4.2081 18.5272 -4.8900 -6.4309 +#> 2.7676 2.7745 -10.0156 1.3594 1.5707 16.4095 10.2932 -4.5799 +#> 2.4023 1.9727 -1.7814 11.8704 4.0050 7.4812 1.7863 1.4518 +#> 2.2568 8.5514 -10.5295 -8.2845 -12.4669 11.6911 7.9347 0.5729 +#> -6.5498 7.0906 2.2620 -1.0590 -16.8576 4.4109 1.8163 -4.5055 +#> -5.3145 2.9363 -9.1143 6.1756 -3.7082 -0.9154 10.7274 -6.5900 +#> 0.4356 11.3939 -9.1932 -5.4917 9.0205 8.5323 6.5337 -3.2082 +#> 1.1042 -15.2302 -6.9172 -2.0502 20.6166 12.2919 9.3639 -11.7269 +#> 1.1434 -8.3028 6.2365 5.7696 -5.1344 -5.3414 -1.9308 -12.6167 +#> +#> Columns 9 to 16 9.1021 11.3963 0.0517 2.5797 -1.8836 -6.3062 -1.5125 -0.2164 +#> 6.4254 13.2882 0.8365 -5.4874 -11.7463 -12.9738 1.1775 14.1190 +#> 6.2328 -7.0272 14.2463 18.3032 12.4710 7.4895 -3.9521 0.2015 +#> -10.1326 0.7080 5.9389 0.8232 -0.9454 -4.4106 9.0112 2.7485 +#> 6.3682 16.9232 13.8757 -0.4842 1.6086 0.6329 -14.1301 -2.6297 +#> -16.3091 6.9441 -1.7005 -4.7283 7.6129 1.3314 -6.9246 -0.7769 +#> -6.1779 4.2079 1.6450 2.6515 -6.6223 -7.5085 -12.9131 3.9233 +#> -6.7064 18.1316 -2.0671 7.9739 -7.0790 8.4678 -7.2208 -1.9009 +#> -15.1592 1.5098 -2.7493 -5.9910 -5.2947 8.8131 1.3424 -7.1393 +#> 0.8283 -9.2183 -13.2099 5.2295 -9.7209 17.5325 12.1273 18.9702 +#> -30.8453 3.4478 16.0666 5.7648 12.8819 -6.1747 -12.7869 0.1842 +#> 2.7594 -21.8979 -7.5436 -6.4978 -6.6699 0.0417 -0.7392 10.5569 +#> 9.2321 2.8777 -0.7252 3.2144 4.9839 7.0128 -9.4469 16.0423 +#> -1.1630 12.0727 4.9134 -3.5467 -6.8740 2.6210 4.4240 -0.5614 +#> 3.7634 -7.6701 -3.2578 -3.3879 -4.0988 -4.7285 -8.8746 -11.3122 +#> 4.8912 -11.2458 4.3606 3.7666 8.8215 -11.0583 1.2629 -3.3356 +#> -9.4644 15.2032 -2.0640 -12.3908 -4.9165 1.6231 -3.2193 -1.6620 +#> -15.3968 6.5954 21.0896 4.5996 0.3185 -20.2904 -20.5485 -4.9448 +#> 2.9882 -8.1953 -8.8737 -11.3710 -6.4003 -1.8616 7.0272 8.5134 +#> -5.4746 -9.2035 0.6705 2.6224 10.0923 14.7217 2.9133 -1.3263 +#> -14.7742 1.6396 -2.4585 -1.0454 14.1353 6.5585 10.8306 -0.2323 +#> -12.4131 18.7650 -3.6926 -1.0782 -3.0099 -5.9184 4.8794 4.2946 +#> -10.3162 4.5767 -4.2125 2.6130 17.0691 -6.6392 5.7069 0.4860 +#> -7.0846 10.6999 -3.1922 -4.8327 -5.6982 14.4594 -7.5793 1.7193 +#> 6.7061 -5.8098 -1.4005 0.7337 -3.8568 -9.5381 4.9706 5.3874 +#> 22.4558 11.6769 0.2725 0.6936 -2.8309 1.4876 -0.3943 1.7408 +#> 4.5192 -4.5664 -6.3206 2.9128 -5.3906 -11.6255 9.4269 3.7359 +#> 4.0353 -2.0850 2.2309 -8.3801 7.4280 -9.8267 -1.8911 -7.4902 +#> 10.6320 -16.0351 -14.9034 -7.4753 -3.0029 6.5873 12.4090 21.2092 +#> 10.5193 5.5001 -10.0292 4.5498 17.1837 4.0564 -3.3950 2.2623 +#> -1.7210 -8.2200 5.5402 -5.7478 12.9917 -1.6978 4.4455 2.1951 +#> 20.2861 -0.2725 2.6812 -3.3156 17.0410 -9.6918 3.3910 -6.5238 +#> -4.9195 -2.3295 -4.6651 7.6490 18.9711 8.2143 1.2420 -5.5669 +#> +#> Columns 17 to 24 0.3214 -3.8681 13.0062 0.8861 0.3557 2.9104 18.6002 -0.3745 +#> 7.3534 -1.8112 -0.9140 -1.8187 8.8806 6.4379 -13.0432 -25.5306 +#> -4.7344 2.6456 -16.2878 9.7185 -6.0796 9.1387 4.4665 1.8055 +#> -1.3921 -6.8190 -14.9453 -9.6039 1.2278 7.6483 -1.9714 -7.4935 +#> 8.6703 -5.2990 -5.1428 -13.4760 14.8302 14.2600 9.1663 -4.6199 +#> 4.8356 5.6312 9.6040 -18.6807 -3.4531 -3.8706 1.4035 6.0210 +#> 9.2248 13.0940 -6.4734 -7.7884 -10.0523 -13.6050 -9.0217 3.0239 +#> 5.2663 12.9650 7.7468 -3.9608 12.6007 -11.5812 8.4588 4.7509 +#> -0.5834 -6.3832 -1.7120 -2.8817 18.3699 -0.0287 -2.1830 -6.5680 +#> -0.6729 -10.1634 12.1604 3.0681 4.9471 -13.4872 -10.0827 -1.1156 +#> -2.0517 10.4413 -4.9095 -27.6038 -22.6202 10.1480 17.4386 -0.6858 +#> -14.5994 8.0607 -1.0870 -0.2354 -11.3591 -3.2987 -22.1263 -13.8195 +#> -5.3956 11.5409 0.4443 -2.8709 -2.4923 -2.0867 -2.0201 -11.6207 +#> 12.5635 8.2138 -7.8983 -9.0094 -5.3698 14.9678 8.8916 4.3412 +#> -9.5728 16.9806 -0.7743 -2.2287 -4.0398 -0.8629 12.0214 11.4347 +#> 10.7500 0.4572 -11.9303 -13.4179 -5.3441 -3.6433 5.1802 6.1838 +#> 10.5714 9.9106 7.7255 -10.7364 -5.7714 -10.6245 1.6796 17.7730 +#> 14.9344 1.3521 -3.6929 -16.7153 -11.1813 14.2468 1.0509 -0.3189 +#> 10.3767 -11.7552 -14.1134 -11.9555 -13.5678 -9.0238 -23.1570 6.9184 +#> -12.9087 2.4511 -3.0182 8.3404 -5.3626 1.6748 2.6170 10.9101 +#> -12.2371 4.2311 -8.0612 -3.1182 -21.5716 0.3783 -14.7003 11.2755 +#> 2.3543 9.2398 10.8943 3.5437 6.2693 4.3286 -1.0154 16.2127 +#> 8.1319 -7.2351 2.9895 -5.8137 -5.2356 0.5123 15.1932 -3.8965 +#> -3.7209 6.2373 2.1414 7.3283 3.2835 -9.1617 -2.2624 12.6606 +#> 7.7459 10.1475 0.6106 17.8884 6.4427 -7.6953 -13.9603 -12.8029 +#> 3.1120 4.0695 -2.2866 11.3803 9.6135 -7.3300 3.4999 -2.5447 +#> -0.4269 -0.1814 0.0400 -1.7797 -3.0033 -2.5191 10.9453 22.9289 +#> -6.3394 1.6993 7.3857 -0.3888 -13.2625 -15.3248 -0.4494 0.9807 +#> -1.1851 0.0683 -7.0141 0.1083 1.4989 -2.6782 -26.2903 -10.8566 +#> -12.9193 -4.8826 -5.2862 -23.8343 11.8810 8.4221 -9.7082 -4.5909 +#> 4.8649 -7.2036 10.6279 -11.2238 -7.1414 -19.1860 -4.2480 -6.4341 +#> 12.4443 -4.5601 2.6290 1.9283 0.3679 14.7593 15.5449 -10.6803 +#> -2.5364 -9.8297 -15.4493 -8.2267 -15.6129 13.9662 2.6087 8.1404 +#> +#> Columns 25 to 32 2.7611 2.3272 2.2703 11.8324 20.7194 -0.1435 7.0673 -0.5158 +#> -8.6295 14.3645 5.4607 -7.1855 -9.7144 0.5462 -3.2703 -3.8569 +#> -4.7064 6.6156 2.9861 2.6888 2.8411 -0.8180 -11.0941 -0.7763 +#> -6.2306 0.5112 13.5425 6.5322 23.7617 0.9257 -5.7523 -8.4829 +#> 9.9884 -6.5912 -7.5777 -0.2900 9.9750 -3.8655 -7.3031 0.6106 +#> 9.5733 -2.3083 5.7018 7.8933 7.5142 -0.3586 13.9308 -2.1888 +#> -0.4551 1.9882 7.8318 -11.4236 0.4450 0.3662 0.6143 -7.2142 +#> 9.8099 6.7530 -5.9614 9.5030 -10.6046 3.6326 -1.6353 -2.6347 +#> -10.3842 -6.0572 4.4219 12.6185 -6.9640 -3.9483 -16.3170 -3.4683 +#> -5.8878 -1.6757 0.3368 5.9224 7.3478 3.4951 -4.0444 12.6460 +#> -2.7145 -0.3078 -1.0998 3.5983 0.3923 10.4494 -0.4025 6.9491 +#> -5.9985 -2.5409 -0.7230 4.9602 -8.2388 7.0272 -17.3047 4.3018 +#> 4.4925 -2.9386 3.3164 3.4919 -0.1992 9.4277 -6.5478 -0.6053 +#> -0.5302 10.7326 -3.5301 6.9691 2.1860 -9.1405 2.5006 1.6445 +#> 8.4418 -6.5366 7.9454 14.0289 -3.3132 -2.8501 5.6528 10.7641 +#> 14.3074 1.1101 -6.9249 -4.4225 -2.4290 13.5465 -2.9559 -3.8659 +#> 4.1826 -3.8453 -4.9055 -2.0063 7.8674 11.3759 6.8721 6.1481 +#> -7.1646 6.4080 2.2303 -9.3383 2.4199 14.0747 4.2509 -11.3046 +#> 4.3393 15.2890 -12.7117 -8.0882 -17.4123 4.5570 -3.2498 13.8391 +#> 1.9676 -5.5145 11.7926 11.1104 7.5781 10.3390 1.0225 -1.9399 +#> -3.4708 10.5718 4.4411 -3.2468 -6.5553 -5.4458 -4.3555 10.5811 +#> 18.6028 -3.6893 -12.1645 -11.4127 4.7428 -6.2613 7.8210 -13.3803 +#> -10.1486 7.3556 12.4539 -1.3806 4.7041 6.7583 14.5490 -6.8778 +#> -0.5740 -1.1135 4.9173 -2.1016 -5.2378 -4.6511 -4.9122 2.0312 +#> -19.0756 2.8867 14.9448 -3.2497 16.0637 -1.4370 -6.9711 -5.2158 +#> -6.8037 9.5856 12.5708 -3.5277 3.0072 2.5428 -13.4286 -6.2804 +#> 18.8183 -1.7829 -17.2493 -5.1817 7.1251 9.3990 -1.1581 -2.3858 +#> 0.7691 -5.4008 -1.5371 -7.8966 5.8265 -12.0490 -2.9311 -1.5862 +#> 4.1762 8.3920 -0.9298 8.4403 0.6377 13.1083 -6.5428 12.8787 +#> 4.2664 11.3475 14.5946 7.5401 -5.7044 5.0656 9.9433 16.3912 +#> 2.8144 3.6244 17.4546 2.6555 4.6665 1.0549 14.3992 13.8650 +#> -3.4108 19.0521 -2.4775 19.8160 13.1611 -12.6593 11.3993 10.5849 +#> -4.3100 4.4157 6.5085 -0.3309 4.2713 11.3586 4.4289 1.2830 +#> +#> Columns 33 to 40 -5.8637 1.2895 13.3441 8.1283 -16.3985 16.8378 12.6993 7.0491 +#> -10.2695 -6.8925 9.7998 -4.8427 8.8375 15.6287 8.7683 -21.6075 +#> -4.7919 0.8737 -0.7585 3.7819 0.1388 -12.4514 -0.8057 1.9002 +#> 0.0278 0.2814 -0.4719 19.0383 11.0058 12.6267 9.3832 10.5738 +#> 8.9777 -1.8367 -10.9427 10.3432 -8.6840 -13.8276 -3.1959 8.9293 +#> -2.0639 -1.6404 -4.6671 18.1058 -13.3077 -1.3236 -8.6058 13.5027 +#> 3.2653 -2.6477 10.0018 4.6693 0.3374 -5.7820 -2.5768 1.0923 +#> 0.8879 -14.3964 -3.5890 -3.3306 -0.1008 -5.0288 -1.4455 13.8029 +#> 4.0307 -0.5976 -8.9527 9.7269 9.0801 9.6730 13.0421 -3.1185 +#> 8.1839 2.7942 0.7323 5.0709 0.2682 -3.2192 -9.1831 -7.2338 +#> -3.6909 0.4486 5.9406 -6.6713 -11.6067 -14.4492 5.4070 5.5005 +#> -7.1658 8.7376 -7.5280 -7.1680 1.6818 3.1647 -11.6297 -14.3609 +#> -5.2513 0.9455 -3.3179 1.4820 -7.2857 -1.3719 -6.8859 -2.6734 +#> 13.0965 -16.1626 -4.2634 -6.9552 7.3453 4.1895 17.4347 5.3152 +#> 4.5326 3.0474 2.4233 -15.4744 -13.9498 2.5395 12.1704 13.9390 +#> 0.1175 5.3433 -3.3282 -6.2194 -10.8869 -11.6978 -1.7299 0.4992 +#> 12.2395 3.5022 1.9265 -5.0205 2.6390 -10.3159 -9.4471 -11.9763 +#> 3.7909 -6.9627 1.3710 1.7278 -5.8415 -9.4329 7.2094 22.5263 +#> 4.5404 11.9810 -9.4707 6.0480 7.1753 -23.3660 2.5013 -8.5898 +#> 1.1192 17.8612 -2.9750 3.6111 -3.5040 -5.2109 6.3410 -6.1146 +#> -13.3833 -2.1845 -1.0278 3.7156 -4.2655 6.0604 15.2238 1.1985 +#> 5.0865 -6.8844 -8.7953 13.6532 -8.8446 13.4714 -5.4443 22.9488 +#> 7.2044 0.1072 2.9296 -1.8047 -3.2720 10.5534 -2.4302 5.1628 +#> 1.9631 -1.8988 11.3914 -8.8160 -8.1289 -4.9537 -2.1955 -2.3276 +#> -4.5610 -4.8902 7.3637 -11.9470 -0.7609 15.5464 4.1903 11.5000 +#> -4.1999 8.8388 9.9628 -5.7092 -4.0545 11.4116 -3.2499 -0.2514 +#> -1.1221 -0.2230 4.8315 -15.6970 -2.0750 -8.9032 7.9567 -0.7288 +#> -8.6301 7.5972 -2.4483 5.2722 -9.4625 14.5135 0.3171 -5.1200 +#> -14.9971 13.2256 -4.7564 18.9661 -0.9293 19.6024 -3.5987 -11.9905 +#> -10.0076 13.2765 15.5937 10.6344 -24.3300 -5.2166 -12.2141 3.5052 +#> -3.2359 3.7034 -10.4985 3.6587 -5.6668 -15.2120 11.9552 5.1292 +#> 7.0014 -8.0485 6.6287 -31.3916 8.1112 -10.2537 3.9824 9.1160 +#> -4.8738 4.4407 7.6256 -4.5846 -2.9823 -0.3574 2.9510 -9.5830 +#> +#> Columns 41 to 48 12.3114 -6.5678 3.0162 -12.8669 -5.6505 -5.7403 -19.1519 -10.8163 +#> -18.1085 0.3156 12.1244 22.7452 -4.3728 13.6746 16.5156 21.0632 +#> 10.6414 -9.2954 12.6458 -8.5825 -3.0427 2.8359 1.1758 -7.0834 +#> -11.8807 12.3064 -0.1863 -11.1111 6.3309 3.9364 -0.2605 -12.7794 +#> -0.7343 -6.2175 -4.7950 -12.1935 7.4900 11.3879 -14.2331 13.4536 +#> -0.9120 7.8151 10.3817 -13.8011 -0.3950 -0.2950 -10.2714 8.8289 +#> -6.3945 -10.5469 -2.6997 -5.8281 -2.0316 6.7816 -1.5823 6.7282 +#> 9.0767 13.0924 -7.7088 9.5412 -2.9963 -7.2629 -1.2771 -2.7352 +#> -9.8715 -0.8061 -0.2498 -18.1659 6.0673 -5.2558 12.4421 -2.4405 +#> -10.0825 -19.7198 -2.6691 -0.1336 -1.7248 6.5902 -3.3641 12.1672 +#> 11.9936 7.0544 -3.7764 5.4963 9.6779 -10.9249 7.0961 5.4853 +#> 8.5235 1.2645 0.5047 -2.7175 4.1771 1.3520 11.4015 -2.5769 +#> -2.3333 -2.0371 2.5025 -6.4989 12.0019 -2.0206 2.9647 -2.6082 +#> -11.4482 0.7012 9.9749 -21.0874 -0.0879 -8.9751 4.3885 6.2640 +#> 8.5000 10.8300 8.6350 5.0371 8.7116 -7.6094 1.2183 -2.5200 +#> 11.0376 -1.5345 -2.6689 -5.0768 -1.2628 -16.5428 14.1540 -12.4692 +#> 12.0115 13.4967 -9.9855 -8.3542 13.9205 6.1883 -6.6162 -15.0412 +#> 1.2382 -3.9774 16.8520 -12.3506 3.0238 6.4445 16.4706 -9.5140 +#> -13.6622 -6.5589 8.1667 -1.2575 2.3490 -2.5885 8.2112 1.6993 +#> 6.5337 -5.6165 2.7663 -13.9573 -4.4401 -7.6872 -1.8505 -8.8208 +#> 6.9599 0.4176 0.5780 -4.3983 4.8720 -18.4348 -12.6010 -14.5679 +#> 13.6287 13.5889 -10.0053 1.3177 -5.7620 1.4773 -10.1316 3.0753 +#> -0.0373 0.6651 -13.9531 -0.3866 -0.0445 -15.4512 -1.2880 -14.4447 +#> 8.0760 3.0786 -8.4432 -9.3746 -5.1075 -6.4881 -1.7059 0.1589 +#> 2.3820 -1.3718 1.2345 5.9072 2.1069 2.1634 1.2525 14.6490 +#> 12.5619 7.9832 -3.2100 7.5911 -3.3256 -5.8684 -9.5314 -3.5992 +#> 13.8859 -4.2809 -2.3360 -16.9123 -4.8272 2.0367 1.3789 -23.7263 +#> 2.5599 9.3968 3.3757 2.8573 12.0256 4.8581 9.8937 4.4206 +#> -3.9684 -2.8238 -0.9528 -9.7343 -6.3254 -5.6714 5.2922 -4.4623 +#> 5.6508 11.2602 -1.8538 13.9077 12.0704 -16.0405 0.9848 27.0403 +#> -0.4521 11.8176 12.5304 -6.4316 11.9286 -10.8989 1.3186 -14.3120 +#> 5.2441 9.2985 12.1178 11.7740 21.3087 -18.1301 7.7393 -15.5094 +#> 14.3697 0.0200 0.4483 -4.1970 -0.3045 -1.1912 -1.2995 -17.4547 +#> +#> Columns 49 to 54 5.2051 -2.7292 3.0805 -9.7109 -0.4840 -1.1897 +#> 2.4828 2.5924 4.8842 2.8287 7.8073 3.2042 +#> 10.9652 -0.2877 -0.2782 2.4013 2.4435 0.6913 +#> -12.1931 -0.2438 0.5390 -1.9409 -7.4068 -2.1269 +#> 3.9070 16.7506 -8.1154 -4.6265 -9.0211 1.5233 +#> 12.6387 11.0710 1.6093 -6.3792 -1.2763 7.0257 +#> 12.7592 6.5146 2.1618 1.4475 3.9531 0.6353 +#> 2.0000 22.4288 -10.9756 2.8924 -4.5897 1.5239 +#> -2.0396 -9.4105 11.2011 -1.9027 1.7017 -2.0193 +#> 1.1253 -9.3861 -8.1326 12.3274 7.9309 -0.9050 +#> 1.7621 -3.4463 -5.7614 -3.6789 -3.6238 -3.4748 +#> -17.7608 -11.5497 0.9856 -4.2926 5.1718 1.8400 +#> 5.2010 -2.9470 -0.9289 3.0569 4.0638 -2.9950 +#> -4.7387 -16.8206 -10.3465 5.5362 5.8112 1.8635 +#> 9.6436 1.0926 -3.9120 -7.2516 7.5891 2.3655 +#> 6.0128 -5.0468 8.3091 0.5521 9.3647 -0.9257 +#> 9.4391 -2.5530 -3.6021 -6.4972 0.4632 -1.1076 +#> 9.0825 -20.0833 -5.6912 -10.3386 3.8220 -5.1085 +#> 8.9449 -9.4812 13.6597 4.3363 11.5952 -1.5450 +#> 2.0255 -6.8440 14.8967 -6.2614 -1.0776 -0.6474 +#> -3.0937 -11.2576 11.6156 -7.1563 16.4331 -2.2199 +#> 0.6510 8.9429 -6.3483 -6.5441 0.2628 -1.0576 +#> -15.6888 -9.6134 -0.4099 -11.3571 4.3295 0.2196 +#> 3.1653 8.7047 -18.1418 -2.4446 3.0319 3.8894 +#> -10.3487 -5.4222 -10.4785 2.9099 4.9732 2.6966 +#> -3.0941 1.8242 -10.8351 0.4536 6.1304 3.5464 +#> 9.0025 17.8373 0.7618 -3.5603 4.6391 -2.6444 +#> -3.4393 -2.2774 2.5032 -2.8818 1.0536 -1.4308 +#> -8.2104 0.5179 9.3516 2.4858 4.6191 -3.5432 +#> -7.9982 -2.1682 10.3227 -14.7556 -4.6249 5.4687 +#> 1.7113 -17.8578 10.2943 -1.6037 3.3656 3.0119 +#> 17.5817 2.3308 -1.2937 2.7468 -0.4557 2.6153 +#> 17.4440 3.4547 9.0486 -12.7384 4.2286 -3.8492 #> #> (3,.,.) = -#> Columns 1 to 8 3.0031 -1.1713 1.7735 -9.0906 -7.7523 1.6255 -8.1038 0.4803 -#> -1.4395 3.3060 -4.9207 0.5426 -26.5450 -9.1573 1.2341 2.6821 -#> -1.6181 -4.7474 6.6856 -8.8715 -6.7656 -9.3924 18.3535 16.4979 -#> 2.0125 10.5115 0.4946 1.4673 1.5072 12.2754 -7.2831 1.1050 -#> -1.0779 -1.3903 -5.9078 -2.7975 -10.6282 -1.9355 -28.0926 -8.5147 -#> 1.3459 1.0233 4.4565 -3.9460 11.7021 -22.0011 3.7328 2.8262 -#> -7.9638 -3.2818 11.9780 -3.6959 1.7928 -2.5773 2.7235 0.0797 -#> -5.8846 1.2887 -4.5946 -7.9901 -17.1901 -6.6256 16.0995 3.1597 -#> -0.3332 -5.5103 -9.8488 -3.2059 0.0323 -9.4431 -18.4452 5.7317 -#> 5.8413 -0.3776 3.9603 -10.9460 -8.4634 -3.9871 -12.5948 -3.5386 -#> 4.0921 -6.9083 -7.3737 -5.5127 1.0160 -10.7198 -0.8637 15.9982 -#> 4.4676 6.3974 11.2690 20.4401 8.4547 -0.3921 2.8117 5.6110 -#> -0.5407 3.5666 -0.3481 -1.5658 -5.2985 -3.3953 -7.0007 15.7980 -#> 3.4430 1.8497 -11.5248 7.5493 -7.5274 3.9757 13.4175 1.6013 -#> 3.4110 3.6490 7.0635 -6.0675 4.6773 -5.6185 -17.0664 -5.3096 -#> -5.1638 -3.6366 0.1630 -7.2286 0.7935 8.5049 -1.6619 -2.8189 -#> -2.0293 -11.2180 -3.9819 -13.4532 7.4374 -11.3310 -10.2651 7.9634 -#> 0.4887 9.5984 6.3274 3.2128 7.1249 7.1410 -0.4603 1.0461 -#> 0.6471 0.3731 -1.7175 8.2021 5.7231 3.8515 -1.8011 -4.4598 -#> 1.6338 3.5299 2.0626 4.3411 2.0501 -2.9471 -5.0114 3.5891 -#> -6.8317 2.1409 -0.3158 -7.3118 -9.3937 14.5959 15.3571 -1.8564 -#> 0.8886 1.4641 0.5028 -12.2130 -5.3902 -10.1844 11.3079 10.3780 -#> -3.0151 -6.1362 3.1912 -2.0550 -4.8657 0.0678 -20.8676 13.3277 -#> 5.0685 -0.4177 1.6027 -5.8127 -10.5129 -8.3065 -5.8800 -2.2088 -#> 5.2699 -5.7045 -5.1716 6.8226 -15.0316 -5.0530 0.6497 -12.4067 -#> -1.5064 -1.0875 -1.5125 -5.5702 -3.2429 -2.1734 -12.1101 -1.8075 -#> 2.8814 -4.0411 -7.4772 1.5799 -8.6595 -7.5146 2.7112 4.5064 -#> 1.8012 -6.0426 -9.7752 2.7970 -1.7619 -5.3261 -3.2915 4.0403 -#> 1.3697 6.3530 -4.4796 10.4673 0.5710 -10.7031 -6.3763 1.9706 -#> -1.2342 1.2121 -13.5034 2.4156 -13.5557 -21.2035 -15.4164 -4.9243 -#> 4.0965 -2.9095 4.9796 -3.4179 8.6316 2.9157 -10.0892 -8.3838 -#> -5.2568 2.4127 -7.6201 -11.9952 -0.9887 -11.3262 8.5635 5.2253 -#> 7.8430 2.1430 5.2939 -7.9930 -0.9956 -8.6677 3.6763 -15.1486 -#> -#> Columns 9 to 16 4.6593 6.7269 6.6702 7.3907 -7.8672 -4.0630 12.8214 -2.5460 -#> -0.0965 -5.4140 0.5344 5.3575 -7.3039 0.7799 0.9683 -3.1604 -#> 0.3504 0.1819 -1.6127 9.8124 32.5733 3.7362 -8.9751 -4.3400 -#> 2.6242 -8.9156 -0.2250 1.4677 0.5044 3.6535 2.1986 7.7236 -#> -1.3133 10.9565 -1.7728 17.1786 -19.9343 9.0192 8.5705 -6.0406 -#> 8.5221 -6.1071 0.2244 -8.0260 -2.7828 3.6833 0.7928 7.3192 -#> 15.0199 -16.7423 6.0564 -5.2131 2.0064 14.0828 -14.1567 1.9929 -#> 2.3899 -3.1128 -5.2550 3.8801 17.2259 13.7856 8.9296 -0.2724 -#> -5.8980 -4.0879 -1.9533 0.1919 -4.3493 6.1849 10.4726 7.2466 -#> -3.7371 1.9459 -1.1663 1.6162 -2.5395 0.5327 -0.2901 -3.8612 -#> 2.2095 6.9287 -12.3987 7.1235 4.1101 9.2574 -5.3279 -2.2198 -#> -7.3193 7.8056 3.6111 -6.5194 6.3612 10.7283 4.3855 11.0293 -#> 2.1896 -5.5781 -19.1569 0.7391 -6.5319 9.0854 -7.2169 -9.5070 -#> -6.4358 -2.3173 -6.2730 4.3687 0.7691 -6.1351 0.5836 -1.7487 -#> -18.7487 3.9496 5.3738 0.0837 6.9914 -12.1602 -4.9256 6.7643 -#> -3.4495 -10.0417 12.7282 7.4786 2.5694 -8.6762 -0.5433 0.1506 -#> 12.5367 -4.5606 8.8003 9.8323 -8.9455 -14.3511 0.7297 22.4463 -#> -13.3844 5.5442 6.8929 19.5213 3.6610 -9.9020 -5.0455 12.4286 -#> -4.0541 -8.0233 -8.1308 -1.7781 -2.0019 6.3873 8.4276 2.1452 -#> -5.3457 2.0029 -2.7451 7.9457 -5.2178 2.0413 6.7447 -14.8059 -#> 5.7557 1.3620 1.3234 17.9600 13.0151 2.1660 -8.7398 3.9648 -#> -10.6699 -8.9068 -6.6008 3.4348 21.5199 -1.6352 5.8334 11.9984 -#> 5.4020 4.2524 9.1617 12.0283 -8.3139 2.7388 -14.3782 -6.9844 -#> -8.5022 -32.2319 -7.4838 -10.9457 4.3391 -4.9729 -9.7857 8.1494 -#> -11.2569 -1.4706 -11.1208 -12.0966 11.7453 8.9875 5.0079 -7.2054 -#> -19.4426 -4.2547 15.5110 -7.6708 -4.6302 -1.4468 5.8384 -4.6624 -#> 0.7718 -2.8727 -7.0337 -3.5872 18.4001 -19.5692 -16.5489 7.8874 -#> -7.3169 9.3362 1.5757 -19.8433 2.6673 -4.9955 9.7552 -10.4626 -#> -1.2032 -3.5806 4.6284 -4.4536 -7.2250 -2.1550 11.7586 6.2467 -#> -0.7258 3.4492 -22.7294 -15.0464 0.7035 -2.5275 18.2421 8.7096 -#> -9.1511 -5.0277 6.5897 20.7037 -0.6972 -2.7146 -6.0418 8.4277 -#> 8.5743 -4.4994 -6.9698 8.8198 18.6992 0.6420 -12.9564 2.3944 -#> 14.5578 -1.8890 -8.0158 2.9650 10.0440 9.4335 8.7453 -0.1934 -#> -#> Columns 17 to 24 -6.6739 -3.3174 4.8387 -0.9273 -7.7149 0.7350 0.7539 -12.7757 -#> 1.4936 1.5664 -0.7254 8.8269 5.0336 -2.1062 11.5775 3.8742 -#> 4.1964 8.1240 -8.9542 9.2426 -4.2874 1.6129 11.9477 -14.8053 -#> -17.7976 13.5713 10.5377 6.5788 0.5665 7.3900 -3.5301 -8.3864 -#> -7.7135 24.4112 8.5829 9.0302 5.6441 11.9017 -15.2928 9.8670 -#> 7.8455 -14.1607 24.9822 7.2399 -3.6282 -10.9977 15.6533 24.1705 -#> -2.7380 3.3217 -5.2483 -17.4094 0.6183 -3.6812 10.6658 -13.4362 -#> -0.0283 -14.1412 3.7245 -10.9191 -5.2927 11.3169 34.0355 -6.1253 -#> 0.9896 3.7909 15.6755 2.6959 6.6624 5.3276 5.7858 2.5801 -#> -0.8955 7.1207 5.7362 -3.7618 2.5286 10.6009 -9.3078 1.7687 -#> 16.1548 -19.1678 -5.6831 7.8994 8.6877 4.6436 4.2476 -0.4299 -#> -8.1035 -10.2413 11.0808 18.7428 -6.1531 1.4083 1.5032 -1.9261 -#> 3.4789 13.5798 -16.1593 0.7770 -2.3820 -0.6284 -3.4782 2.3536 -#> 21.1276 3.7961 14.4141 -6.9490 3.5795 -0.7582 2.4345 7.0882 -#> 0.3352 -0.0920 -0.4495 -0.4418 -4.9307 5.1436 -14.9609 3.9155 -#> 6.8881 -5.7349 -14.4124 -3.8325 12.5811 -4.6771 0.8841 -3.3465 -#> 8.0628 -6.7419 -3.9527 3.5416 -3.5149 -7.7816 0.2831 9.6582 -#> 14.2947 -15.2795 -2.8609 4.6593 1.2110 10.0167 4.2606 -11.5680 -#> 5.3466 0.4190 -8.5543 10.8854 11.1437 4.1192 3.3871 19.8865 -#> -0.9948 18.3307 -3.0989 1.2748 5.2957 7.5191 -3.6155 5.4813 -#> -15.4406 7.2360 -3.9158 -8.4858 -4.7730 -5.4790 -4.4418 -26.1587 -#> -8.9448 -11.9719 2.7618 -3.9798 -12.0544 15.9367 4.1552 -0.8900 -#> 13.3973 -8.4427 -18.6767 6.8483 8.1360 -23.6531 -16.4057 0.3555 -#> 7.2695 19.2310 8.4524 9.4012 13.6135 10.9855 18.6647 4.0355 -#> 0.2357 -2.5369 10.6670 -2.6963 4.3938 9.4022 11.7677 2.9247 -#> 3.1197 -2.7420 -5.7643 4.8510 -6.9641 2.9854 -12.6033 -3.6164 -#> 6.4381 4.2443 -6.4563 -1.7414 -12.8242 -11.2641 -6.9360 -1.4475 -#> -5.7302 -2.3058 -2.4990 -9.3361 -6.7829 -4.5237 -4.0134 13.4664 -#> -0.4313 5.6937 0.0022 -5.6486 -7.6801 -14.7791 3.9556 18.4725 -#> 6.2085 -5.9200 8.8480 2.7069 -17.4849 -15.5185 20.1055 15.0809 -#> 16.2159 -3.3914 -14.8833 21.7363 12.9022 -0.7343 -8.1583 12.4803 -#> 11.2343 1.4257 -5.3833 -12.7218 -2.6865 -2.5140 -6.7985 -2.5359 -#> -4.0532 2.6016 7.4993 -8.4987 9.5665 3.0245 7.0870 -9.0506 -#> -#> Columns 25 to 32 1.1221 -15.1468 8.3570 1.9067 -11.5429 -14.4144 9.4440 2.2931 -#> 2.9613 -5.0968 10.2646 1.3281 -7.5577 -0.9216 7.4214 8.1904 -#> -8.5052 -17.4922 -10.5043 -0.4389 8.9870 1.2418 -0.9949 -5.0835 -#> 12.2524 6.3351 15.4138 12.0733 -1.9435 0.8878 -7.0024 2.1013 -#> 1.0187 -7.7225 25.5166 18.5899 -5.6871 13.7447 -8.9432 20.9760 -#> -3.3446 12.4658 6.8752 5.7418 -1.4096 2.0620 6.0016 2.0485 -#> -14.6137 -1.3447 4.2214 11.4609 -1.4652 3.6985 -15.4739 -4.4912 -#> -14.5247 -6.0115 -16.5187 17.6410 3.8002 10.4642 -4.3999 0.9559 -#> -1.8676 -14.0983 -5.6664 -3.0940 -13.4328 13.5993 -5.7134 1.2287 -#> -0.6292 19.5502 6.4174 4.8042 6.3895 -3.5560 2.3204 18.9714 -#> -2.3795 5.8969 0.7731 -1.4679 -4.4090 -2.2931 3.7329 -12.4271 -#> -3.1622 11.9378 16.6575 -6.7612 1.4359 -12.4484 -17.6017 -9.2899 -#> -1.4348 2.1966 15.6815 -7.2354 10.0188 18.4178 15.3872 2.5706 -#> 3.0502 7.7196 -17.1957 -10.0506 6.4401 6.7961 -15.4598 -2.5249 -#> 2.4114 5.5757 7.4796 11.6125 -6.2794 -11.1033 -1.4154 7.4313 -#> -12.3398 4.9861 6.3509 4.6999 10.8454 -8.6616 4.3145 -0.3505 -#> 5.1506 1.4426 -4.8630 10.4596 -1.3728 4.9981 -0.5317 -2.3504 -#> -18.7493 9.9155 6.3305 19.4469 -4.2548 -9.9538 -23.5458 1.5363 -#> -5.7270 -28.5966 1.4610 2.6186 6.9747 19.2051 6.0497 0.9164 -#> 10.9339 -6.6366 12.2284 3.0518 2.6061 11.2897 0.1941 -0.2602 -#> 19.5594 -11.9835 -1.9872 -15.9810 6.5773 -5.2519 2.7786 14.8894 -#> -1.3409 -11.1748 1.0937 -3.5925 -0.7549 12.0844 4.0608 1.2738 -#> -11.5565 -7.7279 -11.3881 -1.5980 -15.1130 -6.5238 -1.9475 0.3795 -#> 0.7774 -9.4843 0.2672 -19.9400 0.8753 -2.3304 -3.0680 -8.8586 -#> 7.9566 3.6271 -5.0917 -6.8523 -9.8662 -2.2442 -16.8183 4.5610 -#> 1.2773 -15.6154 14.5905 -14.0544 11.1515 -6.0772 14.6152 -1.7339 -#> 14.9440 12.5391 -9.0561 13.2410 4.0097 -13.9463 2.5720 -6.4587 -#> -2.3829 -6.7018 -11.9655 -5.8682 5.5277 0.0934 -6.2478 10.2131 -#> -3.7337 -10.5398 -7.9042 -4.4180 1.0297 4.7407 5.2402 3.7955 -#> 5.4218 8.4790 -25.0305 20.2131 1.5218 1.6669 1.7754 2.5871 -#> -2.5454 3.7441 10.6950 -0.2284 2.8383 -7.8144 3.1613 -14.6570 -#> -4.4937 -1.8892 -11.6622 -7.5895 1.2404 -3.1188 -1.9563 12.0491 -#> -8.3915 34.2823 -6.9476 7.6439 -1.6938 -6.7918 3.5380 -9.0375 -#> -#> Columns 33 to 40 3.3995 -0.3210 2.9188 5.1383 -1.3407 7.5646 4.3350 12.8163 -#> -4.6758 3.8246 -0.4502 -5.3551 -2.4679 -1.6020 6.7734 6.7087 -#> -1.8866 -7.8625 -11.9390 -9.3608 4.7640 -17.6644 17.5815 -8.8841 -#> -1.3425 -8.0310 -8.0548 20.8914 3.6614 0.4057 9.1090 -8.9089 -#> -6.8993 11.7285 -3.0791 9.8707 -14.5684 -1.5579 -4.5657 -11.7019 -#> 17.2565 -3.6128 2.5843 -14.9282 12.2219 -13.2551 -13.5065 6.2647 -#> 6.6300 10.2063 -5.0556 -5.2237 -5.6799 -5.2010 -1.2620 -4.6160 -#> 1.6600 0.4635 -14.1997 -5.3116 5.8385 -3.3701 18.2749 4.3410 -#> -2.3960 7.0271 7.9698 -5.0902 6.2283 0.9159 -4.6657 3.5176 -#> 3.0836 3.9876 -4.8300 0.3241 -0.3630 5.0639 4.8222 -4.8342 -#> -4.8762 -0.1001 10.6387 -7.2324 -4.1635 0.1551 7.3297 6.6931 -#> 10.2620 -1.0456 -2.0783 4.0471 5.1181 8.0840 8.2518 10.5336 -#> -2.2210 18.0167 -1.2456 -8.7770 -17.6564 7.3704 -13.3693 -9.1483 -#> -14.8682 -7.7006 -2.0882 18.8748 1.6923 9.6779 13.6605 -12.6538 -#> 16.1572 -16.5525 13.6661 5.4012 11.1833 -5.3037 7.9277 2.4325 -#> -3.3884 -5.0985 4.8811 -17.6648 5.1383 -0.4609 3.3404 1.5507 -#> -5.6561 -0.2068 7.1859 -16.5431 -5.6844 -0.4892 2.3343 8.0768 -#> -5.9822 10.8257 -0.3373 0.9161 -4.8604 5.9253 18.3434 -0.3593 -#> -12.5291 13.5229 8.5409 -9.3303 4.1437 3.7960 -1.9802 5.5924 -#> -3.9977 -1.4491 -8.0225 2.4355 -5.1887 0.9020 11.6591 -8.5988 -#> -10.6864 4.7166 -12.1693 15.2190 -7.3645 -0.4941 -1.6018 -19.0628 -#> 5.8257 -6.8657 -9.8551 -12.6238 2.9744 -7.7327 6.4582 7.8213 -#> -1.5285 12.3356 -3.8897 -9.5486 8.2061 1.1672 -4.6765 -4.8519 -#> -9.7547 0.5000 -2.3187 13.6487 -4.9364 0.3583 2.2306 5.9768 -#> 6.6336 -7.8624 11.8713 8.1273 20.9814 1.9781 8.6172 1.3056 -#> 16.1903 -10.9793 9.7522 1.6372 -2.8432 2.4354 -15.3075 -1.4748 -#> -3.4112 -1.5999 9.6158 17.9750 2.0975 -6.0335 9.8225 -14.3505 -#> 2.6330 -1.7996 6.3781 10.6677 -3.4005 -14.3448 -2.8834 -5.9152 -#> -10.8092 -2.3071 3.1028 11.0733 -1.5603 -22.6859 -2.0244 7.1346 -#> 18.9459 6.7388 -8.3142 14.8399 17.1676 -13.3548 8.5614 -7.7465 -#> 8.6905 11.5966 5.1644 -5.1298 5.1416 5.4848 -8.0632 -0.1872 -#> -4.2913 -1.5870 7.7001 11.6801 -4.0722 4.0107 8.4250 -17.5451 -#> -5.4688 7.5811 1.7385 -0.4735 -8.9413 0.2439 17.5193 -2.7576 -#> -#> Columns 41 to 48 0.0465 -3.4862 -13.2738 3.8935 -3.2092 10.1039 5.0745 3.7190 -#> 2.0494 -9.1959 -2.2920 -5.7282 3.3374 8.6497 -8.7164 -0.0291 -#> -9.8911 -0.7584 2.2297 -5.7634 -4.3136 2.0147 20.0442 -3.9719 -#> 5.5081 -8.8566 2.8731 -13.9006 20.8200 -5.5648 7.5882 5.7305 -#> -3.1711 -12.6775 -3.9172 -8.7452 14.4456 -12.8824 -1.8546 3.4528 -#> 2.4446 -3.4813 6.2757 0.4589 -4.7091 2.1109 -9.6654 -2.8753 -#> -6.4858 3.2467 10.2775 -8.8833 -7.7731 -9.0477 -1.6386 4.3995 -#> 1.5574 -6.3637 4.2262 -9.4756 -3.0853 -1.0484 17.7309 3.0610 -#> -11.3941 4.9077 1.1466 -2.9488 7.8308 -3.5794 -14.2437 10.8045 -#> 4.6599 5.4957 -0.6567 -3.0111 -2.0842 -7.0308 -1.1045 -7.3985 -#> 2.8322 -12.0449 5.3779 -0.5008 5.0249 24.7133 4.6630 -1.7614 -#> -1.8948 -7.6056 -7.6986 10.9410 12.1283 -5.6003 17.2878 16.8752 -#> -5.6440 -6.6151 8.7042 8.5497 -7.9938 -2.3105 1.3292 0.2753 -#> 13.1700 -2.1738 -1.6815 -2.9282 -2.1487 -7.5467 2.2566 -3.1159 -#> -6.7321 21.2448 -4.8457 3.3978 -0.0036 4.4460 1.1738 -13.2747 -#> -9.0073 17.5459 -4.0688 -5.1694 -2.4779 -4.3687 -1.9631 4.2734 -#> 0.6338 17.4965 0.2533 -18.1077 3.7392 13.4510 -15.2351 -10.5991 -#> 13.9620 18.9477 -1.3132 -15.6907 0.9643 0.4753 2.6462 1.9783 -#> 3.1352 -6.3590 -10.6552 -13.6167 -6.0599 1.4963 -10.2603 11.7957 -#> 15.1292 -5.5503 1.0119 -0.6963 3.3856 -11.4307 13.1196 11.5283 -#> -12.3759 -6.4238 12.8371 2.5045 -1.7021 -2.0936 4.3229 2.1624 -#> -12.5627 13.9249 -3.5476 -14.4480 -1.9823 13.2438 -2.1036 3.9360 -#> 8.0561 -6.5157 18.4071 -12.5351 6.8261 -7.4138 9.1498 13.4108 -#> -12.7752 10.7888 -21.2931 -4.3270 -9.9881 14.7584 12.8067 6.9470 -#> -6.1149 7.5197 1.2564 14.0175 1.7411 3.7951 5.6573 1.6170 -#> 10.2582 -2.0958 -2.3722 -8.6935 -3.2190 -4.3119 4.8846 -6.9076 -#> -15.0583 0.4817 -4.4535 4.1261 -9.5967 12.2711 -1.1233 -17.4156 -#> -2.5126 -6.0924 -11.5254 11.3586 6.2273 2.9722 -9.3137 1.5586 -#> -2.3737 11.0494 -4.2991 -7.8241 3.8737 -0.8572 -3.2632 -3.1751 -#> 19.0771 1.6400 2.4942 -4.3823 -1.9991 -1.5681 -1.9602 -1.9225 -#> -9.8879 12.0578 -4.9132 4.8810 -0.5849 8.4618 6.0484 -0.8199 -#> 2.6374 -3.6763 16.9745 1.4207 4.6623 -1.5556 8.6995 -6.1866 -#> -17.2081 3.8919 -6.3286 12.8504 3.4418 0.1833 1.7541 1.8093 -#> -#> Columns 49 to 54 4.2277 15.2095 -5.3106 -3.5254 -2.0239 0.5410 -#> 1.0530 -1.5268 -8.3209 -7.5667 -4.3293 -0.3304 -#> 14.5093 15.5059 3.6394 -7.8419 -0.5077 -2.7458 -#> -4.2606 -1.3002 -6.4896 4.1244 -7.6892 4.5164 -#> -3.3746 -5.3938 2.3163 -0.2463 -1.7688 -0.0258 -#> 1.9585 -18.0378 7.8877 0.2530 4.8485 0.6589 -#> 3.0419 10.3141 12.3120 -6.6679 -10.8065 -2.5067 -#> -4.6297 6.5639 -12.5321 -0.1158 -5.2502 -2.2560 -#> 10.4186 -1.9900 9.9362 8.4835 5.7325 0.9782 -#> 1.6694 9.0592 -15.8919 2.0805 1.6827 -2.1085 -#> 5.7053 11.3829 -7.9181 5.3085 7.7903 -0.2385 -#> 22.8388 -0.1096 1.3239 -8.0848 -2.0209 0.9309 -#> -16.6073 2.2912 -6.8168 -4.4002 6.6341 -2.6954 -#> -8.1871 8.7314 3.0309 6.2616 -1.7341 -0.1670 -#> -7.3059 -1.1614 -12.2037 7.7507 -8.1071 1.1435 -#> 2.9345 -2.6649 2.3461 -2.0491 -1.6806 -2.3369 -#> 3.1239 4.0808 13.8076 -2.9526 -1.5092 -7.7307 -#> 3.0788 18.8719 5.5230 1.3509 3.6166 -5.6440 -#> 4.1107 -5.4855 5.2623 6.4164 1.0479 1.9502 -#> 1.7431 5.8753 -4.8010 -18.8203 -9.6599 -5.9157 -#> -0.0724 -10.8421 -0.9135 -2.0209 -0.0128 1.9955 -#> 11.8888 -1.6736 8.6527 -6.8223 -2.6404 -4.3233 -#> 7.3335 0.9019 -22.4590 1.2885 -7.3577 -2.0053 -#> -1.2451 2.6736 7.4570 -4.3535 -1.6255 1.7764 -#> 11.3746 -9.8116 5.5320 2.7358 -6.3190 4.3476 -#> 6.3236 -14.9279 2.5646 -0.5103 3.6606 0.7772 -#> 6.1821 16.4744 -10.2067 -5.7784 -0.8729 1.1509 -#> -0.3717 -5.3564 -3.1124 -13.4197 -5.1752 2.2853 -#> -3.0716 -4.4458 12.2943 -3.3567 1.3988 1.0051 -#> -22.5080 -8.9841 5.5813 7.1742 1.6875 -3.7041 -#> -9.3930 12.2926 -11.7914 -1.4915 -3.2901 -4.0539 -#> 0.4054 -1.7884 -2.7561 -2.5213 -9.4311 1.7463 -#> 2.3147 26.5946 3.4709 -9.9254 -3.4216 1.8680 +#> Columns 1 to 8 -1.3674 -0.0474 4.3756 -1.9528 -14.5364 -1.0695 14.5505 -1.2320 +#> 4.1463 4.4713 6.1436 -1.0536 15.0170 -11.1981 8.6223 3.2857 +#> -1.8525 -0.8742 -0.9753 -5.3276 -8.0905 -7.0188 -2.7020 -6.1735 +#> -0.0057 -1.3533 11.8218 0.0820 -6.8994 1.5879 -20.2024 8.2591 +#> -5.0701 -0.7449 2.2023 9.5109 -8.5281 13.2540 -3.0778 5.5950 +#> 1.8493 6.1502 -6.0439 8.7651 -18.8250 -6.1873 13.4500 8.8217 +#> 0.6469 -8.8625 3.3046 8.0922 12.7069 -4.4744 1.9985 -0.6127 +#> -4.5432 1.2356 9.4767 -14.8393 11.2296 -7.7008 -12.9201 23.6001 +#> 0.6837 -5.4234 2.4517 5.8235 13.1936 -2.3797 -13.9642 11.1578 +#> -3.5765 -4.4489 -7.9510 -7.0725 -1.0782 8.3063 -13.2528 -14.5325 +#> 3.1919 4.2950 3.4313 -4.6576 6.3683 -3.6891 -1.1279 3.1698 +#> -1.6529 13.3869 1.5862 -0.6866 -6.2219 4.4380 -7.7764 -6.6623 +#> -5.9887 -3.5318 -0.6315 -6.2202 -15.4760 6.5481 -7.4740 1.4458 +#> -2.5577 3.9710 -2.8323 -10.4966 12.3644 2.3723 -3.4837 7.0740 +#> 5.5712 10.9409 5.4408 0.0554 16.7365 10.9986 3.1266 -0.2494 +#> 1.7357 -9.7478 4.9140 -4.6112 8.3456 4.9798 19.1581 -15.3838 +#> -0.7483 -1.1299 3.2862 -0.5158 6.3356 4.0168 -7.2203 -2.2662 +#> 1.1628 -3.1454 5.2235 4.2900 12.1151 2.3068 28.3364 -28.4036 +#> 2.6828 2.8602 -2.8606 -3.1289 -2.8074 0.9105 -2.7195 -5.8197 +#> 3.9371 -3.2949 -11.2866 2.4117 -2.2629 2.9842 1.2953 8.0445 +#> -1.0550 4.1267 -2.2281 -14.5575 -11.7647 -4.8656 -1.8032 -1.7558 +#> 1.3683 -8.1479 3.6302 2.0502 -7.1031 23.7740 1.5587 6.8549 +#> 0.1720 -10.7989 1.7214 -7.6751 -0.4894 -16.7146 9.4321 -11.1193 +#> 3.5555 1.5577 -6.3123 -4.0664 9.5521 0.0850 6.0485 6.0398 +#> 0.7317 -1.5417 -6.2799 -0.8547 -6.2543 5.0399 -16.8278 -5.3424 +#> 0.7432 1.4527 -2.8910 -0.4864 -10.2852 4.1616 7.0883 12.8953 +#> -4.5664 2.3052 2.9988 -4.6730 -0.0690 16.5138 11.4428 -1.7565 +#> 0.6696 3.2829 10.9881 7.1078 2.9863 4.3046 1.8981 -4.1067 +#> -4.9292 4.0736 -3.3587 -4.1053 -25.7239 2.3742 -8.4333 -6.7934 +#> 3.6379 3.6476 -7.2712 10.4917 -13.9796 9.1833 5.1686 8.3373 +#> 5.0251 -6.8629 -10.1861 -10.2394 -2.5654 -16.4522 1.0065 -3.5672 +#> 6.5238 -4.8961 7.9712 -4.1026 2.8123 -15.1049 9.1867 -10.7452 +#> -0.4319 0.6656 -7.7493 -7.0953 -4.4388 -6.2701 14.8045 -14.8000 +#> +#> Columns 9 to 16 7.5561 -18.6763 5.5794 1.1222 -0.8399 21.0040 14.1533 2.9688 +#> -10.1766 3.4116 -12.6713 0.0116 -14.2647 -2.9001 9.1308 -2.7406 +#> 12.2488 -10.9280 8.9363 -3.5217 -8.8116 8.6491 6.2001 15.4932 +#> 3.4150 -3.5248 -1.5460 -1.2384 0.2763 -0.8866 11.7120 1.9756 +#> 5.7229 -0.8914 -7.4089 -9.6113 6.6046 -9.1741 -5.5532 2.3705 +#> 12.4063 -16.2883 21.3066 -14.7679 5.7952 3.5070 2.0678 -2.3481 +#> -2.4544 0.6191 -4.8261 -11.5614 2.5510 -1.0357 11.0870 -11.9986 +#> -15.2627 1.8209 -3.7969 3.9642 -12.1422 -15.5843 12.3632 -2.4090 +#> 2.0089 -1.8554 11.5494 -0.5925 5.3898 0.3639 -3.5549 2.8286 +#> -8.9665 3.3079 -6.8751 10.2869 2.5814 0.4849 -4.1149 11.0605 +#> -7.5469 10.9942 -13.4549 -5.6609 8.2273 14.3035 7.9379 1.2737 +#> -11.3814 6.3335 3.9521 10.5922 3.6210 -5.5640 -1.9494 -11.2489 +#> -2.1936 -2.7577 2.4907 -9.0372 6.0691 2.6011 5.7338 -4.7557 +#> -0.4478 0.1406 -2.2274 -6.6588 -0.2405 -6.4989 0.4287 4.5505 +#> 5.4107 6.7985 7.1650 8.4213 1.5030 -8.9866 13.4362 0.7864 +#> 18.2867 -10.3115 4.0058 16.6091 12.5703 2.6496 -7.6738 -1.2900 +#> -4.9133 -0.4303 -13.1166 5.2650 13.9520 13.0180 8.8784 -2.6578 +#> 19.3509 -25.7979 -1.5014 -0.0419 0.7595 5.6581 -11.3359 2.5229 +#> -4.7068 1.3142 -7.5965 17.5107 -15.1102 16.4928 -8.7798 -4.3830 +#> -6.7917 1.3441 13.9030 -1.2224 -1.5963 0.5265 2.2177 11.8577 +#> 11.7615 -0.9957 5.4671 7.2577 -0.9167 -10.2359 -1.9020 -11.2225 +#> 3.6712 -4.7122 -3.3527 -12.0308 9.3876 -12.7993 -0.2782 6.6712 +#> 0.1563 -18.8265 8.8017 8.0897 12.7327 1.7222 13.0710 -7.3235 +#> -5.3015 15.6229 -0.8487 13.2131 -3.7971 -3.9059 6.7163 4.3495 +#> 1.8756 -1.2401 -8.9469 -10.7884 1.9884 9.2823 6.3844 -7.1373 +#> 1.1368 4.4731 -1.3934 7.7579 4.9732 -8.4724 6.2985 -3.3991 +#> -5.9330 -3.6940 1.6795 12.9458 -15.9962 16.4554 -2.2548 -10.3433 +#> 14.0752 -16.1698 -6.6347 -1.7063 -13.1069 -3.8874 0.8907 -16.4903 +#> 14.3149 6.6958 4.3645 -3.0488 -0.9147 -9.2828 4.5159 1.2439 +#> 5.6273 0.5974 12.9606 -3.5311 15.5548 6.6464 6.2355 11.5370 +#> 11.5079 -4.4708 6.8192 15.5245 -6.0810 16.6692 9.3721 -3.6822 +#> 0.8252 -0.9475 6.3862 19.8149 -4.2433 -7.0391 9.6773 -3.4908 +#> 4.9312 4.7533 13.4802 8.5619 -14.6161 9.5949 -2.7221 5.1239 +#> +#> Columns 17 to 24 -7.1577 -3.6995 -5.2762 -1.7780 -1.6653 -0.6221 -10.4994 3.5850 +#> 5.8044 1.1683 -0.3558 7.6649 -5.1192 -15.8764 13.1516 3.4745 +#> -13.8479 -5.7453 -1.3611 -3.6277 10.4624 -0.7606 14.8471 -7.7865 +#> -0.2490 -9.8782 -6.9910 -7.3999 -8.7665 2.1246 2.5054 -5.2556 +#> -3.8771 12.4232 6.5379 -0.9417 -3.9213 -9.8110 -5.6616 2.4345 +#> 15.1542 3.9985 -3.1631 0.0376 -12.7746 -3.1457 -9.4607 16.6501 +#> -2.0450 2.1048 -2.2233 8.4675 1.5122 4.1887 -0.2256 6.9480 +#> -2.2003 2.9629 -2.3984 6.5133 -6.2339 10.0168 -7.0375 5.3545 +#> -18.3880 1.1027 2.9095 1.1096 1.2324 3.5304 5.5338 18.3571 +#> 3.5390 9.5953 10.7633 -11.8536 -4.7842 5.5026 8.2702 4.0238 +#> -1.4058 -8.2432 2.4559 14.6921 -5.8694 -5.9541 -13.9616 -5.5515 +#> -15.2413 -14.8361 7.6717 -6.4663 10.1476 -32.7165 9.7161 6.1056 +#> -0.8295 4.4860 6.7026 -17.1413 2.0766 1.7990 3.4066 12.6657 +#> -6.5490 11.2560 2.5612 11.1751 -6.3367 5.8870 0.7805 10.1547 +#> -0.4421 -0.1175 3.3591 10.0457 22.8698 -5.3771 0.1176 -19.6562 +#> -0.7159 1.4638 25.8870 2.1177 -3.2012 8.5504 5.0563 14.8134 +#> -7.4910 16.0077 -8.4419 1.0091 5.2056 -2.8018 -5.1168 -6.2258 +#> -6.6541 -15.2069 4.4515 4.7033 -13.4411 -1.0817 -14.4979 -1.4283 +#> 2.8184 6.1747 0.9870 13.1235 -2.2721 3.1756 10.0821 2.0893 +#> 3.7717 3.7484 -5.8297 0.9345 11.7022 -2.9186 9.0220 7.4304 +#> -4.1281 -3.3905 3.0100 -8.4499 5.9637 -12.1129 9.3107 -8.5822 +#> -4.4446 5.2532 16.8947 -4.0080 -7.0400 0.2281 19.7303 6.2775 +#> -8.7422 -9.7409 -7.6986 13.7377 -10.5350 -2.9565 -0.3803 0.6314 +#> -10.0792 3.7201 3.3956 11.4131 2.1909 6.1739 0.4597 -2.6288 +#> -12.0447 -12.2462 3.2220 4.9946 -13.9562 -21.5143 8.4196 3.6127 +#> -2.1407 -6.4474 12.4759 4.8093 1.9551 -13.9205 6.6378 6.0908 +#> -13.7874 25.3899 10.5463 -1.2907 -0.4872 14.4459 5.3111 12.9559 +#> -9.3350 -1.6281 -11.4549 3.9948 4.7318 -8.9820 -0.2126 -10.9739 +#> 3.5306 -12.9561 14.0357 -1.6310 5.2199 -3.7381 10.7289 -6.8779 +#> 14.1180 -22.5631 -0.5707 19.7512 13.8007 -16.0506 -1.0037 -13.3883 +#> -3.0834 -2.5929 -9.5906 -3.9285 -8.1015 2.1867 -12.1778 -3.5010 +#> 7.5773 10.8242 -6.6862 5.6016 -17.4869 10.7701 -8.9887 -16.3473 +#> 12.5046 -1.7043 -7.8380 -6.5627 6.9845 5.5593 -5.7123 0.5159 +#> +#> Columns 25 to 32 14.8486 15.2420 -4.8102 -1.0207 9.3017 9.0401 -10.2565 3.2499 +#> 4.1092 -11.5263 -2.7902 -7.8758 30.7037 -10.9449 3.2616 -0.6032 +#> 1.9425 3.8594 -1.6268 1.5480 0.2440 -1.5936 2.1300 -8.6573 +#> 0.1057 1.4812 -10.7708 1.5843 -4.8658 8.4246 3.7578 6.5719 +#> 3.7255 4.8072 -1.0537 -10.5567 -5.3966 17.6611 12.8893 -1.7585 +#> -1.5726 17.7785 -5.9407 -2.2563 1.9415 22.8251 5.7589 9.3990 +#> -9.4051 -3.4753 2.4506 -0.0697 6.3970 -1.6429 5.9528 1.7476 +#> 9.6894 6.5451 4.7938 11.6005 -5.5751 1.8544 6.0249 -6.9157 +#> -6.0161 -0.7093 -9.0773 -4.9068 -2.2174 2.2867 -5.4468 9.5960 +#> -21.1858 0.8711 -0.5401 -2.5011 0.3622 0.3830 -12.1438 10.3562 +#> 6.0248 5.9165 26.5937 -12.1715 0.8186 4.5651 9.1920 2.4046 +#> -5.6874 -11.2428 0.4476 -4.0312 -7.2714 0.1789 -13.6956 6.4037 +#> -0.6080 -13.5076 -10.0688 -1.4704 -8.5842 8.1221 -5.2809 4.2640 +#> 2.7920 4.4234 1.8447 1.2050 -1.6868 -14.3365 -2.2684 -3.6796 +#> 11.7954 7.1269 3.2465 4.8046 6.0757 -7.2120 -7.7406 -7.9101 +#> -0.9181 -1.5657 -4.9042 -2.0582 -6.7856 -3.3091 8.8146 -6.1623 +#> -2.9264 8.1209 1.7582 3.5584 -7.3829 -8.6926 3.1312 5.5652 +#> -14.9957 0.5275 7.0772 3.6230 -4.3311 6.8364 3.8225 -11.6278 +#> 7.1336 -14.2441 -8.4764 8.6372 -3.4070 -15.3403 11.9006 -20.8279 +#> 16.5022 -9.0338 5.4943 -2.2632 2.6928 8.8664 -1.2954 6.4135 +#> 2.1301 4.5712 -8.0111 -12.4580 4.7830 -4.3577 7.6809 13.7449 +#> -8.0539 2.5756 -2.2817 -1.4277 -0.6971 9.8518 -13.8332 5.7477 +#> 6.4345 2.9266 -2.1075 -0.9610 -11.3718 3.1750 -5.1814 3.7080 +#> -1.8409 8.1347 -4.4548 -2.2635 6.8645 1.4938 -2.5258 -0.5367 +#> 8.0574 6.8297 -12.1605 -6.7478 10.6765 -2.2182 -9.1707 3.9874 +#> 8.9553 10.8921 -23.9975 5.1565 7.3858 -2.8637 8.2508 10.1017 +#> 4.3987 9.3663 -13.0031 1.3200 -2.2764 0.8969 1.3466 -19.3814 +#> -1.4038 9.1606 1.7880 16.2918 9.2231 2.4753 -5.5580 13.7402 +#> 8.9110 7.4481 -8.0772 -4.7265 0.9895 -0.2133 0.1477 7.5884 +#> -2.7340 19.7750 8.0242 -14.3747 4.4357 14.7675 -10.0986 16.7849 +#> -1.3068 0.7934 -5.8461 8.8910 -4.5639 14.7191 -7.9466 5.7123 +#> 11.9354 26.3250 -2.0097 6.0551 1.8219 -8.2836 13.2040 -4.3991 +#> -0.4803 -0.5662 -5.6762 -13.3313 1.8635 4.5556 16.4861 -9.7490 +#> +#> Columns 33 to 40 -4.6601 -1.1740 2.1359 9.9174 9.1745 9.2293 -2.9876 14.9829 +#> 13.7997 2.4711 -4.1644 -4.1183 -5.6839 -0.1679 -3.4685 -13.5419 +#> -4.3454 -5.8547 -19.2603 -1.6330 3.1751 2.4201 5.7208 0.3049 +#> -10.1706 3.6377 -6.7274 1.1867 10.1489 9.8295 17.7187 -1.7624 +#> -19.8190 7.3783 2.3858 9.4029 -3.4392 -5.2516 -1.9954 17.3859 +#> -8.3851 -9.3860 2.0951 -6.0250 -4.8721 -10.8764 -18.0459 4.7164 +#> -2.0460 -1.5078 5.2325 -5.9806 -1.2630 -10.8076 -3.5425 -9.8268 +#> 3.2921 8.7789 11.3998 3.3249 -4.9335 -13.2581 -2.0565 -2.1746 +#> 8.0028 -6.6745 -3.8613 3.9192 -2.6947 -5.7549 -2.5220 -1.0072 +#> 11.9393 -2.7838 -7.2824 -22.2400 -9.4644 -5.3764 7.3565 4.9516 +#> 8.6304 -14.5468 5.5615 7.0114 -7.0166 -7.5378 0.4683 -2.9764 +#> -1.1885 -10.6993 -4.5900 -8.4948 -5.7036 1.0922 1.0335 5.4641 +#> -8.8373 -1.4932 7.5270 5.7846 -5.6232 -12.2433 9.0230 13.6235 +#> 9.0522 8.0384 -10.0359 -10.8777 -9.8880 10.3123 14.2019 13.5627 +#> -0.1947 20.8733 -3.8304 13.8305 16.5129 -1.0299 -10.8602 -4.1155 +#> -10.8069 4.8562 3.7801 11.9037 -12.1172 -6.1474 15.5234 2.0818 +#> 3.1167 5.3243 10.6862 7.3274 -4.6980 -8.3878 -1.7010 4.4320 +#> -1.3729 -4.2911 -2.9054 15.2937 1.7619 -6.9460 0.1281 11.6048 +#> 6.9564 -4.0573 14.7596 1.2560 -8.0958 -7.2678 1.0946 8.1658 +#> -7.5156 -2.4651 -1.4295 3.0907 -1.1447 2.1614 -5.1984 3.4099 +#> 5.1281 -1.9168 -2.1913 -14.0885 6.5666 13.3331 16.1212 -4.8225 +#> -2.8193 4.3137 18.0664 10.1441 8.8621 -20.9819 6.8682 -5.7911 +#> 0.6973 -8.4530 -12.8368 5.3047 -5.6743 -0.2333 17.4566 17.1041 +#> 5.9795 -1.8793 -6.3264 -8.5010 20.6661 0.4262 -0.6816 -7.9042 +#> -0.2756 -4.7594 -2.5035 0.7690 1.4811 -11.4402 7.8181 0.7491 +#> 3.2958 -5.6547 1.6910 2.2226 14.0465 1.0890 10.6547 17.6703 +#> -0.1001 13.8662 18.8049 3.7827 -3.9923 -3.3656 0.4385 4.2507 +#> -9.2873 1.2250 3.4173 9.4951 4.6999 -3.2821 -1.6763 -9.2629 +#> 9.7680 -8.2944 -1.2215 -8.2016 5.7898 13.6520 -0.1752 -0.9474 +#> -5.9962 -7.4348 0.2910 -6.0173 11.2625 20.2298 -4.1021 5.6394 +#> -3.4762 9.1295 -8.0767 8.0712 2.2210 10.8200 9.5472 4.8456 +#> 1.5022 11.0013 -0.4574 -3.0831 -2.4817 9.0481 9.0799 -2.1804 +#> 1.1925 1.6844 -7.3798 -7.1684 3.6899 16.5189 9.1875 -3.2399 +#> +#> Columns 41 to 48 11.7657 3.1196 2.6665 14.5757 2.9871 -4.5150 0.6151 -10.7336 +#> 3.7077 -9.5561 -4.7596 -3.9707 -8.9381 3.0904 -11.0940 8.2206 +#> -1.1915 0.4038 -1.1578 1.3649 7.9275 -5.2930 -4.8455 8.0586 +#> 6.1191 13.0811 3.5544 5.0136 3.1571 -0.0679 0.4815 -1.3424 +#> -5.6082 2.4863 3.6704 -5.5318 16.4831 -12.2830 3.5200 4.5975 +#> -7.8499 -11.5367 -11.0942 6.3718 -14.5018 -10.6295 2.8046 8.4666 +#> -10.6047 2.6044 2.9539 -0.7786 -17.7200 -0.1585 -8.4284 3.5572 +#> -0.2002 -2.8649 8.2642 4.3810 -0.4585 -18.0291 17.7569 -20.8126 +#> -2.4915 -2.6441 -3.0645 -3.3749 15.8615 -14.1035 -2.5848 -7.3752 +#> -10.9697 4.0416 6.3817 -15.9578 11.3529 0.0486 3.6263 -3.9821 +#> 2.3428 8.1948 8.4879 0.5575 0.5772 -1.3868 8.3604 -0.7609 +#> -7.9358 -5.0201 4.4435 -7.1785 4.7986 -13.8062 -11.1606 3.0492 +#> -12.7244 3.2134 4.3907 3.7924 7.2359 8.0097 13.8718 4.8203 +#> 5.1162 11.4593 -2.6179 -4.6172 -1.6876 -1.9041 12.9672 5.2041 +#> 13.1637 12.5163 7.1326 4.8354 -9.3215 9.6836 -1.2448 -4.7915 +#> -19.1448 -8.5986 14.4240 8.2182 6.7010 6.1534 13.8244 12.7011 +#> 12.9391 11.5793 -6.4071 -8.2259 -4.4934 0.0400 7.7178 13.8950 +#> -13.1558 8.2733 14.0063 15.0939 -4.5552 7.8886 -13.8971 3.6136 +#> -1.5390 -20.4704 -4.7701 -13.0516 -5.4321 -10.1729 19.7763 13.3634 +#> 4.4173 -5.7630 3.1204 17.2562 11.4486 9.3133 -8.0640 -0.9698 +#> -0.3162 2.7982 3.2823 -3.9588 -11.1833 0.0755 -10.1830 7.1177 +#> 4.4892 14.4317 11.1530 6.3800 2.3002 20.2584 3.9545 9.4784 +#> 7.2510 -1.5897 -7.6888 17.4965 -4.5909 4.2699 -11.1061 -6.1803 +#> 7.4172 12.5421 4.4122 3.9228 -1.4641 8.4155 3.3479 7.8418 +#> -11.9439 -4.4922 8.0724 10.8412 1.4647 -4.0828 -15.7723 -0.0512 +#> 2.2536 -5.3186 9.7490 12.5554 6.0728 4.9373 1.2492 -4.0672 +#> -5.8810 -1.2590 8.6954 6.0590 2.2440 -5.5730 16.0551 -6.3462 +#> -9.5750 -2.3336 -6.8815 16.2932 -2.5359 -8.1689 -10.2422 -12.7971 +#> -14.8769 -6.4750 -0.9576 -0.7946 -2.9660 5.6949 17.0211 0.1723 +#> 13.1304 -8.2289 -5.7268 12.2450 10.5746 0.6198 -10.9609 -0.8005 +#> -6.0240 2.5168 3.3276 10.8222 4.2437 -14.0329 10.7717 -9.4397 +#> 5.6491 1.2287 -15.2698 10.8850 -4.4047 -8.7894 2.7462 -3.5512 +#> -2.5415 -3.3443 0.5059 3.4196 -13.9812 19.8057 -4.6844 11.8615 +#> +#> Columns 49 to 54 6.6365 12.0035 -5.6669 -5.1705 -1.0216 4.2934 +#> 3.2771 -4.5684 -0.3776 -1.4875 3.5671 1.1867 +#> -5.8243 -2.1316 -1.4036 1.0183 3.5670 -0.5805 +#> -8.9103 -4.3704 -9.4189 -5.4160 0.9377 2.8298 +#> -5.5767 -3.7738 3.5008 -3.2152 3.0187 4.6025 +#> 6.8752 -8.3211 -3.1953 1.7381 -3.6157 6.2603 +#> -11.1720 16.4505 -1.2207 -3.2138 -5.4599 -1.0469 +#> 7.0002 -6.5917 5.4852 3.2571 -6.0076 -0.6831 +#> 15.6805 -4.7340 -5.2401 -4.7147 -5.8302 1.9700 +#> -14.7213 -3.5639 8.3196 -0.7164 4.0685 1.4306 +#> 0.6079 4.2414 -2.1809 -3.8523 -1.7788 2.2253 +#> 27.1695 -2.6988 -3.8703 -0.6199 -2.6460 -2.7430 +#> 3.0292 -5.7494 -2.6844 -9.1045 6.5650 0.4952 +#> 8.1505 -5.3231 -10.5474 2.6840 -3.3425 3.8581 +#> -1.2053 0.3901 7.8789 -8.6739 -3.8515 -6.6561 +#> -4.1445 6.5305 -5.5170 -1.2831 -3.1719 -3.3955 +#> 7.1810 7.4807 -4.5605 -6.0726 3.4568 0.7595 +#> -32.5483 -10.3881 14.4331 9.1012 -5.9864 -4.6985 +#> 5.1197 -6.8232 -1.6448 -0.4090 -0.4590 -1.9440 +#> -5.3278 8.3632 6.2372 -1.6612 -10.0355 -4.2177 +#> -17.6023 -2.2498 -10.2863 1.8563 -2.0052 1.0690 +#> -4.4807 -8.3849 10.5267 -5.4050 -0.1967 2.4815 +#> -0.5400 3.1266 3.4086 0.3663 -1.9362 -4.0071 +#> -6.5866 11.5554 4.2221 -3.6277 -4.2902 1.0530 +#> 0.3554 4.7149 0.0127 -8.5945 -2.3719 0.5832 +#> 10.9537 -2.9679 8.0124 3.4808 2.1757 4.3042 +#> 5.8067 12.5087 -10.5138 -10.0168 -1.7698 -1.4850 +#> 6.1171 7.7704 -3.9913 -0.9476 -8.9716 -4.8625 +#> 0.4975 0.1223 1.9876 -11.9856 -2.7984 -5.0787 +#> 2.8590 -5.4372 6.8801 4.9836 -10.0127 -1.2156 +#> -9.5913 7.9211 -5.7925 -1.4726 -2.8814 -0.8450 +#> -3.1772 6.0984 0.6627 1.7678 6.2593 -2.6370 +#> -26.2667 2.7168 -9.9451 -6.4581 2.3221 -1.2038 #> #> (4,.,.) = -#> Columns 1 to 8 0.2219 0.5287 -7.2462 3.4678 4.6731 -0.1877 -16.9202 9.5219 -#> -1.9105 3.5858 5.7166 -1.6073 -1.7945 -8.8200 -5.3217 12.1512 -#> 4.7816 1.2403 -0.7889 -2.0563 6.4095 -0.4141 7.0459 -21.4778 -#> 1.6454 -5.1022 -1.6111 -1.3982 2.7441 -22.8674 8.2726 6.3412 -#> 7.9354 -11.1167 3.1809 -0.3604 3.7264 -11.9987 -1.9954 -0.3730 -#> -3.4294 -3.1767 -4.2183 -4.3765 -1.3808 4.5756 -0.6131 -1.5962 -#> 0.1901 2.4103 8.1420 -5.3695 -5.3668 -10.9919 6.8256 -16.6919 -#> -1.6335 0.6493 -0.5593 0.5749 -9.4058 25.1887 -13.2565 -1.3850 -#> 6.2220 2.0828 0.7704 24.1788 8.7479 -5.7883 -0.6454 9.4337 -#> 1.0294 -0.5649 -3.3157 -5.5922 8.3732 -3.2402 -8.2360 -0.5550 -#> -2.9694 -7.1426 3.2555 5.3949 2.0131 15.1476 3.9756 13.2564 -#> -1.8526 -10.5077 1.2471 -1.1577 -13.0580 -2.4768 -1.2320 1.6977 -#> -1.5976 2.1847 4.7230 -6.0342 -0.5115 0.7416 -6.4935 -0.2440 -#> -0.8131 -3.3292 -1.9453 -2.7024 -12.0865 0.0555 -1.3913 1.2639 -#> 0.9961 -5.2428 -11.8401 11.0223 -13.4765 -10.1120 12.0808 13.4634 -#> 2.8489 6.0960 -1.7192 5.1269 1.7591 -13.9983 -7.1799 11.6296 -#> 1.4397 -10.4571 -4.6499 5.7463 1.5327 -22.5954 12.3886 0.7576 -#> 5.2328 -12.1251 -12.2410 13.5180 -7.4857 -8.4028 -13.6096 -9.3512 -#> 5.8540 -5.6182 -1.6361 10.4952 11.7672 4.1303 0.0729 2.5591 -#> 1.0030 4.6119 1.1199 -16.2927 -7.2099 1.8887 -0.6048 -17.4918 -#> 1.1425 -2.4799 9.2321 -15.8380 2.6032 -11.9649 5.2088 -0.5586 -#> 2.0217 3.5377 -2.2017 10.9276 3.7871 10.5646 21.3250 -5.9608 -#> 4.1862 2.7262 -0.0812 6.6039 8.8524 -19.9248 -1.2731 14.4253 -#> 1.9577 6.5865 -5.6471 11.7944 17.1732 -0.8650 5.9225 0.6377 -#> 0.2850 2.7053 4.6109 9.1281 2.9065 3.1857 14.4020 16.7285 -#> 0.3713 8.4739 -6.7031 4.5455 0.3447 0.4787 7.7604 -12.5854 -#> -1.7503 7.2190 -8.6510 -3.4702 5.6751 13.9228 -5.3114 -15.2229 -#> 3.4015 -3.7903 -3.1152 14.0049 5.0853 7.4088 7.7160 -17.8688 -#> 0.8898 5.4846 -2.9675 -2.9970 0.9324 -12.9664 15.2617 1.2304 -#> -1.9995 -0.5858 -4.9704 12.2816 -4.0337 16.3481 5.3325 14.1150 -#> -3.1097 3.8707 -0.5704 6.0035 2.6616 -4.3465 -4.7927 -6.2101 -#> 1.9746 -2.8249 1.1796 4.5028 -2.0795 10.9674 2.1140 -6.5755 -#> -3.4156 3.5885 1.1321 -6.4363 5.4823 14.2754 -5.5120 -9.4005 -#> -#> Columns 9 to 16 13.2124 2.8994 -5.3735 2.2360 25.7976 3.0137 -11.4937 1.8268 -#> -2.8935 4.7092 -6.5326 -8.3361 3.1229 -11.7171 2.4599 -8.0837 -#> 3.1654 -6.8736 -3.0040 17.3870 -6.3694 -0.7925 16.5050 -2.2160 -#> -9.2479 15.3839 -4.0923 1.8941 -11.9576 -3.3405 -3.0182 11.0526 -#> -8.7475 11.4002 -3.1246 9.1074 -8.3205 5.0534 -1.0137 23.3715 -#> 12.7605 -12.0524 4.9179 -14.8454 11.8215 -6.9595 -3.8740 -3.3183 -#> -22.4466 30.3703 16.0598 -25.0331 -1.7203 8.4954 18.3378 -19.4747 -#> -5.5124 -7.1481 0.1281 2.4150 11.9876 -12.5131 5.0769 -19.8594 -#> -9.5846 1.3561 -8.8571 -5.7119 7.8162 -3.4918 1.9622 20.0733 -#> 7.9827 -11.1137 3.3657 29.9334 -8.8698 3.8945 -0.4167 0.9169 -#> 15.6365 -15.8412 -11.0218 3.1657 -6.8065 -12.0324 -1.1061 5.1023 -#> 12.2327 13.6443 0.3516 12.6832 -1.4542 6.8167 -0.6641 -9.1212 -#> 10.2712 -2.0197 -4.6885 14.2755 -10.2557 -6.5737 3.8165 6.1315 -#> -4.2317 0.6908 12.1939 6.9378 -3.0615 -2.2660 12.1141 -0.1512 -#> 0.8666 10.2340 -6.8133 9.7195 8.6313 -6.9433 -9.8662 2.2463 -#> -3.7955 -2.5939 2.8869 -10.2202 -0.2583 -2.9208 -11.6945 -8.5631 -#> -14.7348 3.9317 20.9995 2.2973 2.0594 2.2833 -6.1081 1.9177 -#> 2.8468 8.1607 3.1404 11.0361 3.6421 12.4354 4.0767 -15.4768 -#> -14.6342 8.3660 5.1886 -8.6252 -4.2892 -5.4175 16.5140 15.3920 -#> 2.0241 12.6425 1.9053 15.0312 1.9797 3.7726 0.5551 20.0474 -#> -5.4475 -1.1319 5.2913 3.7935 -8.1448 7.8411 4.2528 -14.5803 -#> -1.9503 -1.1509 -2.4265 4.8543 2.2002 4.1706 -1.2490 8.4582 -#> -9.7253 15.4198 13.5754 -10.7198 -9.2916 12.8682 6.8419 0.6993 -#> 1.9818 19.2110 -14.5814 -20.0495 -1.6095 -3.7466 19.6918 9.6997 -#> 3.3216 13.9218 -4.2295 -6.2760 4.0693 -21.0303 3.8601 5.3268 -#> 8.4889 -5.6500 7.9181 3.5116 11.6170 14.6077 0.0909 12.0469 -#> 3.3008 9.4569 -8.8514 -13.8592 -17.5208 3.9864 1.5469 -23.5100 -#> 1.5347 7.3003 -11.3367 -1.6975 8.1932 -9.5387 -1.2249 1.8121 -#> -8.1982 0.8516 -10.5033 -8.6144 16.8025 0.9546 -9.9261 5.5325 -#> 8.1604 14.4861 -6.1633 -7.9642 6.1587 2.8146 -9.4546 5.0518 -#> -2.6576 6.2556 -0.7938 -7.1543 -17.7354 18.7747 9.7348 -2.2436 -#> 4.5735 3.3980 0.8378 -2.6990 14.0849 14.1037 4.1190 2.0277 -#> -6.6686 3.3696 8.5207 8.9040 0.9597 -2.7365 3.4979 -2.5273 -#> -#> Columns 17 to 24 4.1596 -25.7686 -15.1404 -8.9288 -1.3671 5.0678 -1.9301 16.9985 -#> -12.4490 6.7502 -3.9174 -4.8482 19.0792 -15.0144 4.8911 -1.4765 -#> -8.8776 2.3609 -4.1307 -8.2305 5.7766 19.5054 6.7913 8.6220 -#> 3.4641 3.1098 -7.0934 0.3988 4.3806 -22.5267 0.0561 -17.0301 -#> -12.8308 2.0799 13.8156 1.8082 -13.9609 -2.5745 7.5044 -5.9666 -#> 0.2498 12.5673 7.5503 -16.1104 -3.4187 -7.2543 -4.3433 -7.1847 -#> -3.1907 2.8129 9.4351 -13.6088 4.9873 2.4302 5.7697 3.3274 -#> 0.9187 1.9439 -1.7335 -4.2249 -4.8246 -5.5245 -5.0744 -8.4851 -#> 4.3851 2.8204 -13.0176 11.6872 -17.4204 -15.5696 2.8239 5.9843 -#> 11.3976 8.9254 -14.8285 2.2775 -20.1134 -1.8695 16.1060 13.2642 -#> 0.3391 -2.1935 -8.9088 7.7289 -12.7010 -16.2638 -16.0576 0.9651 -#> -6.3201 18.0630 -12.1047 -4.1500 -3.2474 7.1294 -10.6871 -10.4556 -#> -7.5286 1.0544 6.0757 6.5414 -12.5292 13.6479 10.5169 12.7154 -#> -9.8171 6.7652 1.5683 -0.0256 13.5938 -4.6878 1.4223 -3.7993 -#> 12.3174 8.9059 4.7810 11.0903 -9.1580 3.7943 -17.4670 -18.2115 -#> 1.8995 18.5803 -6.1339 -18.1816 -7.4241 4.7667 20.6264 -8.6507 -#> -13.2026 4.1181 -2.7518 15.8183 4.5620 9.4490 16.4680 -21.5272 -#> 14.1773 11.3816 -31.4019 -0.7659 -6.8353 8.2688 10.8511 -1.9778 -#> -4.7224 15.2913 18.8708 -15.4398 -13.3489 -17.6808 1.6728 -0.1029 -#> -20.5853 -5.6671 22.0880 3.5830 2.7769 8.0856 14.4633 -13.8408 -#> 1.3969 -6.0445 1.2313 4.9702 18.8178 4.0873 4.1045 5.1362 -#> 10.2653 1.1919 -5.6667 18.5522 -7.2126 -5.5894 -17.3211 -6.0522 -#> -8.3299 8.8381 4.4417 -3.4878 8.0462 15.5097 18.6968 -12.0810 -#> 5.5836 -1.8382 -13.5796 -16.5298 -22.5308 -2.2137 -13.6045 3.5380 -#> 0.4382 3.1033 -5.0076 -6.2842 -7.6827 -15.5193 -3.6441 3.2390 -#> 9.8403 -0.5997 2.9366 -9.6024 0.6152 1.8874 -15.6021 -0.0891 -#> -7.9489 -19.4334 -19.7915 12.7997 5.3265 13.1515 -16.4574 18.4732 -#> -7.7003 -0.6809 5.8432 7.4024 1.7996 -12.3416 -5.2899 -5.3632 -#> -8.5651 -3.3111 -9.8433 8.6207 22.2462 -7.7504 0.8410 2.2015 -#> 2.5760 2.7540 -8.8211 8.0310 -4.4091 -4.6158 -13.2911 -11.8524 -#> -4.8951 -2.8482 2.9793 0.8178 -14.6286 2.1986 5.1434 -7.7913 -#> 2.7216 4.1392 20.0401 -19.5082 15.5115 -0.1485 -4.7362 14.6629 -#> 16.3551 -8.6646 5.3361 7.1782 -4.6457 -13.8959 -5.2264 20.3985 -#> -#> Columns 25 to 32 19.2265 2.5813 4.2602 -8.6212 8.0270 14.7208 -2.4313 -6.6042 -#> -8.4114 20.3017 2.0620 -4.8383 3.4691 -1.5760 4.6170 2.7803 -#> 0.6465 -5.7817 -16.7433 -17.2697 -2.9205 9.8512 -2.9238 3.5182 -#> 8.6801 -3.3657 -11.3030 3.9110 -2.6238 -10.8564 -13.4369 11.0938 -#> 9.6046 1.2772 3.9488 -5.2463 12.9554 -0.5244 -21.8934 4.8245 -#> 6.2853 -1.1873 -4.1437 6.8021 -12.8606 -7.8836 7.3618 22.9472 -#> 8.4518 11.2520 -10.6945 10.1761 11.1940 -17.1313 13.7517 -15.8747 -#> -0.8503 0.0473 -15.0965 -7.3103 -9.4960 12.9389 -3.5424 4.7263 -#> 0.6302 5.3077 -1.5759 12.8717 -0.6424 3.9268 8.6920 10.4053 -#> 19.6230 -6.9193 -3.5887 1.7106 -1.2620 3.1657 -0.0184 8.3143 -#> 2.9247 -12.4803 -2.2822 4.9216 5.1835 -3.6888 10.2193 19.3383 -#> 0.8281 -12.9886 -14.3478 8.3200 3.7656 -11.4106 -8.8675 9.3077 -#> -14.8788 -0.5899 7.3588 4.3446 6.0987 -8.5687 2.9230 -4.8289 -#> 10.2902 10.2134 4.1634 1.2769 -5.2478 -5.8166 -0.0322 1.0880 -#> 18.3145 -10.1861 2.7633 6.5026 7.1369 9.8278 -10.0902 0.1051 -#> -20.0572 3.9915 -14.7786 -1.9932 2.8509 2.7118 -5.6367 -7.3608 -#> 0.6559 -3.5271 -4.6592 7.9450 1.1289 -7.8782 0.4414 -4.6317 -#> 9.6705 3.3123 -22.6803 17.0145 21.1617 -5.5084 -2.1212 -4.3812 -#> -13.4505 9.5107 -3.5649 -16.0908 2.5337 4.6970 -9.4074 0.9188 -#> -11.0729 -12.7696 13.6597 -2.0072 -5.0806 18.4795 -5.9133 -9.5014 -#> 4.7037 3.6743 5.2145 -4.1880 -2.6656 -5.2377 6.0863 10.4891 -#> 11.7670 -10.4386 -0.2109 -0.2333 -12.2052 6.4594 5.4473 3.6028 -#> 0.8061 -4.8452 -6.2903 -3.1591 -0.0587 -14.1828 -18.9741 -0.2332 -#> -0.3267 25.3989 19.3063 2.1349 4.2234 -3.1067 2.6432 -11.7914 -#> -2.3602 2.0220 12.4222 2.7937 -4.9585 0.8077 -6.2972 6.2894 -#> 6.5054 10.4263 11.3934 6.9248 -3.4319 9.5436 9.2800 2.6421 -#> 11.7031 -9.1718 4.6275 7.3577 3.4125 18.0781 7.2450 -10.2295 -#> 3.3806 -6.7652 -12.1229 -5.9293 -3.1413 14.5744 2.6997 2.5371 -#> 3.0301 8.9022 -7.5954 -6.3892 -10.5618 2.1032 10.6880 5.5672 -#> 12.4864 13.4421 6.6533 3.7224 -13.4527 14.6014 -17.2829 22.0472 -#> -4.2936 -0.0431 9.9653 3.0915 14.5693 -10.5417 2.3160 -8.4353 -#> -3.0880 -6.1304 -2.0531 -7.1627 -12.8319 -2.8523 4.6373 10.1443 -#> -0.0269 -6.6783 -4.6217 -4.0809 12.2070 -0.7288 11.2078 -6.6478 -#> -#> Columns 33 to 40 6.4185 4.8604 3.7222 4.1845 -6.9717 6.2573 -8.9588 -2.0797 -#> -10.8805 10.3958 6.5959 0.2492 15.6693 -2.3024 -5.1993 -5.0015 -#> -11.0611 -18.4653 -17.0961 -2.1793 -15.8091 -4.8495 -4.0930 -7.7277 -#> -12.6086 -3.9492 8.4564 3.7170 4.2184 0.4347 1.7343 5.0374 -#> -3.5199 10.0360 18.4011 6.6154 7.6686 10.3938 8.6690 -3.2892 -#> 11.9896 14.9657 17.3100 13.6816 5.7155 -6.2593 2.5469 -7.6239 -#> -2.0192 -12.7663 -1.8799 -9.2099 24.5372 -6.6509 -7.4240 -7.4372 -#> 3.2858 -6.3573 0.9446 10.3753 2.7172 -6.0686 2.2428 -4.7485 -#> 2.8410 12.0668 -3.0514 1.8473 -11.1713 1.2479 2.0316 -0.6065 -#> 4.6437 15.0374 0.2794 18.3464 4.3572 0.1622 18.0440 10.0460 -#> 7.2547 14.2274 -9.1562 9.7975 -19.9968 -19.8045 -1.2774 7.8726 -#> -4.9674 16.0535 1.7580 11.0541 -4.9818 -0.5423 -10.2666 5.8336 -#> 5.3223 13.3867 -5.7377 -0.4243 -2.8942 0.4821 -2.8297 4.5070 -#> -32.6253 0.2349 4.3426 12.1137 1.6440 7.2439 5.9732 7.4394 -#> 11.8648 0.1440 -2.2802 -1.4521 4.3316 2.3332 12.4300 -0.3200 -#> -6.3452 9.5664 0.8558 -22.2897 -3.8459 7.0089 5.8969 -4.4485 -#> -8.6390 -1.5985 0.7078 -6.7466 3.5989 12.8649 -1.7226 -2.5022 -#> -12.6954 5.1820 -15.4668 -0.1136 9.0383 -1.8060 -2.1011 7.6085 -#> 9.8640 12.0282 15.3925 1.3519 11.9642 12.9207 -1.3220 -7.5618 -#> -13.5007 1.6434 8.9013 9.6739 -0.7869 16.0522 6.1873 -7.6691 -#> -13.1869 -10.5223 0.5893 -10.4336 -11.4600 2.9302 -11.7560 -9.4367 -#> 1.0218 -12.6724 -3.8524 0.6912 -3.0898 -4.9917 -17.9133 2.3959 -#> -10.5114 -3.0623 -21.4408 -11.9590 -4.1554 7.4104 2.1276 -18.3623 -#> 6.8585 23.4705 12.6679 11.8100 -3.3717 -1.6442 -19.9979 11.4009 -#> 7.6246 2.4456 -1.0381 5.7176 5.9947 -3.6588 -2.6176 -2.4339 -#> -2.1380 11.9104 1.4937 -0.9187 11.4557 -1.7577 1.6467 7.3846 -#> 3.8289 8.6479 8.8680 8.2986 2.5710 9.3239 -2.7676 5.4409 -#> 2.0634 5.6217 8.0566 7.1804 -6.9430 -5.0080 -0.1164 -3.7963 -#> -22.5194 -0.9365 0.0082 -14.0502 2.1485 -1.5372 -13.6636 -8.0557 -#> 4.1657 6.2588 0.7398 27.1365 6.5433 14.9252 -9.3923 10.3682 -#> 4.6615 2.4031 -6.2987 -6.8812 4.4682 8.5512 -1.3710 -3.4787 -#> 3.2988 -2.1579 8.7593 -13.0047 -9.2239 -4.7590 8.9181 5.5893 -#> 3.6827 0.2190 2.4700 -10.8235 -2.6666 -6.8960 -14.1972 14.2358 -#> -#> Columns 41 to 48 6.4534 -9.1895 -8.6435 -14.7488 11.7945 7.9069 6.2200 2.6855 -#> 3.7099 6.7005 -6.4447 14.9308 13.9055 3.2064 3.0763 -3.9926 -#> 1.3041 -1.2253 -5.3743 -0.0238 -11.3995 13.4731 0.8305 -0.6777 -#> 2.3425 4.7941 5.5960 4.5050 9.3140 1.8727 0.0980 -16.1363 -#> 15.9776 6.7659 0.1963 -6.5183 9.9605 -3.8728 2.0315 -4.3087 -#> -0.4646 7.3267 9.4552 -7.7112 -11.6981 5.7818 2.1668 1.2031 -#> 4.4953 -2.8655 0.4263 -9.0706 6.0559 4.9343 3.4586 -1.6319 -#> 9.0060 6.4304 -1.7217 -12.2141 7.1489 3.2940 1.0976 20.3411 -#> -6.8470 11.7559 -8.2976 8.8400 -4.5367 -5.4740 -9.5925 10.1248 -#> 3.1792 11.0095 -5.4095 8.5358 -0.0891 7.4820 7.9858 -9.3940 -#> -4.8595 -5.1274 -6.9625 -6.2827 -11.5960 15.2161 9.9113 4.1383 -#> -2.7703 -4.5346 1.3240 -4.7023 2.2619 1.5294 -0.7932 5.1330 -#> -13.8488 3.7725 -7.4467 10.0005 2.3924 7.0655 -4.2412 -10.6243 -#> -0.5703 2.1365 6.5394 15.4758 6.1899 5.1621 -9.3508 -9.5881 -#> -1.6777 11.2626 2.7623 -8.8503 4.7320 -9.9948 -2.1906 -1.3836 -#> 2.0351 4.1408 -7.0070 4.6746 15.7931 -0.7728 -12.3294 -0.6707 -#> 4.6545 8.8259 -4.0885 -11.8345 1.9865 11.8464 3.1237 7.3947 -#> 1.1175 -3.9012 -12.5408 13.5431 -3.7331 10.7896 -0.2898 4.3303 -#> 11.6799 16.1524 6.9541 -2.2194 0.3828 6.8236 9.9355 12.6040 -#> -1.0700 -0.2820 13.9910 11.0524 -3.5197 -5.9382 -7.1684 0.8504 -#> -6.0114 -3.4103 -2.3126 -3.8858 -10.1844 -6.1619 11.4913 -2.3328 -#> -6.6570 2.1689 0.3162 -20.9597 -7.9286 15.7913 4.5882 6.8197 -#> 17.7398 9.6953 4.2466 -5.7422 -8.0725 12.1194 6.9694 -12.5887 -#> 4.1084 9.9369 -2.8953 4.8401 22.3587 7.2082 -10.6898 -19.8510 -#> 0.9589 -12.2802 -4.2077 4.4325 0.7391 -14.0510 -5.7468 2.9880 -#> 5.0825 1.6433 2.1427 0.0308 -9.4286 -7.0943 -9.0435 8.6887 -#> -8.7570 -4.5735 -2.5489 15.2376 4.0394 0.9269 -10.9205 -5.9028 -#> 1.9380 -4.2203 -3.1124 4.4167 -10.2325 -5.5920 -8.7279 -4.0147 -#> -2.2084 2.4891 -9.7241 4.3068 0.6057 -15.8200 -11.3434 -10.7842 -#> 5.4077 1.6802 -15.9240 -6.0652 -2.5681 20.3799 -8.0014 9.7064 -#> -5.6949 8.5338 5.7999 -3.7980 5.1719 8.5332 2.3118 -9.0947 -#> 3.2450 2.4662 7.0022 -5.3419 5.7970 -2.7587 -6.6008 -4.9079 -#> -8.3299 -12.9051 4.1120 -0.4969 15.4392 4.8217 14.1742 -7.5018 -#> -#> Columns 49 to 54 -4.7991 0.0500 -8.7191 0.4213 3.1326 -0.7528 -#> -18.3523 3.6540 0.0161 -1.2858 2.6736 -0.7228 -#> -2.1636 -4.9768 4.3219 -4.3713 4.8987 -2.0903 -#> -2.0971 -9.9748 3.3338 7.4499 -2.8762 5.8465 -#> 6.8928 -4.1073 5.6118 12.3544 3.8265 7.2727 -#> 2.9705 1.9568 2.5834 0.8506 1.9960 -2.9966 -#> -2.9388 -2.1280 1.5803 12.5152 -2.4432 -0.8005 -#> -12.0056 14.8970 -7.3867 -2.2539 -4.7291 0.8013 -#> 2.5186 1.8716 2.1189 1.9451 -2.0923 -5.3365 -#> -10.2855 -1.1241 3.2962 4.4069 3.3473 2.6102 -#> -7.0721 -2.5420 -7.3420 -13.0171 -2.2379 -6.7108 -#> -3.4175 -3.6209 7.7254 -8.8157 -5.4033 0.7490 -#> -8.5337 9.6271 -0.0745 4.8559 10.0779 -5.1408 -#> 0.0217 1.3013 4.7639 -2.0329 -0.8737 -1.2641 -#> 5.1340 4.6470 -8.7506 -1.6405 -3.4937 10.2746 -#> 11.9159 22.8571 5.2302 -9.1702 -1.6322 -2.3057 -#> 20.3475 1.4480 3.6028 -0.7666 -0.9609 -8.4161 -#> -10.5663 6.8000 1.0616 -5.9306 5.6407 -3.3033 -#> -2.6967 -0.7815 -8.6606 1.8991 2.3074 -1.4894 -#> 0.0664 -9.5043 -8.0940 1.5034 -11.0601 2.7597 -#> 5.8905 10.2688 2.1554 -3.0246 4.5676 1.4414 -#> 6.9913 -4.2888 -9.0173 -2.8255 -3.0490 -5.1996 -#> -4.3810 -6.9911 -6.0053 4.0219 3.4453 1.5885 -#> -15.1787 -9.3592 0.6733 5.3358 0.3754 -2.1221 -#> -3.7403 -7.2926 5.3745 1.8880 -3.5720 0.1754 -#> 5.1834 -0.0021 -0.0035 -0.1351 1.0208 4.0843 -#> -5.8821 0.7086 10.6882 4.8212 3.1791 6.6074 -#> -9.5721 -9.9010 6.5255 2.3268 -2.0515 2.9862 -#> 15.4546 -1.7387 9.6608 1.5877 6.8544 0.1361 -#> 1.1707 -2.3197 5.2251 4.3118 1.0230 0.6757 -#> 6.7378 -2.8661 -13.6288 2.6468 3.8596 -1.8382 -#> -3.3849 -7.5928 5.2608 6.9661 -7.8696 2.8806 -#> -1.0976 -8.2777 -1.6198 6.4236 0.9753 -4.2247 +#> Columns 1 to 8 -0.4399 -9.8339 -5.7341 14.1821 7.2846 -12.4809 15.3554 8.5535 +#> -5.9303 -10.1007 -12.6243 -7.5374 -3.0855 -2.6401 -9.1784 -5.8404 +#> 0.2445 5.1412 5.5394 8.5548 16.6175 -20.9391 11.9758 5.5936 +#> 0.6478 -14.3452 -7.8975 -6.9604 -9.5037 1.2038 12.7594 -11.9982 +#> 5.7954 3.6360 -3.9351 -5.2897 -11.6707 4.1323 14.0706 2.7168 +#> -2.7673 -9.3404 -1.1551 4.1638 -10.0284 7.6380 -6.3285 3.5912 +#> 0.9057 -6.0202 -3.8427 -0.0954 -21.3463 -11.1183 -7.1892 -5.6806 +#> 3.0716 -9.7678 4.7543 12.0256 15.2962 9.7284 1.7495 -18.1781 +#> 6.5256 8.7798 9.8120 2.0260 3.7544 -1.8184 -4.5782 -18.5067 +#> 5.7275 -2.6960 -2.6577 2.1444 -0.6645 0.7423 5.6429 -2.2996 +#> -7.5429 -2.9588 4.2095 -7.5152 -4.7756 -9.1355 1.8877 -4.5023 +#> 6.5034 -7.0176 -0.3468 -7.6490 -12.5162 -10.2921 -8.5465 -23.4569 +#> 7.1064 -6.8738 -7.1737 4.6043 2.3187 -2.6163 25.5759 0.3414 +#> 3.8790 6.8499 3.0317 -4.5667 -19.5008 -5.1349 -2.6449 0.3416 +#> -2.5915 2.0918 4.0359 0.8269 -3.3786 -6.7288 4.3243 -4.7770 +#> -1.7882 -1.1836 -4.7447 -3.5226 3.0817 -10.5895 12.1533 9.3463 +#> -2.0780 3.1450 2.4594 -2.4749 -8.7454 -6.9414 12.2690 4.2044 +#> -0.2059 -4.1702 -10.4617 -5.3278 1.5751 -17.1475 8.7886 21.2329 +#> -1.9195 -13.3358 -10.5479 -11.8983 -0.4503 -7.4361 -1.4711 9.7657 +#> 1.3657 3.9532 5.0426 12.3670 15.9357 -2.8124 -2.6142 -8.2979 +#> -5.2996 -5.0220 11.0571 7.0752 -4.1674 -4.9632 -7.1966 -13.6628 +#> 9.7301 2.6139 3.8642 9.7427 22.3077 15.1479 23.7175 -5.7975 +#> 4.9020 13.6969 4.3543 1.5525 7.2297 -5.3547 1.8316 -3.7038 +#> 3.0956 7.9357 10.9700 -14.3732 -1.8252 -12.6894 -11.8313 4.5608 +#> -10.8936 -1.1501 2.5471 2.1257 -11.6878 5.2519 8.0220 5.8740 +#> 4.2600 9.0741 6.7806 9.5234 3.7230 -2.8274 2.1060 15.1678 +#> 1.3539 -11.4143 0.4210 11.2573 -2.2872 -3.2963 7.8345 -12.0063 +#> -5.4789 -10.7103 4.7610 8.8139 -0.0932 -6.9008 1.8923 -10.9050 +#> 5.9308 -3.2216 -8.8826 4.0317 0.2408 0.0349 0.0710 -0.9200 +#> -5.4162 6.3745 6.9228 0.9181 -0.0911 -1.9766 -4.8743 6.2025 +#> -7.4394 -3.0056 6.1070 -3.4087 3.3336 6.2609 16.2879 3.0028 +#> -8.9153 13.7766 2.6318 8.1106 -2.0410 3.8390 8.8171 7.0878 +#> -5.4004 -0.9636 1.1346 -2.0752 0.0901 -7.0591 -0.6268 12.9182 +#> +#> Columns 9 to 16 5.3651 14.2650 7.5604 -6.7343 8.4843 4.3947 -5.9483 8.7966 +#> -2.7654 -2.3454 6.9252 -0.0815 12.2935 10.5242 8.8327 6.1936 +#> 11.4527 16.4624 -8.9326 -12.2866 -1.2389 -13.7843 4.9859 -2.1215 +#> 1.8822 0.3879 2.3952 7.1999 11.5789 4.0394 6.9331 1.4807 +#> -11.3371 0.9226 -0.3788 10.0386 -12.0893 10.4436 -9.3535 7.0422 +#> -5.5681 13.7311 -4.8841 8.9371 7.5074 -14.6669 -4.8672 15.4527 +#> -1.3421 -22.3294 2.8623 -2.9001 -0.6917 -3.5467 3.8639 8.0614 +#> 4.9583 -13.9854 0.5634 -5.2125 -4.1940 4.5869 -9.0215 8.4669 +#> 0.3494 -6.7512 -23.1683 3.9889 -2.4973 -4.7381 5.6142 7.4027 +#> -4.9036 8.7479 4.7585 2.2023 4.9907 -0.7846 -9.8249 -5.8838 +#> 3.0641 -3.2316 11.9413 4.7472 -5.9334 3.9581 7.9338 9.3080 +#> 9.7415 1.4038 -1.3927 3.3384 -8.3999 -2.4913 5.2354 10.8509 +#> -0.8674 4.5202 -8.4854 5.4010 -2.9413 1.7532 -2.2522 3.2416 +#> 10.9610 -3.2508 -4.7333 -6.3673 4.2876 3.7218 15.9949 16.8563 +#> 2.9891 -2.6980 -4.9086 0.6166 7.1292 6.3679 -0.9282 9.5413 +#> -8.5565 6.2409 -3.6749 4.3947 -4.2313 4.0329 7.3583 3.4428 +#> 3.5101 -3.8375 -4.2902 -2.3837 -2.1970 -2.6697 2.2321 3.3484 +#> -21.3187 -8.2250 2.1174 0.2722 14.6967 -4.6395 6.6127 6.3066 +#> -1.3565 3.5411 -5.6334 -8.1736 10.2623 -4.6321 -4.1303 -10.0440 +#> 11.8812 3.9888 4.6782 7.4353 0.5304 -12.3094 -1.8426 -3.7535 +#> 2.2787 0.4358 -3.1635 6.1347 -2.3667 -2.7445 5.2450 -7.0328 +#> -7.4824 -2.1243 5.5581 1.3164 1.0700 -3.3431 -0.9782 -7.7898 +#> -2.0124 3.8889 7.4500 -6.9558 -13.8442 -7.9493 -0.2970 -6.2874 +#> 14.0225 -19.3300 13.2850 -3.4373 14.0544 9.4851 9.0342 0.0487 +#> -1.7435 -8.5715 -5.0730 0.0395 6.8899 -2.0006 -0.2242 5.7855 +#> -6.9617 -3.9413 3.1908 9.1024 2.0667 9.9852 0.9369 -1.0024 +#> -4.9178 4.6218 -1.3753 -9.8213 -3.8856 0.0139 -7.7337 8.8656 +#> -7.2232 -4.7904 -1.2482 0.5743 -0.0366 -7.8573 -4.1574 3.3545 +#> -6.3942 9.3756 -14.3798 9.6943 -0.2711 8.9593 6.9881 -10.2577 +#> 24.3169 19.9238 -9.1816 -0.8564 2.9165 4.1707 2.7056 11.6700 +#> 7.4866 8.9667 -3.6551 7.8229 9.5467 -7.9477 -11.2145 6.1564 +#> -8.2359 17.4440 0.6013 -20.2454 2.8364 -5.5546 9.0681 -2.9457 +#> -5.6151 14.6247 -2.7100 -15.7017 5.5888 -6.7840 2.6476 -9.4555 +#> +#> Columns 17 to 24 5.6183 -2.2153 0.8109 -10.9208 -6.9728 -0.2295 6.7799 5.5874 +#> 2.4850 -14.8900 7.5054 2.9570 -9.5373 -4.2789 -1.5990 -13.0451 +#> -4.7077 -12.7756 18.7467 8.8846 1.5291 8.3353 -15.4821 13.3129 +#> -11.3264 -3.0792 10.8050 15.0769 9.2741 -8.4039 -6.9717 3.5838 +#> -3.6343 1.5097 -10.2390 6.5485 -7.2157 5.6341 5.6290 -7.8932 +#> -7.3573 -1.9905 -19.9132 -13.8910 1.9008 -8.5817 12.3528 -6.8822 +#> 2.9178 -11.3506 -2.2280 6.5316 -2.1847 -17.1350 -12.5507 -4.8582 +#> 11.4017 1.8128 -8.3513 -6.2435 -22.3032 -0.5120 -15.1850 7.1722 +#> -14.3802 -3.5207 -5.5234 10.9757 5.1192 0.8222 -2.3774 7.4422 +#> -7.4805 2.7108 -15.8225 -0.2904 -5.3539 13.1530 9.9738 5.0945 +#> 15.7363 -7.7178 -4.9708 -9.2691 -7.2409 -6.6241 -6.1960 -0.9867 +#> -10.6237 -9.1690 -5.1146 5.8755 6.6720 22.3087 5.2693 10.9291 +#> 1.4019 8.3625 -2.6825 8.0516 -1.0583 8.2296 -11.6665 1.4366 +#> -3.3552 -14.9592 -14.8967 2.7919 4.8436 9.3720 4.6877 5.9854 +#> 16.6644 -0.1520 9.0551 -10.4617 -12.0942 -0.9356 -14.0587 -5.2801 +#> 15.3731 -10.3244 -1.0295 9.3016 2.6167 -4.4824 1.2845 -12.7519 +#> 12.8638 -10.6249 -8.9250 -2.9008 -10.5265 9.1539 -18.9211 -3.6155 +#> 11.6078 -4.0365 8.9050 1.3733 -7.9153 -9.3213 14.7705 0.3930 +#> -15.0470 1.8544 3.5344 9.6559 16.5799 -10.3601 -6.6171 -2.1831 +#> 6.4051 9.0978 -0.0979 -12.9394 13.2711 -9.8917 -1.8478 -0.3260 +#> 8.3673 -10.6700 8.8476 -1.7596 -2.7251 9.1745 18.7763 15.8548 +#> 18.1532 10.2988 -8.2354 4.3982 -18.5632 23.1937 5.4139 -10.2724 +#> 11.5605 -9.1969 1.9404 -5.3495 9.4016 12.6998 5.9488 18.5692 +#> 4.5141 -1.8314 -0.1923 -4.3068 -22.2908 14.4707 1.3276 1.7627 +#> -3.0217 6.2592 7.4638 -6.1301 -1.9086 -4.5095 3.7591 11.5180 +#> -6.3339 4.6793 1.6742 -3.5293 -14.0501 -1.1520 20.4809 10.7152 +#> 7.5744 -2.4382 -17.7833 -10.5110 -6.9921 17.4620 -25.7552 -0.7050 +#> 12.8532 5.3170 -3.5521 -4.6499 -2.8613 -3.9584 -5.6844 -8.1551 +#> -10.0477 7.0236 -0.5387 -2.2920 11.0251 -10.5603 8.7230 -8.2712 +#> -6.6999 2.9466 23.8148 -1.6187 12.5007 -20.8724 12.3928 -1.9434 +#> 10.5920 8.4415 1.1417 -16.7846 6.8896 -3.3742 5.5085 16.6993 +#> -5.5027 -3.8251 2.6803 -11.8676 -3.8384 0.5090 -4.3183 -6.4270 +#> 7.5013 -9.0091 6.0672 11.0745 9.3144 9.1787 -3.4240 -7.2819 +#> +#> Columns 25 to 32 0.9207 2.0368 -14.7595 -2.3700 -9.4132 -0.5554 2.8267 1.9021 +#> -10.0508 -16.7542 11.7158 -5.2896 10.7968 10.4491 -0.0389 2.8819 +#> 4.5385 12.4348 1.3891 -9.8461 -0.4487 12.3783 -4.1551 8.8723 +#> -9.5195 5.4560 5.3158 -4.3291 -9.9187 -1.6283 -10.7847 6.6581 +#> -1.9136 3.8731 -7.5145 -12.7316 -9.1366 9.3933 5.3921 3.7452 +#> -0.8538 5.8630 -11.3826 -2.9935 -11.3101 -4.7223 4.4738 10.6260 +#> 1.9936 -8.8177 5.3058 -9.7294 -7.7622 -6.3238 6.4446 -1.3659 +#> 6.0357 2.9529 -7.9592 -12.3859 1.2444 -12.3022 2.6763 14.9436 +#> 5.2183 7.2566 -1.2416 -7.2451 1.4036 1.0254 -9.8252 -1.0778 +#> 6.6504 0.8037 -7.6595 -0.5570 4.0338 14.3104 2.5542 -8.0929 +#> -0.6015 9.1358 4.5889 -12.5929 -4.2856 -14.6540 13.0719 11.7818 +#> -1.9586 8.1230 2.3274 7.3378 6.9369 -0.0582 -7.9008 9.1119 +#> 7.8170 -3.8486 1.0594 1.0198 -10.5631 3.9986 -7.4255 0.9664 +#> 3.3753 -9.7303 -14.2008 -5.8655 -1.4843 -1.2457 0.7414 -0.1931 +#> 0.2903 -7.6817 11.7602 7.8886 -5.7281 -1.4388 -6.9232 7.2284 +#> 7.6689 -2.7571 1.0084 -5.8282 -8.9794 -5.1272 -4.6154 4.7556 +#> -2.3670 8.4990 11.2139 -5.0128 -5.6257 2.4300 2.8358 0.1539 +#> 8.5433 -26.1802 10.0320 -9.0166 -14.9888 16.1721 4.0149 11.9925 +#> -19.1530 3.3381 9.0346 -1.0924 14.2923 2.4672 -7.5474 -9.1617 +#> -6.9023 5.2533 -7.8535 -0.1190 -10.4692 -11.7988 -6.9563 0.1468 +#> 0.7852 -5.4089 -2.2657 -14.7595 4.9537 -6.5129 -14.1604 1.7007 +#> -0.3543 6.7939 9.1758 -7.6594 -0.6388 2.4999 4.5303 16.2191 +#> 5.5623 -6.3744 -1.2816 -4.6006 5.1525 -5.7069 -15.4750 0.9261 +#> 3.3566 -0.7460 -4.1063 -7.2672 -4.1472 -4.7387 6.6353 15.8060 +#> 7.1686 8.8878 -1.2049 0.1900 -1.5401 2.5221 -0.5413 -11.5386 +#> 2.7018 -6.6635 -6.0778 -6.7091 -4.0805 2.3584 3.7314 -2.2992 +#> 3.2233 -10.8133 1.5959 1.2055 -8.1207 -9.8140 -12.6877 8.8985 +#> -15.3895 -1.1929 -2.5592 5.4308 1.8455 0.1622 -12.5549 11.3373 +#> -8.3236 3.8338 -10.2195 10.9981 -7.4546 -9.8947 -18.2802 4.1683 +#> -11.2489 21.4333 -18.1130 -1.8371 1.2404 6.0333 4.2132 -8.1135 +#> -5.6300 10.1563 -8.9303 -2.1507 -6.3020 -8.1767 -6.7428 -10.7172 +#> 4.8373 -3.1852 -7.8659 1.5173 2.3496 7.1938 -9.8646 -10.5505 +#> -1.1027 -8.7715 11.7387 1.3545 -13.9336 -4.7665 -6.7524 -3.1597 +#> +#> Columns 33 to 40 6.5539 -5.5556 2.7697 -0.5191 -0.9395 16.0489 7.8946 -4.0665 +#> 1.0967 9.0358 -5.7644 3.6538 -9.3637 0.6205 -11.9093 6.7415 +#> -2.0511 -8.6511 -1.2424 6.0813 -2.3020 3.4821 1.1816 5.3889 +#> -6.6797 -4.3222 -0.3560 -1.2880 -4.6805 -5.1932 -0.3653 7.9137 +#> 8.4303 -12.4701 20.2444 2.3857 -2.3511 12.6756 9.8967 -15.2806 +#> 5.3830 -4.1600 0.3984 -8.4826 -3.2370 4.4837 -1.3721 3.5716 +#> -7.6241 -3.5401 -0.1866 0.0584 0.9402 1.8092 -7.4590 8.7904 +#> -21.6164 9.9771 1.1564 14.1455 -1.9325 14.5105 4.5724 -3.1793 +#> -5.2144 -6.0559 10.5163 7.0450 -11.0510 -15.2040 3.5164 -3.7926 +#> 8.5668 -10.2205 0.1464 -4.4411 14.4663 -7.1797 -8.6814 -9.0000 +#> -22.6188 4.1929 -4.9197 6.3402 11.0029 11.3697 -7.4817 9.2828 +#> 15.0105 -19.4201 7.2595 -3.1692 -12.5138 -7.9568 -8.6103 -17.4663 +#> -9.0719 -8.5978 17.9267 -9.5988 -0.7344 2.6988 1.2121 -5.8299 +#> -2.4363 -15.8126 11.3802 -0.0943 -0.5220 8.8321 -6.0334 -9.9249 +#> -4.1756 -7.2381 -7.8929 -4.5356 2.5705 20.5241 6.4802 -7.4891 +#> -10.1320 -2.7971 -4.5442 4.0991 7.7741 -0.7378 -0.8670 3.6224 +#> -6.2688 3.5671 3.2446 -13.3061 6.0049 7.3297 4.3155 -16.1011 +#> -2.8790 -1.0572 -2.0730 5.2036 6.8688 1.6448 -4.6652 -4.9549 +#> -17.5073 -1.2243 0.6608 -6.2250 0.1022 -8.3891 -0.3210 9.3562 +#> 8.8424 -5.8274 4.4686 5.7836 1.7361 6.5671 9.0251 13.4838 +#> -5.9233 -4.2397 5.5106 -5.5407 -0.2701 -9.6298 -11.9852 -1.3018 +#> 6.4946 1.7733 -0.7541 4.5311 3.5513 12.8196 12.5087 -10.8967 +#> -4.2948 2.0791 -15.4223 13.7624 -0.9687 -5.3149 -0.0733 -13.0511 +#> -2.7266 -0.5527 -5.4479 2.1995 2.8003 14.2527 -1.7781 -14.8952 +#> 4.0721 -11.9535 -5.3805 4.1456 14.0724 -10.7224 -7.3015 5.1099 +#> 15.3555 -7.4927 -5.9483 0.6172 4.8021 -1.5085 4.1855 -0.5813 +#> -5.5500 -10.3300 17.8180 -1.3665 -0.6823 18.3681 5.4871 -19.1068 +#> 2.1424 2.2024 5.0952 -9.8469 -9.3828 3.2173 9.6302 -1.0092 +#> -7.5320 4.0745 1.2548 -16.5210 -2.0761 -1.6013 1.0776 13.3129 +#> 19.3149 9.2281 -24.3378 8.1898 26.7277 2.0330 5.7732 7.8458 +#> -4.0812 -6.1952 -10.2545 -4.4162 12.6505 -13.5147 6.6683 -4.1617 +#> -6.2417 -20.2140 8.1752 -18.0629 14.1109 7.2749 -0.3871 2.6768 +#> -4.7742 1.9486 1.1159 -17.6181 7.0251 4.7026 -7.1945 3.8130 +#> +#> Columns 41 to 48 3.3232 -13.5924 2.8170 2.3330 -15.8960 -1.8917 2.7798 -7.6116 +#> 3.8347 17.8713 -2.1484 22.3505 -5.9447 24.5912 4.5556 -1.5373 +#> 11.9191 -16.4974 -3.8895 -6.3970 -1.7202 0.6156 4.8091 -10.1829 +#> 5.1317 -14.7918 5.2824 -15.7628 -4.8590 -6.0179 -1.7699 -1.2362 +#> 7.8854 -0.7031 -1.4654 -10.2095 -10.7414 -6.0618 -0.1446 13.8343 +#> -1.8350 6.9098 7.2743 3.4918 2.6187 -20.0876 -5.8811 1.6351 +#> 5.8766 -4.4788 -12.6568 4.0703 -0.2506 -9.4394 2.6171 -3.7551 +#> 4.2620 -8.3575 14.8513 -7.1902 17.5376 0.1819 -4.9055 5.6810 +#> -10.8480 -5.5902 3.0382 -11.4017 -2.6344 -8.4439 8.3445 1.9137 +#> 10.2491 3.8944 -0.0634 5.9458 12.0449 -4.2006 3.1583 -8.0179 +#> -2.4558 -2.3309 -1.1199 6.1755 13.1978 -8.8434 -16.9387 4.4322 +#> -0.8797 3.1642 4.3431 -8.6493 0.1978 8.2223 -3.5361 1.4634 +#> 12.9270 -10.5220 7.4634 -5.0543 4.8620 -0.5180 12.9420 0.0950 +#> -4.3370 11.3971 17.9090 -11.3510 -2.3065 -3.1703 -0.0963 -7.4143 +#> 2.0556 3.0468 4.6587 18.2531 -7.3665 6.6207 -11.4257 -7.1988 +#> 9.3272 -1.0225 -8.4347 7.0958 -4.5650 -0.4044 -7.4498 5.0786 +#> 1.6697 2.4790 -0.7083 2.7175 -3.5798 -0.0206 1.3507 -9.2313 +#> -0.8127 -9.9962 -8.7948 12.4723 7.5816 -5.5933 -8.4468 8.1223 +#> -8.9043 1.3019 0.5547 -5.8646 -12.5195 6.7520 8.4801 2.8676 +#> -4.8482 -2.7945 -2.7811 -9.4188 -5.9609 -8.1079 5.5658 0.7395 +#> -1.9725 8.5564 -4.0857 13.5660 -2.4938 -0.4724 -5.4248 -7.0286 +#> 8.3375 8.9952 -2.7771 9.8040 -8.2494 -0.0066 3.3925 -0.1934 +#> 4.1149 0.1652 -20.0050 0.9962 -16.9388 1.2711 -12.9137 11.3030 +#> 7.5552 -3.2796 -0.4728 2.4928 16.0125 -0.4962 -6.8039 -14.2538 +#> 9.7317 -7.5181 5.3971 10.6695 11.5370 2.9633 -5.9410 -10.9148 +#> 12.8864 5.6636 0.3896 19.3001 2.2492 -2.1819 -4.7484 -20.7994 +#> 24.8203 -12.6014 11.1082 -18.4825 -11.1396 -2.3746 1.8259 2.2090 +#> -0.5355 -9.7707 6.1803 3.5288 -12.9117 12.0748 -4.3031 1.2460 +#> -12.0494 8.5140 4.8341 -0.0779 -6.0345 -3.4025 4.1744 -3.7141 +#> -1.7613 4.1214 -17.0757 2.6091 5.1673 8.0337 5.6331 -1.7692 +#> 1.1393 -0.4348 7.0936 9.1328 9.7192 -0.6600 -7.7376 -2.2071 +#> 15.3554 -0.2694 10.4138 8.9714 -13.8950 5.3369 -14.7004 -2.2660 +#> 2.6429 1.3753 -17.0177 -4.8190 -9.0052 -6.2367 8.9734 1.9156 +#> +#> Columns 49 to 54 -3.8169 3.5906 -4.5632 12.4301 13.9804 1.4995 +#> -9.5793 -16.4893 -4.7312 -8.0008 -3.6178 -8.9891 +#> 15.3577 -2.8947 7.6546 0.0745 -1.9184 4.4233 +#> 11.9067 7.0628 4.8176 5.0115 1.2250 4.5187 +#> 14.0419 2.0488 3.5507 -9.6562 6.0250 -10.7442 +#> -13.5825 12.7014 -10.9511 3.0237 13.1409 -3.6006 +#> -4.4251 -6.4177 -0.9253 -0.1735 3.9153 6.9999 +#> -8.2174 -10.7605 -3.1630 -9.0839 5.0032 -3.0584 +#> 1.8874 8.8065 -2.6591 0.5320 7.9787 2.3050 +#> -6.3381 10.3886 1.2976 2.0447 0.1569 -2.3262 +#> 15.3167 -13.7464 8.5274 -3.2178 6.9150 -3.5044 +#> 3.8239 -6.0307 14.5465 0.1830 9.4673 2.0756 +#> 12.4815 -9.1989 0.9628 7.6539 -9.7898 -3.9564 +#> -11.5310 7.6337 6.5070 2.8451 8.1552 -9.6600 +#> 2.0690 -5.9099 -3.8537 5.8093 1.9038 7.2235 +#> 0.7651 -4.2905 9.6545 4.3836 -7.6700 7.6161 +#> 2.7775 -4.7501 -11.2710 3.5349 4.7221 0.4581 +#> 3.7960 16.1732 -7.5054 5.4440 -9.6357 1.4493 +#> -8.2701 -1.2075 13.1401 2.9992 -1.9170 -0.5563 +#> 6.6165 5.7090 6.5782 -0.5012 6.4436 11.3584 +#> 15.6611 -11.3147 2.4648 3.7152 5.0941 -4.6742 +#> -2.8079 3.3530 -17.0085 -1.0148 0.3942 -7.2079 +#> 16.2594 1.6486 -5.9208 18.3353 -0.3978 4.8515 +#> -3.2813 -1.2803 -2.6389 5.2233 10.7540 0.4536 +#> 3.8454 -7.1942 -4.8117 1.3339 2.4427 -3.4145 +#> 0.1433 -11.1748 -9.3389 7.8077 -3.5003 -3.9770 +#> -2.7575 1.3132 -2.6993 5.9574 0.9215 -0.4006 +#> 9.2865 -0.6235 -6.1245 -6.7864 1.1137 -0.0587 +#> 2.2778 1.1557 4.7738 9.9479 5.5139 3.9762 +#> 6.9869 -2.2712 12.0344 1.5681 5.2382 1.9824 +#> -0.1761 1.4252 1.4267 2.2728 3.6432 4.0365 +#> -2.8207 -6.3113 -3.8591 2.0780 -10.3713 -0.3266 +#> 16.1383 10.9544 -4.0993 10.9288 -3.3440 0.0892 #> #> (5,.,.) = -#> Columns 1 to 8 0.1270 3.9969 -8.8403 -17.5017 18.6949 -11.8889 1.7688 23.0865 -#> -2.0743 -3.4313 -0.5353 -2.2056 -7.4029 -3.6609 -11.6210 7.4690 -#> -4.2211 3.8374 1.6643 8.0054 -14.7087 3.0723 3.5819 15.7280 -#> 0.3868 7.4607 2.6972 6.7585 1.5712 0.7332 9.5469 1.5579 -#> 1.3800 9.2863 -11.0584 -3.1627 4.3456 -5.3272 3.4245 1.0607 -#> 0.1366 0.8656 5.4564 4.3627 -13.1777 8.9084 -16.2988 -6.1773 -#> -5.2455 -6.2299 3.7452 2.0602 -12.6547 -3.8861 -0.2208 0.7433 -#> -0.9262 -0.8542 4.7811 0.1665 -13.8330 -4.0361 10.8870 10.7425 -#> 1.9948 6.3012 -24.9735 14.5074 13.6684 -16.8739 -3.7360 8.0338 -#> -2.1510 4.7077 2.5011 -3.1044 -5.4948 8.0186 17.5787 2.0567 -#> 0.3403 -5.0138 -2.3260 -5.7637 -0.8710 22.6176 -5.8266 18.9274 -#> 0.1255 -1.3090 5.7327 4.1940 -10.9863 10.3695 8.2213 -12.3300 -#> -3.2420 -5.2769 -12.7078 16.8987 8.1143 -4.5887 -6.8306 9.4650 -#> 2.1145 -4.0721 15.0845 -18.5331 -5.0826 12.1581 -11.6930 -4.2580 -#> 1.7775 6.6388 3.0794 2.0254 -6.6043 -8.7520 -7.1985 -0.0161 -#> -1.2429 -10.8730 0.3081 -3.5270 -1.2679 6.2354 -9.5832 -6.1032 -#> -3.7292 5.2118 -1.6073 -2.0752 -0.5546 -11.9674 -14.3058 -0.1385 -#> 1.5663 -0.2134 1.1244 8.9575 -21.3111 9.7652 14.7048 -16.1164 -#> 5.0537 -5.4365 -4.2810 -3.5680 -0.4654 -0.6910 -1.4555 16.7210 -#> 2.3733 5.7109 -5.0402 14.3269 -3.7080 -0.5469 1.3461 0.0370 -#> -10.6768 1.6675 -2.6818 -6.6235 -23.3109 4.4723 -2.9306 -15.1552 -#> 1.3749 5.6761 6.1372 0.0540 -15.2654 -10.2012 9.1777 21.9168 -#> 0.0939 1.4988 -5.8306 2.3862 5.4255 -7.5310 8.3593 22.5439 -#> 0.6239 0.0600 -10.5110 -10.8921 20.7994 -3.7823 9.1831 12.2325 -#> 4.4807 1.2006 11.4099 4.1220 4.0172 12.5550 -5.6524 -5.8280 -#> 2.3734 5.6991 -7.4481 6.7318 2.9518 -3.5768 8.0330 -2.3949 -#> 1.4166 3.1967 4.9052 -8.5509 -0.2633 4.0145 -2.5000 -6.6647 -#> 8.5022 1.2671 4.9056 -3.2028 -5.1207 17.0980 12.4183 -21.3307 -#> 3.3560 0.0335 0.1486 10.0135 4.2337 -11.4092 -7.9652 -3.1330 -#> 3.8520 6.2577 5.6189 -4.1615 -5.3337 5.7469 0.9777 1.5486 -#> 4.1924 -3.3068 -6.0355 8.9054 1.5996 -12.0054 -4.6602 14.9232 -#> -4.0917 6.6781 3.7390 -4.2841 6.7789 11.4633 13.1591 -20.8491 -#> -1.2403 -5.8101 11.5358 -20.3429 -9.8043 10.3301 5.6508 -9.9822 -#> -#> Columns 9 to 16 -5.0519 -15.6536 -5.5278 10.4839 4.8238 -4.2297 -4.9211 10.4956 -#> -3.2430 -9.5777 -0.2008 11.8599 -3.4722 2.9282 -10.5189 -11.0866 -#> -8.1572 -0.2395 8.4587 -11.2127 -0.8930 0.3864 -2.3457 -6.3919 -#> -12.6023 -9.3652 -15.9834 9.0576 -6.4162 8.6291 -16.4968 -5.3234 -#> -6.2092 -16.6057 -8.0656 4.7572 0.6566 16.6773 -9.9437 12.1628 -#> 3.4973 -15.5520 8.4753 0.7503 13.3989 7.6743 1.1990 -8.3222 -#> 3.2941 0.5389 -4.4373 -8.0926 10.3525 8.2620 -0.3922 -10.5982 -#> 2.0992 2.8307 -11.3777 5.5632 -18.6747 -11.0641 12.3123 -5.2210 -#> 11.6860 -2.3646 6.6022 9.7806 6.7111 -13.4087 -1.0562 4.7586 -#> -16.2824 -14.1014 15.6424 -2.8804 -1.8653 16.2567 -1.7422 9.6859 -#> 7.2852 -5.1156 -4.8869 -1.8487 0.9758 -6.1300 -9.8608 4.3139 -#> 8.4879 -16.2979 -22.0021 20.8787 3.8001 -2.2933 2.3055 -6.7258 -#> -11.6384 -10.7877 23.9544 -0.3630 -4.4196 -5.4371 0.1727 0.2736 -#> 8.2331 -0.9939 -10.9051 6.3860 -10.9876 -2.9687 -0.3035 -6.2201 -#> 1.6205 -7.3983 3.2433 9.5837 -15.1221 1.2189 -11.8776 -10.1880 -#> -8.5982 -2.8860 -1.5523 12.6660 -6.2845 -4.3429 8.6657 4.9786 -#> 10.7378 -2.4435 6.9686 -13.8020 0.4491 12.7697 -4.6896 -0.3179 -#> 12.3907 -9.3405 -8.8491 -3.5816 5.9158 -2.1643 14.6102 -11.7558 -#> 11.3377 7.6532 -9.1046 0.7160 -15.4803 7.0581 8.9869 -3.3178 -#> -1.8110 0.7145 0.5814 -6.3573 -9.7225 -9.5813 -7.1352 6.4063 -#> -4.0320 7.8878 5.9525 -5.4456 -2.6309 3.0996 5.5376 -7.9531 -#> 1.6578 5.8366 5.3673 -11.4888 -13.9038 -17.5789 -1.9719 -0.9318 -#> -7.2453 1.9663 4.3650 -0.3790 -4.2484 14.1084 -14.0829 -5.9840 -#> -3.2372 2.3716 7.5951 -1.4341 1.9291 -2.3323 -13.2522 0.9026 -#> 17.4940 -8.7963 1.2609 8.2443 -0.4105 2.7368 -0.9383 5.9460 -#> 0.5119 19.2834 -2.7669 -1.6191 -0.8475 -12.9259 8.1604 5.5678 -#> -15.2675 -2.3728 4.2026 7.0293 6.2566 -2.7296 -3.3734 -2.0884 -#> 10.2617 5.2372 -4.6707 6.7844 4.9344 5.8431 -10.6913 -6.4078 -#> -0.4118 -5.9068 8.8041 9.4516 3.0169 -2.9376 -12.5616 7.4301 -#> 5.8621 4.0873 -13.9898 4.6101 8.8701 -0.1944 -10.0589 -2.1204 -#> -8.7893 -4.7496 3.4217 -19.9301 1.4026 5.6201 -9.7724 3.1006 -#> 3.7177 25.8277 -7.9515 10.2657 1.2519 -10.6447 -4.5983 -19.6696 -#> -3.3835 -6.4632 15.6626 -6.6609 2.5483 8.2629 -11.0463 -8.7981 -#> -#> Columns 17 to 24 -15.1061 1.9276 -13.8666 -3.8967 -5.5002 -7.8204 -5.1597 -13.6479 -#> -1.0532 -4.3306 5.4546 -4.7892 15.1088 8.0288 12.6441 1.9789 -#> 8.0984 1.0747 -1.2822 -10.0374 10.3140 14.7900 -11.8292 -3.7487 -#> 3.2155 -0.4762 6.4662 4.6925 -4.8926 -7.3207 5.0228 13.0175 -#> 11.5186 -11.5963 9.4002 -5.7629 -9.2373 -13.5491 -1.5002 -1.1277 -#> 8.0051 -9.2534 -0.8459 -10.4178 -5.8362 4.5994 0.2911 5.1845 -#> 18.8482 -9.6356 0.5151 -1.5948 9.9338 15.7383 -7.3023 1.8789 -#> 13.5459 5.4399 -7.1362 2.1980 21.2082 1.5343 -2.2570 -28.6290 -#> 3.2007 -6.6374 17.2961 6.0226 -6.0808 1.0173 10.7136 -12.6136 -#> -13.5292 2.4464 -7.7585 -0.6386 5.8336 -11.2572 1.8914 10.2651 -#> 9.2183 -7.1085 -16.7239 -14.4238 -13.1062 12.2904 -4.7747 -12.3438 -#> -5.3513 -1.4276 -10.7133 -1.0104 -14.7948 -11.3771 -10.5380 -14.1806 -#> 2.2582 2.5082 3.1395 -10.3802 -3.4846 9.7888 6.3816 8.8894 -#> -3.3255 8.7914 3.5600 -2.4824 6.4942 3.9103 0.1821 -1.1500 -#> 4.7220 1.1739 -5.6123 -1.4683 0.5952 -0.7315 -0.1751 -5.2895 -#> -10.7894 0.0108 3.8297 3.5149 5.1771 -7.0988 13.1429 9.9300 -#> 4.9622 -7.1838 -20.6221 -4.5445 -6.3929 14.4627 5.7682 -5.9828 -#> -1.6371 -2.6708 -7.7051 -12.2669 3.6014 1.2710 1.0021 -16.3431 -#> -10.6782 5.9850 32.8903 12.4231 12.3848 -13.1794 11.3659 16.1553 -#> -8.6677 -10.8305 2.6711 5.8227 -3.2384 12.1780 -8.5230 -3.7069 -#> 9.8047 1.7091 -6.1964 9.3361 2.7999 -3.9781 -13.4320 12.7182 -#> 5.2453 -3.3518 -10.8381 -3.0425 19.1940 6.6235 -9.0769 -0.6954 -#> -17.2991 -1.8443 13.4762 2.0591 -8.9836 2.1469 12.4268 3.2675 -#> 0.7351 22.4695 6.1724 5.6455 -0.3949 1.2288 10.5278 -0.1145 -#> -2.4389 -3.5736 0.5515 -5.5292 -1.3270 -3.0887 -18.4237 4.8149 -#> -4.3389 5.3588 7.6871 10.6304 4.6827 -5.4639 -4.1754 6.1267 -#> 11.6110 -3.9579 -5.9381 -4.1842 -5.2819 14.5803 3.7449 -9.4213 -#> -1.9401 6.1597 5.4595 6.9357 -1.2076 10.2617 7.7148 -11.8499 -#> 0.7595 -0.6014 15.9818 -10.5274 -5.3796 3.8521 20.9919 0.7645 -#> 1.8653 10.0281 6.2075 -15.7851 -14.1920 9.4755 13.5518 -15.5913 -#> -1.4591 5.5454 -5.1016 -0.7982 4.8222 -3.2032 7.6409 -8.4062 -#> 6.5436 3.7843 -8.6999 -4.8098 5.4861 4.2819 8.1165 7.3718 -#> 4.6000 -5.8893 -23.4787 -4.9795 3.0594 12.3016 3.2690 3.5217 -#> -#> Columns 25 to 32 1.7778 -0.1741 10.0144 0.1100 -14.6647 -5.5856 -0.2637 10.2384 -#> -1.3931 8.4956 1.8278 -9.6910 9.9097 2.5671 -10.6541 4.6897 -#> -13.2821 3.1024 -6.2520 -8.1819 -10.8312 15.8243 -6.2202 -5.2134 -#> -10.4369 10.2615 -5.4063 7.5572 11.9416 7.1635 -1.3847 -0.4782 -#> -3.4464 9.0079 6.4960 4.4430 5.3117 4.7168 5.1332 11.8318 -#> -1.6587 -3.9442 1.0908 10.2382 -5.6771 12.8907 9.4950 4.1755 -#> -5.3847 27.2970 -5.2261 -12.2230 22.8968 -14.2896 -15.2572 -4.1222 -#> 13.7583 -10.0067 0.2167 -21.0369 1.2837 8.8987 5.9922 -3.0999 -#> -4.2537 4.1788 16.4749 -2.8583 -2.1226 -9.5117 2.1922 1.8162 -#> 8.5287 -2.9122 -2.1248 10.5133 4.8937 7.5389 0.2121 -7.8859 -#> -4.9846 -12.8030 -1.8882 0.1910 1.2181 6.9836 14.2577 22.4123 -#> -7.4522 -12.7975 -0.8079 -0.8627 8.6350 11.2845 -2.3146 5.7266 -#> 12.1104 14.6532 19.1453 -9.0659 -7.2229 -6.0188 16.3080 10.1372 -#> 4.5465 -9.0641 0.4322 -7.4698 1.1671 11.2206 -14.3092 -11.6783 -#> 17.7936 -0.8289 -13.6353 6.1814 -2.8911 8.2300 5.4011 4.8218 -#> -7.9323 17.8275 -0.4554 -3.4589 4.4595 3.7615 -5.4832 -1.0095 -#> -13.9743 16.0892 -2.6171 -3.5611 4.0792 -2.1144 13.8133 4.5982 -#> -2.6170 -0.6755 -12.1117 -24.0577 1.1589 6.5868 -0.5188 1.1435 -#> -8.4654 -2.7579 1.2969 -9.0796 -10.1263 12.4411 -3.7248 2.8195 -#> 17.3934 -8.4432 -2.7909 2.7379 -8.4793 -13.8044 10.1999 -10.4044 -#> -7.7334 -2.5178 5.2465 -9.3406 10.2474 -0.2369 -22.2037 4.8796 -#> 4.0159 6.8714 -7.3192 -4.6508 1.0704 2.8704 6.4501 5.2904 -#> -11.5786 5.6984 3.3727 3.5654 -7.6489 -6.9025 -9.9804 -4.0167 -#> 1.5916 4.3510 19.0703 3.0556 -2.2498 -7.1878 -0.2999 11.1407 -#> 10.9710 -12.5355 -3.5661 7.9113 -5.3299 -0.6712 -6.0232 -4.9191 -#> 6.9522 7.4351 -1.8514 17.6501 -4.5240 -8.5315 -0.9537 -1.2942 -#> 11.7416 -2.4716 7.3769 2.1249 2.2503 -5.5305 -3.4552 5.2614 -#> 4.4133 -14.9014 2.6159 18.2596 -0.2846 -0.0281 -4.2213 -7.2945 -#> -14.1361 20.3555 -7.8349 -0.0203 -15.6937 -0.6371 -3.8028 12.6171 -#> 19.4651 -5.1343 14.9903 0.6087 -10.3531 4.2478 19.4004 3.1429 -#> -2.1876 17.4168 -5.6756 -0.2676 -2.2310 -0.9550 -5.5168 9.5355 -#> -5.8493 -14.3634 -6.6581 10.0440 -2.8606 -2.0556 -11.0157 -2.0355 -#> -14.2199 -5.4934 -0.0826 1.4885 16.0165 12.4117 -7.0615 12.5529 -#> -#> Columns 33 to 40 10.1500 -2.7168 5.8167 -12.4654 -1.3378 -12.2143 10.4488 -13.0792 -#> -2.6546 -1.7643 -5.9071 -11.1366 -4.9924 21.9747 -8.3145 -10.3978 -#> -11.8286 14.7212 11.9621 -10.4938 7.4112 25.7293 24.8885 3.2072 -#> 6.3994 4.2064 -11.9686 9.1049 -4.7787 18.2113 0.4549 6.3143 -#> 1.8633 5.0401 -10.5132 15.4074 -3.6229 13.2258 -1.8025 10.2815 -#> -8.0038 -5.8750 5.6899 7.1259 -5.7372 5.9912 -1.7020 7.0553 -#> 0.6529 11.6060 5.7158 -5.1726 -16.5611 5.9302 2.3731 4.5060 -#> 7.9616 11.2526 7.7906 -3.9221 -15.5615 6.2789 18.7343 3.2657 -#> -9.6967 -2.3485 -3.2977 5.0751 11.0015 -3.8198 -4.6981 13.0108 -#> -6.1833 0.3852 -13.5263 -10.9368 -5.7111 13.8614 3.1115 -2.4188 -#> -23.3818 -4.6403 1.6466 5.0277 14.6978 11.7769 1.6959 -0.8511 -#> 18.4458 -0.8086 3.2779 7.4938 2.9144 -8.4173 12.3187 1.5119 -#> -13.7431 -0.5227 -0.7258 -5.3151 -15.2641 4.0030 -0.8535 3.9266 -#> 5.3062 3.4875 -2.3348 3.8489 -0.5105 -10.3150 -0.6041 9.8378 -#> -1.0193 -8.3886 9.1420 5.2717 -15.8106 -10.3659 -14.0255 -4.1194 -#> -2.4097 -10.2394 -4.6149 -4.6161 2.0708 6.0866 15.4649 -12.5058 -#> -14.6599 -14.0379 13.4541 20.2061 -3.6042 -2.7530 -13.5095 -7.4449 -#> 0.3103 -1.0956 -8.0902 1.3008 -3.4313 11.7765 -11.8600 3.8552 -#> -2.7998 6.2498 -0.1702 0.4605 -13.8449 -8.1785 18.2829 23.7498 -#> 6.0590 20.4857 10.4082 -2.4494 2.3333 -1.4283 0.1231 7.1851 -#> 7.4581 7.6981 -12.6472 -2.2774 -1.0708 1.0064 -2.8286 -11.5675 -#> -2.5189 14.7925 3.5630 -9.5981 2.0895 -14.7525 16.8936 -3.0053 -#> 1.2041 3.4081 1.7201 2.0051 -10.9110 22.4472 -5.5768 7.2891 -#> 8.0471 -4.4368 10.0651 -10.3965 15.8845 -15.5329 25.6666 3.4862 -#> 15.5102 -10.4686 4.5244 -19.9958 6.8780 -8.7682 -9.2341 18.1238 -#> 15.5442 3.4110 -5.3646 -3.8514 8.0869 -0.3192 3.4853 -9.9307 -#> 0.9164 -5.3659 4.1568 -6.6659 -5.4278 -18.0584 0.3826 -19.3212 -#> 6.0171 -2.7288 6.8094 5.1870 9.5104 1.4394 1.4395 2.5156 -#> -3.5162 5.0776 -5.7641 -4.2334 -6.3650 5.7626 -3.5435 1.7901 -#> -1.3093 -1.8369 -2.8865 0.9459 -20.2915 -32.7982 -7.0939 -1.9332 -#> -4.3264 4.8188 8.0833 -4.7750 -3.5830 1.9510 -1.8569 11.8234 -#> 14.5494 4.4043 -10.1908 4.0622 0.1435 3.1325 11.6746 -11.3710 -#> -0.5474 -17.7964 -5.1160 -7.5432 10.3011 2.1522 -2.5519 -6.5769 -#> -#> Columns 41 to 48 -7.7728 13.0417 -6.2141 7.5805 -10.5306 2.4039 -2.6211 -10.8351 -#> -5.7943 1.4155 -0.0184 -6.7851 -2.8573 5.4923 14.2143 -4.9217 -#> -10.2013 17.2115 -5.5376 -0.1993 -0.0483 -1.8005 27.7821 2.4018 -#> -0.1497 -10.1295 20.7939 -2.0661 7.0674 4.5289 -5.6561 -18.6444 -#> 2.6511 7.1128 -12.3257 2.0484 12.3790 -4.2943 9.5533 -16.7716 -#> -12.8534 -8.1058 -1.1198 1.3922 3.3961 -13.1037 8.0641 -0.8374 -#> 2.6301 -4.3767 11.9773 -15.8264 0.5274 13.3549 0.8531 -15.4280 -#> -0.4952 -4.6740 6.5916 -6.6276 -3.5350 -1.0065 8.1074 3.3948 -#> 6.4617 4.4943 3.4736 9.9566 10.4772 -0.0510 6.4968 18.4616 -#> -5.7202 -5.0081 -2.1739 9.4466 -5.9768 -1.9245 -4.2107 -12.0566 -#> 6.8396 9.8853 -4.6098 8.1681 12.2168 -11.0984 0.6794 6.4963 -#> -6.9582 -14.8138 6.2660 14.0955 1.1016 8.0584 -5.1369 -3.8343 -#> 3.1336 2.0385 -10.6221 1.4631 -8.8730 -11.5658 -1.9890 -2.1705 -#> -3.2012 -3.2624 0.0124 5.5875 -5.9079 3.6307 9.1475 -5.5163 -#> -16.6238 1.3657 13.1135 -18.5553 19.3835 3.0428 -11.0630 -6.1474 -#> 1.9540 -12.7126 17.6025 9.4565 -13.1545 15.5756 5.4707 2.3039 -#> -2.2161 -2.3298 4.7162 -23.5897 -1.6494 14.3266 2.2557 10.9696 -#> -0.9794 -0.4933 2.9129 -6.6436 10.5483 12.2195 3.2601 3.5748 -#> 15.1612 -3.5543 -3.3447 12.1451 -9.2359 -12.5165 8.5579 5.8165 -#> 2.1669 -5.3160 -11.2051 -5.0594 3.0964 -6.5364 -2.7889 0.1063 -#> 1.6183 8.3523 0.4388 3.9910 -6.5673 2.6309 7.4823 -3.9073 -#> 3.9793 9.4371 4.4418 -7.3555 -1.5988 0.2323 -1.8985 1.3787 -#> -8.1983 -4.2067 11.3807 3.0023 -6.7143 0.3628 8.3436 -5.4336 -#> 2.1477 16.8822 -6.4990 4.1027 -12.6306 5.3138 -21.9907 -14.3988 -#> -19.9449 14.1068 -1.2428 4.0588 19.1461 -7.3755 1.1968 6.9337 -#> 10.8277 5.7167 1.2530 0.4044 -8.9044 -2.3803 -22.9909 -10.8675 -#> -8.5395 15.7964 -10.0465 -10.4169 0.0549 -0.2287 -14.4526 -14.1527 -#> -9.4452 4.3918 -11.0792 -5.8634 6.1970 -3.8583 -18.6164 3.9002 -#> -9.7997 3.0892 7.0320 -14.7294 4.5604 1.5742 12.2626 3.5791 -#> -0.0732 -13.5142 6.8419 -11.9071 1.5294 5.4908 -1.9896 11.3291 -#> 1.4363 9.0709 3.7630 -4.3767 -0.1546 1.2374 5.7477 -3.1649 -#> 2.7391 -9.0105 3.2614 4.6591 1.7479 -4.8373 -15.5717 0.2825 -#> 3.0020 6.7660 -1.4641 -1.4705 18.1625 7.9863 -2.3651 -5.9829 -#> -#> Columns 49 to 54 -10.3971 8.8077 -4.6116 0.1249 -1.6678 2.0824 -#> -2.2096 -8.2924 13.3718 6.6673 1.4004 -1.3016 -#> 2.5277 1.7145 -0.9568 -5.3421 -2.9864 1.5047 -#> -6.6332 -8.7000 7.2478 1.0313 0.7790 -3.3170 -#> -3.6021 10.2331 -10.4041 4.7000 6.0417 -7.5707 -#> -1.3524 0.1230 4.3883 -6.7944 4.1890 1.4641 -#> -2.5252 -11.2596 -0.6901 6.4817 5.9568 -4.7086 -#> -7.5398 3.9761 2.6150 -2.4091 3.3055 0.5490 -#> -0.6691 5.7260 -2.5497 1.0488 4.6558 -0.9856 -#> 9.7165 11.1001 -10.1059 -2.6323 -0.2516 5.0828 -#> -1.5492 4.6172 8.0683 -9.1370 -10.2819 4.1062 -#> -14.9461 -1.1056 4.2353 2.6166 -2.3585 3.9567 -#> -2.1073 -2.8609 0.2629 12.1419 0.1297 0.5681 -#> 0.1319 -6.9208 7.5661 -4.5516 -1.4141 0.7718 -#> -5.0699 17.0210 6.4682 2.1013 5.7954 -3.9657 -#> 8.7692 -9.5887 -5.7504 2.7774 -1.2659 0.4100 -#> -2.1969 1.8587 0.1379 -7.1127 11.8105 5.3066 -#> -5.8700 -6.6889 -5.8325 0.0023 -2.7781 9.8854 -#> 11.8987 -5.3737 -11.4409 1.0054 2.0471 0.0693 -#> -8.3490 -3.2622 -2.3483 9.3945 11.1448 -5.8516 -#> 13.3893 9.0998 6.4305 0.8584 -6.0464 -0.4977 -#> -10.9009 24.4256 12.4352 -9.8091 3.5869 -3.4174 -#> 12.5356 -5.5965 -0.5642 5.0351 3.2751 -1.5896 -#> -11.1168 -3.6532 -3.4759 7.6531 -0.7342 0.2704 -#> 7.4699 11.7731 6.1229 3.4599 3.3510 -0.4367 -#> -4.2270 -2.4345 2.1732 2.9828 0.3095 -2.4458 -#> -9.8393 14.6494 10.6712 -6.4012 -0.0743 1.9862 -#> -1.1818 -4.4589 0.8286 -7.5110 1.1673 0.7609 -#> -1.1280 -5.8980 6.6434 0.7556 -1.9274 5.3015 -#> -9.2338 3.2889 10.3630 3.2861 9.8004 2.4265 -#> 3.9075 5.8852 3.2487 6.3270 -4.0139 4.3934 -#> 15.9540 1.2406 1.6622 8.0269 -0.0123 -0.9241 -#> 2.7061 11.8639 1.6918 2.4294 -7.4127 6.2571 +#> Columns 1 to 8 -2.9981 1.9987 -1.0477 -13.6194 6.9298 7.6206 -2.0428 6.8252 +#> 0.8963 -1.2151 -0.7207 -1.4635 8.7503 7.3111 3.6662 -9.0786 +#> -2.0631 -5.8146 4.6948 -4.8939 3.5372 -1.8821 2.1917 14.4940 +#> -4.5296 3.5460 2.1648 4.7897 6.7181 0.0067 15.9350 -3.9847 +#> 3.1995 7.2079 -2.7504 11.6689 -15.8049 -1.2787 -9.8361 4.2753 +#> 0.1304 -4.4724 -0.0635 -8.5782 2.4975 -4.0694 -5.5228 9.8667 +#> -2.9821 2.1418 4.5388 8.5924 5.6464 -5.5546 1.3803 -10.0516 +#> -3.4923 -1.7521 -8.8606 -4.7262 -25.7114 17.4543 -7.0046 3.4054 +#> -1.8225 -1.4219 -5.1090 6.8038 5.5271 30.7212 -2.0611 4.6384 +#> -0.2193 2.7363 3.6337 3.3040 -11.2611 -8.3691 -8.6780 -0.4924 +#> -8.1473 6.2070 0.9058 6.3800 -11.3422 -8.8116 -3.3142 0.0828 +#> 1.8943 -5.4390 -12.0924 19.0233 -6.6010 -4.4031 -17.7816 -0.5272 +#> -4.4502 5.9669 -6.9457 -3.5074 -0.9598 -13.4715 -12.9025 3.1840 +#> 1.0585 -4.3245 2.3124 1.1457 -0.9148 21.6873 -10.4921 10.6373 +#> 4.8480 -0.6384 11.2052 -10.2312 7.3468 -2.3318 19.4543 -3.0181 +#> 2.0397 7.5497 7.0159 5.5897 -8.4134 -16.3037 9.6301 8.8296 +#> -1.5285 -9.1825 10.7200 -3.3999 3.7654 -8.1046 -19.6819 -7.5786 +#> -0.9638 -2.4457 18.2017 -7.9900 -4.7643 -24.4827 8.0693 9.3977 +#> -4.0457 7.8084 10.6424 9.7764 1.9130 -2.8986 2.0304 3.5510 +#> 0.8902 -1.2850 -3.8147 -11.0375 3.0638 2.0255 -3.4273 -10.2156 +#> -3.3306 -0.5066 -4.7918 -4.6034 -3.7404 12.3950 17.5895 9.0359 +#> 0.6129 -3.4534 4.4716 -1.6140 -11.0353 6.7994 12.5446 5.5009 +#> 2.3297 -7.2480 11.6517 -7.3574 16.5833 -7.7865 -7.6982 -0.6897 +#> 4.4613 -8.0269 5.3806 -7.1562 -5.8349 5.1819 5.1881 -14.7557 +#> -1.3944 -1.4486 4.1030 -3.5756 -2.7655 -7.2241 7.0294 -1.5904 +#> -1.1111 -1.2950 4.5635 -1.7037 0.2499 3.7121 10.3800 3.2501 +#> 5.2651 -0.4433 -7.4740 3.1947 -4.4924 6.7886 -7.1823 -4.0272 +#> 4.9170 -4.0617 1.0573 -13.0511 0.6103 5.2818 6.9409 9.7578 +#> -0.7962 -5.8867 1.2623 -17.5376 12.3123 -19.7035 15.3750 8.9767 +#> -2.4093 2.5900 1.4192 -3.5658 15.1976 -30.3074 7.1319 -0.2922 +#> -0.0568 5.0923 -6.5125 -14.6115 -13.2712 -3.2822 -3.4963 6.6401 +#> 5.4941 -4.5832 16.0247 -24.6025 15.3211 -4.0191 -2.0110 8.3245 +#> 1.8139 -4.2336 6.5050 -10.5538 16.8541 -2.2905 -0.7633 11.6168 +#> +#> Columns 9 to 16 -19.7388 -4.2497 17.7844 7.0490 0.1373 -13.9906 -3.0449 -3.2562 +#> -9.8894 0.9661 -0.4101 -12.1781 -6.5649 -10.8805 12.6788 11.9859 +#> 14.1947 -4.7796 -6.8920 -2.5855 7.2923 -1.3769 -3.5240 -2.7617 +#> -4.6408 -9.9279 -9.7712 -8.5201 -8.6075 -0.6716 4.3006 -1.0796 +#> 2.9509 -6.6747 6.2703 3.4515 17.1978 -10.4269 3.8136 -0.3971 +#> 15.2298 11.0304 3.9934 7.4501 21.7057 -2.0523 5.4827 -17.0539 +#> -6.2983 -9.3994 -6.6639 6.6324 -1.0864 -6.8409 16.5468 13.2975 +#> -4.5705 1.1321 3.0089 3.2488 -3.9008 6.2613 -11.0166 -1.7206 +#> 1.4281 5.9335 -2.7531 -14.9261 -2.0529 2.6827 -21.9907 12.6534 +#> -9.6204 2.2846 7.7640 -9.0140 3.0862 -1.3569 10.1182 17.1239 +#> -6.9389 -10.4684 -4.8376 0.5224 13.2216 -2.0447 -3.1860 0.5203 +#> 2.3373 16.9374 -5.2742 6.0314 18.7102 -5.8949 -11.2776 25.5272 +#> -6.3283 -0.9445 -7.5971 -9.3140 10.4555 -1.4052 -8.5250 6.4385 +#> -0.8305 3.4175 10.9144 -8.5010 -10.3696 -3.9154 -7.8015 0.8250 +#> -3.2788 -0.7424 6.5051 2.7374 0.9923 0.4761 3.6965 1.5076 +#> -1.7487 -1.9768 -7.6754 15.6914 -5.0728 -10.2906 -2.1927 10.6893 +#> 2.9760 2.3833 12.3606 6.0287 14.1418 0.6970 0.8762 -0.6810 +#> 2.0287 -20.3368 -7.7164 14.0426 5.8805 0.0249 -13.0519 -4.9372 +#> -4.5226 -9.8750 1.9928 -0.7749 -11.8479 6.5163 0.0261 19.5263 +#> -1.0261 -8.0168 0.8350 -3.2509 8.3222 4.1742 -8.5501 1.9879 +#> -6.4162 10.4247 4.9921 -3.5867 -1.9859 -9.2677 -2.4422 -0.0360 +#> 3.7977 0.7369 7.2755 -3.9704 -2.2519 -23.0008 -4.8531 -14.7251 +#> 4.6898 6.8939 6.5778 -5.2031 8.2624 6.6660 -14.6330 -7.6518 +#> 4.5284 -0.9883 8.0322 5.3881 0.9397 -5.1828 19.5255 5.1489 +#> -11.2951 -4.4947 -6.9464 0.0233 -7.3350 -6.6192 -1.2261 -6.6542 +#> -1.8227 7.5137 2.7021 6.7293 -5.5610 -8.2128 -6.2575 4.8904 +#> -8.7624 1.3427 8.2329 12.8056 -9.2667 1.1070 1.3570 0.2240 +#> -6.6889 -2.9888 -3.8591 -4.8334 3.6868 -16.5301 -1.8390 -10.6246 +#> -14.8556 -8.5565 -11.4939 -5.0514 6.2092 -15.9428 2.7903 3.3556 +#> -0.1720 12.1874 -3.2367 1.0141 25.3597 3.4302 4.6980 2.1638 +#> -6.3482 1.8216 7.7777 9.6635 5.7204 12.9777 -5.7420 -6.2243 +#> 5.4616 10.5734 8.7925 9.1357 -22.7457 7.4112 15.9451 -8.6602 +#> 9.0210 6.7807 1.7586 -12.7231 7.7920 7.5126 3.4841 -12.5947 +#> +#> Columns 17 to 24 -2.0086 -3.7429 -3.4922 -5.0073 -1.4823 12.0925 13.1227 -5.9666 +#> 2.5905 7.4030 -1.1400 -8.2132 1.7247 5.3412 -11.4227 2.6169 +#> -16.5144 3.1496 -4.0716 -4.3978 7.8668 5.3914 -0.9665 5.5490 +#> 14.5195 -6.5885 -10.4912 0.2149 -7.8601 4.1144 -4.2487 0.5029 +#> -0.7825 -0.1147 -0.6131 1.5169 -1.3588 1.5212 9.0924 -0.1239 +#> 5.7114 -9.1147 6.7794 1.5678 -9.0061 3.4211 2.7157 -8.3202 +#> 18.9572 14.8968 -3.4597 -2.0328 -16.6206 4.3839 7.4423 -2.7560 +#> -5.6803 -6.4400 8.0516 4.1565 -6.1295 15.9909 1.0870 9.8960 +#> 9.8491 1.4323 -2.4933 -1.2742 3.4939 2.4114 -8.7089 -0.7075 +#> -11.7952 -1.4127 -9.0026 0.8020 -1.6741 -1.3498 10.3926 -4.6258 +#> -8.0503 0.2616 4.4266 8.0826 4.4674 2.8782 -0.6460 3.2489 +#> 5.9409 0.7170 -1.1449 9.1042 8.1602 -5.5182 -3.2153 -16.5777 +#> -3.2937 2.6271 -13.1536 3.9373 3.1741 -2.4886 0.0096 -4.9245 +#> 7.7979 -5.2359 -2.9209 -4.2665 5.6631 14.2786 -9.2505 -0.6026 +#> 6.2603 10.2625 9.1270 -11.1127 -6.7668 3.5576 -7.8623 3.2312 +#> -1.4024 2.9339 7.2831 1.6765 -6.2406 -14.7317 -16.4826 2.8849 +#> 0.7807 8.7083 3.0579 3.9737 -10.9223 0.7293 6.9710 -2.9484 +#> 8.5802 8.3053 6.2090 5.8093 -3.0959 -7.1872 -15.9926 17.7402 +#> -0.4575 1.1255 3.4946 8.2534 5.2208 -6.3664 -12.0906 -7.8641 +#> 2.6943 6.7818 -9.0341 -9.4007 -8.5445 -1.6037 8.9691 13.0356 +#> -14.1278 -3.0522 9.4725 9.2014 -13.1171 6.1672 -0.8044 -4.0928 +#> 6.6042 -6.2427 -3.5026 2.7236 -8.6202 -14.5676 -4.3816 0.7696 +#> 22.2243 -6.9362 -7.0573 -8.7486 2.7153 6.1101 -16.0233 2.1742 +#> -13.7068 6.6661 1.7033 -11.3240 -11.4527 9.5502 2.0516 12.7820 +#> 3.1524 11.6998 -13.2027 -4.6594 7.7246 6.6598 11.9521 -12.1999 +#> -6.6184 11.3413 -11.2106 -15.3998 2.6890 -1.4778 5.3495 -10.7252 +#> -3.6577 11.2616 1.4742 1.5310 4.6568 0.4294 1.0309 2.6902 +#> 8.5144 1.1739 -1.0834 10.9488 -10.1380 5.7704 8.0036 7.0080 +#> 3.8915 -1.2978 -18.9493 4.4777 -2.6526 -11.6430 24.0570 0.0046 +#> -16.8701 -13.7557 8.7846 -0.6851 -0.2110 13.2500 17.5430 -8.3173 +#> -8.1549 -14.5187 -2.9298 4.2745 -6.8029 16.8042 1.3567 8.7368 +#> -12.3244 5.6606 -8.1712 -1.2285 3.8558 1.5351 3.3973 -4.9187 +#> -17.0428 11.9150 7.8854 -7.6420 -3.5685 -4.1366 -1.1942 4.0374 +#> +#> Columns 25 to 32 3.8753 12.1455 -10.3902 21.5935 -18.8873 -4.4217 5.4072 -10.8176 +#> -3.5803 8.0410 6.5697 11.4057 -6.6880 16.8841 -10.9073 4.8997 +#> -1.9040 -10.2239 5.8802 -11.0792 -21.3019 -2.0365 -5.9360 9.7430 +#> -8.0633 -6.1166 11.4493 5.6100 2.9430 2.8234 3.7902 1.3998 +#> -7.4949 -2.5945 10.6476 -0.1444 -2.0789 -4.2223 12.1262 -13.0240 +#> -1.9563 7.9041 -8.2371 4.3589 -1.7796 -12.8283 -4.6996 -10.9314 +#> 5.3424 -7.0446 -9.1074 13.1848 9.3316 6.7332 -0.4355 -0.4085 +#> 13.1640 6.5910 6.6802 8.2557 5.8556 8.8972 -29.5956 -18.1542 +#> -13.0963 1.9939 -14.2596 -15.5464 -13.6829 15.0417 -16.4008 -2.9759 +#> 15.3115 1.7133 -1.4397 -5.3574 -11.3403 -11.9070 3.8297 14.8635 +#> -14.1224 12.8473 5.8808 24.0365 1.0657 -0.5976 -9.2761 17.4670 +#> 0.3925 -13.7639 15.7773 -14.0898 -3.5661 9.7524 14.9191 14.1870 +#> 17.9036 -8.2760 2.8901 5.3520 -3.4983 5.3481 4.8200 0.8641 +#> -8.7172 -5.9535 9.4824 -8.0497 -9.2682 11.7967 0.9077 -8.5644 +#> -5.9413 -1.3933 8.7317 -2.0789 -7.9804 3.9378 -1.1050 3.8051 +#> -7.1543 14.8597 -15.1809 13.5658 7.1848 3.1301 4.2739 -12.0077 +#> -12.0043 -4.9602 3.2887 11.7474 0.8738 4.3405 0.6988 14.6923 +#> -10.9789 9.9063 -4.3363 16.6096 10.6429 -8.7303 11.5028 1.0445 +#> -14.3490 -8.1508 -12.7705 3.2201 10.8448 17.9310 6.7438 -3.3404 +#> 9.3696 10.0469 -4.5626 0.0225 2.7414 0.0235 -9.0254 -15.0893 +#> 2.2884 -1.1768 6.5486 -4.6490 5.8459 4.5072 -3.3865 9.3412 +#> 4.5308 9.0664 -2.7439 7.3581 -4.1265 -5.9932 -16.8750 5.9705 +#> -21.3790 19.8427 -15.4205 -11.0711 5.5214 0.8218 -0.6010 2.8180 +#> 1.2954 5.8568 -0.8064 -0.7497 -7.1671 -3.2086 -5.9610 24.4766 +#> 8.3796 1.1624 20.2413 -13.5125 5.7963 8.6417 -0.6447 5.5746 +#> 12.1926 -2.8949 14.3854 -16.4621 -2.0113 -2.2683 -4.5900 3.5811 +#> 5.1163 -5.6551 7.6786 9.6312 -10.8648 15.4038 -9.3148 -28.2952 +#> -3.9097 8.7084 -6.6107 8.1558 14.1674 5.0755 1.6191 -4.8684 +#> 13.0704 -1.4697 2.6999 0.3325 21.9355 -14.7359 4.8868 20.5376 +#> 0.7001 2.7432 -7.9853 -1.1106 2.8453 -3.1376 9.9429 7.0484 +#> -4.3222 18.9010 -11.8631 6.8128 5.8591 -9.9242 11.8052 -0.1487 +#> -7.2910 5.3112 -2.1243 -17.4465 8.2097 2.4354 -10.0280 -10.1925 +#> -5.5466 -12.9839 -16.9296 10.1625 -0.3858 7.9458 -6.4372 6.2500 +#> +#> Columns 33 to 40 -1.3404 -4.6652 0.2426 8.0424 0.6240 -16.2287 -14.0641 -19.2772 +#> -7.0186 3.6977 7.4803 10.7408 -8.8736 -14.8762 -16.7146 3.4571 +#> 8.6031 7.3687 -11.6173 0.4644 -4.1822 -5.7826 2.2955 -10.0396 +#> 20.5584 -2.4726 -0.2091 -7.1217 -10.4729 -21.7939 -5.5822 -14.3465 +#> -3.9753 -7.7535 -5.9620 5.0671 -6.6821 22.7495 -1.0369 -18.5720 +#> 23.5852 -5.0578 1.8712 -0.2534 11.4244 -9.9855 -11.5512 -9.1573 +#> -7.3007 1.1173 -13.2500 2.9722 3.0153 -1.2784 -1.3786 9.7177 +#> -21.0926 -2.6614 2.1680 4.4065 2.3558 18.4460 -4.7796 13.7280 +#> 6.1411 -6.1539 0.5368 7.9554 -0.5887 -0.8597 -9.4036 10.7069 +#> -5.3225 6.9767 7.7706 12.5271 -8.3516 -6.7179 6.0514 -12.4826 +#> -16.8952 -17.3380 11.5551 7.8882 -1.1232 13.2687 0.0060 -7.5381 +#> 9.1225 -4.0228 21.3088 14.6427 -15.0176 12.6761 -2.8529 19.8361 +#> 6.3292 -6.2752 5.2451 18.2435 -14.3518 -4.0304 -9.1245 -9.3231 +#> 4.2020 -8.5146 -10.9979 11.6502 3.6841 3.6257 -7.7977 -5.8441 +#> 6.8744 -12.7409 -35.1760 -13.4471 0.2886 5.2108 -3.0605 2.9356 +#> -8.3027 4.0940 -3.8117 5.8515 -5.6651 -4.1635 7.6188 -0.6298 +#> -5.0789 -7.8177 13.5700 7.7381 -10.9249 4.0041 -6.8821 -10.9187 +#> 14.9364 -14.7832 -6.2318 25.2370 11.7722 -10.2568 25.8816 -6.2300 +#> -10.9443 5.2411 5.4978 -13.5603 -14.5684 -22.4316 0.6638 5.9151 +#> 5.8436 -4.9731 5.6021 0.3048 6.9230 1.0252 -5.4153 11.8626 +#> -6.0750 -4.4299 8.1145 -6.4224 -7.7818 17.3477 0.4304 1.8351 +#> 0.4143 -20.8468 3.2939 0.2564 4.4796 9.6334 12.4289 -2.4255 +#> 2.4209 -0.2858 7.2184 8.9520 18.5336 -14.5567 -2.3773 -0.9898 +#> 5.8089 -3.6283 -11.5245 -6.8369 8.2689 8.0623 5.3862 -0.2187 +#> -13.1077 1.8365 -6.6235 -3.1059 1.2266 -0.3518 -1.3653 -1.2756 +#> -2.6019 9.8177 -9.1612 -5.3509 -6.6606 -13.1829 5.1352 4.0275 +#> -11.0478 -6.3089 -4.6812 0.9207 -2.8915 3.5621 -7.4784 2.9841 +#> 0.3917 -22.1856 -3.4268 -4.2744 10.9030 6.2728 8.8917 8.5960 +#> -6.0017 -12.6934 3.1050 -6.7541 2.0820 -10.3219 1.6366 -3.0606 +#> -6.1262 -2.7377 0.3554 -18.5050 1.3357 4.7694 -4.1198 -5.4279 +#> 9.6639 -6.7754 -9.4867 -7.6361 -1.1628 -1.1184 -9.1079 -10.8073 +#> -13.6114 13.6032 -18.6887 -5.2429 -17.2754 -3.4884 -7.1409 -19.8385 +#> 19.4462 -1.8338 -5.5235 -12.7856 -7.5409 -12.7443 -0.8764 -14.7751 +#> +#> Columns 41 to 48 -0.7075 -3.6871 1.0846 3.5491 3.4595 11.7627 3.4947 -3.7417 +#> -5.6032 6.1089 -17.6123 8.4267 -11.8817 -16.1448 -5.5878 15.7792 +#> -2.1483 -3.1593 -3.4288 17.5301 -15.8145 -1.0703 15.5746 0.5555 +#> -15.7804 -7.4713 0.1368 4.8883 -0.9655 -6.1302 9.3916 -3.7362 +#> 5.3681 2.9075 12.5258 9.1905 8.1007 -1.0967 17.8497 -4.7358 +#> 2.8150 -17.4774 8.8003 -7.3641 -0.1207 -8.1908 0.8019 3.8171 +#> -13.9557 -1.9243 -4.9146 5.1251 1.8861 -6.6843 -11.1276 10.3518 +#> 4.8836 -7.3427 7.6724 11.3192 1.1050 2.1295 13.2262 -5.3453 +#> 8.2690 -2.3365 19.8755 5.2859 -3.4894 6.5555 1.4601 -13.8344 +#> -10.1978 -0.7198 1.0085 -11.5294 -18.9771 2.2056 -0.8485 -12.1431 +#> -2.2154 4.2609 -9.3026 2.6947 0.2253 -5.8004 -6.6905 15.8501 +#> 10.0000 13.3104 5.9539 -15.7541 -8.8126 1.6318 -16.1992 -13.3627 +#> -3.4504 -2.6034 5.4614 -3.6517 -9.4828 1.6330 5.7189 -13.0835 +#> -3.8162 -13.3095 -0.0463 5.3641 -7.2300 4.0203 14.4388 -9.4494 +#> -5.5874 -14.9909 -18.4548 4.6491 11.6345 14.4683 -0.9663 9.3870 +#> 6.4413 -12.1750 -15.1652 7.4948 -8.9703 -9.7146 -3.1714 6.5990 +#> -3.7498 -12.8697 -1.7002 3.1643 -0.0843 1.2336 -4.3673 -2.5000 +#> 3.9990 -20.6433 -2.5107 8.6411 -16.4188 -15.1635 14.5629 15.7645 +#> 5.3677 -1.3633 -5.7406 2.8192 2.1830 -17.3066 -10.0983 -5.0137 +#> -5.7851 -0.4517 -3.3496 -8.0487 6.5608 0.9516 5.0757 -6.3950 +#> 0.6421 15.2076 -0.0578 1.5717 -6.6474 4.1189 -18.8823 11.6054 +#> -1.4062 -5.0325 -2.6647 4.6340 1.9222 4.8882 -0.7630 -12.8169 +#> 0.0601 -5.9999 4.0442 7.2946 -4.8901 4.5484 -2.1316 -3.1636 +#> -1.7716 6.2030 -5.7828 -0.3988 5.3320 -6.8372 -4.4370 1.1744 +#> -4.5766 6.5112 5.6447 -4.3751 -10.3169 -0.3645 3.5060 10.3789 +#> -9.9366 8.3807 4.1887 -1.2544 2.1130 1.5043 12.1316 17.7038 +#> 4.1311 -9.2013 8.8707 13.5254 4.5054 3.1386 13.2180 -20.1743 +#> 5.2612 6.4132 -3.2005 -5.6742 10.9046 6.4287 -0.1744 1.8012 +#> -6.1653 0.3093 -0.3940 -10.2995 -13.1186 17.8623 -14.0711 -13.1964 +#> 10.3253 -3.8395 10.5909 -13.7830 7.2059 -2.9572 -19.1352 31.3352 +#> 4.7028 -10.9875 3.3549 -8.2194 7.2142 3.7944 1.4418 4.4237 +#> 1.0914 -18.5742 -3.5826 19.5459 0.2027 18.6070 19.6489 10.6596 +#> -1.6508 0.1999 -7.0168 19.1316 -12.3294 -3.5433 1.1942 -9.3961 +#> +#> Columns 49 to 54 9.9891 -8.3729 -6.8773 0.4956 -0.5590 5.0643 +#> -8.0108 8.4069 7.9129 4.0763 4.8907 5.1555 +#> 0.3710 3.4169 -8.5330 -5.0090 -6.0567 -2.3113 +#> -4.2001 -7.1632 0.0594 0.0326 9.0647 2.3119 +#> -9.2387 -12.1768 -0.6337 11.8236 2.7781 -8.6904 +#> 3.4671 -9.5394 8.2766 -2.7984 -5.0650 -3.5717 +#> 3.3864 -2.4448 -2.4917 -2.8890 1.6809 2.4046 +#> 18.7811 13.6295 -0.8040 -2.9517 0.2973 -1.5790 +#> 5.1043 -4.6780 -0.0868 -4.3414 -3.8778 2.0134 +#> 13.8499 4.1464 -1.7200 2.3408 0.7762 -4.0872 +#> 23.4962 -10.3445 2.5922 -2.8299 0.4665 3.2759 +#> 4.5790 3.4190 -2.6821 -8.1166 -4.6453 5.1660 +#> 6.4918 -3.8520 9.4369 6.8390 0.1168 1.2807 +#> 9.5057 -1.4327 -14.1694 -1.9196 0.5504 -1.6491 +#> -1.2961 5.8970 -5.8560 7.8415 2.6092 2.5312 +#> -10.8947 11.9945 -0.7405 -7.7695 -5.9003 3.6948 +#> 6.9811 -5.4123 -7.1316 8.5120 4.9062 -3.5583 +#> -4.5261 -0.6889 -10.3639 5.1176 -6.1424 4.9872 +#> -12.3113 -1.7938 6.5298 -1.8617 -3.0636 5.8704 +#> -0.8052 3.7009 10.1956 -8.2405 -4.2964 8.8030 +#> 18.5907 -6.3344 -1.4395 -6.6994 -4.3130 -5.4660 +#> 9.5798 0.9034 -0.3554 1.5547 5.9826 -8.7406 +#> -13.1542 0.9034 -8.4751 -6.0076 -11.4328 1.2385 +#> 8.7175 -3.3861 -5.9570 -2.1920 3.3056 -4.5674 +#> -5.2959 6.7782 13.3159 1.1097 7.3374 0.2493 +#> -11.3829 -3.8999 4.7460 -0.7568 1.2069 -4.0650 +#> 11.6582 7.8920 -15.4628 3.6439 3.7283 -1.1876 +#> 1.7519 8.5374 9.2063 -5.8952 0.1843 3.2878 +#> 2.1885 10.6823 5.4717 -7.8739 13.3070 12.0191 +#> -8.3412 -8.3748 28.7164 -6.4650 -0.8949 9.5608 +#> 5.7447 8.8891 11.6408 3.4265 -5.8180 3.1247 +#> -20.6969 13.7208 -7.9311 9.4007 -0.7257 -3.4730 +#> 0.9533 -1.9686 -4.0524 -3.8427 4.0237 0.0430 #> #> (6,.,.) = -#> Columns 1 to 8 -2.3933 -2.8231 -14.5649 1.2265 6.9484 16.7138 2.5152 -3.8714 -#> -2.3410 0.0383 -1.8482 -1.2086 21.3870 2.8062 -8.6456 0.0021 -#> -1.8909 3.2607 -0.2578 0.7585 1.2088 4.6651 -14.7558 -7.0397 -#> 1.6005 3.0437 2.7254 0.4729 0.4200 2.9602 -5.7026 7.4910 -#> 7.7097 5.1390 -4.2629 -5.3246 -13.7337 25.3440 -7.3598 4.1057 -#> 8.0588 -5.3984 7.8002 -8.1057 -0.1618 24.6554 -12.0122 3.7527 -#> -2.1816 7.3111 -13.8940 -13.4551 19.3834 -5.2697 6.9472 -6.4791 -#> 4.0208 -3.4722 -17.0254 7.7089 -7.8556 7.9077 1.4991 -18.5755 -#> 4.4408 -3.0581 2.1746 0.8543 -2.8558 7.9674 8.2400 19.3803 -#> 2.6803 -8.3320 -0.8502 18.1305 3.0918 -4.5918 -7.5745 2.3753 -#> -1.4478 -10.6514 -0.9545 -0.7664 1.4152 -5.6242 19.3267 14.1202 -#> 9.0945 -8.8592 -4.3406 0.4397 -4.6462 6.7036 -5.7391 24.0809 -#> 1.9166 -6.8628 -7.8494 3.6546 17.2094 3.9769 -4.0957 3.9199 -#> 1.7243 -9.9460 6.4939 4.1563 10.2148 -24.9290 -1.3239 -3.4306 -#> -0.1722 -10.1212 -3.0323 5.8537 -12.9172 1.2957 9.8134 -4.4496 -#> -0.8825 1.3276 5.5673 7.1959 13.0362 -25.3318 -13.5864 6.1727 -#> 1.5064 -4.7732 4.0040 -23.9419 3.9104 -2.8041 3.1461 1.7080 -#> 7.4478 0.0707 -10.1757 2.4835 -1.7016 -3.2533 1.0278 6.0833 -#> 1.6632 14.5217 1.0237 -7.3828 0.9577 13.4193 -0.7450 -11.0810 -#> -0.3358 -0.7283 -2.9503 -3.8002 -5.4520 9.4111 -3.8151 6.5682 -#> -12.6043 3.4720 4.7416 -0.9385 -0.6316 5.4884 -2.0362 -7.3382 -#> -8.0593 7.6517 -8.7342 -5.1107 20.6218 9.9912 16.3979 -8.4791 -#> 1.8494 -0.2744 10.7613 -6.2050 -19.5197 -5.7177 -5.4184 -5.1411 -#> -3.0168 6.6093 -3.3874 -2.2436 18.0242 -1.7924 1.4984 6.4660 -#> -12.2303 -14.3616 16.1126 2.0701 11.4154 0.3789 4.9664 -0.8457 -#> -4.4937 8.3347 -13.6119 7.7609 -20.5116 0.1745 -0.5136 15.9122 -#> -7.5482 -0.6359 -0.0841 -5.4823 0.1912 0.8122 20.5052 -2.4496 -#> 6.8388 0.3139 7.5516 -9.9683 -10.5476 -2.7888 -1.1222 -9.8913 -#> -3.1357 9.2803 7.8254 -11.2691 -0.8377 2.4385 -8.8452 4.2665 -#> 0.1097 -9.1326 -7.8229 -12.0185 2.6693 23.2933 20.2058 -1.6425 -#> 0.5991 3.7670 0.2063 -1.4881 -1.1862 13.1080 -3.0750 10.8134 -#> -8.0925 2.0068 -6.0206 21.5375 -20.4440 3.3259 -3.7572 -20.7331 -#> 0.4287 2.6760 2.0979 -4.7913 11.6869 12.6154 1.7529 7.2859 -#> -#> Columns 9 to 16 1.5429 -16.2952 -4.6099 4.4046 -3.0855 6.6220 1.6809 -15.7730 -#> 4.9859 -10.9777 -9.2018 -1.2260 11.0141 17.4103 -11.0088 -14.0926 -#> -5.6609 1.2191 -2.1708 12.3986 10.5315 3.7087 14.3801 0.3327 -#> 0.4658 15.3275 5.7982 1.7107 -2.0811 -0.2691 -4.6511 -22.7007 -#> 12.6620 -10.0261 15.5672 -11.6777 -9.5031 -12.7180 1.2024 -19.7086 -#> -6.3520 -7.2483 -7.2158 -5.1041 -6.8909 -7.9554 -0.5690 7.9753 -#> 2.5586 1.4266 -5.7646 -22.6115 14.7957 6.7313 -12.9693 6.5472 -#> 2.1179 -6.2491 6.2176 3.6738 -3.9064 6.0592 17.8334 -1.8069 -#> 5.5736 -10.2409 9.7896 9.8987 5.8255 9.5428 3.6325 2.3045 -#> -11.4319 7.3671 -8.7400 -20.3050 -16.0279 -13.2162 -0.9830 -18.5106 -#> 1.1721 0.5274 -10.7990 15.6938 5.4564 0.7109 -6.9359 22.1753 -#> -8.8085 9.1855 9.5483 1.9305 -12.1039 1.4839 -9.9639 -15.0440 -#> -2.3187 -12.9011 -8.9538 11.1161 -0.4570 -9.3071 -13.1429 4.4905 -#> 2.0216 8.9996 8.6279 19.9109 -6.0012 11.1547 5.3846 -15.7855 -#> -5.7400 11.2579 -7.9448 -0.6473 -16.4727 -14.3253 -12.2542 -4.0223 -#> 9.3313 22.7818 -15.1444 2.1813 6.9884 -2.3573 2.4577 7.7630 -#> -7.4960 5.6692 -14.7068 11.3074 -1.1772 1.8731 -1.3718 19.7326 -#> 3.1983 -1.7400 0.2044 0.1885 5.6679 -7.9948 6.3472 -15.2114 -#> 7.0347 10.1483 11.5145 -8.7661 0.6672 15.2777 8.6046 -5.1687 -#> -15.9128 -1.2608 -3.7305 -6.6174 -0.8022 -12.1907 0.5581 -6.8652 -#> -2.6660 -8.1604 13.6352 -12.5818 4.3933 0.8350 -0.8670 6.9186 -#> -14.4307 4.6077 1.2247 8.5692 7.3176 -7.7655 11.5433 26.7380 -#> 0.2708 1.2055 -9.8883 -8.4861 0.2955 1.5498 -8.4476 -3.7971 -#> 4.1670 4.8443 3.1621 19.7748 17.0420 10.3664 -0.9168 3.7278 -#> -2.1689 7.2192 8.4080 -3.2654 -4.2774 14.1565 -3.8137 7.5986 -#> -1.7891 -7.4096 -4.3061 -13.8280 -5.9388 -12.7266 11.5695 -13.9269 -#> -12.7099 -0.0428 -14.1989 -5.1375 -8.5218 -9.1595 -16.8514 -6.4152 -#> -7.9016 4.6911 -10.4946 -12.6796 -4.2166 -1.3278 1.8187 1.4088 -#> 0.2709 1.4040 -2.1233 0.3287 17.2023 -10.9112 5.0172 -3.8812 -#> -9.1995 8.2871 -14.2229 15.7726 -10.5637 -11.2286 15.9094 2.9421 -#> 2.8056 -0.9664 6.7691 7.8808 10.5534 -9.2041 -1.0348 3.5583 -#> 7.8407 -16.9434 -1.9920 -17.0384 -4.8415 -8.9256 11.2713 -15.5950 -#> 11.6981 0.1152 0.9315 1.8805 8.5721 -14.7573 0.4268 9.2058 -#> -#> Columns 17 to 24 1.6778 -6.4415 -0.1352 -8.3186 2.3479 -2.9124 -3.0235 5.4116 -#> 1.7527 1.8941 -7.2753 -6.6662 -0.6948 5.5409 7.5639 -11.6145 -#> -19.2897 2.8418 7.8411 9.0814 12.8986 2.8308 10.1728 5.7908 -#> -1.6884 14.0491 19.4663 -11.5493 1.3748 4.0061 10.2249 -6.3584 -#> 1.6027 -2.9045 7.8118 -5.7253 -2.4294 1.6924 8.8773 2.7202 -#> -6.7086 -17.3329 7.9259 -2.4976 -7.2582 13.8021 1.8096 8.7034 -#> -12.4561 6.8747 -3.2511 4.0770 1.9898 13.9476 0.8878 -12.9841 -#> 4.3855 -2.7669 -3.9468 -3.1256 -11.9641 -10.6414 -4.2919 -4.2538 -#> -3.9837 0.9678 -11.9469 -3.0423 3.9294 16.7164 -1.3809 6.7290 -#> -2.8724 -9.0192 2.6321 0.5427 17.7220 -3.3728 0.8575 10.2964 -#> 22.2418 -9.5127 -9.0471 -19.1665 12.1969 -8.3111 6.4907 3.1734 -#> -0.2076 -2.4305 5.1785 2.1002 -0.7725 1.4751 1.8629 -5.4309 -#> -5.0746 -0.7222 4.2633 6.0595 -1.4383 6.2370 2.5049 6.4484 -#> -2.7657 6.0351 -9.5651 -15.2493 6.4888 -9.2095 4.7027 -5.8996 -#> 9.5221 4.1215 11.3843 1.7492 -3.3621 -11.1883 -7.6068 1.9288 -#> -0.0211 8.0418 4.4088 7.3769 15.1228 -22.4525 -11.0030 4.5516 -#> -7.2525 5.1525 -8.8931 -5.2451 4.2944 -7.1268 -4.7372 -10.3392 -#> 1.4353 4.9326 -9.9241 6.8887 -8.8230 5.2892 -2.2666 2.2917 -#> -1.3109 -19.8390 -10.8671 -9.9451 -1.5207 -11.8419 2.0018 12.9587 -#> 0.4461 -10.1880 3.2137 2.4221 5.3363 0.4954 -15.3489 6.2938 -#> -4.6830 3.2804 -1.1812 -8.2964 3.7031 2.6834 4.3869 -19.4382 -#> -9.0388 12.4784 -3.2806 -7.6163 1.6500 -1.9572 -12.4062 -7.6018 -#> 7.8623 -13.5411 18.3099 14.9309 9.6770 -14.5300 0.7799 -1.9263 -#> -18.3736 3.3309 -10.0669 -12.7049 -3.2633 1.2947 3.9347 9.7269 -#> 11.3697 3.8972 6.4741 -3.7142 -12.9091 6.1751 -3.9448 -7.4640 -#> 8.2126 -0.8895 4.4201 7.0476 -13.7706 1.3601 -4.8520 6.2506 -#> 8.5005 18.1041 -16.0235 -2.0557 6.6932 10.8115 -3.1100 -13.5942 -#> 7.9085 -2.9537 4.6969 13.3141 6.4515 -1.0102 -2.8777 5.5790 -#> -7.1045 15.9441 13.3113 4.8020 8.2366 4.4381 8.0431 -7.1640 -#> -8.8677 -2.9996 9.1404 2.6122 -21.6991 -16.3302 2.8424 6.2128 -#> 10.3883 -5.7031 -1.9179 -10.6424 -7.1544 2.9624 -6.7363 8.1942 -#> -0.9672 -1.5058 -4.0205 6.8180 1.1208 9.9072 3.4915 -13.4807 -#> -7.6882 1.6917 -7.1852 -13.2910 8.2148 22.9102 -9.2222 -9.8807 -#> -#> Columns 25 to 32 -5.2330 -1.3815 2.1893 -13.3959 -20.0664 -1.1699 2.4693 0.6031 -#> 3.2243 -1.5599 8.9246 -5.4088 -5.8966 -15.8264 3.9623 -7.8535 -#> -12.6833 2.4453 10.2644 6.5495 -2.7492 3.6596 1.9944 7.6586 -#> -3.8781 -10.5531 -3.0427 -4.5520 4.2960 9.7992 -3.2453 6.4634 -#> 0.0360 1.1191 -0.1727 6.7454 -9.5639 5.6739 2.7641 2.8032 -#> 2.8271 -7.2727 -0.0424 9.8217 -3.7367 -21.3563 0.2294 11.2744 -#> 10.2004 0.9974 -3.1559 -1.7444 11.6059 -4.6158 12.4775 -5.4706 -#> 4.2351 3.3644 4.5505 -12.7553 -10.2433 -4.5546 -1.5327 21.5162 -#> 4.4139 13.4896 1.2470 -2.8440 0.5755 4.0851 16.5470 3.8477 -#> -4.0246 -4.0716 -12.7473 3.2761 4.2678 -6.4974 -14.6987 2.1921 -#> 3.4868 12.5677 -2.0901 2.2527 5.1330 -0.4987 4.6309 2.8002 -#> -7.3434 1.1411 -22.2636 -4.9744 -1.3532 1.0651 -18.1702 9.8706 -#> 4.9933 -8.0530 6.8392 -4.6851 2.0099 -0.3594 16.5478 1.6143 -#> -6.7089 1.2296 -3.3766 -22.8374 -6.2673 -0.4287 -0.7818 -2.8487 -#> -20.3780 -10.1946 -14.1414 -1.8622 0.5909 -3.7518 1.6533 -5.9114 -#> -7.5912 -1.6410 -11.5662 13.6936 8.9424 15.1482 8.7913 -13.7951 -#> -0.9836 -3.2381 -11.7469 -7.2732 -3.1640 -25.6097 -7.5313 15.5874 -#> -4.0414 9.6809 -10.4774 -4.9385 1.1550 -7.6655 -16.8005 -0.4686 -#> 11.9079 12.2473 6.1715 -0.1022 0.0843 11.3742 3.1708 -18.4931 -#> -6.2798 -6.1154 17.0070 -10.1745 -13.4848 -1.4222 -6.4125 -15.9544 -#> 4.5880 -1.8172 1.3404 -0.7641 9.0390 -0.8292 8.8150 -0.0080 -#> -8.6819 13.4015 -8.5101 2.4812 10.9497 0.8663 9.6707 3.3289 -#> 0.1464 -8.4763 0.8412 2.7965 2.5752 -0.5562 -12.5688 -0.9830 -#> 15.1069 4.9995 16.4681 -0.5231 -7.8215 -13.6747 6.4876 0.6850 -#> -8.4894 -3.5768 -6.4240 -3.0812 15.4772 12.1769 10.4209 -3.3678 -#> 3.8084 1.9189 -1.7310 8.7086 -3.6002 2.5440 -0.2780 4.0019 -#> -1.3956 2.6794 -9.5926 -6.8309 6.3613 -9.5152 -0.6415 8.0382 -#> -2.3512 -6.4113 17.4262 1.2770 2.9487 0.0967 -1.5846 5.8918 -#> -7.3301 2.1242 0.9905 2.8800 -5.8558 10.1734 -3.6074 3.0594 -#> -13.2812 -16.7233 -9.6330 -11.3087 -9.2737 1.5179 -0.8227 28.9040 -#> -2.7239 6.9531 -0.7233 4.6741 4.4453 3.3907 6.6621 -4.8867 -#> -5.7248 -7.8697 -9.8186 -2.3132 -7.1376 -3.2057 -12.4708 8.3674 -#> -4.3927 1.4272 -8.7889 3.6932 10.4733 -8.7547 -10.5809 13.0088 -#> -#> Columns 33 to 40 -0.1658 -4.2741 -13.0167 2.8125 19.4319 -5.4442 8.7345 11.3106 -#> -5.4128 4.5412 -14.4241 -6.9806 20.9891 1.9883 -8.5468 -11.6336 -#> 3.9230 -5.9303 1.0094 -2.0760 -18.3764 2.7971 -9.8910 -17.1911 -#> -0.7150 -1.6136 5.7454 4.3717 -10.7253 1.1466 0.7195 -8.8246 -#> 4.0325 -3.4147 6.5115 6.8854 -28.3982 9.8396 -4.9240 6.0618 -#> -0.7968 7.2536 -8.6915 -4.6818 -6.5899 11.5288 8.7270 26.6901 -#> -4.0894 -10.4742 -2.8013 3.5833 2.8203 5.2279 -3.2011 -19.0499 -#> -7.7248 -1.2338 -5.4393 13.5333 1.4814 6.0945 0.7191 -8.0978 -#> 7.6185 -0.8284 -4.9946 13.8686 -2.9363 -3.2145 6.9183 -2.4251 -#> 5.0350 -11.5492 14.6761 9.1763 -7.2025 -0.9546 6.7639 9.4197 -#> 16.2627 1.5523 -14.3638 -2.5518 -6.0636 -0.8085 1.3869 -2.2191 -#> 2.2576 -7.7883 -0.2024 3.2101 2.8854 11.1889 -3.9979 2.4221 -#> -5.9915 4.2164 -8.7803 4.5399 19.8924 -5.1557 -5.6777 -5.8437 -#> 7.3394 6.0241 -1.8508 -1.9891 10.3307 4.8951 -12.2203 -3.0790 -#> -22.0107 -2.2640 7.6870 9.3464 0.2916 -5.7537 7.5604 3.1492 -#> -10.7479 -3.7193 16.7831 -15.1875 13.6446 -8.0983 -22.4715 -2.1607 -#> 11.9150 -5.6264 -2.5958 -14.5199 -7.6892 2.8243 6.1238 13.7542 -#> -12.1732 1.5765 -3.3213 -6.7741 1.4289 15.2627 -7.1139 -3.3994 -#> 14.9497 12.3898 13.8701 10.1220 -3.4746 14.4940 23.5101 4.9444 -#> -7.4806 3.8434 -10.1420 10.8480 -3.5622 -4.8281 -7.2726 -2.8693 -#> -4.7255 -2.2326 5.0139 0.5783 -5.6597 5.1953 -15.6661 -0.2395 -#> 8.3898 -11.8495 7.7439 12.7849 -1.7928 -6.8798 10.0592 -1.0226 -#> -4.4024 -0.1014 9.6252 -3.3212 -20.1394 -5.8556 4.4151 3.6770 -#> 8.4042 9.1342 9.2365 -7.5792 24.0859 0.5709 12.2008 -14.5676 -#> 0.0516 -2.5967 -7.9509 20.5446 -4.4098 -3.6914 0.0778 4.5274 -#> -7.2622 18.3369 13.0771 0.7538 5.7499 -6.8232 7.2616 9.0808 -#> -12.1754 -9.0076 11.1918 1.9653 11.7068 -2.9894 4.2650 -11.6029 -#> 13.0535 1.4470 -8.0668 -17.0397 -5.0373 12.8577 6.7251 -4.5107 -#> -3.0594 2.9478 3.9375 -13.9612 1.8279 -0.7990 -6.5206 -1.8909 -#> 4.2958 2.5674 -5.0706 3.6540 7.2688 10.8935 35.1408 6.1412 -#> -7.2251 -0.6509 7.0742 -7.2755 6.0282 -2.5625 -7.4393 1.4161 -#> 5.6039 12.7617 -3.2069 -7.9473 7.4579 20.2680 0.9503 -9.4715 -#> 1.8726 -0.4505 -8.4528 -3.1456 22.2555 8.8612 -14.3986 1.5937 -#> -#> Columns 41 to 48 -7.0365 -7.1911 -6.9832 -6.6034 1.8335 7.5551 3.6796 8.7989 -#> -3.6209 -3.4856 -6.2510 2.3313 1.1667 -8.5825 9.9867 1.9490 -#> -12.8983 -1.8999 2.1977 4.7087 24.5768 8.9532 3.6447 -12.3067 -#> -0.1780 2.2829 -1.9048 13.2251 2.4473 -1.6018 -13.6998 -0.8499 -#> -4.5863 8.7913 -1.2143 -1.1927 -10.1472 7.0486 7.8177 -2.5055 -#> -10.8818 -10.3920 -4.0015 -27.1391 4.7582 -6.4043 12.1196 2.8603 -#> -3.2827 6.1637 3.9089 -1.5718 -0.1650 2.7290 5.3034 -7.1740 -#> 12.8172 -3.8016 1.2016 -2.3019 -5.9471 -3.1962 3.1356 -7.3677 -#> -2.7111 -1.8917 -2.2459 -13.3483 7.5332 7.3395 15.2006 3.9319 -#> -4.5157 -11.7697 -5.2488 5.5137 2.6628 10.3019 -2.9456 2.4325 -#> -8.0180 -11.1056 -4.5557 -1.5262 9.2000 -6.9966 7.3665 -15.5740 -#> 16.5981 2.1450 -13.4648 6.9906 -3.8754 -6.5307 -2.1259 -2.9880 -#> -14.1285 -9.4364 -1.0108 7.9575 13.1723 -3.3897 3.5740 -9.5083 -#> 4.1621 -1.5070 -2.2087 21.1738 -1.1345 -12.7376 11.6858 12.5840 -#> 10.2081 -7.1385 -0.2708 -9.7277 -2.5300 8.6155 1.8450 1.3057 -#> 13.8926 7.6847 -7.3537 -0.5823 9.2490 -7.8143 -11.8197 2.1618 -#> -7.0815 -0.0924 1.7060 0.2990 3.1005 -2.8214 0.9185 18.8770 -#> 10.3080 -2.8859 -18.1575 0.8361 14.2335 -10.9023 -10.3132 6.2573 -#> -2.3226 10.1153 -9.8714 -11.1489 8.7382 11.8088 3.5335 -8.6061 -#> 0.9529 6.2439 13.7589 2.4170 -15.8774 -0.7123 7.9430 -5.7493 -#> 3.0389 -8.2618 0.8278 -5.5897 2.5569 -11.8408 5.3491 3.7439 -#> 0.4842 9.9147 -1.0470 -2.6416 20.6687 -2.1343 -11.0162 -4.9360 -#> -12.7470 9.6998 6.0791 5.8153 15.2451 23.3747 -4.8672 -11.0663 -#> -21.7379 2.9156 6.4470 3.1172 8.1243 8.6528 -10.7619 4.5328 -#> -1.5000 -0.2806 16.6390 0.5521 -15.4529 11.5029 -1.5393 1.3152 -#> 1.4420 10.6928 4.3994 -12.0668 -0.2689 -1.5219 -2.0803 8.6118 -#> -1.3304 5.7595 8.6643 -0.6670 -0.9824 5.3312 -9.3878 -6.1657 -#> -2.6928 5.0323 4.3580 -2.5496 -8.2724 4.0027 -8.0189 9.2038 -#> -14.3821 7.1752 1.4434 -7.9288 3.9231 6.6579 -7.0609 2.5534 -#> 0.6453 -4.4102 -4.5158 4.6314 9.5885 2.2223 2.2485 -1.6284 -#> -3.5814 -5.0103 3.0189 -1.3147 -7.1761 -6.2938 -8.0212 -0.3543 -#> 7.2415 6.0512 3.7243 10.1745 9.9101 0.0936 9.8109 6.2233 -#> -8.6651 -7.9083 -5.1292 5.5781 5.1312 -1.0708 -4.8641 11.3148 -#> -#> Columns 49 to 54 -10.8975 11.6077 -6.2775 -1.0497 -13.6208 -1.8142 -#> 13.1456 -9.1939 0.3886 -4.1949 -1.6206 -4.2013 -#> 6.0395 -7.3898 7.8786 14.0081 -3.9137 0.4371 -#> 2.2743 -9.5347 1.2135 -10.0645 11.8385 2.1302 -#> -0.4494 16.9023 -1.9057 -6.1568 3.9840 -3.3445 -#> 0.9947 2.2809 -8.2821 9.3761 -6.5058 -5.3371 -#> 0.0837 -7.2570 10.4998 -5.4291 2.0629 -1.2072 -#> 2.0734 -4.8856 22.5101 0.4016 2.8442 -2.9386 -#> 12.2798 -4.8926 1.5630 -0.9470 4.9285 0.0782 -#> -10.5178 16.6957 1.3643 -8.1838 -7.0226 -4.0855 -#> 8.0503 -0.6926 -5.9212 16.0828 1.3463 1.9198 -#> 4.4199 -8.7152 -7.8884 2.5225 -3.3634 1.3823 -#> 7.1209 -6.7880 -12.6276 -14.1402 7.5011 3.6135 -#> 2.4809 -16.6612 3.1206 1.3862 8.9676 1.8443 -#> -17.2778 -1.3190 5.9414 -9.6632 -3.6013 2.3111 -#> -1.8095 1.7647 5.5546 9.7994 -1.9943 -0.8855 -#> 4.7578 -7.2273 2.8811 0.5475 -9.1510 -3.1228 -#> 3.7045 -0.2151 17.4081 -3.1675 -5.3228 -5.7981 -#> 9.6104 13.4691 -1.4277 1.9960 6.9995 -3.7394 -#> -13.1755 1.6854 9.4084 -9.5875 -0.8298 4.6526 -#> -0.5333 -10.8859 2.6966 -0.2496 5.6243 -0.3994 -#> -11.5627 -13.2616 10.7873 4.5478 2.5188 5.3648 -#> 14.5499 9.6646 -10.2373 1.7955 -1.3717 -3.4287 -#> 13.2639 0.8773 1.1604 1.2519 3.5976 8.2300 -#> -17.1914 0.5858 -4.3062 1.3593 6.9147 7.4865 -#> -4.8861 20.8843 -4.2372 3.0550 -6.0051 1.4374 -#> -3.9068 -33.6806 1.2267 0.0336 -3.5000 3.5497 -#> -10.3744 -0.5291 7.1012 4.2670 -4.3717 -0.9353 -#> -1.4874 -10.1441 -6.8394 12.4491 -1.1372 -2.3226 -#> -0.8102 -29.3352 5.3444 -8.5302 3.6247 1.2677 -#> 10.1890 5.5535 -6.2732 13.2793 6.5547 -1.0918 -#> -2.8983 -3.8226 1.3922 -5.4163 -2.0137 0.7241 -#> -13.1667 -1.6609 -2.3496 -2.3967 -2.1740 3.1770 +#> Columns 1 to 8 2.2135 2.2744 -2.4032 4.7625 -0.6137 3.1394 10.0055 -2.8687 +#> -1.3064 -2.2801 -1.3256 2.4759 2.6833 -7.2205 -7.7346 -9.7641 +#> 2.4956 -6.3248 -6.2271 -3.9590 -18.0408 16.4091 -1.5628 9.9763 +#> -2.4925 -2.3138 -11.9997 -6.1532 -6.3317 -5.0805 -1.6468 17.1797 +#> 0.0629 0.1995 5.9242 8.3304 9.0459 7.6142 4.8036 -12.0736 +#> 3.4413 4.0913 0.9712 6.2869 11.0677 -3.4056 -7.8102 -2.1270 +#> -4.4579 4.1702 0.6375 -0.0477 5.1982 -11.2275 8.9460 1.5677 +#> 2.6904 3.4541 -1.6124 16.3042 3.2777 4.2698 -1.1955 11.9641 +#> -2.4262 -3.9030 -3.6114 -8.2710 -1.4164 8.7284 2.1595 -0.4117 +#> 1.0814 0.8000 3.0875 4.7548 -5.3547 -17.6405 10.6495 -2.7958 +#> 5.8510 10.6322 -6.3643 -3.5246 -12.9604 -2.0855 2.7855 11.7784 +#> -0.3453 6.5470 10.6913 -1.5554 -16.3289 -11.5359 5.2700 -3.9608 +#> 1.2436 -4.5511 -9.1398 4.2730 11.6361 1.9257 7.8826 -6.6093 +#> 0.1510 -6.1002 1.6126 3.4322 8.5997 -6.4239 1.0188 4.2273 +#> -1.7900 -13.3173 -1.4404 -8.3024 -4.3626 -4.5511 8.2458 -1.4294 +#> -2.5784 1.5192 -6.5783 1.6499 -12.4026 -4.8758 -3.4585 1.5789 +#> 3.8580 -4.9930 0.7346 6.2328 8.4082 -6.5991 4.4083 -1.0046 +#> 0.6743 -4.3152 -5.8452 5.0080 3.7384 -21.9220 -12.2148 11.2428 +#> -0.3790 -2.2588 -6.7439 -4.6867 3.7880 1.1640 -26.5236 -0.0054 +#> -0.0567 -0.3626 6.5940 -10.9867 8.1805 -7.1203 1.7278 7.2201 +#> 4.6416 2.0586 -3.6743 0.3590 -4.3049 0.4161 11.3404 -1.8059 +#> -5.2165 0.2337 -4.2656 3.6181 -5.0486 -11.6908 -11.6442 -6.0226 +#> -3.0021 3.2002 2.1940 -3.9442 1.2898 -5.6723 -2.8187 -2.1008 +#> -3.2257 6.2033 6.8204 4.7004 -6.8386 1.4361 11.5841 -1.2795 +#> 1.0397 5.0716 9.2202 -6.6404 -11.1956 1.3397 2.8808 7.5543 +#> -1.6870 0.4534 14.1308 1.8232 -3.4654 -1.8852 -5.2898 -4.2924 +#> -1.9876 0.6338 -1.9068 8.8323 -0.4345 8.5468 7.0683 2.0120 +#> -3.1540 7.7343 -1.5833 5.7757 -4.1702 -10.7095 -4.9754 5.7679 +#> 1.6136 -3.3095 2.3101 4.8368 8.0606 -14.4697 1.9953 0.8581 +#> 5.2108 -3.1173 11.8045 -1.9914 -8.0544 -5.4813 -1.7920 -3.6895 +#> 4.2425 4.3510 3.0851 5.5588 1.3848 1.8118 6.7842 4.0513 +#> 0.0644 0.8374 6.2682 -5.2885 6.7913 -0.9329 10.0503 -7.4671 +#> 3.6094 -6.5895 -10.7463 5.7850 7.3585 -5.4725 5.5822 -16.1018 +#> +#> Columns 9 to 16 2.5039 -16.2922 -6.7044 11.8824 2.4025 0.4172 -9.0057 -2.2229 +#> 10.7156 3.2957 -8.1330 -0.3797 10.5093 -15.2512 0.5237 -0.6342 +#> 3.5383 -10.4639 -0.4879 -6.5776 -1.2709 9.3121 -1.1333 2.7572 +#> -3.7386 -17.1051 -7.2766 -11.3527 -7.5308 13.9741 -6.1271 -1.9946 +#> -9.7924 -4.5332 9.0743 -1.0211 5.2158 0.8992 7.3770 -6.9080 +#> -9.4799 -0.7012 -2.4332 1.7575 -18.9986 2.0167 6.3571 0.4500 +#> 15.5448 5.1408 -7.5658 -4.0225 -11.9862 -2.9355 4.7610 -15.3453 +#> 1.0483 12.8483 9.6863 -7.8222 5.2626 -11.0110 -9.7563 -7.3414 +#> -12.3625 -18.6203 1.6335 -4.7872 -2.5059 -4.4079 -10.5989 30.9205 +#> 7.5354 -7.8220 3.7788 -4.7764 6.7773 14.5389 -4.9261 1.6773 +#> 5.5107 9.9199 -16.7830 -3.2340 5.0264 -7.7856 -4.4200 -2.0426 +#> 10.2898 -0.6562 13.8536 -7.5734 -3.7500 -8.0234 -7.8404 12.8900 +#> 7.7300 -8.4238 12.9362 4.6471 -3.7397 2.9992 6.6356 1.1758 +#> -1.9281 12.2917 -7.8830 1.6001 -13.4961 -2.0411 -13.3746 8.9633 +#> 7.3628 -4.3547 5.3241 2.6318 12.0781 3.5330 -6.0297 -11.6909 +#> 19.1012 -1.9865 3.0640 -3.6507 -16.9498 9.8562 6.5746 6.8137 +#> 0.4844 -3.9859 9.2041 -1.4860 -3.0415 -3.6633 -2.2930 -3.6276 +#> 17.7334 6.0643 -20.0312 7.5501 -6.6048 20.6153 -8.1628 -16.0870 +#> -2.8171 5.4657 3.7815 -22.8397 -20.5106 5.6540 -23.2079 3.1174 +#> 0.3893 -3.5663 7.9546 7.6300 2.5896 1.0693 4.0117 6.4425 +#> 4.9693 -6.6747 2.4427 -12.0373 -14.6726 -3.0411 -15.4597 15.5744 +#> 8.4786 0.4865 -13.4001 8.0536 5.8684 8.9350 13.7514 0.1885 +#> -1.3111 -0.0431 -5.9225 12.4518 -6.1468 -14.7867 5.1663 15.2329 +#> 2.2690 17.6904 -5.1850 -1.2463 15.9314 3.8881 7.6665 -0.2140 +#> 4.5795 6.1591 7.3600 0.5866 7.5447 -11.4432 -4.2585 0.7696 +#> 1.5206 -4.7729 -2.0287 10.6773 9.3859 -8.6040 6.8237 18.7661 +#> 3.6645 5.7808 -5.1768 2.2680 -8.8899 1.0830 -12.0403 -4.9888 +#> 3.9320 -6.8440 6.8704 3.8760 -3.5369 -11.5604 -5.6720 -13.4513 +#> 7.2588 3.0454 9.8922 -18.0485 -4.1625 10.1040 6.0447 1.3142 +#> -3.6985 -9.5494 33.4702 -8.9046 0.8596 -3.5953 2.8533 8.5468 +#> 1.5409 4.8864 16.5131 -14.2863 -2.6458 12.7455 -15.9059 -2.5760 +#> -13.7460 -3.1347 14.8093 26.8160 -16.3366 -9.4262 -0.3195 2.1399 +#> -6.3656 -4.6834 3.4649 -9.4979 -23.5982 7.6751 1.6219 10.9824 +#> +#> Columns 17 to 24 9.3324 6.3304 1.7720 -15.0587 -12.5964 9.3043 12.9548 -7.7601 +#> -2.7592 -7.0683 -12.8870 -2.1885 -4.5443 8.0038 -3.7140 -3.1095 +#> 0.0334 -5.2943 -0.1540 5.6037 0.8544 16.0952 -0.0605 -5.9662 +#> 6.0038 -13.9490 2.1470 -8.1573 -6.1015 -2.2869 -0.5762 -2.5651 +#> -4.2905 -10.4983 1.6719 -4.4364 -5.8537 6.5986 8.9249 -8.1591 +#> -4.2759 -16.2675 6.1078 -16.7187 -3.4638 -0.2850 13.6264 -5.7115 +#> 5.9395 -13.2931 -4.3535 -0.6677 -6.1867 -17.5077 -12.4860 -2.8227 +#> 4.8826 4.2179 1.1313 0.9272 1.3374 5.9875 -11.4002 6.4245 +#> -8.7054 -4.9952 2.0889 4.9943 -6.1132 -0.7138 -7.9137 -4.9143 +#> 4.4980 13.3026 -8.7420 5.3964 -1.8704 3.4872 -2.0934 2.9711 +#> 0.8424 11.5982 8.0608 -5.2656 1.2197 -9.4929 1.8903 21.7185 +#> -6.3589 3.1115 -4.2720 -0.3339 6.0476 -4.0408 8.2888 8.5415 +#> -2.0864 -0.1170 -1.4761 2.8857 8.8764 5.5274 -13.0928 -1.6003 +#> 10.2591 -11.8200 10.0686 -3.7683 13.3431 -0.2714 -6.4799 -15.4492 +#> -1.4053 -5.2044 -8.1386 -1.1091 -0.6233 2.0652 1.0445 -1.0414 +#> -1.4189 18.2151 -1.8874 2.3994 -6.4932 3.5080 -14.4957 -4.7396 +#> 0.0770 8.8373 8.3253 2.9777 -14.2051 -2.8648 2.4273 1.7811 +#> -5.4495 -1.0354 6.6137 2.8131 -7.6769 -0.3869 -14.4008 12.3147 +#> -5.8493 17.7427 -2.8514 9.4776 -4.1274 12.2208 -26.4459 1.3627 +#> 9.2113 -3.5756 -2.1142 -7.9503 3.3314 -12.5494 -0.1233 7.0149 +#> 9.4371 12.6448 6.0816 -6.4636 4.4609 4.8990 3.3260 -2.3265 +#> -8.1406 12.9907 16.7523 5.0516 3.8437 1.0699 11.7544 -9.8462 +#> 6.2008 7.4172 13.1104 3.6327 -10.7350 -7.3427 1.3633 10.8861 +#> 5.4840 -6.2143 -3.4092 -5.3348 10.6867 1.3953 -5.5659 -7.5357 +#> 1.6014 -11.1681 -4.4866 6.6944 4.7726 -1.3633 8.6329 -10.3593 +#> 2.0079 6.3870 -9.1691 1.1962 -0.2855 15.4217 6.2776 -10.5935 +#> 11.3961 0.1276 4.1711 -0.8448 -5.3681 -17.8231 0.1996 -1.4930 +#> -18.3679 -12.0041 5.6460 2.0658 -0.7714 -3.8083 1.1132 -6.1140 +#> -16.6530 7.9177 -7.8466 2.6111 2.3612 -0.4131 -7.2756 -5.7625 +#> -13.5851 10.7676 -5.7843 -5.9604 0.4106 11.0133 7.5143 -2.6937 +#> 1.4349 -4.3626 6.1910 5.5450 -7.7830 8.3621 -2.4486 4.5995 +#> -8.3759 1.9431 -11.5438 2.6202 -16.4537 21.4471 8.1414 -14.0446 +#> 3.1302 -7.5561 -4.9328 3.0413 -0.5228 -4.5005 -1.1739 5.5412 +#> +#> Columns 25 to 32 -4.9921 11.8749 6.8052 -1.6223 -5.0417 -11.3130 -12.3659 -15.4293 +#> -20.0101 18.2990 -4.3814 -12.4221 -3.5064 -5.1753 -7.5437 2.0174 +#> -9.1740 -9.9307 -4.6429 6.4327 -6.8483 -1.8640 10.6255 -1.7954 +#> 5.9874 -3.2789 15.2537 9.3382 0.4256 -4.3901 1.0370 -6.2250 +#> 8.0407 2.5339 5.1116 4.2795 -8.4246 4.0116 -10.0233 4.4015 +#> -9.1687 1.7920 0.4000 -7.4694 -1.9409 -7.4935 -17.8723 9.2113 +#> -12.8515 2.6711 9.6064 1.3426 -3.8090 -4.3189 11.7298 18.1138 +#> -8.4509 1.6229 1.3712 -4.4523 -9.1117 5.8101 -7.4262 1.7048 +#> -9.1652 2.7710 8.5037 -7.8006 3.0689 12.0864 -1.2945 -2.3185 +#> 8.3726 -1.5290 -2.3691 -10.1070 3.3510 -0.9877 -0.1463 7.1754 +#> -23.7770 16.7063 0.6362 0.3466 1.4782 2.2913 -3.0341 -4.8129 +#> 16.5063 4.6042 6.6903 0.4337 -3.0114 15.9774 -0.2011 -2.6899 +#> -3.3346 -2.6632 1.6846 -0.4937 -1.4823 -5.8397 2.9372 9.4176 +#> -4.9656 6.2064 4.4998 -14.1976 1.2556 18.1513 -18.6776 3.5407 +#> 1.6853 -10.8171 -1.8259 4.8707 6.6832 -3.4421 4.1701 -4.3055 +#> -3.2763 -3.8923 -19.9942 11.1664 -9.5751 -9.5473 19.7872 7.4483 +#> 5.0459 6.1259 5.0875 -0.8051 -1.8123 -2.4846 1.1983 5.1986 +#> -7.8122 5.9089 13.8430 17.0934 -3.2713 -0.4226 1.4303 13.0954 +#> 1.5944 6.8495 -6.6204 -1.3772 -6.0934 4.3985 9.0246 7.3300 +#> 0.2200 -12.9269 10.1386 -0.1954 1.0603 -4.0962 -3.2273 -3.2283 +#> 2.6499 9.6159 -10.8460 10.3052 7.3553 2.6659 -0.1121 -13.7584 +#> 4.8797 -0.5332 -9.9482 -7.0226 0.5603 -7.3748 9.5130 5.3608 +#> 0.5389 6.1693 2.5994 2.7665 9.5016 -3.2650 -5.3591 -14.3288 +#> -4.5699 -8.0362 0.2745 -5.0735 -2.3603 -7.5081 -0.3132 9.0614 +#> -1.8522 6.8134 7.5966 -1.7252 -5.4286 0.1030 -1.4991 6.4211 +#> -0.5422 11.9454 -3.4362 4.7169 -5.9638 -10.3555 -2.4434 -2.9211 +#> 10.6703 -16.0884 -0.9812 3.4574 -2.8120 -0.5155 0.2814 10.9890 +#> 6.6505 2.1370 1.5319 -0.5443 -1.3172 12.2760 12.4180 -7.8838 +#> 5.4640 1.9931 -9.7384 -0.3721 1.3064 -0.6287 4.1623 0.5917 +#> 2.8879 23.5030 -12.1704 5.0036 11.4511 -5.2605 -13.5595 5.2515 +#> 1.8030 2.4854 6.9497 8.4886 -2.1977 -11.0915 5.8441 0.6094 +#> 2.5570 4.1091 -19.2412 -2.0765 3.8786 -12.6116 -5.7205 -11.3402 +#> -7.5732 -4.0357 -3.7934 11.4792 13.5739 -15.8708 -3.9369 9.7511 +#> +#> Columns 33 to 40 -9.1498 1.0224 2.5557 -3.9999 -8.7879 -15.8448 -8.8763 4.5561 +#> 7.9687 -6.1811 13.6454 -13.7227 -4.0827 12.9292 13.3715 -4.3120 +#> -10.2845 -0.1378 -3.0810 -1.4613 13.9452 -3.7052 -6.3972 -4.3207 +#> -18.8757 -9.8238 -0.3281 10.7633 9.3164 5.6381 -10.5065 -5.7902 +#> 1.0771 1.5498 -12.3555 -0.4910 -15.2706 -24.6012 8.2595 1.6147 +#> 13.2954 -1.7923 5.6691 14.0812 -21.1235 -9.5387 1.6765 4.6125 +#> -2.7839 -16.8119 1.9217 8.7875 5.1847 -0.6776 7.1346 5.8532 +#> 2.7831 -6.8621 -14.7492 0.8036 -8.6342 0.8280 -6.0761 9.9090 +#> 4.6954 -1.1437 2.4594 7.7631 12.6277 14.1254 -1.9389 -11.5542 +#> -12.4512 5.5937 7.8544 -14.2836 2.4115 1.6958 9.4456 0.7077 +#> -13.2471 -6.6811 13.6936 26.3369 -2.9248 -8.9903 4.7060 5.2106 +#> 5.5752 13.4318 -4.7569 16.1942 11.2705 -14.4103 -0.3421 -18.0020 +#> 7.0430 10.8793 -5.9126 -2.4798 3.1072 -13.2734 10.1392 0.3825 +#> 0.0120 -9.2309 -12.4875 -5.4033 -7.6199 8.9924 3.8505 -6.9480 +#> -1.5667 -1.6259 13.7165 -4.1303 15.1214 -6.4428 10.0190 0.8410 +#> 17.0503 -1.6365 5.3205 10.6327 4.6021 4.1293 14.2263 6.0942 +#> -14.7770 -2.9867 0.1682 -0.2817 6.8840 -1.8693 -8.6032 9.6126 +#> -2.2571 -20.5931 5.9232 -3.2959 -5.3999 -8.9706 2.4636 2.8744 +#> 5.3935 18.8268 0.4861 4.5469 -3.2428 12.6261 20.5986 11.6719 +#> 15.1916 8.1120 -6.9524 -3.3624 2.8661 -2.5539 7.2161 -6.4069 +#> -10.2325 1.3713 15.6838 3.6210 3.9250 -12.5961 -16.3249 -5.0422 +#> 6.4411 -13.6013 2.6299 -16.0739 -14.0839 -2.5225 -4.4807 13.4892 +#> 2.6592 -13.8785 2.6847 -5.3585 7.4292 -3.3500 -14.3786 2.4257 +#> -9.8452 -0.2178 8.4658 -2.9448 7.7259 -5.2272 -7.5180 9.4841 +#> -1.5067 -5.7126 -3.0854 -9.3424 1.0947 2.1711 -2.3600 -12.6407 +#> -9.3124 7.7902 3.9494 -16.1575 9.2356 -11.4665 -12.6826 7.4724 +#> 7.3826 13.3859 -8.7221 2.6186 1.3185 0.2336 -3.6932 12.3689 +#> 7.7037 -11.6459 -20.1851 11.4569 4.8962 5.0734 4.9487 -16.9875 +#> -7.1961 1.3238 0.7320 9.3074 11.5284 6.7604 6.4831 -6.3404 +#> -11.3669 -6.5104 4.0795 -9.7762 -4.1450 2.9030 3.7602 -11.1470 +#> 1.7323 -0.9422 -9.4310 6.6960 0.2927 -5.3295 -3.4203 -11.4168 +#> -6.8595 5.5981 -6.3271 -6.0393 3.0477 -0.7424 11.9399 -0.2345 +#> 1.6577 -6.0810 6.6819 -8.9084 0.4325 6.0519 -7.9133 7.4273 +#> +#> Columns 41 to 48 1.8353 5.0528 3.4951 -9.0679 11.0028 6.1925 -0.5418 -16.5812 +#> -0.5433 10.0272 -1.1334 0.6936 -17.7075 -5.5724 4.6281 12.8386 +#> -11.7197 3.4715 10.7780 -9.7709 -1.8842 1.7804 -6.9247 -7.3299 +#> -5.3639 -19.2410 -1.4794 -10.3681 -3.9660 2.1006 -1.9470 -5.5627 +#> 7.5097 -7.9963 -3.1425 -1.0279 -2.1544 -5.5581 -13.8334 10.3771 +#> 11.4252 15.3848 13.0953 8.2470 2.2717 1.5647 4.2417 4.9367 +#> 1.3768 14.3687 -7.2375 -5.0713 -11.7499 -11.5194 17.1535 0.5708 +#> -10.1209 -8.5866 3.6746 -5.3425 8.3581 2.2553 10.8325 13.2715 +#> -8.2578 15.1133 8.9757 -11.0993 2.9291 -3.2638 -3.3501 -6.2311 +#> 6.3174 14.0842 1.9107 7.1787 1.0850 7.0974 11.5582 -16.5498 +#> -8.6835 3.2800 -1.3284 4.3463 19.7286 -4.2625 3.3034 -5.5352 +#> 3.2935 18.2161 -8.0461 -7.0316 4.0370 3.8959 -14.0842 -4.0833 +#> -7.9546 8.4912 -2.7412 -19.9193 2.4274 5.6734 -1.8485 10.6074 +#> 6.8169 4.2674 -4.0421 -1.0973 6.0960 -23.0291 -8.7688 3.5648 +#> -12.7413 -28.5586 -1.3460 4.4852 -8.8658 3.3518 -10.8100 -3.7896 +#> 11.1173 4.7237 -8.5040 -5.4392 -8.6458 6.3683 -10.8678 3.8805 +#> 16.9131 -9.5203 4.9735 -3.2252 14.8521 5.3122 13.3178 3.8548 +#> -4.5715 -16.7926 9.0824 -22.1700 -26.8291 16.6743 -2.2120 9.8949 +#> 6.9240 -3.8696 -2.9557 -13.2527 -12.8807 0.2085 -10.8347 4.7033 +#> -6.0386 8.5350 4.0996 -14.7628 11.7446 -3.6745 4.9260 -1.6664 +#> 4.6730 1.0010 4.4995 27.9075 5.8951 4.2947 -16.0618 -15.9623 +#> 4.7371 -17.9729 11.8856 -7.3971 -8.8382 20.9224 12.7971 8.6029 +#> 2.2808 1.5251 13.5189 -2.8363 -5.6756 13.5164 -1.0008 -7.6924 +#> 12.7574 -10.1915 20.8699 14.2555 0.8298 -9.3327 -0.4088 -8.4686 +#> -4.0076 6.0963 -9.8001 -7.9556 -2.6791 6.5401 3.9974 -5.1087 +#> 0.3525 5.4211 9.4951 16.6767 -11.3788 21.0782 0.5490 -9.1744 +#> 17.9557 -16.0236 -4.8447 -22.0739 13.0146 -1.4386 -8.9642 4.9656 +#> 7.4886 -4.7218 -11.7758 -17.3871 -1.9418 14.2106 -6.5096 -10.2552 +#> 5.9031 17.9611 -10.6873 2.5068 -0.9294 8.2299 1.4218 0.7348 +#> -3.3478 0.3052 -7.2493 10.8731 -13.4435 4.8601 15.2912 -15.7411 +#> 1.8852 -6.9789 3.5930 7.2022 9.4417 9.3241 -0.8272 -3.1054 +#> -3.2483 -14.6299 -3.9589 10.7887 2.1387 3.6395 -8.1679 -1.0316 +#> 8.4340 -13.7136 3.2912 5.7727 -3.5039 -14.3083 -7.9429 7.0883 +#> +#> Columns 49 to 54 2.7577 8.8217 -0.2796 -8.7620 -5.5553 1.0933 +#> -4.2471 1.8396 3.7060 -1.3979 6.7423 1.0845 +#> -8.9284 -1.8396 1.2401 -2.4745 5.2761 -3.5389 +#> -6.7518 -7.7967 -13.8865 -0.5287 7.6688 -3.0929 +#> 18.2269 -20.6937 -3.4987 -4.5275 5.0985 3.1071 +#> 7.9901 8.3455 -7.7927 -9.5336 -6.4837 6.5534 +#> -8.3591 -10.3706 -3.5707 0.7897 4.8532 1.9355 +#> 12.7343 -3.2890 1.0176 4.5947 -5.5457 4.0487 +#> -11.7678 -11.1459 5.1397 3.8807 -3.6064 0.1270 +#> -3.7639 11.1989 -0.8373 0.7988 -0.4052 0.5196 +#> 5.1139 4.7246 -11.9855 4.7655 -1.4255 3.5579 +#> -4.4424 -4.7940 11.2954 1.1835 -5.0625 -4.0952 +#> 17.4430 -3.5959 -1.6289 -1.0479 2.3471 1.1935 +#> 3.2738 -14.8813 -3.0527 -6.4569 -4.9056 6.5913 +#> -2.6169 -2.9127 11.4907 -8.4904 2.7326 -3.8852 +#> -6.4685 -4.4735 3.3633 -9.3217 3.8744 0.6654 +#> 6.3299 1.4495 -13.2675 -1.1175 0.7794 5.0476 +#> -18.9639 11.4914 -9.6418 -9.9962 3.9545 1.2274 +#> -13.2205 9.5347 -19.4671 9.4712 0.4172 -0.8135 +#> -3.8090 3.6536 3.6461 0.9179 -0.0971 -2.7749 +#> 5.3839 9.3851 -6.3293 -4.7842 2.4043 3.9667 +#> -5.7411 -0.1300 -8.2024 -6.3546 -5.5695 5.0834 +#> -15.1551 -6.5435 4.6504 2.4342 -0.4790 -0.9245 +#> -3.3217 7.6986 0.6394 -2.9978 0.9986 0.9748 +#> 15.0740 8.9770 0.3050 -2.1275 3.9056 -3.8107 +#> -0.2836 8.7942 -0.9631 -9.8259 0.8564 1.2944 +#> 11.2496 -11.1414 2.7440 -4.4352 -0.4223 -0.1063 +#> -5.9215 1.6966 6.6517 -6.6146 2.2959 -0.8255 +#> -0.4415 12.8665 -2.4494 3.0863 0.6487 -5.0703 +#> -2.5567 16.7094 -3.6701 1.3881 -6.7850 1.9591 +#> 27.4791 6.6245 -4.3833 4.1281 1.2326 -2.1893 +#> 13.5952 5.3210 5.1178 -2.3340 1.2214 2.3725 +#> -1.0774 5.8084 -17.8315 -6.4149 8.1494 2.8319 #> #> (7,.,.) = -#> Columns 1 to 8 -1.1510 3.3446 -4.6907 -4.0923 -9.0119 -1.8544 -11.5465 10.2266 -#> 2.1530 -10.3229 -13.4920 -10.5353 -7.1878 4.3803 -1.0222 1.2178 -#> -0.0279 2.5698 -14.4998 0.8784 10.8162 -5.7057 7.7050 -9.5477 -#> -4.2213 -5.9699 -0.4114 -2.1312 6.4255 7.8751 -3.3435 0.9462 -#> -1.5995 8.4146 -2.0530 4.2347 3.0521 -4.4703 -8.0160 -9.5067 -#> 5.6889 -3.7806 -3.1521 -3.1988 -12.7691 -0.8307 9.4835 -12.2073 -#> 4.7189 1.5370 -4.2539 1.2660 7.0476 11.2019 -15.6511 -4.4649 -#> 5.5624 3.4705 -2.2476 -13.6931 9.5659 -6.6446 -3.9418 -8.1081 -#> -7.8403 -4.5030 0.9928 -18.9725 -0.6898 -3.9355 6.6413 -12.1244 -#> -2.3357 0.3881 -5.6700 -0.9241 3.7925 -16.9245 4.3452 0.9876 -#> -0.0763 3.1268 -3.8460 -4.8019 -14.2220 -17.3226 7.2250 -0.6345 -#> -2.4367 6.0777 4.7913 -12.2375 -6.7339 0.4671 9.0299 3.9063 -#> 0.6186 -2.9219 -2.7057 -0.7764 -2.6864 -7.8872 4.4441 10.2005 -#> -0.5241 -2.6075 -1.9883 -0.3281 4.3225 6.6077 7.0158 -7.8009 -#> -3.4496 -11.0826 -11.6018 -4.6082 25.9934 -16.3178 -11.9316 -8.0340 -#> 1.5328 -4.1684 -6.1294 4.3586 5.1483 0.1453 -6.2353 -2.7218 -#> 3.5766 6.2953 1.1756 -2.9681 -14.6940 -2.2812 -2.3563 -2.7273 -#> -2.8772 0.3871 2.0151 -0.8548 10.4352 -24.0016 3.5732 3.9069 -#> -4.8925 -2.5335 5.6550 5.4676 -15.1168 -12.5531 0.4016 9.9141 -#> -1.1719 -0.4005 -3.1729 2.5641 -3.1845 -10.5227 -14.5495 -7.9963 -#> 0.0419 2.4212 -12.3028 10.4835 -0.1816 6.0758 -1.5183 7.0821 -#> -0.2427 -4.8821 1.2099 2.4364 0.6832 -11.2929 -12.2063 -6.9524 -#> -1.1091 -2.1504 -1.6837 5.4961 1.8894 -3.6969 7.8218 -4.1573 -#> -2.8983 -1.7186 6.0291 -6.4366 -7.9518 0.8214 -14.5556 7.1323 -#> -6.7137 -2.2434 -6.1717 -17.8276 -1.7784 9.0319 3.7935 -14.0787 -#> 2.4205 -0.9703 4.8595 9.9742 0.5819 -7.0176 -16.9618 -4.5669 -#> 0.8702 -2.8651 8.0752 1.4832 19.5654 -0.9157 -2.8576 -7.4101 -#> -0.2374 6.4131 4.4602 -5.2020 4.4795 -0.2797 7.7103 -2.0048 -#> 0.3967 -5.9701 1.5705 4.2041 3.2667 12.0663 -2.2622 -15.7162 -#> 2.2880 -3.6944 5.7249 -23.6550 3.7084 8.1872 3.2680 -27.8372 -#> -4.2591 -4.9057 4.0272 1.2348 -9.4757 -9.8123 1.3069 5.6520 -#> 1.8418 -9.5065 -8.6097 3.2440 -5.6186 -1.7234 4.7165 5.6540 -#> -5.0394 -3.8266 1.3010 -4.1619 -13.8733 -5.5402 8.6290 6.4715 -#> -#> Columns 9 to 16 7.9968 3.1824 1.6528 -5.3501 2.8699 -6.5961 5.7740 -12.2310 -#> 0.7302 -4.2759 1.8779 -4.9163 -3.0430 -7.2339 11.5894 -6.2907 -#> 10.8629 17.9219 -1.4833 19.6404 -5.1833 3.2871 -3.9377 2.2159 -#> 3.5275 -4.0493 0.6487 -13.8285 -3.1606 -10.5714 8.2817 -5.4528 -#> 2.0357 -2.3497 -8.4855 -2.4340 -0.7470 0.5274 6.4336 11.4348 -#> 0.1513 0.0801 2.9735 9.0915 5.5422 6.9197 1.6482 4.7752 -#> 2.4132 6.8459 -3.9542 12.0085 -9.6651 -3.9433 7.8995 5.2293 -#> 5.2293 2.7419 20.1011 -7.3417 7.4816 2.0838 -1.1586 5.7965 -#> -9.5940 0.7334 5.8397 11.7022 9.7054 9.1951 -2.6624 -2.2839 -#> -5.4442 -7.4897 5.2764 -9.4087 3.7028 -4.2671 -6.9070 -0.4682 -#> -5.3623 19.0221 7.5789 -11.4834 -3.0362 10.1072 9.7035 -12.8669 -#> -6.9003 8.0125 1.4583 -3.7150 1.7329 17.1508 -2.5087 -7.2431 -#> -9.6946 -9.9120 -0.9855 12.4596 -5.2041 -3.8580 -1.7943 0.2785 -#> -5.7165 -4.5681 8.4780 -17.5609 -6.8587 0.9480 -0.0522 0.0123 -#> -1.7467 11.0275 -7.7045 -11.1939 -0.5519 -0.3445 -1.7720 -18.8731 -#> -3.1978 -5.6833 8.4781 12.3317 2.4258 -2.4807 7.0808 -4.3378 -#> -13.6104 7.2438 4.5246 8.7591 -20.6504 1.5312 0.3517 12.4284 -#> 12.5804 -1.3469 16.2424 -1.5184 -10.0994 5.1005 -9.0594 11.6394 -#> 11.3501 -2.1988 2.7424 12.4550 15.6389 3.3059 6.8598 2.2757 -#> 11.3390 -3.5430 -2.9019 -0.1729 -2.5298 -6.6461 6.2712 -2.0054 -#> -1.1417 9.1742 -3.3956 15.1536 15.2132 5.6723 -0.3860 9.3219 -#> 9.2356 9.3205 0.0930 5.9758 -12.2520 3.8998 -10.7852 8.9788 -#> 2.4628 3.8309 -6.3892 -0.0402 16.0499 -17.4290 4.0783 -12.4398 -#> -4.7559 -7.5739 -16.7031 -4.0543 -18.5902 -1.6052 16.7595 1.5219 -#> -4.2533 -5.4680 -2.1122 -4.1514 6.1736 0.0462 6.5320 -11.1835 -#> -0.7842 -4.2493 7.3948 -7.3262 7.4715 -8.2523 4.5708 -4.9115 -#> 10.5087 5.8179 -25.7325 -25.3737 -9.0901 2.1340 -2.1222 1.4530 -#> -2.2761 -6.2733 -7.1621 -6.9965 -1.7172 3.2972 10.9736 -4.0085 -#> 12.1618 -6.2517 -6.5980 2.9502 4.2635 -4.8189 -5.1003 -4.1420 -#> 3.9728 -11.8250 15.6391 -13.9557 -14.7237 -11.7873 -5.1508 4.1413 -#> 9.6275 -2.4659 -10.3977 11.2501 3.9619 1.8273 8.9549 -1.4237 -#> 1.5820 -7.8791 2.5443 8.2473 9.9800 4.2415 -4.5965 10.8618 -#> 10.0454 -13.3391 -6.2946 12.4930 -10.3633 -1.6016 2.9378 10.6521 -#> -#> Columns 17 to 24 -0.2017 -14.2036 10.9189 -6.3093 13.3421 -5.2021 -10.8571 5.6784 -#> -3.7236 4.2107 11.5695 4.1625 12.5397 3.5341 -1.0868 3.1356 -#> 12.3779 -15.8945 -11.7514 6.5719 7.3708 12.7398 -4.8171 -10.1031 -#> -6.3732 -3.1501 -0.9881 9.8138 7.9670 -6.8364 2.6885 6.0887 -#> -17.8622 13.3396 -1.6779 -2.7707 9.6506 -6.1295 -11.8552 12.9470 -#> 5.1236 5.7030 -4.1452 -5.1171 -0.0168 8.9045 9.7367 14.3302 -#> -9.8137 4.5833 -17.7446 3.9888 4.8591 21.3616 -9.9362 -9.1715 -#> 15.0867 -4.7968 -8.5767 -16.3302 0.7973 23.9033 -5.7737 1.1635 -#> -8.7811 -3.0575 2.9939 -10.3879 9.7657 9.5343 3.0429 20.9961 -#> 10.0502 -8.2041 2.8456 11.3417 -2.4953 6.4162 16.7315 -4.6014 -#> 22.0820 -21.9904 -11.3016 -22.8116 -23.9028 3.8842 22.1187 -0.2680 -#> -1.6472 1.6026 -7.8115 -3.6156 -3.2747 -7.2421 5.1116 -1.7513 -#> 5.3359 2.4528 0.3550 1.3230 7.9741 -1.6141 2.0854 12.3013 -#> 13.7491 -6.5297 11.0942 -6.7257 -3.1055 -16.7902 9.7864 -7.6694 -#> 8.0004 0.4933 32.4629 4.3232 -5.2497 -1.7230 -17.6472 2.9151 -#> 6.4136 -0.2973 -0.9971 -1.3520 3.8680 13.8084 -2.5657 -16.9238 -#> -7.1824 -2.1916 4.0267 4.1113 -13.9599 9.5921 5.3595 3.1066 -#> -5.2619 -8.1361 -7.5799 -2.7969 -0.9957 -13.9166 4.1316 -8.2160 -#> 11.3372 16.3689 8.8371 -7.3994 -2.0990 3.0968 10.8980 15.3794 -#> -3.8021 10.2220 -1.2202 14.9014 3.2765 3.3371 -11.9771 -2.4916 -#> -6.8762 4.6891 2.5929 -0.4768 9.1211 -9.8519 -9.6648 -11.6314 -#> 9.7460 -6.4685 -4.7942 21.3611 -3.0056 17.9657 1.6230 1.0961 -#> 11.9743 -0.9533 11.4663 -6.1106 2.7248 3.4914 -5.2735 -14.6978 -#> -1.5166 -8.1734 -6.6487 -5.7655 2.7949 -8.5387 -3.7835 29.8814 -#> -4.0876 4.7392 1.2242 -15.8987 7.1728 -9.1029 11.9292 8.2534 -#> -8.9881 -4.8746 -9.5954 21.2173 10.0572 -1.6628 1.2289 5.8971 -#> 7.0142 -4.1670 5.6467 15.3771 -12.8819 -14.6869 -11.2740 -0.9921 -#> -0.2972 -2.1018 6.0177 0.5221 1.4981 -6.0632 11.9915 -3.5423 -#> -3.3258 10.5987 -5.7706 -3.9644 12.1994 -9.6323 -1.2799 8.8518 -#> 11.6141 -4.6315 -6.4642 -3.2025 -0.1698 17.3934 3.8328 13.8618 -#> -2.5127 -4.7518 6.7672 -2.5339 -1.2967 -2.0569 -1.7243 6.5882 -#> -16.7856 4.9965 -0.3747 5.9526 2.7530 9.1097 7.1798 2.2725 -#> -5.8005 -2.7035 -3.8047 -0.9284 5.5126 2.2023 8.9701 -2.5880 -#> -#> Columns 25 to 32 -3.3465 -12.1665 -4.7266 3.3576 -3.1950 -2.8802 17.6598 4.7404 -#> -10.6723 -15.0681 1.1665 5.6881 -4.7752 14.3467 4.8009 5.4295 -#> -17.6142 -1.5378 -2.4484 -7.1934 -10.2677 1.5343 12.1779 -3.2596 -#> 6.8188 7.3524 -2.2905 4.4959 1.7009 -2.8864 -18.7653 -4.7065 -#> 16.6051 6.1646 -1.4842 0.5813 1.0516 11.0768 -12.5964 14.5830 -#> 16.7302 -7.0112 3.0949 -5.1746 22.0134 -4.1310 8.7279 24.2889 -#> -10.7381 -3.4775 6.6372 6.4803 -3.1785 -5.5057 -2.6962 9.3441 -#> -1.8757 -11.1710 9.7184 -5.2805 -8.8909 -5.4596 0.0158 -14.2830 -#> 1.7568 -7.5622 -1.3491 2.7702 -10.3292 -1.1636 2.6564 4.2344 -#> 0.7000 1.7821 14.5931 -9.0773 5.2029 14.6774 7.8485 2.8135 -#> 1.6623 1.7492 1.3100 2.6948 -13.5191 -4.6533 10.4787 15.1358 -#> 7.3247 4.5906 -4.4544 2.1359 4.3850 -3.7317 -1.3749 13.7357 -#> 0.8120 14.9744 2.7189 10.4039 -2.4289 12.5968 2.3494 2.9934 -#> 2.5820 -5.7150 -8.7422 5.8390 -3.7284 -3.0733 0.3706 -9.7306 -#> 4.4213 -16.0351 -3.1989 7.5213 -1.6001 0.9968 -4.5627 0.9700 -#> -2.7907 -7.1278 -2.0649 4.2140 4.5857 5.0167 -4.1840 -13.4642 -#> 10.4783 -7.9112 -17.1973 -10.2099 -3.1797 -10.2205 1.3935 8.0176 -#> -0.7982 2.0720 -6.4560 -8.0288 -6.7375 3.6489 -0.0936 -5.3324 -#> 4.8929 8.4467 5.4012 3.9065 0.2363 16.5182 -2.2986 -0.8222 -#> -4.1151 8.6142 1.8951 6.1967 -1.7194 -2.3320 5.2856 -4.6069 -#> -19.3944 -1.7963 -4.2656 -8.2972 2.7705 5.6570 6.9059 4.4057 -#> -0.4050 3.9011 -2.3972 -3.1239 -17.2196 -5.5653 12.2193 -0.9968 -#> -5.3592 -3.0987 -7.6986 -7.4511 -4.2069 -3.4384 -4.8321 12.0722 -#> 20.0403 19.5729 15.6391 6.2954 0.4169 -3.8861 12.2240 -12.1667 -#> 3.5800 -11.0232 14.0919 -0.2808 -7.4968 -12.0439 -4.0667 -9.9398 -#> 10.0156 11.4773 6.1229 -1.3124 -8.8166 -8.6302 1.0571 -11.2056 -#> 1.6166 6.8320 0.6153 6.1363 -8.2500 -11.0222 11.3847 1.4746 -#> -0.7793 -0.4208 -0.2259 -6.4614 9.4015 -4.7347 2.8738 -6.9280 -#> -1.6318 -12.4125 3.9271 7.4695 -5.8701 -11.2740 -3.0332 1.2802 -#> 28.0953 2.1393 -24.2158 -4.0061 7.0950 -8.6099 2.1225 -2.6146 -#> 3.7211 9.1366 -11.3096 -4.2825 -2.1420 -2.1255 -12.4218 9.6057 -#> -1.3382 -5.4725 -1.8357 0.5935 -9.3416 -3.5055 -1.9799 -12.6972 -#> -11.4266 8.7223 2.7088 -8.5663 -2.2837 -0.0327 6.6363 6.6428 -#> -#> Columns 33 to 40 6.7635 0.2676 -0.4159 9.0724 -7.2259 -4.9980 -5.8763 8.4493 -#> 4.4937 2.5311 1.9138 0.3078 1.9588 -3.5497 -1.1907 0.5561 -#> -10.2568 -3.1322 11.5117 -4.3087 -4.6816 17.6844 8.7753 3.3748 -#> -5.3756 -7.6747 2.2086 0.9428 3.6217 1.3725 7.7266 3.1351 -#> 14.0019 -4.5984 14.8477 0.3603 -5.1469 1.7272 16.7233 -14.4129 -#> -5.7418 -11.3068 6.4387 4.5919 -6.4510 -4.8739 1.7584 13.3482 -#> 11.4781 1.1217 3.0755 7.8919 6.0445 -1.6499 9.7943 -12.8667 -#> 0.0311 -16.3330 5.5842 9.4925 2.7538 -3.7333 10.3775 -1.5636 -#> -0.8140 3.1831 5.1449 -2.5414 6.0609 -1.0483 10.0245 -6.0115 -#> -10.3292 -5.1663 4.7419 -2.7025 -10.1061 1.4991 -2.2201 3.2466 -#> -7.9465 -14.7259 9.1298 -5.0068 -12.2494 12.7274 7.7615 -3.5029 -#> 10.7479 -8.9968 6.5722 3.0814 -1.4771 -5.4336 1.0488 6.0063 -#> -6.4348 0.7132 4.3070 -8.7904 8.6975 1.1075 -2.0351 -5.8650 -#> -4.7090 10.8575 2.0938 -4.5925 13.6765 5.7960 -10.5568 10.8240 -#> 4.3676 6.4377 -5.5873 -2.7393 -1.0691 -10.3165 7.6410 -2.0931 -#> -7.1456 5.1314 9.0001 -1.0220 1.6024 7.0429 -20.3988 11.5544 -#> -9.4779 8.7472 4.8977 -4.8484 1.3657 -1.3088 -2.8118 -0.2669 -#> -1.8931 -0.8126 5.4710 -1.1423 11.9743 2.1789 -1.0370 2.7930 -#> 12.8539 -7.2875 -3.3640 4.9971 9.4708 5.3407 0.1027 3.0712 -#> 5.3685 13.3009 -0.5570 -1.6243 12.8898 -6.8893 -4.9286 5.7364 -#> 21.6231 7.1604 9.1344 -1.7378 -3.5171 1.3859 -0.7098 -8.5414 -#> -8.8463 -5.5049 -3.2001 -1.2804 4.5352 1.3429 7.7656 -17.9811 -#> -3.8347 -4.9080 -4.4590 -21.8495 -7.2955 6.4522 -7.3021 11.5973 -#> -4.6035 6.9409 1.1151 7.4165 9.8849 3.9993 -3.3113 -7.1636 -#> 0.8386 3.7905 -14.2038 14.4263 -17.5220 -10.7280 8.3832 -17.0463 -#> 4.4654 5.3002 3.3907 -0.2470 3.5849 13.1716 -12.6129 0.5816 -#> -0.3431 5.6900 -5.8313 -2.5538 3.6540 3.3511 -0.2439 -17.6064 -#> -6.1437 -9.6946 -9.5116 -0.1163 -5.2854 0.0935 9.3468 10.3278 -#> -19.4557 -1.8373 2.0779 -1.2904 -2.0416 -1.6558 11.6877 -1.9486 -#> -12.2806 -12.7014 -19.7658 -2.3969 5.6329 -8.3815 2.2392 0.5735 -#> 1.0984 12.8486 -2.2627 -1.3134 12.2106 5.7082 -9.0247 -9.3327 -#> 7.9066 -3.8783 -10.4517 -15.9973 -3.3150 -6.4859 -11.3980 13.3674 -#> 3.4618 3.1344 -6.4312 3.0740 -4.2376 -2.6805 12.8825 -7.3342 -#> -#> Columns 41 to 48 -8.2446 -14.1066 7.4180 5.9619 -4.1717 -2.6857 -4.5057 -2.5795 -#> -0.3471 14.8749 0.6670 5.6553 -10.9413 2.5896 -3.8698 4.2730 -#> 8.1345 -4.1757 3.7532 6.9645 -14.8808 1.3076 -9.9735 -9.2789 -#> -5.8473 13.6811 -7.2703 -5.9008 -4.0263 -8.5845 3.9876 3.7570 -#> 11.3226 6.9115 4.8852 15.7906 -0.8682 8.0937 8.1273 5.7852 -#> 17.3444 -6.6595 6.7296 5.8521 9.9824 -1.5831 3.1717 -4.8209 -#> -16.4776 10.5050 2.9079 -8.5284 2.8132 -16.2037 8.6382 0.7495 -#> -23.1697 -3.9381 -13.5591 5.6852 -0.6891 -3.2555 8.2294 -8.8434 -#> 3.8975 10.2780 -12.6152 5.3798 -15.7025 -3.5821 10.9657 -7.8737 -#> 15.2915 -0.5255 1.8512 13.8459 2.2610 3.5411 -1.5479 -1.5811 -#> -8.6926 -1.1466 -7.9095 -5.5129 3.6154 -5.7410 -18.9909 3.5314 -#> 0.0673 7.6417 -4.5786 -8.5491 2.4879 -8.6047 -16.9503 3.3925 -#> 6.1347 -0.2703 0.6698 2.9947 -18.5610 6.9394 14.4577 5.3886 -#> 6.0268 12.8496 4.6240 -11.4068 6.4780 1.9538 -23.4217 2.5520 -#> -2.0623 -2.9128 -12.5985 18.0719 -3.6771 9.9461 14.1066 -9.3032 -#> 10.3474 0.1668 -9.4097 4.7920 -0.7543 0.7548 9.4571 7.3780 -#> 8.5396 -8.9624 -13.7220 6.6342 26.1063 -2.2718 12.9850 -12.8724 -#> 4.9769 -0.4265 1.8554 6.0727 -5.0484 3.0449 -11.8768 -16.2155 -#> 3.4199 11.6511 10.9404 5.5802 2.0642 -3.1614 -32.3605 -4.1784 -#> -4.5305 -7.2991 9.0261 1.8573 -2.2770 17.8930 -1.7181 -2.7132 -#> -5.0601 7.4311 6.1313 0.2610 -3.9911 9.5264 -9.0407 1.1036 -#> -14.7860 -6.6657 -13.7383 0.9878 12.0193 9.0834 -0.4269 -0.0988 -#> 5.7226 1.2954 4.7022 12.1605 -8.3241 -11.2832 3.1006 -1.2237 -#> -2.4029 4.1849 -4.9961 -8.9011 -17.0678 -17.9874 -4.4997 -1.9155 -#> -12.5505 12.1856 -3.0765 -2.0942 0.7412 16.8770 -4.1587 -3.8859 -#> -2.1647 -20.1726 -5.2616 -6.9617 -11.7598 13.4308 4.2533 1.5311 -#> -5.6661 -1.5919 -12.8315 6.9896 2.5813 -7.4786 -2.9170 -0.3050 -#> -6.6409 -7.6123 -2.1276 -1.4862 -4.7415 -1.6506 -15.4718 8.7143 -#> -1.0364 8.5024 -13.1561 -5.9855 -8.4445 -3.3230 10.1388 2.4691 -#> -13.9208 -7.7000 -11.4377 3.8244 -0.5969 -7.5562 7.3189 1.4867 -#> 9.7942 -4.7978 3.3158 9.0595 -9.0613 1.8362 3.8362 1.1675 -#> 1.0413 0.4598 13.2523 -17.2823 3.7930 -3.0105 2.8211 -2.3156 -#> -2.8977 7.6209 9.4072 -5.2823 11.0450 -18.1353 -5.5680 -11.8955 -#> -#> Columns 49 to 54 -2.5928 -7.1004 6.6081 1.7156 5.4287 0.3410 -#> 13.2924 3.9968 -1.9209 8.0944 2.1424 3.7392 -#> 6.7899 -10.5478 -0.7002 -1.6988 -6.1060 -0.1987 -#> 7.4364 10.9952 3.2161 2.4305 -4.5592 -6.1626 -#> -9.6062 -3.1601 0.2585 3.1764 -6.7485 1.2784 -#> -3.3731 1.0436 4.3733 -4.6247 4.0325 2.0421 -#> -4.4729 -0.6966 4.3825 8.6256 -3.5688 -8.9680 -#> 5.6425 -15.1321 -2.3662 0.1096 2.5494 -4.4745 -#> 1.9892 -0.5806 -9.2635 2.6563 5.5949 3.1520 -#> 10.0269 8.4024 7.9782 -0.0012 -3.3929 -4.4011 -#> -3.4735 0.3273 1.0878 9.8868 12.0104 5.5299 -#> -5.3334 -3.3646 2.0360 2.3895 6.0884 -1.9941 -#> -8.2128 9.0170 6.6366 1.8423 -5.0797 2.4894 -#> -1.3292 -2.9701 2.3141 -7.0520 0.3502 -4.1201 -#> 2.2892 0.8809 8.7419 8.5965 -1.6637 2.2177 -#> 0.5825 -2.6708 -1.1449 0.7359 -10.7928 -3.5240 -#> -19.0753 -6.4115 -2.3333 0.4257 1.2642 9.3979 -#> 6.4100 -9.9433 0.5144 -0.9337 6.1790 -7.8487 -#> 7.3200 -6.5671 -8.7311 1.1663 -4.1018 1.6795 -#> -4.5269 5.5922 10.9591 -1.6388 1.5133 5.6238 -#> -3.2997 -5.9542 -4.4557 -3.3275 1.7602 -5.1058 -#> -7.5134 -0.7009 5.4777 15.1911 -0.0899 3.9860 -#> 4.1985 1.1996 0.2030 -4.6156 -3.7712 0.6755 -#> 8.4728 19.2328 3.1302 -10.3603 -1.4621 -0.7851 -#> 13.4330 -4.9662 7.7158 -7.3931 -4.1295 -6.4962 -#> -8.0304 8.5032 -9.3079 -5.2790 2.8347 2.4643 -#> 7.0726 22.4254 3.3912 5.3902 8.3798 1.6767 -#> 21.0437 3.2168 -6.0675 -4.4073 3.2828 0.1357 -#> 12.8162 6.2139 -10.2245 -6.9193 -3.1154 1.8901 -#> -2.3231 0.6678 9.1898 6.6339 4.3099 5.7301 -#> -5.9029 -2.0468 13.5363 -1.7687 -7.4800 2.0132 -#> 0.1635 7.0316 -4.7992 -14.0503 4.2662 -0.6726 -#> 4.5819 2.0853 11.7331 0.3567 9.5702 -6.5673 +#> Columns 1 to 8 -2.2730 7.8593 0.9908 -3.0444 6.1337 -25.2705 -14.2599 -6.6618 +#> -0.3577 -0.4505 5.1022 5.3875 -2.1536 0.6500 13.6969 -6.1476 +#> -7.7850 4.0331 -1.2691 1.4275 6.7814 5.9597 -0.2232 1.6182 +#> 0.9961 -0.6523 5.4388 -8.5267 -2.7399 5.6863 5.8690 -4.3801 +#> -0.1006 1.3807 -3.2668 -1.3749 -4.8580 18.8711 3.0766 6.0173 +#> -2.9582 -6.0321 -9.7915 -4.0201 -13.1074 6.6917 0.7936 13.7708 +#> 0.8232 4.9478 5.4542 -0.5045 -1.0632 0.0558 13.6762 -16.8812 +#> -2.4208 12.6910 7.1453 1.5003 -17.3859 0.7820 -11.5887 -9.7050 +#> -3.2414 2.2264 -8.0102 -5.2580 -5.7959 -1.7368 -14.4322 0.4818 +#> -2.2727 2.5185 -16.4346 -3.1027 0.0889 2.2516 6.2611 7.8665 +#> -2.4878 -4.5203 1.3185 11.1101 -10.0181 -9.2895 0.5022 1.1197 +#> 6.2122 -7.3675 -11.0835 7.7296 3.0535 -2.0279 0.5060 18.6268 +#> -4.0568 5.6577 -0.0712 2.9243 -0.0427 0.4657 -3.0712 11.9293 +#> 0.1447 0.9984 8.0982 -0.2298 -8.8112 7.6361 7.1027 2.4461 +#> 0.2856 -9.6297 -1.5844 -1.1736 2.9140 9.2969 -6.1753 3.1525 +#> -4.7177 -4.4425 16.4893 3.4271 -2.3453 -4.3842 4.4446 13.8547 +#> -7.1218 4.3438 7.7292 -7.1477 -4.4932 -7.2707 -1.5090 1.0484 +#> -2.9824 -0.9287 17.3670 -8.1004 -5.2840 0.1963 8.2633 -8.9787 +#> -8.7298 -0.9489 12.0986 11.0660 3.8593 2.7286 -4.6051 9.9778 +#> -2.9336 -6.1185 -2.0629 -9.7719 3.1541 -11.3900 -8.7379 -4.4613 +#> 3.6517 2.1274 5.8082 -1.4068 -2.3069 -6.5642 7.3878 6.1230 +#> 4.1871 -0.9537 -1.0768 -3.6213 12.9236 0.1057 -5.3870 0.3658 +#> 4.5481 -6.4953 -5.5291 -8.7735 8.0858 -5.4139 -7.9247 3.1546 +#> 2.0853 -1.0929 -3.0532 1.5453 -6.4799 5.8163 8.7538 -0.7050 +#> 3.5095 9.1956 -0.5975 9.8420 0.4532 -7.5270 0.7032 -7.9548 +#> 5.6316 5.3247 -6.0735 -5.5060 -7.4822 8.9425 4.1544 11.2958 +#> -1.8783 3.5574 10.7198 3.4882 0.4321 -10.3987 -8.3182 5.9989 +#> 5.0677 -4.5184 1.3690 8.1197 12.3121 -9.0753 -15.5622 -7.8918 +#> -0.8347 4.5280 -14.8661 -1.0116 -1.9119 1.5700 -6.7108 7.0353 +#> -3.6526 -0.5379 -16.4044 8.4148 -3.1234 -4.6541 -4.3963 -7.4973 +#> -0.3159 -6.2344 9.0694 -3.7032 -1.9176 -7.5660 -9.6904 4.4349 +#> -0.3463 -3.8474 -2.8252 -8.5979 -4.6760 12.8513 2.3690 6.9538 +#> -9.8493 2.6900 7.4000 -20.6824 7.3917 -0.8028 16.2666 7.9293 +#> +#> Columns 9 to 16 -13.5713 -2.5760 -4.9393 -7.2169 8.6358 -8.5392 12.0442 -0.1647 +#> 0.4966 -0.1420 -0.1760 -1.6560 -6.8205 15.1826 -11.7602 -10.7671 +#> -11.0952 -0.2855 -3.2001 -0.6889 -5.2732 -10.2177 -1.3504 -5.1594 +#> -14.3325 -14.6651 -9.9135 4.6613 -6.1976 -12.1132 5.4452 -12.5988 +#> -4.4341 -14.7879 -9.4919 6.7474 -14.7154 -4.2532 11.7998 5.9935 +#> -4.5361 -3.1370 -1.7791 -5.8429 -2.1636 -0.1224 0.8331 7.3680 +#> 1.5726 1.0104 1.2858 6.1455 -13.8538 -4.9884 8.7709 -6.3910 +#> -4.9205 -17.7037 4.4343 -9.0261 -9.5053 -2.1501 5.3031 -12.7681 +#> -9.9058 -5.0338 3.1186 2.0879 -9.1277 -13.4672 -7.4933 -0.6072 +#> 18.3112 21.5954 0.5695 11.5204 10.4862 -2.5303 -6.6446 -15.3077 +#> -11.1269 1.3703 -9.0922 1.0853 6.2027 -1.4406 4.4617 -9.4731 +#> 18.0525 12.2814 11.0963 12.1213 10.1177 1.2374 -4.2953 4.1410 +#> -10.2644 16.0067 -5.7257 15.5210 13.6253 -3.8203 20.4329 1.3354 +#> 11.3955 -23.7706 -2.3947 -5.2285 -10.5981 12.1519 -5.9301 -20.3726 +#> 4.5903 0.4530 2.6283 -1.4012 -10.2537 2.5154 -11.9201 -6.2894 +#> -5.3331 6.5027 4.1514 18.0472 4.4327 -1.1695 10.5339 -10.8364 +#> 8.5975 8.1958 6.0396 7.9658 2.8937 15.3728 14.1661 7.5359 +#> -7.1388 1.4318 1.4532 -4.9481 8.0767 -7.1541 -8.7552 -0.0985 +#> 4.4449 -10.1757 7.1359 -7.1204 3.6736 19.4050 6.1920 13.4474 +#> -14.4783 -4.1935 -4.1255 -2.2721 13.3225 -8.7610 0.0321 2.7674 +#> -2.1757 4.6784 -8.5011 5.8105 -9.0521 5.8155 -7.1599 -18.4400 +#> -11.1479 6.7380 -4.4160 -5.5327 26.1272 -17.7498 3.7817 2.6385 +#> 0.7380 -0.5406 10.3081 -1.0304 21.3875 -2.1121 -4.7990 6.0009 +#> 11.5466 9.0894 20.6480 -2.7185 1.3281 2.2790 -9.7400 -8.6737 +#> -2.7586 2.9799 -11.2673 11.4694 -2.5877 1.8151 3.8847 -8.8739 +#> 4.1564 6.3242 -2.1553 -1.0679 4.1584 7.5325 -11.1966 1.7027 +#> 1.3052 -16.6512 13.6294 5.7100 -1.7764 1.3304 3.0484 -5.2398 +#> -19.3760 -11.8877 -12.7904 -19.4183 -10.4183 -16.2829 3.9666 6.2953 +#> -17.5562 21.2359 -22.1550 0.6797 0.5508 -2.0378 -11.4867 16.3799 +#> -7.7626 10.7128 -20.0223 -4.6639 -8.0793 4.6555 0.3998 4.4482 +#> 3.0209 -8.8355 12.1543 8.1113 2.8692 2.4867 4.0020 -8.4796 +#> 3.4418 -10.1315 -0.7398 -12.2104 -16.3583 11.1954 -5.6027 5.5067 +#> -21.0277 5.4547 -7.7433 -3.2208 -7.6423 7.8742 -5.3315 -6.3196 +#> +#> Columns 17 to 24 -0.8044 7.6940 0.5729 -0.8598 -14.8655 3.8065 11.0061 3.9517 +#> -1.7344 5.4716 4.0392 -2.0350 6.4923 -2.8588 -7.0289 -7.6552 +#> 11.0347 6.9663 5.8463 -4.7432 -12.3731 -0.9869 17.3195 -4.6528 +#> 6.6301 5.3814 -7.7161 -6.6339 -10.3006 10.7296 4.2550 14.8271 +#> 3.1266 -10.4885 -4.2312 4.1358 16.9532 -7.5228 30.3083 5.1970 +#> -16.4844 -1.4614 1.7639 9.5224 -8.6649 -5.9088 15.3286 1.8804 +#> -9.3211 -3.9786 -4.1051 -5.3606 -4.2863 11.4523 2.0951 -10.8718 +#> -1.0368 -7.5980 12.3542 2.6582 -8.6567 -3.8804 5.7039 9.5411 +#> 13.9589 -11.9419 -13.2947 -5.3372 4.0980 -4.4041 3.9258 2.8270 +#> -5.5901 2.7887 -1.1067 -6.4275 8.2780 3.8845 -1.4423 -9.6241 +#> 7.1248 -4.9265 -2.4889 -2.0815 6.4172 2.7767 -0.7365 6.0730 +#> 14.3584 -4.9104 -6.0361 -0.3906 9.0165 -9.9866 0.2489 -9.3144 +#> -9.4347 -9.8951 -7.2084 -1.0173 -0.3145 3.2613 3.6082 2.6699 +#> 9.7243 1.5669 7.1077 -5.8768 -7.0043 1.6861 8.2713 -1.4772 +#> -2.0790 4.6980 -3.7801 -2.6672 -13.7883 9.6221 0.6044 10.3643 +#> -20.7165 -11.5271 9.3761 11.6901 -12.1154 -9.9378 6.3912 -1.7335 +#> -12.8631 -6.2434 -11.0467 3.9998 1.3249 -8.0850 -2.0281 9.0038 +#> -16.9440 23.3617 5.7348 1.3088 -4.4689 12.6930 3.7079 -2.5876 +#> -34.8685 -14.0574 12.3507 12.0336 -0.2335 -9.0516 -4.3334 3.1520 +#> -6.0417 2.8619 -6.1685 4.0157 -7.7805 8.3510 7.5021 5.3347 +#> -5.8736 6.3922 4.5812 0.4039 7.5982 -5.8035 2.2279 -7.2749 +#> -14.7741 -1.2870 -10.9958 6.9673 13.8836 -17.1309 -9.8639 5.0669 +#> -1.6298 -1.3950 -24.2018 4.4567 14.0468 12.1605 -18.0359 -14.0211 +#> 7.4055 7.1378 0.3266 -9.8586 -11.9663 7.1298 17.7259 0.9935 +#> 14.1076 3.3928 -13.1731 -17.8079 5.5772 12.3619 -4.5905 -3.9774 +#> -1.0345 6.6008 -1.3905 -16.2688 -2.8131 4.9475 3.6227 4.9430 +#> -16.0886 -17.8450 13.2029 -4.3504 -3.7232 3.4661 2.1836 -6.4093 +#> -0.0434 6.5221 0.8334 6.0033 1.4263 -3.7186 0.0274 -9.0248 +#> -12.5186 7.1847 -11.6790 -8.2131 -13.9474 3.4847 -3.2464 10.7246 +#> 6.4342 4.0180 -16.8509 15.4589 0.5730 15.0659 10.7185 17.9494 +#> -0.8620 2.6107 -2.2791 3.9882 -2.0982 8.4218 5.7368 -4.1497 +#> 6.5447 -7.6214 -4.3677 -3.7800 -12.6336 18.7059 3.8062 -4.3111 +#> -15.3726 7.4275 -0.0249 -11.3225 -0.0146 13.2671 9.2744 -1.1364 +#> +#> Columns 25 to 32 16.7140 12.5390 -3.8460 -5.1873 -8.2286 -4.2000 -2.4032 1.3773 +#> 4.5517 15.4829 3.5227 9.2409 -16.0337 2.1957 -16.4219 -0.5252 +#> -14.1090 0.1770 -9.4900 -1.2289 7.8689 -1.1464 10.1450 -1.6497 +#> 11.8066 -5.3717 3.8524 6.5927 1.7388 -12.0818 -0.5250 -0.2491 +#> -8.7325 3.3834 -13.9050 12.1629 -5.4468 1.6252 -12.4929 2.9964 +#> 12.0322 -8.7257 3.1479 8.0130 -11.4440 -0.8196 -12.3785 -7.9827 +#> 4.0670 27.0924 8.9623 8.0875 -9.4515 -9.7726 -14.8642 -0.9397 +#> 0.6661 -0.0387 6.8619 -3.6456 -4.6241 -8.6900 -6.5807 -12.8149 +#> -1.0908 -13.1932 0.5457 -6.3462 -6.4273 2.3103 6.1080 -6.3447 +#> -8.2056 8.6274 -15.0308 -14.4721 2.0131 -3.5753 2.0280 7.4207 +#> 11.3355 2.6975 -15.5221 -8.9064 -5.1234 -4.9808 -5.6037 -4.9280 +#> -1.0172 2.7507 4.1338 -3.2062 10.9898 -2.0258 10.8534 2.2687 +#> -6.5802 -0.1283 -7.2080 -0.7063 14.2631 -7.1419 -0.9013 -2.5769 +#> 9.6233 -17.0679 -2.8874 21.5422 -12.9105 6.9918 -8.3253 -17.6162 +#> 3.1537 8.3606 6.5124 6.2343 -4.0419 19.4723 6.3941 -3.6153 +#> -2.4588 -2.0989 -0.8255 -8.3086 9.0995 -0.5779 -16.9909 10.4875 +#> -0.5940 8.9188 0.5501 -8.9874 0.9578 7.5963 -0.4968 -6.8239 +#> -21.5109 2.0689 10.2137 3.5746 10.4391 -1.1877 -10.5458 -0.5157 +#> 9.3930 -10.2973 10.3239 -15.1966 5.5297 -6.0198 4.9566 -7.3151 +#> -2.8751 5.4660 -0.7704 -2.8695 -18.3324 -6.0660 6.8699 -5.0511 +#> -14.2165 -13.0150 5.1183 11.2955 19.5342 1.3691 -2.7344 -7.9917 +#> -0.8479 -2.5890 4.3555 -17.1836 -1.9581 1.7468 -5.1517 -4.0209 +#> -10.1379 9.0568 -0.1629 -3.3383 4.6196 8.1972 0.5770 9.1230 +#> -15.2504 7.6431 1.3029 -3.4058 -3.1297 4.1920 4.2794 -0.6498 +#> 7.7819 13.0803 -10.7424 3.5486 7.3103 -0.8610 -5.8997 1.0007 +#> -7.1900 -0.7159 10.6423 -2.0924 13.7340 2.4895 -9.7194 0.9214 +#> 24.0935 3.6397 -5.3558 -9.3220 1.7999 -14.4170 -2.9897 -5.7092 +#> 7.5935 9.3421 3.6951 6.6828 -5.5483 -2.1949 -4.2317 -0.7691 +#> 12.5178 0.1022 18.3640 -7.1903 -3.9141 -10.1061 -17.7475 -2.2567 +#> -7.2339 2.7263 -5.4803 -5.0212 -12.6820 10.8720 12.0039 3.1567 +#> -7.2658 7.6804 1.9481 12.0319 14.1453 0.5688 12.4939 7.6836 +#> 15.0504 -1.9957 0.6219 14.2578 -1.5269 20.5701 -4.8401 18.0711 +#> -6.8896 -10.9830 7.8463 0.5201 7.1333 1.6838 -2.4744 -1.2777 +#> +#> Columns 33 to 40 11.1891 2.3612 4.9869 7.1164 -6.1635 7.9671 -8.8226 -20.0022 +#> -3.7496 4.1229 -7.1114 -6.0235 -7.9529 -5.6305 -11.9795 5.7476 +#> 16.2346 -9.1481 -5.1870 -0.7856 -3.0979 10.1354 18.5026 -2.2562 +#> 14.7152 -6.8124 -1.4971 -3.8059 5.1113 -4.4989 -2.1806 0.1147 +#> 9.5490 8.4203 7.4613 3.4967 1.4412 -10.0005 -10.3874 0.0541 +#> -1.1241 8.5744 13.9264 -1.6042 -6.0417 7.2974 4.2899 10.3889 +#> -2.1472 -3.6327 -2.5070 -5.4610 7.5473 4.8237 -16.9401 9.5811 +#> -3.5434 -10.1649 -11.0285 -1.5163 2.9661 0.8941 0.6125 -11.9428 +#> 4.4512 -1.4903 2.2263 3.6980 -4.8034 9.9793 15.5765 -5.5659 +#> 11.7977 -6.4713 3.8184 -1.4604 -5.3925 -2.9009 2.5318 7.9008 +#> -7.1032 -12.1406 -1.3852 0.5985 15.2772 4.0568 -1.4362 -6.7224 +#> 2.0491 -8.9185 11.5803 -8.8514 9.4828 6.5328 5.7315 -3.7928 +#> 18.5637 -6.3672 8.6959 -8.4857 -6.9228 2.5588 -3.8137 5.3303 +#> 3.1835 7.2718 4.8441 -5.8965 4.7756 2.0748 5.2194 -11.6351 +#> -8.4733 -6.0291 -3.4255 -0.8344 3.8360 5.8734 1.9755 -0.3546 +#> -2.7398 18.5369 -13.5673 3.6614 9.9130 5.3302 -1.6828 -0.9912 +#> 6.1713 1.7696 6.0380 8.1149 3.7931 -3.6109 3.0379 3.2310 +#> 7.9317 10.1544 -24.9884 -2.1906 8.9362 4.2951 -0.3964 -12.8506 +#> -4.7451 -1.6990 2.8409 -10.0277 -0.0674 -6.8846 2.5878 -14.7227 +#> 14.1526 -1.7900 11.8586 -13.1311 -4.5975 6.3489 -1.4287 1.1265 +#> -7.1040 -16.4280 5.8764 1.3607 9.6846 -5.4847 -8.9554 -9.4251 +#> 17.3347 4.6266 6.6357 11.3441 -2.3738 -5.8814 -7.4835 10.5533 +#> 3.7688 7.3121 -7.5062 19.9391 -3.7574 2.1544 6.8715 2.5047 +#> 14.9763 4.6681 -1.9971 -2.2574 14.3317 -1.9973 -9.5711 2.0809 +#> 1.7701 -15.1203 -11.3706 -1.7282 1.2281 1.6709 1.1860 0.4642 +#> 21.2303 3.5344 0.1070 8.0554 6.4689 9.8879 -7.5437 -2.9940 +#> -1.7553 -0.0390 -3.6977 12.9958 -1.8626 -16.0140 -6.0981 -2.0215 +#> -11.4318 -7.7619 -12.3137 -10.7971 -5.4202 -0.5294 -4.9391 -11.0302 +#> 5.9739 -13.7469 8.6302 -14.3386 1.0901 2.0474 -2.7627 -6.5631 +#> 9.6283 -2.5697 5.4815 -16.6955 0.4443 9.1330 7.0235 -2.3131 +#> -7.8267 -0.7191 -11.4275 -1.7554 3.9102 1.4224 2.6154 -4.3396 +#> -15.6128 -1.4557 -13.5651 13.0218 2.2950 -3.2419 9.3395 3.5658 +#> 11.1861 2.0958 -4.3153 1.2679 -4.5878 -8.0363 -0.5200 14.5533 +#> +#> Columns 41 to 48 -0.9108 2.5736 23.6068 17.8971 -9.8998 -0.3723 8.5228 -6.2444 +#> -4.7507 1.0621 -21.3736 -2.5079 -8.7703 12.8073 -2.5485 -2.0069 +#> 7.6028 -17.9349 9.7133 17.6869 -8.2228 6.3778 1.9821 1.6661 +#> -27.2213 -5.6466 -2.0318 -1.8861 3.6299 -13.7714 -0.1735 -5.2628 +#> -7.9302 -17.4987 -4.8797 3.4711 -3.8896 -12.7326 5.5984 26.1975 +#> 16.2949 0.5148 6.2633 -7.7494 -5.0770 3.9893 7.6127 5.1836 +#> -16.1897 -13.8911 -14.8253 2.9741 -2.3398 -12.4067 0.7181 -8.8012 +#> -9.0352 -2.2348 -0.0923 8.4101 12.7918 -3.3611 -13.4531 5.9796 +#> -2.1294 1.2724 7.9003 -1.9198 -2.3139 -0.0962 -4.9995 -8.6472 +#> 8.0456 6.0421 -2.1854 21.4954 -7.7784 3.7823 9.5822 -9.6594 +#> -16.1417 -3.0719 -14.7597 2.9943 5.5766 -3.6289 1.7907 -0.9100 +#> -7.0368 -10.0777 4.1014 -3.7436 -11.2970 -4.4734 4.0854 13.2371 +#> 3.4379 -0.3793 -0.5829 1.4768 8.8789 -9.8077 -1.7780 -0.9594 +#> -11.0606 -12.8748 -6.1057 -17.3899 8.0851 8.2202 -5.3422 2.0588 +#> 3.0444 -0.6357 -2.7121 -24.9212 -10.1625 -9.9997 -1.3370 -6.9828 +#> 4.3923 -9.9107 -2.8040 2.8179 10.3227 -11.9491 3.3004 -9.7261 +#> 1.1471 -4.0315 -6.8643 12.3258 12.8620 -6.0243 5.0360 2.7953 +#> -7.1764 -18.0498 -14.8368 -7.4712 19.0543 -21.6376 1.9780 -11.5958 +#> -2.2876 -2.9053 -10.4628 9.8194 1.4327 9.7972 -5.3808 -7.2988 +#> 9.2306 6.3700 1.8751 3.0940 9.2425 0.1972 -7.2674 -3.6516 +#> -8.5610 30.9970 6.5833 -3.4096 -8.0928 3.4764 -0.8839 -13.3513 +#> 3.1135 6.0214 4.6994 3.2300 11.5337 9.8493 -7.4466 12.6573 +#> -1.3111 4.1462 18.8419 -13.5653 15.4664 -8.1628 8.1597 -5.4016 +#> -0.2862 8.9065 -14.0087 6.7767 1.4279 -1.3798 1.2166 3.1805 +#> -21.7577 2.1208 21.7048 3.0792 -22.3270 9.4551 2.1030 -13.4714 +#> -2.2481 7.2565 20.6121 2.2482 -12.0235 14.7799 -3.3109 -0.1584 +#> -5.9734 -14.8000 9.7602 9.3335 -19.9157 -12.0126 3.0379 7.8461 +#> -15.8157 -6.9058 1.7340 -16.3998 -8.3330 1.1420 0.3805 4.5508 +#> 5.9457 -9.3403 9.2522 7.0673 5.0233 -2.6928 -13.2402 9.3040 +#> 4.5461 5.8450 -3.5006 17.0313 0.8917 4.4630 7.0531 -12.1679 +#> 11.5264 20.0394 6.1846 9.4172 9.2527 -16.1174 12.3933 -29.4922 +#> 15.4788 -2.2228 17.2747 -28.9027 3.3319 -0.0649 3.0690 -2.0674 +#> 12.0413 8.4730 -28.0324 8.4682 2.3961 -2.8019 2.9341 -10.2863 +#> +#> Columns 49 to 54 8.7118 -5.3373 -0.8911 -2.9775 17.7174 7.0686 +#> 1.4402 -1.5954 13.7874 -5.3311 5.6940 -10.3293 +#> -9.5794 -8.2956 1.0855 -0.2443 4.2143 -4.9750 +#> 2.4641 0.5950 -0.5537 5.2224 4.7215 6.1185 +#> 3.8407 6.2760 -1.4256 -5.4305 8.3171 0.1903 +#> 6.4141 -0.9386 -9.7720 4.4181 2.0147 0.6292 +#> -0.1498 3.9477 11.1233 -0.2984 -0.6797 -13.7208 +#> -14.4884 11.3525 -15.1903 0.3645 -3.9955 -1.9401 +#> 3.8658 -4.2446 -2.0747 5.3005 2.9922 -1.7759 +#> -7.8709 16.7344 17.4355 -7.7345 -0.2476 -8.2806 +#> -8.7224 -6.3422 1.5312 1.6095 3.0019 6.1128 +#> -11.5291 -4.8024 -3.1520 14.2362 3.9954 -2.6066 +#> -6.4523 -3.2470 -0.7550 -0.7630 1.7560 0.8667 +#> -4.1342 3.8586 -7.7788 -3.0716 0.5799 -1.4901 +#> -1.6157 4.0802 -2.3738 -5.2966 4.4962 -6.2342 +#> -7.6457 -8.5709 2.2574 3.0248 3.0652 -6.4177 +#> -0.7423 -4.3019 9.1465 -5.4589 10.4766 -3.6808 +#> -7.9415 11.0033 17.3416 1.2805 -5.0901 -0.0253 +#> -1.5102 -1.4065 -6.3656 0.5411 -3.6698 -3.2818 +#> 9.0264 -14.8046 -5.9661 -3.1890 -4.5762 4.8290 +#> -12.7225 2.6138 0.5408 -13.2594 5.0941 -7.5691 +#> 6.1799 -0.3532 15.1285 -10.7989 6.8885 6.0073 +#> 0.1308 -10.8585 3.4953 -1.3139 -3.1275 -0.0590 +#> 3.6928 0.1863 6.3759 -1.3974 4.2864 -5.7240 +#> -7.3965 -0.4803 4.7945 -0.2172 1.3750 -3.5740 +#> 6.2396 7.8088 6.8077 2.2023 -2.3868 -0.2787 +#> -7.1989 -12.9299 -10.0476 0.0304 6.3930 0.3320 +#> 16.1494 -8.7735 -11.4019 -15.4966 -2.4481 0.2085 +#> -6.2417 4.2229 -14.9676 -10.2305 -5.3899 7.4819 +#> 6.4306 -6.8505 -8.0652 1.1807 6.5301 3.2707 +#> 9.9707 -6.9622 -16.6376 -3.0652 1.6486 -5.8388 +#> -1.9141 -6.4129 1.6620 -16.3314 2.3941 -7.8276 +#> 0.3518 -19.5827 7.9812 -11.8222 8.8929 0.0334 #> #> (8,.,.) = -#> Columns 1 to 8 -2.7529 0.8192 -7.1341 -6.5282 11.1871 4.4511 -21.7879 -1.4713 -#> -3.1048 -7.8195 -0.2788 -6.0910 1.3162 -9.9899 -16.0470 -0.5912 -#> -2.9547 -5.6337 4.0594 6.9994 6.0116 4.2885 12.9769 0.2763 -#> 11.0393 -3.4285 -3.4719 3.9043 6.3892 -3.4250 14.3331 -7.9237 -#> 9.0916 1.6536 -3.8375 16.7278 3.2863 8.3293 5.2747 5.0463 -#> 13.7368 -2.6661 1.8664 3.0571 4.3028 0.9034 -16.2985 1.4940 -#> -8.0692 -9.9216 5.6677 -6.9962 -15.2937 -7.3063 15.3760 3.5604 -#> -9.0523 8.6461 -0.4937 3.0509 5.6696 2.0694 -5.2931 3.3585 -#> 2.3426 -9.7887 -13.0725 -3.5629 -4.1340 -4.8556 -3.6626 4.4876 -#> 8.1377 -3.9488 -4.9067 -6.2834 3.1234 -3.5747 4.6790 -8.5427 -#> -3.0277 0.6315 8.4442 7.6313 4.6818 -7.2317 -26.7969 -3.1051 -#> 9.9994 4.5279 2.0965 10.3779 7.8810 -0.4459 16.5216 11.3792 -#> -2.8980 -9.2737 -1.2585 1.7333 6.8224 -17.7776 -10.4174 3.9152 -#> -3.9673 -0.4021 -2.0732 12.1576 -10.3663 1.5133 4.0214 4.7282 -#> 7.0511 0.4957 4.1995 -5.9918 0.0764 -12.3378 14.4978 -5.6393 -#> 0.2674 -7.7723 6.0367 -7.7422 -0.1882 -5.4300 -14.0436 12.4318 -#> 0.9067 -13.9798 15.4094 6.1903 8.0087 -11.8253 -4.7356 12.0953 -#> 1.1894 4.2628 -1.1142 2.2573 -7.1488 -11.9867 -3.1376 -4.5180 -#> -1.3142 -6.1627 -11.0111 -10.1632 5.6541 -1.5477 -22.9148 -1.1882 -#> -3.0296 4.3970 1.8265 -2.7886 0.0352 16.6297 15.4871 1.1961 -#> -5.0692 1.5222 -4.8863 6.1280 -4.2142 4.1118 10.0311 -7.1992 -#> -2.7119 0.2040 -0.3274 -2.1352 10.6831 2.7113 2.9998 2.2773 -#> 4.0870 -8.3630 1.3345 5.7558 8.6777 6.6120 4.7221 12.0395 -#> -3.0057 -11.6187 -1.4478 -0.9246 3.5884 -3.1255 4.0088 -2.2778 -#> 0.9240 4.9592 7.4613 6.5776 -2.6666 14.3401 17.3487 -2.4898 -#> -2.3642 -2.1005 -9.3355 -9.3243 -10.9523 6.9057 7.6943 -7.1152 -#> -3.1120 -1.7855 -4.4008 -13.6525 -6.0453 -0.5619 8.5483 -13.9939 -#> 2.8317 2.0840 1.2822 -0.4408 -2.4619 14.0995 -2.7047 -13.0475 -#> 7.0575 -5.8074 -4.1218 1.6133 -3.2632 13.8478 -4.2430 -8.0296 -#> -0.8498 5.2949 6.6301 -0.3180 12.0754 -5.2642 -18.0740 -1.4874 -#> 1.4737 -7.5670 5.6318 2.6489 -0.7847 -5.5680 3.2172 -1.7407 -#> -5.1628 8.3471 -3.3556 6.5424 -9.1413 10.8724 14.4525 -8.6271 -#> 6.2300 0.4581 -3.5530 2.6457 -8.0018 3.6802 -4.7906 -3.5306 -#> -#> Columns 9 to 16 2.6059 0.2068 18.7862 2.4858 5.5476 0.0574 8.6758 10.5389 -#> -5.0830 10.2386 11.1307 0.6531 6.4362 -3.3191 0.2904 -6.1147 -#> 15.8550 17.1665 -3.4921 3.4039 -17.2131 1.2187 -0.2304 -0.9924 -#> -9.5623 2.8004 -8.9082 2.4214 -2.3780 -14.2012 -8.7973 4.5452 -#> -2.8062 -6.0448 -13.6052 3.1826 -10.9059 -17.3714 4.2268 1.8483 -#> -27.0679 -5.9070 5.8950 0.8538 -6.8051 7.9535 -3.5834 13.9089 -#> -9.0877 3.0419 6.0590 5.6717 -0.5998 -3.2637 -8.2851 9.2411 -#> -14.4284 7.5758 -1.0864 -11.7635 2.7582 -10.3796 -12.3267 6.0934 -#> -7.5945 12.7614 12.6524 -0.1052 4.6192 5.3419 6.2143 7.6067 -#> -6.7993 5.3109 -10.7815 15.6736 -7.0126 1.1654 -10.1354 1.9374 -#> 2.6210 18.7662 2.4249 -17.9167 -8.3412 1.4214 10.6310 1.9863 -#> -2.9500 3.1937 7.4129 3.2962 14.4996 0.4553 -0.1157 5.2943 -#> 5.7296 -1.6762 2.8099 -2.4858 -2.4683 -5.6878 -6.6655 3.5194 -#> -8.0123 1.1176 -3.9421 1.1844 5.9917 -4.3421 -8.6695 -1.2122 -#> -16.6057 5.8500 -2.5749 -3.0693 14.9980 -1.7176 6.8648 -9.9549 -#> 4.9229 -12.4192 -3.1418 -18.3803 3.1458 -5.5795 -6.2845 2.4021 -#> -9.7384 -9.2445 -14.0000 -6.0651 1.8076 7.7087 3.2962 13.9044 -#> -5.8195 12.8527 -4.2801 -4.9660 2.3438 18.3197 -7.5903 2.4078 -#> -3.8759 -1.0662 10.1111 2.1849 3.8999 0.2588 -8.7367 -11.6584 -#> 9.5571 -12.2928 -2.4735 10.2014 -12.1671 1.6716 -0.6683 -6.9441 -#> 5.0652 10.4032 -11.4672 3.9434 0.9181 -3.1731 -6.7213 3.8030 -#> 5.0064 4.5650 7.9435 -7.7492 5.5196 8.4759 -6.5715 2.1932 -#> -6.2972 2.7103 10.7781 7.4434 -12.7144 -2.8269 2.7386 -7.9616 -#> 12.6700 25.3529 21.4739 -0.1103 -7.4641 3.7377 0.1204 10.8745 -#> -6.6823 19.7743 13.1868 15.4460 1.7699 1.6169 14.1696 -10.2873 -#> -5.2075 -23.1905 4.7842 -5.2366 -8.8474 9.9627 0.4570 -7.2879 -#> 10.5321 15.3019 10.4495 12.4939 -11.4734 -2.0136 16.9252 -5.5005 -#> -3.2051 0.4821 -8.7248 2.0482 -11.4909 8.4767 -4.1454 -6.0090 -#> -2.6472 7.5251 8.0112 -1.4287 -3.5853 0.8019 6.6144 -6.8130 -#> -16.1993 -7.3859 14.0124 8.9923 5.5679 -0.9503 8.3413 3.4190 -#> -0.9727 -14.9218 -5.1464 0.0045 0.5696 -5.9744 2.7713 -0.7118 -#> 4.2479 6.4606 -0.9434 -4.8116 -8.2512 6.2986 3.6989 -2.9903 -#> 12.5828 15.4578 -2.0097 7.4678 18.1141 2.4997 -9.8066 7.8747 -#> -#> Columns 17 to 24 -9.6185 -5.2363 -6.4971 2.9259 5.3478 -1.3745 -11.1241 5.3987 -#> 5.0377 -10.5867 -14.1546 1.7548 -25.1766 -3.7241 14.0057 1.3400 -#> 7.3620 -7.2763 -4.2834 -3.2599 -12.6782 -1.8999 0.6878 0.6934 -#> 3.7993 -3.0477 -6.4918 -2.9106 12.9166 -10.5429 19.3072 -3.5060 -#> 1.1725 -6.3711 2.1429 5.8187 0.5333 1.2548 12.6958 -7.0119 -#> 7.1364 1.7381 -5.0821 2.3576 -1.2286 11.1923 -10.4377 -13.2910 -#> -12.4025 1.6296 7.5249 -17.2821 -11.1699 6.4293 -10.1163 -0.0761 -#> -1.7264 8.4885 4.0593 4.8025 11.4839 -7.3611 1.4249 -6.4636 -#> 3.5115 6.1677 -3.1524 3.9877 -9.3612 0.2320 -6.9287 0.9210 -#> 1.7669 -1.4957 -1.1088 -5.6035 12.9229 -4.6847 -8.2598 -2.7438 -#> 4.8460 -4.1944 5.4778 14.8436 -2.7482 1.0736 -4.6544 6.0900 -#> 16.3952 -1.7126 -2.9432 -0.8184 6.0616 2.6004 3.9056 -10.1400 -#> -4.8629 -2.0349 -4.2152 11.5647 -15.5042 -6.9312 5.8201 7.9488 -#> 2.3042 -10.7469 -1.8377 -7.0127 -10.5825 -5.2462 5.5303 1.1535 -#> -14.1828 -6.2652 -12.6412 7.8766 3.5697 -0.5576 -9.0542 6.4479 -#> -1.8594 -12.0191 1.1954 -3.4492 2.6362 -1.5502 1.0217 11.0366 -#> -2.3127 -12.8279 1.9349 5.3822 -0.3430 -8.6638 -9.1417 -1.8167 -#> 0.4679 -2.3985 -4.0016 7.9150 3.4129 -0.2558 -6.5079 -0.5395 -#> 13.7021 14.7185 -0.9215 -2.1865 -5.6755 3.5487 -1.3128 0.8928 -#> -5.6018 0.6378 5.0673 -22.4938 -12.6288 -2.9217 6.7909 -6.8253 -#> -5.8383 -3.5588 -1.0980 -5.1938 -6.0518 2.1598 3.9049 -2.2670 -#> 8.3973 -2.8062 3.6439 -7.4556 7.8676 0.9450 -9.9529 19.5965 -#> -6.7550 -6.3885 -4.9750 -1.8393 1.2241 6.1629 10.4999 -8.8812 -#> 1.9692 -6.6736 -4.2791 -6.9886 -0.1644 -3.6476 -1.0289 19.9012 -#> 14.5276 6.4475 3.3525 12.8286 -7.6074 20.2843 -3.5258 6.6068 -#> 1.7833 13.3814 0.1390 -5.2190 8.0605 -5.4690 0.5839 -14.1667 -#> -10.1046 3.2568 4.2568 -0.8551 9.1603 -7.0637 8.3551 19.1710 -#> 5.4195 3.1397 0.0737 2.5032 8.6055 6.7515 -1.5177 -0.5559 -#> -7.0993 0.6425 1.4658 -0.1229 -0.3991 -2.2585 6.4073 8.8977 -#> 5.6671 3.0705 3.2044 3.6582 31.1265 -14.4046 2.2149 -6.1952 -#> -5.4701 -14.7604 8.3713 9.8865 -24.9880 11.3028 1.5293 4.6248 -#> 0.1451 5.1354 -3.6193 -6.1743 28.6728 -9.9950 10.5469 -1.0057 -#> -0.6977 -5.1401 1.7695 1.4553 -2.8253 -2.7118 2.4154 24.9040 -#> -#> Columns 25 to 32 -7.1700 11.6366 7.2286 8.8610 -3.0368 -3.5862 7.6909 15.8158 -#> 7.5102 10.8181 2.6158 19.5021 -4.5559 -1.4966 -5.9985 -7.0599 -#> 3.9965 -4.6997 -5.6602 -11.8554 18.1068 -2.0255 -15.3478 -3.8725 -#> 15.7267 -8.1247 1.8175 -6.2360 -1.4675 -4.8595 5.2477 -14.5281 -#> -8.9276 5.8084 10.6656 -21.9621 7.2696 -9.8578 10.2391 -8.6043 -#> -5.5958 4.3886 3.6833 -3.8290 1.8972 0.2591 -2.6312 9.3042 -#> 16.8863 6.3746 -14.4350 5.9461 -7.5033 5.9999 -13.7993 -2.1955 -#> 4.5430 -1.1801 -5.7656 3.3432 0.2830 31.4724 -10.8935 5.0580 -#> -2.5888 -4.2127 28.1604 6.3434 -21.9281 -1.4052 10.1537 5.8521 -#> 11.1758 -4.5972 -3.2592 -2.2157 11.2609 -4.3342 8.0417 4.7088 -#> -10.4686 -9.2667 -5.1960 7.8169 14.3069 -7.7104 19.9064 16.9445 -#> -1.9276 -11.7381 -1.7933 -17.9961 5.6561 -0.1927 21.0128 0.4839 -#> -0.4672 8.2286 -2.6962 -1.3400 -7.7344 -9.1882 10.3880 -0.9670 -#> 8.2809 9.7850 1.1968 8.3133 16.6780 -8.3523 -2.4638 0.1139 -#> 11.1578 3.0020 6.4177 -8.3652 -7.9631 17.1988 8.0300 8.5071 -#> -0.0551 5.9983 5.7762 2.2087 9.6836 -9.9556 6.5206 -2.1632 -#> -3.6828 6.6844 19.4924 15.1757 4.4342 -9.5140 17.7458 17.2699 -#> 0.4415 -11.3196 3.8629 2.5370 15.5115 -5.7184 6.3163 13.6840 -#> -1.1127 1.6238 -3.6081 -1.9011 3.7526 -3.0333 -2.1946 -10.1399 -#> 10.7830 -5.8261 -6.1259 -10.7859 -0.4588 11.1663 -6.9900 -4.2215 -#> -1.4840 4.4005 -0.6102 -6.3680 17.9664 8.3212 -18.3523 4.9031 -#> 3.2974 -10.7969 -0.3268 -9.9017 7.4522 6.7771 -3.5755 3.8225 -#> 7.3126 -3.5130 -7.6412 -2.8489 -1.1771 -8.6129 22.5971 -16.1773 -#> -0.3023 -13.9086 -10.5268 5.9208 -12.7641 -0.8612 -13.7925 4.4451 -#> 13.9948 -7.1070 4.2837 -14.2782 -14.0873 22.4218 -6.5989 -5.5982 -#> -3.4005 -8.0452 -0.1647 3.6283 8.3792 -6.1744 3.5500 0.9821 -#> 5.4092 -11.6146 -20.8671 3.7039 5.2048 -0.4829 -6.9013 13.6288 -#> -6.7347 -13.6019 2.6732 -11.2069 -1.1424 5.3381 -22.5442 -1.3596 -#> 4.3021 -3.2848 6.6782 -12.2661 1.3848 -8.1107 -0.6805 -17.9858 -#> 17.8665 6.8305 15.7989 -0.8882 0.4430 14.3679 9.7999 -4.2770 -#> -13.5004 3.0550 9.7904 12.1239 -6.2687 0.7774 8.3725 3.0753 -#> 10.9506 -5.6546 -27.5954 5.8827 9.0555 -2.9388 -2.4872 10.3958 -#> -1.1340 -7.0988 -14.1755 1.7169 14.1141 0.9794 -3.7148 12.2845 -#> -#> Columns 33 to 40 1.4384 -4.6102 7.9683 2.7660 7.9938 -1.0803 -8.9664 -1.3981 -#> 19.5743 0.8177 3.9418 5.5161 9.0790 10.3763 1.0911 -3.6978 -#> -5.6234 7.9174 -1.2428 13.7931 -5.4066 -0.4459 15.9044 -4.9218 -#> 7.1198 -5.6939 -1.3782 -2.8288 -7.2263 -1.5326 -8.7972 -7.8809 -#> 2.0581 5.2294 10.4372 1.7771 17.5576 -2.2725 -11.5410 -10.8612 -#> -14.4360 16.1779 -8.2027 8.4059 -3.5090 -2.6810 -7.2530 5.8010 -#> 11.7753 1.6925 0.0918 15.1680 -12.1380 0.0158 -10.3996 -7.2048 -#> -7.0974 10.7667 -10.2725 7.3891 -17.3839 -2.9920 -2.4067 -1.1251 -#> -18.5467 4.7870 9.5273 13.4591 19.5929 1.9708 -6.4323 3.8563 -#> -13.7966 10.4357 2.8547 0.2278 -6.0680 -6.4969 7.0030 -6.4925 -#> -15.3292 -8.6361 -4.0396 5.9759 -8.9914 -28.1967 -2.3788 21.7079 -#> 4.3733 -4.7229 -4.6953 8.4878 0.0928 -9.9990 -1.3342 -1.0123 -#> 1.7047 -0.8910 16.3733 -11.1787 -1.2181 -17.9266 -9.3722 12.1319 -#> 3.6155 -6.4393 -10.4984 14.2492 0.6283 19.7343 17.0469 -1.6854 -#> -3.7397 -4.4730 10.8904 1.0943 -2.3979 -6.4401 -2.0084 0.4883 -#> 2.3665 9.4115 3.8417 16.7768 1.0740 -2.6458 1.6131 -7.3026 -#> -10.7354 -7.7811 5.5553 -1.1074 -8.8395 -2.0963 6.2890 -2.4415 -#> 3.8500 1.9622 -7.5663 12.1152 -9.1200 -2.7134 2.1331 -7.6049 -#> 6.3665 7.5608 -5.2248 0.8428 15.7291 16.6072 10.2988 -1.4232 -#> -7.8167 19.1128 -2.7300 -3.5795 -5.0596 5.3191 7.9266 3.7644 -#> 10.8489 8.5284 -1.8161 10.5279 -3.5506 3.3824 3.4306 -8.5454 -#> -13.2690 -0.5293 11.1187 -4.5714 -14.9835 4.9575 21.8038 17.2836 -#> -4.4791 7.3667 -4.0882 9.4668 4.9189 -6.9242 -1.7868 -3.7555 -#> 12.5676 -4.2350 14.2625 -5.9949 -1.6661 15.3602 5.4294 10.8306 -#> 0.6979 -12.6459 2.3800 8.3869 0.8838 5.4597 12.7520 6.8345 -#> -4.8699 13.0221 -1.8689 -20.3537 -4.7671 1.1076 -5.6562 -1.5576 -#> -5.1534 -12.7627 7.0405 -8.1219 -15.5294 -5.6776 3.9444 0.8268 -#> -2.5128 1.5007 -11.5941 -0.8027 11.0840 2.9976 -9.7512 -9.2726 -#> 21.1139 -10.3481 10.7656 5.1777 -2.5764 2.7313 -1.9686 -6.8200 -#> -7.9482 -3.3359 -0.4480 9.9027 -4.0077 -1.9207 -1.9934 9.1539 -#> 0.7318 -13.0766 10.2638 0.4785 11.2678 8.6835 3.3261 4.6726 -#> -2.1868 6.1275 -9.7815 2.8962 -16.2180 -6.2915 -4.5034 3.0573 -#> 15.5121 -6.8184 10.7959 12.5375 -13.7759 4.4653 -0.9482 -0.9945 -#> -#> Columns 41 to 48 15.1234 -10.3035 18.9469 -9.3543 -7.7956 -18.9362 -0.6373 -16.9786 -#> -19.6301 -9.1963 0.4785 15.1186 -9.2118 6.7499 7.7051 -9.3235 -#> -11.8282 -12.6039 -13.7831 3.7439 -23.2749 4.1191 -1.8422 22.8505 -#> -6.4394 3.4854 -12.3427 10.9700 -5.3929 0.1003 -1.4660 15.0701 -#> -0.5888 -1.2878 12.4392 16.2797 4.4355 10.1608 -3.6143 9.7532 -#> -11.4246 18.0569 21.2602 4.3748 2.1658 -14.8113 -24.5796 2.4979 -#> -14.2909 4.8997 -17.6585 0.7266 16.5683 3.5429 1.4633 -10.7058 -#> -13.0987 -7.3477 -3.6767 -15.0189 -17.3225 -2.4886 -1.2141 6.8781 -#> -4.2019 -7.1041 6.8834 3.1884 -12.3768 -3.9347 -16.8851 -4.9041 -#> 1.1614 15.5545 6.9877 -7.2775 -7.2149 -2.0622 -5.2004 15.0375 -#> 17.3684 5.4120 -3.9649 -26.9709 -22.3064 -3.7620 -0.4840 -7.7502 -#> 8.2134 16.4462 9.2899 4.5362 -3.4135 -17.8783 -23.2531 -8.3197 -#> 4.2102 -8.7115 5.7801 -1.2040 -16.3119 16.9851 -4.2366 3.0893 -#> -7.9770 -1.9856 -1.2772 4.4906 -0.2718 3.9672 12.5172 -9.9648 -#> 0.6586 9.1921 12.7662 8.2750 -10.7659 -4.8818 4.7952 3.0726 -#> -13.0078 -7.4784 -15.6910 2.9226 3.6097 -7.4876 23.6680 2.0568 -#> -7.8754 5.0710 4.8404 2.5667 3.2970 19.4876 2.7635 2.3988 -#> -7.3073 -5.4642 2.1707 0.9878 -13.8835 -4.3143 -0.7010 0.1638 -#> -6.1368 8.6022 -2.3794 -4.9842 -2.6207 -16.6344 -0.5803 -23.0859 -#> -1.4737 -6.8409 9.4013 3.6841 -2.5767 10.2542 -13.6277 14.4757 -#> -12.7047 -0.9505 -7.3066 -5.3821 -0.2884 -0.8365 15.3252 2.6350 -#> 8.7984 19.0534 3.6122 -15.4152 -12.9271 -0.1066 -6.9598 8.6080 -#> 2.0097 2.7893 -0.8227 4.1339 -0.6433 9.9833 1.5379 22.7720 -#> 22.9658 2.7287 0.0924 -7.3743 -8.6120 -10.5792 -8.3092 -34.4353 -#> 8.3216 0.8046 -2.7417 -2.8410 12.2403 -14.6068 2.8725 -18.7368 -#> 5.1215 -0.3085 6.0280 -10.1927 -2.0305 -1.7860 -14.4816 7.4972 -#> 3.4380 4.4268 -8.7352 4.2027 -5.2833 9.8693 -19.4086 -7.5258 -#> 2.5557 0.1169 -2.1515 7.2013 3.8568 -4.1783 -1.7116 2.7032 -#> -16.7468 -16.8727 -1.2258 18.1821 -4.3218 -5.5793 4.0499 -5.7701 -#> 15.1284 14.2322 -1.3899 17.1513 -5.2165 -4.9669 -25.0727 20.5660 -#> -0.5518 -23.9666 -10.6307 -12.3171 -6.0767 7.3313 4.0299 -18.5027 -#> 6.5236 7.7597 2.4812 -4.6051 0.3597 -4.3789 6.9711 9.4499 -#> 13.7426 8.1034 -3.4679 -12.3309 2.7366 -7.2719 14.5037 -21.5581 -#> -#> Columns 49 to 54 28.2266 -24.0744 -3.0613 -9.8396 -4.7171 8.8575 -#> 11.6383 2.1390 -7.0953 1.6003 -7.2498 0.8229 -#> 20.9584 2.1395 5.4265 12.1389 -4.7847 8.8648 -#> -17.1530 16.8176 -12.7773 3.5272 1.3383 -2.3195 -#> 15.4277 2.0832 5.0761 4.7348 -12.5190 2.1667 -#> 9.4932 3.9052 -16.7069 0.1884 -13.9748 6.2520 -#> -20.4911 21.7655 -0.5098 -0.9561 -2.9878 -0.4902 -#> 5.1038 6.4373 -13.7096 -10.1529 -1.5253 -2.9657 -#> -0.1040 -13.4354 5.6028 -10.4792 -0.9341 -0.8847 -#> 6.3219 -4.8316 -6.8600 -8.3210 3.8692 3.7136 -#> 13.8827 -26.3527 -8.5919 -7.6520 3.4166 -2.0100 -#> -6.1717 11.1735 -1.6515 -11.3219 -0.4028 -5.4764 -#> 4.0893 -19.1908 6.6321 6.2797 5.8809 -0.7183 -#> -4.3332 4.8074 8.4485 13.3534 1.9774 -8.3769 -#> 13.6668 7.4613 -10.1284 6.3912 -4.4749 -7.1188 -#> 0.8212 4.6318 -10.7527 -10.8872 2.0984 -10.7213 -#> -4.6114 8.9248 7.3142 2.4720 3.0747 -8.1015 -#> -15.3217 19.8732 -13.4930 -7.6570 6.2652 -3.6485 -#> -0.9852 -9.7691 -1.8700 -3.5571 -12.5527 0.9310 -#> -12.0288 20.0358 15.8906 4.8544 -3.7699 -5.6404 -#> 1.3941 -0.1866 11.3678 -6.3311 10.9359 -7.0841 -#> 12.0489 3.4691 5.6454 -3.1639 3.1077 -4.2794 -#> 11.4797 13.1703 8.9269 9.0371 -7.2471 10.1417 -#> 6.3791 -32.7896 6.8775 -5.3002 2.7534 2.1391 -#> -7.3106 7.0380 -16.5145 8.6710 -12.6582 3.1825 -#> -17.3828 5.1435 -6.8716 3.5323 -5.5878 -1.0485 -#> -6.2563 -2.6207 10.9603 -5.9135 3.8629 1.1014 -#> 10.1823 7.6165 0.5025 -4.5793 -2.8579 0.7155 -#> 18.8798 10.5151 -3.2269 10.4497 -10.7137 3.3351 -#> -12.9150 14.6824 -9.3680 -12.7423 3.8019 -5.0127 -#> 18.3241 -10.5744 21.1866 7.0562 -11.9339 -3.2777 -#> -18.0871 16.1602 5.9427 1.3406 6.7964 3.2303 -#> 9.2389 4.0168 -4.3945 -2.0490 0.0984 1.5683 +#> Columns 1 to 8 1.8260 8.1591 4.6035 -6.4535 -7.6263 0.3832 -19.6011 -12.2446 +#> -1.5352 6.6871 15.1543 1.0247 -1.3265 5.2470 -2.6448 15.8465 +#> -0.5355 -4.9958 5.0326 -2.5350 -2.4010 10.1999 9.6205 -4.1794 +#> -1.4542 2.3526 4.0340 5.7586 -3.7329 8.7590 -2.1620 -2.4711 +#> -4.5747 1.3335 -2.8664 -0.6998 5.3227 -4.3237 -5.6851 -2.9924 +#> 2.4556 0.6984 0.3695 -4.9028 0.2950 -11.8196 -4.6375 16.2335 +#> 1.2967 0.8378 5.2698 14.6375 7.4172 11.6153 0.2900 2.3175 +#> 0.1551 13.5187 3.9923 -0.1120 8.6104 -5.6072 -12.3877 -14.0587 +#> 0.7140 3.6238 -2.5277 -11.0936 0.8576 -6.2835 -4.0109 -13.8049 +#> 1.7948 -1.5655 -5.2298 0.0798 -8.0445 2.9370 22.9451 4.5566 +#> 2.8522 0.2362 2.7696 -1.4341 11.2378 -7.5862 5.8331 11.2259 +#> -1.2209 9.4171 -6.8260 -17.8173 2.4927 -14.2671 11.8725 -1.5519 +#> 0.1673 0.2711 3.2382 2.2383 13.0791 -1.0681 8.9259 2.2542 +#> -1.7408 2.7892 4.4890 -4.5976 6.4182 3.0700 0.9142 4.4284 +#> -5.4744 -6.6379 -6.0099 3.4705 -4.1193 2.7548 -4.5794 -3.8785 +#> -4.0447 -1.4522 4.8310 6.9625 9.3192 17.6498 18.7523 10.0904 +#> -0.3412 2.1651 -6.6297 1.7064 -1.1193 -4.4957 -2.3076 -2.0743 +#> -2.2017 -6.4894 12.5846 9.7681 -6.6289 3.3749 7.7310 0.2515 +#> 0.1496 -11.5796 12.4953 -5.5924 6.9807 12.4693 22.2900 10.4280 +#> 0.5502 0.3681 0.1878 -11.4241 -3.2833 5.7589 4.8359 -8.5480 +#> -0.2198 -1.0807 -2.4916 -0.2052 12.6240 -13.0816 1.3949 -13.1501 +#> -9.1901 3.7052 -5.4623 0.4961 3.9664 -13.9748 -13.2404 -9.6157 +#> -6.5239 -1.2500 -14.0527 -0.6232 -6.6544 6.4823 -7.0401 4.9028 +#> 1.6598 3.4389 -4.0135 3.1290 -3.1109 -2.9381 3.0452 -7.1035 +#> 5.8949 4.3308 -2.5224 4.5669 -0.7112 12.5232 6.6898 -9.7690 +#> -1.2793 -1.6489 -2.6060 -12.9363 -14.7577 -2.7659 -4.2459 -15.9953 +#> -0.8876 10.4545 6.1134 -1.4375 17.9921 3.2297 0.4034 -8.9399 +#> -3.4494 9.9486 0.8924 1.7481 -0.6058 2.7960 -12.6821 -10.6016 +#> -1.3018 1.7970 4.9323 1.5759 2.0413 -8.5546 8.2645 2.4378 +#> 3.2835 -8.9576 -11.7936 0.7575 -16.7894 -9.6921 5.6635 0.3417 +#> -1.3662 -5.6534 -6.4740 3.6154 -5.7681 9.4283 -6.2955 -14.4455 +#> -2.9512 -6.1373 -12.6356 -8.3252 -5.0713 16.3846 -14.4110 7.8157 +#> 0.3033 -9.7302 -0.1077 -0.0731 10.7779 0.9662 5.9024 4.8023 +#> +#> Columns 9 to 16 -0.1047 -19.1578 -5.5226 8.9494 -9.7815 6.2850 6.2250 -3.6184 +#> 13.4626 20.6692 1.7037 -1.9196 0.1335 0.5153 6.5421 0.0458 +#> 7.6406 -8.4481 7.4565 -5.5883 -6.3770 0.9975 -10.3960 3.3317 +#> 5.5526 -12.5955 -4.6110 -19.9294 11.1434 8.1111 13.4437 9.7602 +#> -5.7701 -14.4509 -9.9148 -3.3918 17.3232 -1.3501 3.5308 10.2287 +#> 0.7887 6.1411 -6.6744 22.2058 -2.0214 1.5489 10.9984 -6.2188 +#> 6.8477 0.5003 4.8697 -18.7130 -7.5220 -1.4153 1.2948 2.6685 +#> -8.0237 -5.8528 -0.7898 -15.4213 9.1824 -19.4928 -1.9316 -9.3313 +#> -2.7807 -3.1351 -12.9898 -10.0935 -3.4731 3.1313 14.3338 9.1565 +#> 12.5202 5.4359 8.5619 -1.1187 -6.3920 7.4425 -8.1584 -2.5314 +#> 0.6057 0.0600 3.8801 0.6867 15.4532 -14.8271 -7.0832 -2.0445 +#> 1.5331 -11.5073 15.0946 6.1962 18.0587 7.1307 7.5428 2.6637 +#> 10.9646 -27.9754 8.5566 -5.6839 -11.2368 5.6492 -4.9974 7.9753 +#> 5.1423 16.6858 -11.1429 -20.4088 -3.7578 -11.3367 4.2205 1.3338 +#> 1.0923 -3.0073 5.6252 2.0922 -8.9872 -6.8790 -14.4178 -7.0144 +#> -6.6900 7.0685 9.4401 3.3826 -20.2304 -12.6441 -2.6827 -3.5986 +#> -0.5642 -7.2309 3.9038 -1.4890 4.5334 6.6294 0.7921 -5.6311 +#> -16.0923 1.8307 -11.3612 9.5744 -9.1781 14.5992 -3.8145 2.3548 +#> -0.8599 24.2808 10.7316 11.9193 -1.1999 0.2188 -5.6878 -7.7666 +#> -3.6908 -9.7783 5.4089 -5.6423 -9.1959 -3.0122 -0.3511 -7.6429 +#> 9.6349 11.3599 22.9755 2.4793 6.1315 3.9302 4.0085 7.4404 +#> 12.3148 -21.0538 -4.0262 9.8068 -2.7723 8.6776 -10.1430 -12.6838 +#> 3.1660 0.0423 -8.8353 18.5997 4.9787 -8.4963 -4.6031 8.2449 +#> 16.4236 5.8065 -3.0484 -11.8502 6.6388 1.7884 4.9407 -1.4769 +#> -3.8862 -4.6335 7.3494 -14.5256 -11.2026 -5.8785 -7.1175 17.1685 +#> 8.8301 -2.4920 -1.1694 6.6143 -10.2259 11.6213 2.7629 5.8684 +#> -3.7236 -21.2345 -10.5814 6.4557 -15.1036 -17.7614 -2.8330 7.2787 +#> -6.1088 -10.3529 7.3196 6.1368 5.7111 7.4768 10.7212 -4.7709 +#> 12.0777 -6.1155 -0.6965 -3.0857 -15.9443 16.6812 3.3802 -0.4840 +#> -13.8430 -11.1731 5.2937 14.3018 10.1421 -2.0058 0.1930 3.3149 +#> -11.8601 13.8479 -0.6223 -2.0810 12.6734 -7.9995 12.0444 -7.5359 +#> -2.6741 7.5763 -0.9344 13.4706 -9.5540 -17.5891 -12.8672 -1.0086 +#> 5.6310 2.4510 2.4199 9.9744 -11.7516 9.4566 4.8498 3.5874 +#> +#> Columns 17 to 24 9.6117 2.4139 7.5414 -5.4717 10.6299 11.6584 -1.7004 11.8702 +#> 6.7951 2.9962 2.4875 -7.5958 0.7660 -14.6585 -12.2632 -11.9941 +#> -3.8407 -0.6697 -6.9220 0.0953 1.4455 -1.8162 4.2619 16.1030 +#> 0.0833 4.5801 12.3592 17.6265 5.1370 14.8573 17.8954 10.2154 +#> -6.3291 -5.9660 -0.5349 7.2442 -3.1581 -7.9011 5.5732 5.6579 +#> -4.1975 8.1636 -10.5229 7.3959 -1.4640 -6.7787 8.5577 2.5785 +#> -3.2045 4.7122 7.3140 0.8511 -9.0000 -13.1235 -7.8732 -5.9827 +#> 9.7054 6.6750 -2.9342 1.2708 -6.1004 -5.7021 -20.9160 -6.6215 +#> 7.4206 -8.0444 -1.5943 16.2185 -2.3163 19.1996 1.9113 13.7482 +#> 2.9260 2.4947 -7.3885 -8.8559 -9.0710 -4.7847 -5.0627 11.4911 +#> 12.5281 12.4077 -2.7609 -15.7855 -11.6894 4.7595 8.0279 20.6484 +#> -1.4544 4.8802 1.4279 -19.9919 0.5369 11.4888 13.1405 21.1828 +#> -7.1931 -8.4896 3.5959 3.5184 0.3361 -1.3475 -10.0686 5.0022 +#> 5.0852 -15.3644 -8.2730 14.5316 -4.5948 -4.5328 -3.0399 2.6500 +#> -3.0184 -1.1849 13.3050 9.4191 11.6314 9.5854 -7.0939 -1.2023 +#> -14.1128 2.3235 2.0549 -5.8081 -0.9814 4.2295 -2.2474 -1.8073 +#> 12.0891 -1.0395 -10.5660 -0.1711 8.6656 10.9097 -4.5949 14.3297 +#> -2.1854 14.0908 6.5025 13.1104 -7.9046 -11.7269 12.3389 16.1556 +#> 1.0126 0.1390 -21.8147 -7.0993 12.6624 10.4887 -1.4911 -3.0295 +#> 4.6010 -5.8652 2.4004 4.7835 8.2194 8.9928 -0.9659 -1.6229 +#> 3.0149 1.3793 -2.8025 -2.6144 16.0011 9.7169 -9.5069 -2.4568 +#> -1.9979 9.8677 1.6009 17.3100 -10.6102 4.7812 -11.5583 -9.6018 +#> -2.9051 -10.8274 14.8063 13.6623 -3.1345 13.3263 13.9614 25.6438 +#> -3.4692 13.5901 -2.4435 0.5632 -9.5823 -4.3950 -16.7901 -3.5702 +#> -1.4436 -8.1252 1.6708 1.9321 -7.2231 -12.8222 5.2369 -9.0412 +#> -11.3765 -10.9554 6.7334 6.0455 -1.2395 -5.1354 -4.2885 -5.2551 +#> -0.2414 -8.7988 -2.0368 -0.1567 -0.2542 0.0353 -10.0441 0.7034 +#> -4.2507 16.4335 3.1021 -13.5747 5.7078 -3.6657 -9.8541 2.0731 +#> -5.1024 9.4153 10.7014 -14.6222 11.9865 -2.2591 3.8923 -11.3250 +#> 6.6266 -4.2345 -4.3679 -4.6797 2.0929 0.1601 23.6827 14.3724 +#> 0.3226 3.2979 3.2266 0.7749 -1.2714 1.9274 -9.0633 11.0556 +#> -16.3087 -0.7284 3.7947 5.4353 4.1057 12.1630 -1.2969 6.4635 +#> -1.9961 -8.7054 3.5599 7.4334 25.2743 9.4001 7.6899 11.7341 +#> +#> Columns 25 to 32 13.8565 15.0534 -18.1175 -18.8683 -12.1822 -8.3684 3.5843 21.8066 +#> -16.0126 -6.0157 10.7098 2.4732 9.8452 -2.1667 18.1304 -0.4926 +#> -4.7170 10.2174 -7.5685 -3.7832 -11.6528 5.0592 -3.5901 -5.9223 +#> 5.7385 7.0945 -4.2476 -13.9630 -20.2407 -0.9073 -0.4925 -13.6198 +#> -7.6288 -4.7604 -14.5075 -3.0168 -1.6811 10.3598 -8.5905 10.3897 +#> 20.3282 8.7429 -4.9332 9.1588 8.3854 -3.1852 4.2343 10.8840 +#> 9.3045 -4.3913 17.8795 1.6561 14.2900 -6.1013 1.2602 3.0010 +#> -1.4932 16.7030 6.4388 24.3908 16.7804 8.2126 2.6756 6.8680 +#> 8.0064 11.4419 -7.4075 -12.1296 1.4976 14.1038 -11.0142 -2.7533 +#> -2.3584 5.1514 13.3293 9.5870 -3.3005 -8.7325 -3.8137 8.7257 +#> 10.7233 5.8765 1.9809 5.5919 -2.3698 -3.4344 -11.4501 -3.1339 +#> -3.9927 -1.0936 -11.1450 -20.3392 -8.2775 13.7119 -6.6823 2.5255 +#> -10.6514 -8.4597 -3.4908 8.2157 2.4306 3.8472 -12.5723 8.2174 +#> 5.5668 0.6475 3.6847 0.0888 -10.0817 7.1276 -5.1332 -0.7860 +#> -3.9734 -2.5397 -4.5659 -1.4551 4.2701 9.2423 3.8241 -5.8377 +#> -16.2858 0.6074 1.8516 3.4390 -1.7665 -14.4418 -6.4367 -16.9911 +#> 11.5260 0.7023 -3.2096 8.5777 15.9507 2.7943 -9.4643 -8.8775 +#> 7.5726 -4.5065 4.3539 7.8640 -22.8240 -16.5590 3.1033 -15.8813 +#> -23.2959 -5.4135 -0.5304 15.2863 22.5813 -0.5325 -2.0420 -13.5478 +#> 13.3480 10.0768 7.1202 -12.0740 0.2621 3.9127 -2.8234 23.3141 +#> -1.1018 7.5446 13.4782 -7.2760 -2.4564 -21.3437 -5.9590 1.2723 +#> -7.9235 3.9276 -1.2627 6.3375 9.3817 -8.5482 -7.4119 -9.3964 +#> 12.0387 2.8146 -1.9433 -6.0720 -15.1555 -13.0470 8.5779 -0.2309 +#> 8.5396 1.5353 -9.4736 -12.1676 -6.1710 3.6298 -7.5355 -14.2486 +#> -4.3749 -5.9070 -0.6163 9.9351 -4.5684 7.1634 13.6035 -0.2342 +#> -1.7573 1.0551 -3.1468 -13.1839 -12.6333 -8.2721 1.8924 3.8757 +#> -0.1967 9.2809 1.7006 7.2561 -3.1687 19.4680 -2.1642 8.5549 +#> -2.6086 16.0496 -6.1886 0.1446 12.8329 -1.6468 13.3924 12.2389 +#> -2.8380 11.9721 8.7617 11.0669 8.4057 3.5837 13.2194 2.3375 +#> 4.2631 -3.9737 -9.2538 5.4973 -6.0531 -8.0730 9.9880 11.7268 +#> -2.6810 7.9992 2.1081 4.4030 -6.6208 -1.7038 3.4373 8.8476 +#> -2.6851 -8.7269 0.3655 2.0906 12.1190 11.2801 13.8482 19.2171 +#> 11.2260 -6.7638 -1.5984 1.1283 1.1331 -13.0027 8.7570 -7.0304 +#> +#> Columns 33 to 40 7.1918 -11.0123 -6.4861 -0.1183 0.3635 -8.0039 -2.6474 13.6560 +#> -9.7596 -8.8562 -14.8694 -8.2309 -16.1653 -1.5027 5.3128 -5.9354 +#> 3.4086 7.0576 -0.3896 2.7934 5.2402 -4.5817 -3.0550 5.1175 +#> -4.6062 2.8740 4.0586 -12.2719 -8.1342 13.1166 1.1305 4.9577 +#> 7.1038 18.1793 -7.9760 -3.6311 5.5416 -12.3066 -2.9630 -8.3412 +#> 14.5975 -14.0890 -8.2750 0.4054 -0.8280 -15.8696 -8.5135 -7.9173 +#> 4.5631 -8.4553 9.5980 6.2341 -7.0497 -3.8439 -3.8311 3.9239 +#> 12.9110 13.5431 -2.7905 2.7211 22.8222 -6.5699 -9.7038 0.1738 +#> -1.9580 28.5856 10.2951 10.0750 14.9270 -2.3017 8.5844 6.9776 +#> 0.3076 11.4036 1.4542 7.1880 -0.9892 5.9726 6.2072 -7.7617 +#> -9.2929 -14.6650 -8.3304 0.8636 -12.6975 -2.0798 6.9953 3.4992 +#> -30.5649 11.2118 -12.6991 -11.0419 -7.8663 -2.7282 15.5507 -5.5073 +#> -6.9478 -6.3732 -1.6843 15.1864 -3.4811 -10.0513 7.6497 3.0546 +#> -10.2720 11.0129 15.4102 -13.7251 3.6784 -13.4604 15.8577 9.6818 +#> -6.7153 6.3852 3.7858 0.3862 9.6262 -0.0152 -2.2117 4.3051 +#> -15.0817 -22.0049 3.9758 13.2642 -4.3705 13.5733 0.3564 1.6646 +#> -12.8667 4.2065 5.8434 -3.0097 -2.0800 -6.0556 -7.0629 -3.4629 +#> 20.7134 -19.3672 6.3106 -5.7618 -1.0921 8.2990 2.3327 -0.6665 +#> -20.9373 -10.6720 -24.5484 3.8842 -1.6623 3.9842 14.9838 10.4721 +#> -0.6862 7.3007 -1.0001 9.2735 10.9631 -5.7798 2.6837 -3.3766 +#> -1.7613 12.7982 -5.2639 -3.2515 -4.7278 1.7007 -8.7418 0.0157 +#> 4.5151 -1.8616 4.5131 12.7287 12.2601 14.0306 -16.0782 -5.0002 +#> 26.9779 -7.7813 3.0088 3.7676 1.9643 -2.2549 -1.6592 9.9631 +#> 3.4674 5.6946 -10.3938 -6.5255 -2.4956 -4.2236 -13.3714 -6.7357 +#> 0.2363 -3.1763 -1.0663 -4.4249 1.9171 10.9242 -0.5985 5.7317 +#> 14.4323 7.1065 -7.7282 -7.7047 -2.6848 2.4342 -14.8455 -7.9755 +#> -9.4945 -10.4411 8.3310 18.4509 16.0904 -6.4055 6.5227 13.2564 +#> 0.1447 0.9421 -4.3497 6.8124 -8.7435 1.6754 -3.0355 -1.9535 +#> -3.2269 15.3326 -7.1119 3.1387 -22.4404 15.9652 17.1364 -6.8747 +#> -13.7395 -5.0539 -5.1533 -16.5728 -13.1708 6.5803 -1.3005 -8.1917 +#> 9.2851 -9.1253 -13.2328 4.4441 2.4160 -7.9322 -11.6210 -4.9168 +#> 14.6798 -0.8670 0.9353 -4.6907 3.0096 -10.7709 -10.0609 -1.9744 +#> -8.0928 -2.8592 0.6087 9.3705 -15.5771 -0.5251 0.9091 8.0747 +#> +#> Columns 41 to 48 8.1006 16.8604 17.1993 1.5062 2.8623 13.6276 0.0033 -23.9927 +#> -7.3958 -2.5589 -1.5578 -7.3520 9.4930 9.6351 -11.2366 -16.3185 +#> 1.1102 1.6622 5.3786 -12.9149 0.3962 -0.3849 1.8834 12.0826 +#> 7.0652 17.6994 19.7010 8.8079 6.7431 9.6183 8.8504 -4.0046 +#> -4.8416 4.3447 -6.4734 16.7248 -4.6533 -11.7857 0.5011 -2.2642 +#> -2.4893 6.1604 -5.8977 14.0653 -3.7521 -11.7748 -5.4343 -11.8478 +#> 4.6204 -0.3881 4.9293 -16.6730 1.9469 -1.7783 23.9308 -8.2245 +#> 3.3164 -0.3111 -2.3785 -3.8382 -19.0539 -1.0947 7.6452 4.1497 +#> 4.7905 3.3564 -13.3232 3.2799 3.5246 -1.4202 9.6656 10.6428 +#> -4.7419 8.5789 -16.5311 -2.7748 -0.2826 -6.6775 1.0639 -2.6836 +#> 6.5005 0.2734 5.3065 -12.6514 8.6547 8.0053 -4.7755 -0.1551 +#> -6.9675 5.6982 -15.0773 -1.5211 -5.9475 -14.1388 -7.0005 -4.6153 +#> -10.4425 -0.5933 18.7262 1.3891 -0.8319 1.9496 -1.7930 -4.6701 +#> -7.6568 -3.0406 -1.1290 9.4498 15.3772 5.6354 2.0975 10.4207 +#> 12.1873 -1.1623 5.2466 -3.5829 -3.3226 17.0875 -2.0909 -11.0734 +#> 1.0031 -0.9128 19.3637 -2.9508 0.3005 -10.7548 4.1302 20.3919 +#> -0.9462 3.1503 -3.7012 5.8681 6.4743 -2.4770 -6.1728 0.1677 +#> -7.3106 0.4775 24.6495 -4.7428 -6.3881 14.7008 -6.6530 6.4338 +#> 8.4131 6.7445 -5.6644 15.6822 -12.5014 4.1015 1.4066 13.4287 +#> 3.9977 3.2108 7.6805 4.9462 -6.3765 1.1978 1.8262 10.2912 +#> 7.0599 2.8675 -16.8190 -8.7010 8.2732 -10.6164 15.4398 -17.0620 +#> 11.5717 5.8657 7.2089 -1.8162 -3.3848 -1.1113 12.3739 -4.0730 +#> 0.1380 -9.2253 7.2103 -2.3036 4.3707 -4.9597 -12.2279 5.7701 +#> 14.5090 -14.2638 -6.5145 -3.8222 -17.9963 1.6697 7.6186 0.6131 +#> 2.2813 0.2900 3.0762 -7.8753 8.5035 4.4506 -6.6869 1.8409 +#> -0.3724 -5.0969 4.3368 9.2297 -7.7656 5.4747 0.2278 -12.1794 +#> 2.4222 2.3833 17.3921 0.6806 -7.6551 1.2875 -4.3404 4.1111 +#> 5.8935 2.9006 2.8999 -2.3060 6.5036 19.3929 0.1177 -8.8524 +#> -1.9444 7.0333 4.3451 2.3663 -2.1195 10.7117 14.2797 5.6832 +#> 4.2026 7.9457 -31.7168 18.5949 -2.7400 -7.9383 -4.8747 -2.3428 +#> -1.6635 13.3650 -9.0384 10.2653 -1.6372 -9.8862 -7.2558 5.3548 +#> 0.3649 -8.9365 10.4085 -9.7159 17.5403 -2.7090 -29.7048 12.1964 +#> -5.4408 4.2499 18.7786 -10.5886 -1.0374 -6.2835 0.9434 7.4485 +#> +#> Columns 49 to 54 -2.4635 26.4433 -5.2289 -1.8283 -3.3177 -1.4009 +#> 11.3204 -14.5709 7.7352 -1.9641 9.5851 0.8920 +#> 2.0535 2.5428 -6.3752 3.3288 -5.9639 -3.5136 +#> -2.9311 -8.7297 0.3804 7.1837 7.8500 -3.9282 +#> 1.2784 -10.1014 1.3071 2.1363 3.3534 5.2828 +#> 11.6092 -1.9299 -0.2373 3.3368 0.0811 -1.0494 +#> -10.9732 -7.6794 -7.6382 2.4690 11.3893 2.7271 +#> -9.6305 -3.6221 11.8598 -6.5606 3.1518 1.5069 +#> 2.8755 -15.5314 -16.0310 10.8255 -8.7148 1.8761 +#> -1.7920 5.8465 10.3092 -3.1931 -8.2935 -3.4691 +#> -6.2296 1.4329 13.1407 -3.5149 -0.9253 4.1405 +#> 9.6564 -6.0261 -4.9751 14.3209 -15.2343 3.2894 +#> 1.0771 2.2845 -15.0481 3.1343 -0.8870 1.3347 +#> -5.9095 11.7338 -12.1575 6.2385 0.0487 8.9511 +#> -1.3080 7.8309 -8.7697 -3.5777 10.9397 -3.8944 +#> -3.2248 1.4651 -5.4231 -2.0645 3.4962 3.2184 +#> -0.4314 1.2685 2.4387 4.9287 -4.2598 0.0481 +#> -22.3894 8.4356 -7.0722 17.6679 8.3444 0.5431 +#> 12.5965 -1.1868 -1.5539 8.0991 -10.5500 6.2496 +#> 4.9151 3.7412 -14.8398 8.7518 -8.8861 5.8264 +#> 10.7290 -0.3469 3.3986 -5.0775 -18.4906 15.0917 +#> 7.8041 -10.3125 21.9555 -1.8409 1.2616 -1.9164 +#> 1.8639 0.9491 8.0367 -4.9478 -2.2863 -2.0672 +#> -9.4279 -1.1549 7.8288 -12.9975 4.1920 3.9919 +#> 7.3442 -22.9798 -6.0588 -1.2220 3.1272 -1.7624 +#> 11.8644 0.6579 4.0728 -3.1797 4.3028 -0.5456 +#> -6.1925 25.4354 -21.3934 1.9897 5.2866 0.1201 +#> 7.6163 -13.0325 4.5060 10.8893 0.7714 9.2330 +#> 0.6249 -14.2223 9.0848 -1.4057 -4.6715 5.6285 +#> 25.9531 -5.6675 2.4861 -15.5022 -7.3610 11.2828 +#> -9.5851 -0.6091 -7.3243 7.7205 -11.3502 -2.4365 +#> -4.0420 21.3837 -11.6227 -8.4066 0.6026 -2.8530 +#> 17.9246 2.4747 -21.6623 -7.9381 0.5815 6.4550 #> #> (9,.,.) = -#> Columns 1 to 8 4.0875 -13.1660 6.1413 1.6866 -7.9687 11.6420 0.5888 21.6606 -#> -2.9234 -7.8627 3.5386 6.7541 8.4469 -0.6668 10.9694 5.8437 -#> -2.8451 9.0504 -2.0656 -3.7975 15.4361 -10.9013 9.2741 13.7776 -#> -1.3023 -4.6213 1.9568 -8.2512 13.9255 -7.4534 11.3668 7.4748 -#> 1.5525 -2.3667 -5.8136 -7.4960 5.6232 -1.6982 0.8556 11.8351 -#> -5.5165 1.7084 -7.7292 5.4986 3.2420 7.6540 -0.2872 9.7683 -#> -2.9122 13.6950 -12.6256 7.8668 13.7894 1.3318 -6.7552 -0.6951 -#> 2.8501 -1.6932 3.2410 0.0887 -4.4175 3.7085 -3.0652 11.5549 -#> -1.7622 -6.1368 2.4826 7.4410 9.7392 3.6960 15.6597 6.1769 -#> -7.6209 1.6972 -3.0448 1.9954 -8.1176 2.7204 13.5791 -2.6416 -#> 5.6467 2.8106 4.7776 -9.6193 2.1398 6.4787 13.7958 19.0501 -#> -0.1869 -5.5733 4.4560 -7.0290 11.0339 -1.5122 17.4960 10.0469 -#> -0.5857 -10.2198 14.7713 16.3169 -1.5068 1.1987 4.6137 -15.1676 -#> -1.8894 -15.2043 7.0605 -11.0119 -11.8196 1.7829 -0.6664 -5.7275 -#> -7.3169 6.6425 3.8902 3.2093 6.3221 -1.0500 -2.6227 -9.2949 -#> -0.2854 -6.3065 10.3913 8.1978 -4.6598 -6.8949 7.0462 -0.3631 -#> -2.6381 13.2912 -1.1430 -12.5023 8.5150 -10.3241 5.7787 -1.6207 -#> -3.0306 0.1186 -11.0713 11.8305 -11.7191 -8.0379 20.6490 -3.5747 -#> -3.7223 -4.9261 -7.8515 -3.3555 -0.5600 4.2985 -4.3782 6.1615 -#> -0.7275 -6.7427 6.0927 -0.9234 -3.6294 -4.5123 -4.4903 -12.0694 -#> 1.7222 6.4943 8.4215 -5.9419 12.6630 7.3062 5.2832 -1.2977 -#> 0.4108 15.7560 -8.1166 1.0658 1.7338 -23.5575 3.8808 -4.2467 -#> 2.3169 -0.3698 6.6242 -16.5452 5.7667 2.0703 -6.2268 8.1523 -#> -0.0676 -4.9680 -3.6224 0.9647 3.0948 -3.4271 1.3068 5.9702 -#> -3.0543 12.9846 3.5692 -9.7936 6.0392 1.1089 -6.0544 -0.5150 -#> 0.9283 4.9139 -8.5166 8.3903 -1.7509 -5.2080 7.8338 -6.4984 -#> -3.3726 5.7619 -6.9525 -0.1470 19.7022 -3.8281 0.6373 10.6404 -#> -0.5416 1.2048 -10.0297 -3.0471 4.5664 10.2175 0.1318 -5.1289 -#> -2.1869 0.0989 -1.0279 7.7973 11.0315 -9.5611 -5.9728 8.5446 -#> 3.6401 -5.9616 -2.8115 -12.7283 13.7908 -3.5134 -8.3821 -3.8347 -#> -1.2762 -0.9875 -2.2588 5.1875 -8.8331 6.0745 3.8728 -0.2223 -#> -1.7755 2.7929 -3.2278 3.5319 2.0656 9.9338 -0.1135 -2.6921 -#> -0.7828 2.1875 -2.6695 1.7830 -1.5953 19.4810 6.1483 15.0575 -#> -#> Columns 9 to 16 4.2088 -23.9762 -6.8043 8.8212 8.7422 -2.3903 3.2339 -5.2097 -#> -7.2594 4.7735 -12.3461 -4.8847 -10.4494 7.4665 8.4385 -12.5377 -#> -11.8828 4.3953 11.8320 21.5989 11.6513 6.9019 -6.6922 -2.8721 -#> 6.7395 10.5906 -5.7747 7.1411 -16.0808 -6.9254 -5.4521 -10.9185 -#> 8.2466 1.1895 -0.1548 -2.3253 -28.0649 1.2423 -9.5628 -0.1640 -#> 3.5897 -17.9191 -17.7579 -27.1033 14.4983 -2.0833 1.3654 15.6925 -#> 10.1009 8.5532 -0.2808 -0.2258 -16.1121 9.4849 3.8841 -3.2685 -#> 1.0302 2.9111 -1.6666 23.5359 -4.0120 0.3524 -16.0500 0.5214 -#> -1.5642 -4.4428 -2.7665 1.2616 7.2632 7.8903 -8.8398 -5.6303 -#> -6.0943 -19.7397 -0.9850 -0.2281 -13.3369 1.8731 2.2916 9.9167 -#> -6.0780 -1.6448 7.8172 2.0459 7.7387 2.5029 -23.2533 -12.3586 -#> 13.2757 11.1168 -9.2176 -5.5516 0.8257 -8.8642 -13.2948 -7.4019 -#> -9.3961 -10.9207 8.7869 1.3486 -11.6727 6.0785 -0.1468 1.0374 -#> 4.1470 8.7967 9.1593 8.6602 15.7938 -2.6806 10.9040 -1.8152 -#> 6.2388 -2.0511 0.3299 -10.8950 -11.9330 2.8885 0.4726 9.5734 -#> -4.9775 19.8541 15.4489 -0.7692 -9.5588 -11.9333 12.7164 -12.2642 -#> -5.1962 9.7119 14.1155 -5.6186 -3.1858 15.5836 12.0592 2.3122 -#> 14.0698 19.7998 3.3043 3.4178 0.8533 -0.0373 5.3411 2.4784 -#> 5.4941 5.4324 4.7054 10.5663 -9.1261 -4.5124 7.2460 -6.0025 -#> 13.6831 8.8033 -6.7323 11.9500 -30.1266 4.7476 -18.0578 10.4861 -#> -6.9986 -4.8844 -8.5996 7.4400 -0.4392 3.8846 5.0058 3.8753 -#> -3.3200 -5.5672 1.5313 17.5748 3.4141 -7.4009 -13.9479 3.4515 -#> -10.8194 6.7758 -1.8326 -5.6283 1.0390 20.6476 1.7635 -15.6967 -#> 9.8613 -2.6945 4.2391 -7.6302 -0.9557 -6.1139 -7.9384 -6.0483 -#> 11.0115 4.5217 -7.0642 -9.2148 7.0387 -8.5065 0.6995 -6.8530 -#> -5.7435 4.7510 -8.7562 -3.7314 -0.9758 -3.0986 -6.8076 8.7105 -#> -8.1038 -2.4940 -4.7496 -17.7449 6.4212 -9.0613 -17.9790 7.3966 -#> -6.8502 -18.9183 -7.4141 -2.1069 7.5214 -5.9213 -2.8063 7.5368 -#> 1.5855 -9.5122 -0.6550 -0.7084 24.4028 1.6516 0.6475 -11.2705 -#> -9.6477 -0.4903 -17.6532 5.7543 18.2415 -4.9971 -3.8286 -3.2842 -#> 14.1147 9.0895 6.6478 -18.3653 -13.5341 10.5265 -6.7646 -13.0001 -#> -16.9131 -3.4685 -19.9777 -4.9386 -1.7271 4.9551 12.4286 7.8517 -#> 4.1537 -11.3263 3.8526 -6.3522 3.6299 -13.6234 12.4613 -4.8751 -#> -#> Columns 17 to 24 -4.2706 9.6318 -14.4805 6.3895 -3.7929 8.0710 2.0545 1.8292 -#> 7.8162 6.6203 -5.8975 -1.1224 1.9272 -0.8045 6.8998 -3.2840 -#> -6.8472 -17.6737 7.2496 -3.0438 8.6990 3.2873 -2.7906 2.0976 -#> -2.9556 -0.9176 17.6425 -7.5155 -2.5665 -10.1610 3.8583 -6.7622 -#> 1.1791 13.9903 6.0277 3.7011 9.8324 -2.8115 -1.3943 7.6187 -#> 0.9433 16.6836 17.6236 -18.7736 -6.3167 -4.2612 8.3085 -3.4196 -#> -3.4771 -1.7288 19.2656 10.8092 3.1097 2.7039 6.5544 13.4471 -#> -2.5811 -7.8217 -10.2289 7.0625 -8.1599 -0.8722 -11.0821 -5.3660 -#> -0.3557 0.1654 -27.0798 7.3281 9.1698 4.4319 7.1142 11.8328 -#> 2.6711 12.0621 26.9639 3.9798 -1.4104 7.9621 2.4684 -6.6909 -#> 4.1163 -10.5928 -14.7403 -12.4781 -14.6933 10.3249 -18.3088 4.6522 -#> -0.0078 -4.2932 4.7690 0.9550 -3.7099 -4.5541 -7.9861 -6.3120 -#> 4.3695 13.4931 2.4103 11.5745 7.9793 8.5670 -1.6531 1.4744 -#> -5.4559 5.6987 -10.2102 -7.2425 0.9817 -2.2712 6.4125 3.5513 -#> -1.8864 2.3376 6.6529 -13.3615 -10.9942 -2.3597 0.7256 -9.1127 -#> 0.1496 1.5402 -2.3539 3.0123 9.9166 5.6713 -8.0756 3.9683 -#> -0.4101 9.1029 2.3711 -10.7000 -15.2477 -17.3745 -7.0343 -9.0581 -#> 1.8222 -8.6733 6.3450 13.1310 1.8973 -3.9703 9.2409 0.7033 -#> -6.0650 -3.3313 -14.9866 13.3368 21.1127 25.5643 12.0376 15.6202 -#> -6.1982 0.8577 7.0105 -3.4303 5.7572 -0.5078 -4.1691 -14.3487 -#> -4.9384 -2.2306 12.4091 9.5809 -7.9701 15.5971 -6.1461 3.4497 -#> -14.6010 -19.7534 -3.0943 -15.2183 -10.4810 3.3483 -6.5617 -10.0276 -#> -1.8306 1.1335 2.9998 -4.5367 0.8741 -3.0733 -2.7563 4.2682 -#> -5.0458 1.9771 -33.9085 -3.2428 1.2707 16.8337 14.7150 10.6823 -#> 2.4681 -7.3226 -22.9366 -3.9452 -18.8797 -18.5527 -2.8076 -8.6607 -#> 9.9208 3.2776 10.6686 -4.7430 14.8613 -3.8122 4.8039 -4.9235 -#> 9.9328 -2.9447 -0.5335 -0.8264 0.1861 9.5124 5.4681 -14.0606 -#> -3.1846 8.4049 -1.4289 -6.1339 -9.3869 -2.7514 0.4952 -11.9531 -#> 18.0310 2.2193 -14.0216 -13.0492 22.2719 -4.2757 4.5596 -10.6983 -#> 7.9559 1.0702 -5.5950 -17.2573 -13.7766 -13.7111 0.9190 -4.5991 -#> -0.3905 -3.3458 1.7576 6.3945 5.7497 -2.7466 2.4251 13.0997 -#> -4.5891 0.2517 7.7112 -8.0639 -1.7789 -8.1066 0.9122 -13.0374 -#> -5.3413 -4.9465 5.5711 6.2203 -0.3533 2.2089 -8.5250 1.5109 -#> -#> Columns 25 to 32 -2.5417 0.8453 4.7376 -2.9612 1.8473 1.4810 4.5415 7.2610 -#> -4.2711 2.9126 -7.8663 -0.3670 -9.6876 -15.3951 3.4643 -1.2463 -#> -15.1669 -2.1372 9.1019 -1.4552 4.6120 4.7544 -4.4678 2.5946 -#> -8.8096 -2.5299 0.3544 3.5773 1.0737 -6.6828 -2.0851 1.3127 -#> -7.1648 -0.3307 -9.3747 4.5719 3.2879 7.4797 11.3753 8.1376 -#> 4.2475 2.2863 4.1592 2.2073 18.8083 1.7558 5.6452 -11.2770 -#> 1.8582 -17.5141 -9.8492 6.6420 0.0380 -7.6875 -7.2550 6.8817 -#> -12.4302 -8.5575 13.1428 6.6443 5.4586 4.6629 -0.2422 5.3181 -#> 11.7403 1.0674 -9.5835 1.3690 -7.6413 -6.8457 7.1888 -11.0833 -#> -8.1675 -11.4043 -6.3836 -7.3862 -8.4281 -5.4854 7.3219 -0.0165 -#> -0.8766 21.4948 12.8014 14.8860 26.1611 7.7665 -12.3399 -11.0969 -#> -6.5066 -8.8206 3.9323 7.7160 7.8918 5.5574 10.4558 9.2808 -#> -6.7053 -1.6588 1.5652 -2.6664 -4.8871 1.6638 4.5137 -3.4380 -#> 0.3392 14.3931 -0.6118 0.2134 -6.2186 -5.7362 -4.2839 4.3567 -#> -13.5350 -6.2299 0.3904 -16.4546 -7.2557 -0.6942 9.1569 -9.9394 -#> -4.5590 -2.8492 0.1492 -0.4079 -4.7893 -0.5491 17.3962 3.4775 -#> 2.0336 5.1708 -12.4232 0.2248 8.3460 1.0982 -0.4847 -4.7426 -#> -5.0570 -7.8702 3.1474 -8.8002 -2.3591 1.0060 1.5078 -5.2314 -#> 16.9447 12.7382 -13.2697 -0.0683 5.2825 -3.0754 -7.3953 -5.5983 -#> -7.7573 -5.0490 -4.0720 1.8675 -6.4763 -2.1763 3.5788 14.7622 -#> -4.0019 6.1244 5.4763 0.6726 1.2970 4.2389 2.8034 4.1622 -#> -0.9995 6.5293 2.1186 10.6913 2.2119 4.5758 -7.5272 -8.9917 -#> -8.7199 -8.4839 -18.1779 -9.1905 -4.0585 1.9482 3.9999 -7.3518 -#> 12.7110 9.5307 15.4975 13.9518 -9.2565 2.2380 -5.5922 -5.2402 -#> 4.5704 -11.6175 -7.3932 6.6459 4.7025 -2.2518 8.3495 -2.0091 -#> 10.7717 2.8472 6.3731 -5.9258 0.8502 -2.0102 1.9226 -6.5894 -#> -14.1405 -2.1212 6.4797 -14.1395 -16.3247 7.2465 -6.4626 -4.0388 -#> 4.1977 -2.3055 -2.6956 0.9332 2.7282 2.3741 -8.7264 8.0648 -#> -12.1615 -6.7460 -9.2023 -3.4595 -13.0378 -0.4108 11.2732 -8.4836 -#> -9.0775 -24.4088 0.1750 5.2929 -2.1573 3.3092 -13.1942 -8.2572 -#> 3.6687 12.6269 0.1908 -6.8676 -1.9316 6.3263 -9.2124 -11.4998 -#> -17.3930 -2.7537 -1.4206 -6.4436 -3.9444 -11.5823 -4.6937 7.5235 -#> -11.4818 -5.7549 5.9569 -1.7157 -7.7764 6.1034 -10.4827 3.0300 -#> -#> Columns 33 to 40 -4.0546 5.7033 -1.9717 4.9453 -4.0232 21.8872 -3.3472 2.4886 -#> -10.6006 -6.1495 1.9815 4.9314 1.9719 6.6286 6.7427 2.7484 -#> 5.8264 -10.3392 18.2553 -9.3866 -14.3963 -13.8339 0.8560 -11.0095 -#> -7.8975 11.0141 -15.8547 3.1282 -5.9673 8.7539 -0.4567 -4.7988 -#> 7.7529 -1.0745 -16.7453 -11.1365 -7.0175 0.1077 -11.2151 -0.8434 -#> -0.7133 -7.7374 -0.7937 -10.8897 16.1370 -13.8290 -0.1160 3.4583 -#> -1.5516 8.6448 -4.6002 1.4309 -3.2235 2.6007 12.0963 -13.2540 -#> -7.9362 -4.9958 26.8412 -13.2264 -6.6898 -11.1547 -2.7267 -0.6085 -#> 4.1175 -7.4804 -3.3964 -10.2496 22.5975 -22.6124 -4.8042 -10.3592 -#> -8.3552 -2.6763 -0.5044 -19.3676 10.0768 3.1593 15.9853 -3.9306 -#> -4.9406 -2.3733 22.2437 -21.2701 4.0356 -19.9860 -16.7914 8.4032 -#> 9.8218 -10.7498 10.6639 1.5719 0.2460 -3.3337 -1.8820 14.1830 -#> -11.1259 -1.1850 0.7274 -3.5176 4.5723 -1.3078 3.5844 4.4614 -#> -9.0342 1.3072 0.6087 5.9559 14.0140 2.3856 11.1323 -6.3673 -#> -0.9597 6.0675 3.4484 -16.5896 14.6504 2.3404 13.2214 3.1287 -#> -15.1154 -12.5263 6.2127 -7.7274 -0.1448 3.1026 -2.2499 -11.8948 -#> -10.4774 -1.3816 -4.5550 -23.3595 11.2419 -2.9943 2.6173 5.4653 -#> -1.0691 5.1118 5.7607 -16.5816 14.0878 -3.2260 17.9101 -9.2509 -#> 4.7743 -1.2142 -1.9710 12.9453 -5.1697 -6.0383 -12.7658 -3.3180 -#> -4.7639 -9.1992 5.1599 2.0450 11.3600 -2.9904 -4.9618 1.1367 -#> -0.2035 7.4995 6.7237 9.6282 -15.1450 0.0953 -1.5054 -3.1165 -#> 9.3922 6.3088 10.0187 -6.8202 0.7742 -5.7718 9.5027 -9.4457 -#> -7.3586 9.0690 -9.2550 -11.7036 6.6068 -4.6989 -0.0246 -0.4834 -#> 8.0758 11.4354 -0.4742 16.5706 -1.5196 -2.9838 -5.4655 -11.9395 -#> 16.8395 -8.0241 -7.3224 20.5876 14.2268 -10.0591 16.2582 -3.5242 -#> 6.7963 10.5324 -16.6560 5.6012 -9.8153 2.5755 -13.8786 3.6098 -#> -4.7407 3.8492 14.7185 -4.0867 17.5908 16.4011 11.1086 -21.1926 -#> 13.4565 -10.2485 -0.9491 5.4584 7.1630 -14.4297 -6.8817 1.7544 -#> 4.9086 3.5352 -14.2149 16.3630 4.2086 3.2492 -4.4649 -5.6819 -#> -2.4664 -7.4547 7.8175 -12.1694 21.7147 10.8425 13.5847 6.1566 -#> 14.5458 1.8772 -4.1889 -15.3383 -8.2980 6.9765 3.1014 -15.1226 -#> -1.3925 3.0163 -11.4281 19.1352 -12.2901 -1.0417 -13.7466 13.6779 -#> -0.7463 1.0728 -0.6218 9.9406 -9.6542 3.4510 20.0130 -7.8743 -#> -#> Columns 41 to 48 7.7196 0.3849 -8.3918 -7.7087 9.4628 6.3112 -4.8614 0.7898 -#> -10.5400 6.7290 -3.3070 11.4925 17.5507 2.6617 10.3062 -3.3887 -#> 2.3790 1.5194 5.8885 -2.5441 5.1343 -5.0835 -19.2448 -6.2814 -#> -13.8772 -7.0727 5.0106 -2.6973 10.7691 4.7806 1.1937 1.7533 -#> -0.1879 11.4678 7.5956 16.2828 2.8012 5.0949 5.4573 7.6978 -#> 18.0733 16.1860 10.2843 -3.6556 7.2638 8.1729 11.3880 -15.7216 -#> -9.6233 -3.4684 -1.5559 2.1601 -13.1356 7.2361 16.7344 -10.8216 -#> -16.5723 -7.2646 -11.1144 -3.9776 -0.2065 13.8502 -8.2284 -9.6323 -#> 1.5238 12.3028 8.9714 17.9027 -4.7627 -8.3126 16.8880 7.9462 -#> 10.9103 -5.3810 -5.5318 5.7035 21.4334 2.3475 -2.6262 -12.9918 -#> 8.0442 8.9567 -11.8374 -5.4994 7.8793 -1.9821 3.3344 9.2903 -#> -12.1435 6.2812 10.7557 -8.9740 -0.2044 10.1391 0.1696 -13.2730 -#> 1.4029 9.6779 -5.9944 21.5273 12.4168 -2.9909 -7.2323 -0.2837 -#> -15.4937 -15.8609 -4.5449 -11.5226 10.0661 -19.1849 -13.9259 4.5161 -#> 11.5241 4.0590 -15.7215 -2.1419 -1.3034 5.4464 3.6719 15.3133 -#> -4.4282 -7.1290 -0.5408 10.7173 9.4064 -10.2900 -1.1956 -0.3817 -#> 13.4626 14.8415 -15.4962 -9.9346 -15.0116 -11.9595 17.3005 7.6668 -#> -0.6313 -3.6673 -7.9908 -18.4216 -0.2426 -4.6290 -3.6879 -7.4714 -#> -10.8215 -7.7815 6.3899 14.1215 -1.5517 -4.1354 17.5691 -0.2966 -#> 11.1281 -7.7009 6.5719 10.8587 -6.1595 -8.7532 -4.3086 15.5671 -#> -9.7915 8.7561 1.0023 -2.0281 -1.7689 -4.6823 -12.4498 -6.9113 -#> 7.7028 4.9294 -7.8488 -6.5750 -9.0153 11.6397 4.5690 -0.1016 -#> 7.1106 -1.6929 -1.8737 2.1624 -13.8151 -5.3098 15.0948 0.1795 -#> -1.3814 9.2974 5.5842 13.2400 12.6897 12.6315 3.4940 -2.6392 -#> -11.3209 -8.2444 16.8397 17.8638 5.5211 16.4997 15.2528 0.0082 -#> 3.0446 -4.4829 4.3880 -10.7411 -0.3064 -13.2129 2.5039 9.2481 -#> -6.7606 10.0870 5.7216 -5.6511 -6.1712 -2.0900 -17.3603 8.3145 -#> 6.8814 -0.1777 5.1503 0.9947 5.6574 2.5916 0.6318 -2.3832 -#> -0.4918 0.3861 4.7932 -4.0897 4.0422 -1.6061 2.8181 -8.4493 -#> 5.8768 -11.2762 5.9376 -21.8132 -10.8796 26.4854 3.2273 -6.3154 -#> 2.7235 -7.0789 -3.0443 2.1171 -7.4423 2.0577 9.3470 26.1017 -#> 12.9806 -5.9052 5.8160 -3.9925 4.2319 -11.9046 -10.8885 -0.4372 -#> 7.3300 -9.5418 7.7864 2.6138 10.5347 17.3636 -21.5352 0.4739 -#> -#> Columns 49 to 54 -14.8691 -9.1324 10.1655 0.7449 2.1667 5.3363 -#> -4.4574 -0.1098 -3.1628 -9.3421 1.4818 0.8749 -#> 4.6459 9.2058 -0.2434 0.4508 -7.6855 3.1409 -#> 4.7861 28.0300 -0.7577 3.9693 -3.3897 -2.4329 -#> -21.8895 4.7810 -9.7748 -11.4574 1.2092 -4.2950 -#> -4.4833 5.0633 4.8649 8.0878 0.2127 2.9271 -#> -5.7122 0.5988 -9.7677 0.9075 10.6970 0.2351 -#> -1.1009 15.3324 9.8893 -11.4473 4.1047 0.4827 -#> -11.1070 -3.0292 2.0740 -3.7679 4.7414 -1.2884 -#> -8.6583 15.6834 -9.2855 8.0586 0.4443 -0.7787 -#> 0.2194 4.4984 11.6936 7.5706 -2.2380 -5.1833 -#> 4.4206 17.4562 11.1093 14.2875 3.0627 -1.2111 -#> 0.6527 2.9872 14.8202 -1.0611 -1.8122 1.5132 -#> 14.1045 -4.4124 -9.2904 -4.5478 -2.2182 -0.7207 -#> -1.4828 3.6784 3.2015 2.2996 -12.7189 -7.8033 -#> 5.9095 8.7084 -5.1051 0.3851 -1.0407 -5.2680 -#> -6.3020 -4.3540 -4.5427 -1.1850 -3.6061 2.5747 -#> -1.0570 4.7170 2.8429 7.9308 2.2215 -0.9558 -#> -12.0819 4.4918 7.6590 2.5101 9.8169 4.9874 -#> 0.1378 8.4419 -1.8811 0.1350 -1.8110 1.3977 -#> 7.6449 -6.2310 -13.1248 -9.3925 5.5972 0.2361 -#> 9.6474 5.3826 7.5009 -9.5686 -8.0677 1.1340 -#> -32.7488 -2.4500 -8.0628 12.2802 -0.2749 -3.8348 -#> 8.0790 -6.9282 3.2445 -1.6245 -2.1593 2.9972 -#> -0.7125 0.4441 -7.0608 -6.4584 -8.6841 0.2525 -#> 3.5664 -0.2363 -2.8050 7.3713 -2.8613 -0.4915 -#> 17.1820 -11.7188 -3.8804 -3.7392 -4.7100 -1.1502 -#> 8.2372 -7.1018 -3.2752 3.9507 -2.3532 1.3612 -#> 0.1603 4.1866 2.4981 3.8943 -6.4633 -2.6680 -#> 1.7882 -4.5337 6.1890 -0.3154 -10.4658 -5.8276 -#> -7.1726 -8.9011 0.1020 -3.4402 -8.8854 -3.4632 -#> -10.3952 -2.3793 -0.3542 3.8725 4.9056 2.3551 -#> 7.1893 7.1117 1.6026 -7.5178 -0.4649 2.7714 +#> Columns 1 to 8 2.7891 5.6461 11.2726 1.9882 3.1389 -7.7933 4.4229 17.3522 +#> -4.3693 7.6364 -1.5425 -3.8255 -4.4841 -2.9940 -6.4672 -16.4546 +#> 0.0327 0.0078 3.5946 -4.1353 -4.3344 -6.5809 -0.6687 -8.7116 +#> -2.7286 -3.9566 3.5076 -11.6647 -4.0101 10.4190 -2.4942 0.9421 +#> -2.9444 1.1068 11.7706 6.0877 14.4685 14.3227 7.0167 -0.0975 +#> 2.0883 0.5287 5.0323 5.7100 4.4980 -0.1426 -13.9365 0.9413 +#> -2.0620 -6.3690 3.5664 -6.0346 11.5299 10.3415 14.0979 -1.5701 +#> 4.1305 4.2571 1.9339 1.0847 15.3376 -9.6260 1.9086 12.2972 +#> -0.4064 1.8530 3.1236 -2.5911 -4.6137 -4.8262 -1.5832 7.0104 +#> -5.8752 -15.0220 -4.4444 -6.3587 -6.1152 3.2593 -11.4632 1.6653 +#> -5.8421 5.4665 1.5583 -5.9300 5.9576 -10.8424 10.3203 -3.0753 +#> -2.8996 2.3752 3.6025 -8.0325 -5.1392 -16.5147 16.3896 4.0846 +#> -1.5363 -12.6707 4.9563 -12.6877 -4.8858 -20.6553 5.1575 5.8557 +#> -1.5121 2.8635 10.2283 4.1047 -4.7667 0.5365 13.8085 7.6162 +#> -0.3384 2.2910 5.9901 8.0042 3.7401 8.7724 15.4495 0.7961 +#> -3.6876 0.6770 -0.0762 -7.1910 -8.5069 8.1194 1.8044 4.6803 +#> 3.5639 4.3075 8.1912 -3.7056 4.8064 3.7267 2.1989 13.8254 +#> -0.4432 -12.2472 7.0346 -3.1157 -10.0137 30.1598 -7.0072 6.6565 +#> 0.3731 3.8682 -2.2841 -1.9968 -18.7218 10.1465 -17.4892 11.2863 +#> 3.2327 -2.9070 4.2799 7.6125 10.6098 -8.1591 5.8310 1.0487 +#> -3.3853 -2.4264 -6.6538 -15.7584 6.6708 -2.3824 2.4382 -6.3140 +#> -0.4255 -6.4060 -1.4134 2.3033 10.4701 -1.0757 -6.2709 10.4229 +#> -1.4168 -6.1772 -6.9050 8.0146 4.7789 1.4485 -10.0194 7.0168 +#> 5.3006 3.2196 1.9223 -2.3551 8.1670 8.1758 6.6591 -11.6577 +#> -4.0528 -3.2018 -9.0353 -14.9391 -14.1932 -2.4723 1.4463 -10.5708 +#> 2.7737 -2.4013 -6.7505 -2.2005 1.7287 19.6719 3.5308 5.2615 +#> 1.2905 2.5430 10.9490 11.6910 0.3056 -2.3869 7.7539 18.0730 +#> -4.6555 7.8963 9.1508 4.2204 6.5066 1.2389 3.1907 4.6268 +#> -1.2872 -5.9298 1.2936 -4.3192 -14.3200 0.1818 -5.3164 2.6740 +#> 1.5379 6.7997 0.7273 5.0672 6.1576 4.8976 6.5064 -2.6637 +#> -6.5611 -3.4853 -4.7243 -13.0694 -2.1603 4.8007 -18.2887 -0.3439 +#> 3.6181 5.9698 -7.8726 9.0949 4.4630 19.2695 -16.6097 -3.5550 +#> 4.6032 -6.2528 -1.4542 -1.4308 -3.4709 1.5289 -6.0862 -7.9197 +#> +#> Columns 9 to 16 -13.3502 -10.9006 15.3224 17.3260 -8.3167 11.1412 -2.7912 -5.2268 +#> 4.2691 11.9275 -4.2768 -0.3375 -25.0954 9.3447 -4.0346 5.2689 +#> 9.1157 -19.2557 0.4819 3.0041 0.8986 -0.5568 -12.6354 -13.4393 +#> -1.9457 -5.6830 18.5670 20.8668 7.6358 -1.9210 -1.3930 -3.1887 +#> -3.1992 -10.8519 10.6605 5.7853 0.0061 -5.3224 -0.7501 -2.2627 +#> -6.6866 -9.3200 1.9750 -15.0306 2.2915 -3.1248 4.0134 -8.8367 +#> 7.2783 -3.3737 -0.4615 13.4524 -9.7191 -1.0670 -5.9242 12.4941 +#> -1.6839 3.4820 -18.7390 -3.8986 -2.3229 6.1428 4.9823 13.3527 +#> -3.9977 -2.0847 0.8875 7.1335 1.6811 -13.0423 -10.3999 -6.6988 +#> 9.0746 3.6729 -3.3916 -11.3161 -9.9358 10.6504 5.9969 -0.3506 +#> 2.7198 7.5492 1.5438 3.8592 -4.6483 5.0131 1.4573 -13.3437 +#> 3.6485 5.0274 4.9936 -11.2782 0.2270 10.0538 -2.0702 -6.0261 +#> -6.6231 -11.2644 7.1774 9.7326 -0.2837 4.7962 4.5807 -3.0669 +#> -1.6483 -23.7315 4.3302 3.3571 13.7165 -0.6550 8.5681 -16.5317 +#> -1.8355 0.5975 -0.3203 0.6874 -10.1596 5.2492 -1.2169 -11.3643 +#> -4.6443 2.4558 -4.2523 8.1814 -10.1897 -4.2530 -12.1652 8.3655 +#> 5.3690 -3.9224 -3.1992 -5.3694 -3.2890 14.9747 17.4545 -5.3308 +#> 9.1016 4.0328 6.0780 15.6832 -5.6727 -20.5686 10.7428 3.9753 +#> 13.0496 -2.5721 -2.5217 -11.5500 15.4258 -1.8422 9.2925 6.0233 +#> 8.5035 -4.1750 -8.9295 5.7701 9.1853 -6.9553 -8.4313 -19.3226 +#> 2.9166 -11.0147 10.1236 -4.2951 7.6608 12.9772 -3.4486 -20.8659 +#> -11.1108 4.4085 -3.7069 6.7342 -7.9812 4.2891 12.0735 2.7704 +#> -5.0800 5.6861 -9.9988 19.4505 7.6321 -4.0207 -17.1867 -13.2480 +#> 5.4979 0.7310 -5.1859 -1.3413 -12.0993 -9.1632 4.7480 -1.7103 +#> 3.6900 -6.0383 4.4401 4.4768 14.5726 8.9373 -4.4441 15.4543 +#> -2.0458 -5.3701 -0.7855 9.2217 6.3305 -0.1386 -1.3346 -0.7774 +#> 4.2399 -16.3759 7.3887 -1.5502 9.8142 -2.0231 2.8552 -9.4405 +#> -15.3747 -0.1222 20.4641 9.7776 -9.1597 3.6420 -2.2226 10.6921 +#> -15.7407 -12.6524 20.7552 1.8564 9.5878 6.5725 1.0907 -4.1684 +#> -5.9031 16.2492 3.8512 -14.0848 2.3558 12.0028 -12.6915 -14.5405 +#> -3.0352 -2.2557 1.2893 6.8459 8.3554 5.8113 1.5446 9.2033 +#> -14.0303 -3.6214 3.6899 -4.3906 -14.9126 14.7248 0.4015 -11.0723 +#> 4.9811 -13.0231 6.2604 3.8745 4.4099 -0.2151 -3.7590 -29.0314 +#> +#> Columns 17 to 24 0.5903 1.2292 4.4785 13.8327 5.3019 5.0653 -15.5849 2.4288 +#> 17.6620 9.9821 0.2180 -11.9256 0.3329 -6.3259 -2.0938 21.7199 +#> 1.8970 1.9715 -1.2359 1.0619 9.6286 -2.9894 -12.5589 1.5861 +#> -5.1145 5.7224 -15.1579 12.4867 -1.6384 11.9790 2.1090 -10.0775 +#> -3.0872 7.7246 5.7323 4.7021 23.8640 -1.3152 5.0465 1.0895 +#> -18.0686 -7.3630 -11.8334 13.1770 13.5497 -10.7829 14.9718 13.9289 +#> -1.3626 -2.7023 -3.1864 -12.9975 -15.3546 -0.7801 0.1602 15.4471 +#> -18.7058 -0.9208 -15.8708 11.7466 -14.2495 11.2790 6.0995 25.0583 +#> 9.1240 -1.4579 -20.3905 5.6573 -0.7184 -5.6087 -6.3994 13.0453 +#> -2.4425 -0.7989 6.2329 -13.7502 -6.8374 10.4169 0.8800 2.4768 +#> -9.2049 -7.4061 -2.7655 -6.5597 -0.7682 19.4496 3.7899 3.6785 +#> 13.0414 4.9492 -11.5959 -21.0042 15.5289 -3.2080 -14.2575 13.0175 +#> 9.2355 -1.7161 -1.4585 -1.9030 -4.3101 12.4132 0.0779 10.9919 +#> -1.0625 9.6083 -7.3377 3.5531 -3.9269 1.4615 24.4355 14.5865 +#> -3.5285 1.3685 -5.5002 -5.1391 -7.4575 -1.4907 4.1108 4.2759 +#> -4.8534 -18.5523 -4.9735 -2.6078 -3.6643 4.0607 1.4195 7.1534 +#> -5.0158 -11.4926 -4.3159 -6.3126 -20.6569 3.9367 10.3564 2.0483 +#> -1.6701 15.4250 -8.6130 -7.0511 -4.8020 1.9636 8.4343 -4.1786 +#> -0.9257 -19.0060 2.1396 -10.7141 -11.3384 -8.6834 11.7603 0.7399 +#> 2.0826 -15.4453 0.2871 6.8400 0.7255 1.9984 -11.5148 -1.4323 +#> -12.0348 -20.5719 10.4165 1.0737 7.7002 -3.7523 1.1721 -1.6846 +#> -9.7998 9.9020 -8.4249 13.2797 -10.6123 -1.7268 7.7460 -22.1409 +#> -9.1708 8.8775 -19.2015 10.1956 -0.8793 -3.3652 -7.5626 -20.8103 +#> -15.5172 13.8040 5.4655 4.2733 -6.6152 3.4871 17.4479 -0.1538 +#> 28.4350 10.8799 -11.8118 -1.1315 5.3741 -19.2895 -0.1006 2.0684 +#> 5.7723 5.5244 -4.1145 8.3152 12.6638 -4.6514 0.9151 -8.6046 +#> -5.0254 -2.3961 -13.0114 2.8701 13.2636 -3.8114 11.2217 6.7448 +#> 8.7154 -0.9041 -11.4255 2.0557 -1.8496 -18.7856 -2.5219 -7.3274 +#> 7.9779 -24.7783 10.9781 -11.2546 -7.0707 -1.2906 13.9495 -1.3816 +#> 2.4164 -2.9488 0.4156 -4.9939 8.6019 -1.6816 -18.8522 -5.1339 +#> -13.3422 -6.2524 -0.1487 8.1645 -7.4022 4.6731 1.7811 1.5432 +#> 10.4236 -16.5892 -13.3360 28.1620 -1.8771 -9.6515 9.4042 -0.3092 +#> -8.9722 -21.2798 22.1872 3.7967 0.6152 -1.5334 4.3920 -8.5679 +#> +#> Columns 25 to 32 -1.6424 -2.4765 9.3092 -11.3510 9.9788 5.8714 5.4902 -15.9698 +#> 12.4106 -0.6024 -11.0791 8.7927 1.0994 -17.3039 -26.4455 -13.0543 +#> 2.2594 -5.4852 -3.4057 -12.2934 -12.9057 4.9216 3.0801 5.6059 +#> -1.8950 -11.5884 -2.7283 -1.9462 1.7399 -5.8023 -3.7597 -10.0243 +#> -14.3559 10.5322 6.8887 -0.4751 15.8738 -0.9240 10.1749 -4.9763 +#> -2.6932 0.5575 -0.6663 1.8782 4.0940 -6.2931 4.2382 -16.0185 +#> 2.6599 12.1880 2.8019 2.6629 11.2282 4.3514 -5.3328 0.9144 +#> -8.6377 19.9402 -3.4128 18.9311 -10.6883 5.2146 0.0675 -7.2361 +#> 4.0462 5.1910 -6.2741 2.8696 8.4610 10.5984 12.8530 9.1585 +#> -12.2206 1.3749 13.5455 -12.3921 2.5813 -5.1370 -4.7817 6.4752 +#> 20.4107 20.8562 -14.5576 -6.0688 0.1366 1.5356 -10.7728 -3.7383 +#> -1.8856 12.4265 -15.4950 -18.6076 4.0694 14.0289 6.8498 5.6447 +#> -6.9311 3.9407 -0.4233 -10.8357 -3.1333 -0.6132 -9.7033 -9.6968 +#> -9.2422 1.9215 -12.0507 9.8397 -6.2344 1.0666 9.6721 2.1904 +#> 12.5287 15.5544 16.8050 2.7052 0.9099 9.5372 6.1023 -6.0156 +#> 13.4405 -7.6034 -4.4066 -8.2761 3.8420 -9.2116 -0.8592 -4.7290 +#> 1.4604 -9.0550 3.2961 -2.5062 -6.5379 1.2413 9.0860 6.8267 +#> 1.1640 -8.4296 -0.6172 -6.5487 -5.0897 -2.5419 -3.1881 -11.7350 +#> -4.8023 -18.6926 -5.3272 4.1101 1.2480 -1.6659 -17.4415 -6.0486 +#> 7.2006 3.4588 20.5035 4.3221 4.4193 7.5548 -2.9902 -2.9720 +#> -9.4837 -8.7489 -17.2334 -2.5913 9.0866 8.9060 -1.0552 11.8623 +#> -23.3699 -13.4060 0.2063 -0.1960 0.0870 -16.9977 1.6882 2.1954 +#> 9.0821 -2.3378 2.1622 -0.3262 2.0681 4.5297 5.4300 5.7771 +#> 6.7068 6.7965 -2.2693 2.7424 -5.4353 7.3393 20.1439 4.3987 +#> 17.7561 10.4224 -12.1841 -10.5163 14.1629 -3.1522 -18.3901 -8.9442 +#> -2.9116 -16.6984 -9.8374 -4.9267 14.1725 13.5858 3.1658 -6.9405 +#> -2.5623 12.7188 9.3896 4.3581 -7.7263 1.2252 3.7008 0.0457 +#> 10.8466 7.9068 7.7756 -2.1754 6.9143 -6.5399 3.6235 1.2716 +#> 0.2789 -1.6467 16.3595 0.5726 5.6597 13.2393 5.3822 -8.2157 +#> 18.8431 3.2904 13.0234 -9.0880 7.3523 3.3317 15.0342 4.2243 +#> 7.8590 -6.2939 10.7964 -2.9619 7.0402 -5.6279 -1.4979 7.6217 +#> 32.7250 -11.7462 20.2352 17.2657 4.4180 -13.8541 11.9462 -20.9230 +#> 1.1396 -12.7938 7.1452 0.5081 -4.8631 5.3434 4.9601 -2.3946 +#> +#> Columns 33 to 40 -0.2619 12.7161 10.7242 15.4972 -12.7757 -11.5587 -8.9834 -12.7978 +#> -13.9851 -2.0933 -6.0986 -6.2693 -0.6607 23.6650 -7.4972 -0.4155 +#> 4.3713 6.8333 -1.7626 5.5788 0.8095 7.2280 -10.1679 -0.3738 +#> -10.6305 -6.8019 11.1522 -0.0075 -7.1234 3.4740 -1.1015 -10.4914 +#> -17.5438 24.0233 -17.5758 -0.3541 14.3009 -7.6145 7.9742 6.2297 +#> 5.7447 -5.7513 -9.7239 -2.2230 -2.7103 -7.5661 4.4542 -5.4678 +#> 7.4828 -1.8481 -7.2217 1.1582 1.7979 -10.9675 10.1704 7.2448 +#> 6.3663 9.9856 4.2320 14.0903 -2.8358 17.7403 -24.4428 -6.7781 +#> 11.9790 2.8801 -4.8997 -0.3844 -16.0260 1.7895 -14.0649 6.7327 +#> 3.8283 -20.5070 -13.9063 3.6265 -1.7224 4.8390 11.2439 8.2543 +#> -4.5104 1.0990 -7.9801 -0.3267 -9.5771 15.7856 1.2381 -2.8860 +#> 2.1250 8.0618 -4.4884 -17.2684 0.6274 9.9014 8.3939 13.9658 +#> 8.2922 -1.9465 3.2701 -4.6437 4.2321 2.4474 0.8845 -2.2747 +#> -13.0043 -16.3209 -9.6747 9.0214 -7.6230 10.8326 -7.8567 -8.1656 +#> -7.0567 -4.7429 7.5084 -12.7796 -10.0529 -3.0678 -10.7715 -6.5500 +#> 4.8686 4.5588 -20.7367 -5.0031 9.2900 -5.3462 6.4064 -2.0021 +#> 7.0105 -8.8632 7.5515 -1.7277 -7.0501 3.2268 -16.9346 1.2673 +#> -1.2122 -8.3589 -11.9257 -5.2638 10.6705 10.1972 20.4345 -14.5363 +#> 10.5597 -0.2706 -17.3127 -6.0143 2.6291 14.3160 -9.3253 16.0324 +#> 2.3626 7.3991 6.0970 -1.0597 1.6514 -14.9038 -7.1639 -3.1538 +#> -10.3637 -9.0861 12.4378 -9.6552 4.5593 -1.1351 -0.8175 4.1669 +#> 10.5016 4.8836 4.7881 6.2513 -4.8725 8.8854 -5.2061 -12.0160 +#> -3.8515 3.6451 14.1298 -5.3823 -15.4148 -1.7431 6.9765 4.0277 +#> -3.8197 7.7999 -4.2841 3.9069 -19.1509 1.8881 -2.7426 9.3630 +#> 6.0422 -2.9553 5.0320 -2.1669 5.1896 0.2393 3.9858 -3.5060 +#> 5.3165 7.4829 22.5830 -10.8877 0.1544 6.5125 -10.4479 1.0537 +#> 5.2859 13.8024 -2.3394 6.6578 -9.8351 -7.5973 -11.8256 -3.4882 +#> -2.9636 0.7640 -1.8511 10.2530 -0.3724 8.3940 8.0818 -17.0425 +#> -2.5797 -16.9501 12.7641 -4.4602 0.3296 -7.6189 -3.5270 -1.6177 +#> -7.8829 -8.2571 14.9438 -11.2490 -9.1534 -8.5257 -8.3059 9.0745 +#> 2.2279 -5.0084 2.0301 3.6759 2.2980 -9.7950 0.5601 -10.7693 +#> 1.7267 -15.1450 11.2855 1.0800 -2.0005 3.6635 -21.7263 2.5000 +#> -3.5535 -14.2293 3.0653 -6.4028 -9.7550 -7.3614 2.3882 10.9056 +#> +#> Columns 41 to 48 -0.2569 -9.5139 -4.2866 -2.9261 -12.3800 2.6025 10.5975 -13.0938 +#> -6.5529 -1.2092 3.5724 -10.5480 -11.9196 -5.3149 -17.9613 9.1899 +#> 1.3782 4.1817 -7.9139 0.1709 4.2652 4.2287 9.1069 -1.8842 +#> 17.8498 5.9753 4.0421 -11.4002 -4.4282 -3.4945 -4.7283 -6.2333 +#> 13.6016 -8.0730 -17.3541 -3.7150 -6.7762 0.1805 -3.1156 -7.3046 +#> 7.5814 -11.6236 8.9773 0.9687 -0.5822 3.8360 9.0609 2.7697 +#> 4.8157 -1.9007 -3.3362 -4.6769 -2.1371 1.1593 -6.6814 -13.3274 +#> -10.6753 -16.6580 -5.7623 -12.5975 14.0259 -1.2657 0.0719 -11.5077 +#> 16.4422 12.9995 4.5997 7.5588 5.5025 -3.2281 3.3950 3.4887 +#> -7.3649 13.8673 -19.2715 -2.2774 -12.7737 -2.3281 3.1734 -4.8774 +#> -7.1085 2.0625 7.2735 -14.2468 -3.7661 -2.4637 -2.5334 7.2650 +#> 4.5905 21.6741 -12.1799 3.7120 -5.2773 6.8621 15.6738 11.2954 +#> -1.3936 3.1606 -12.2960 0.0098 -7.2469 5.4153 6.4829 -11.2436 +#> 6.3742 -0.7576 4.1856 1.3057 -7.2457 -11.4027 1.6516 -0.2674 +#> -19.8319 2.3468 11.4316 14.1998 1.8338 5.0958 -6.7178 2.4970 +#> 3.6518 -0.3007 4.1743 -12.7341 17.8384 13.4231 13.6527 -1.1151 +#> -1.5634 4.3176 0.2550 7.8061 -7.5150 2.0949 12.4934 8.2512 +#> -5.2670 10.3745 4.6012 -21.4855 0.3423 2.2103 11.3690 -21.6348 +#> -8.8169 10.5383 0.4247 0.0161 2.6061 11.3677 4.2398 -6.3025 +#> -0.6777 2.8157 -12.5293 -0.8502 -12.2659 8.5000 -1.4116 8.4668 +#> 7.5280 5.8360 3.6033 15.3199 6.4199 13.4714 -7.0552 -12.8062 +#> -7.3435 -8.2162 4.7295 11.5650 5.9780 6.6564 -9.9283 -2.4459 +#> 20.3087 8.9068 5.2203 -13.4446 4.6801 1.6056 3.1806 7.3509 +#> -6.7363 -1.3154 -3.4538 6.5529 0.7935 -2.6589 4.7125 5.5222 +#> -2.0835 -5.3218 -18.6172 -11.0490 -2.2373 7.8153 -12.7983 -7.3268 +#> 1.7505 6.4871 -5.2299 -8.5925 2.4491 8.8033 4.8963 -6.5391 +#> 6.2808 -3.8198 -17.8336 6.1843 16.0659 -1.4005 13.6031 -9.3461 +#> 6.4763 -13.9038 3.5939 9.4351 -9.8249 2.5886 -8.1894 -8.6189 +#> -10.4219 6.3559 -1.9367 3.6815 -12.9632 -3.0950 1.5879 -0.3378 +#> -13.0734 -9.6310 -12.4734 -15.8620 -13.2280 25.8353 1.9425 5.3443 +#> -1.0289 -8.7732 -12.8032 3.3817 9.9112 7.1954 4.1474 -5.3028 +#> 9.4079 -10.5230 -12.3552 1.5561 11.0246 2.5046 -4.1856 7.8783 +#> 10.7292 2.6453 -1.6227 4.8396 6.4339 8.1401 -0.2312 -0.1694 +#> +#> Columns 49 to 54 -8.5980 -6.5352 1.1090 -0.7174 3.1088 3.6585 +#> 7.2771 16.4819 -4.7899 0.3398 2.7190 -6.9382 +#> -1.0847 -14.3245 -1.4476 3.7634 -1.1584 1.3540 +#> 5.8353 -19.0570 -8.7585 -8.0196 1.2538 2.6888 +#> -5.8740 5.2906 -1.1292 1.9004 3.7302 -2.8623 +#> 8.4203 2.1144 8.5652 6.3670 5.8550 -1.8026 +#> 0.0371 17.3777 9.7877 3.2500 1.9215 -4.5138 +#> -11.1362 -11.2049 20.1779 -1.5833 5.2968 0.2506 +#> 2.9318 -10.4597 7.5047 1.9741 -1.1214 3.3391 +#> 4.3523 10.0785 -2.7405 1.8364 0.4232 -3.5657 +#> 1.8942 -9.2101 5.7043 -6.8886 2.1016 -1.0968 +#> 3.6432 -5.1992 -12.4703 -3.3176 2.4760 -0.4288 +#> -9.1095 -8.0913 5.7376 -3.6167 -4.1750 1.4780 +#> -1.5986 7.1605 2.6784 -12.7939 -5.6978 -2.1152 +#> 3.4665 7.1060 -8.1521 -2.9817 -3.6896 -3.6559 +#> -8.3366 7.4986 -5.1208 1.1779 -2.3170 1.2639 +#> 0.5367 0.7394 10.1420 3.0481 -1.0416 -3.6655 +#> -0.0778 6.1623 -5.8228 3.2258 -5.6881 1.3782 +#> 10.4587 -2.5889 -1.9695 3.4009 -5.9180 2.5792 +#> -3.6272 -6.1581 5.3198 -1.2878 -2.7789 2.4301 +#> -10.0869 -3.6536 -1.9521 10.3700 1.1489 -3.8861 +#> -1.0637 -15.3969 10.2382 4.8546 4.3392 -0.8770 +#> 6.0377 -8.2065 6.4166 -2.2024 -7.9182 -0.0882 +#> 3.1750 -0.5693 7.1182 2.9453 0.0960 -6.6956 +#> -5.8022 -10.2795 -8.4465 -0.4040 0.5724 -2.2128 +#> 0.7772 3.8755 1.6506 1.9345 -2.5504 1.3202 +#> -8.0783 -2.7738 -3.6769 9.8188 -7.2214 2.4513 +#> 4.7323 2.3894 -0.2689 -6.2017 -4.2145 3.0817 +#> -8.0090 0.0731 -6.2789 -7.6835 -3.4595 6.5530 +#> 6.2038 -11.0191 0.9809 -3.1851 10.0065 -2.9394 +#> 1.4329 -16.1954 -7.7916 4.3592 2.5994 -1.6995 +#> -0.9031 11.7154 -4.9832 -8.6200 -0.8326 -1.8789 +#> -10.6094 1.5200 -6.3085 6.3027 -3.7259 -1.5837 #> #> (10,.,.) = -#> Columns 1 to 8 2.2363 -2.1157 -3.9890 -1.4472 -12.1587 1.9112 -0.6772 2.6816 -#> -0.5097 0.8234 2.5852 -2.9076 -6.4124 -4.2266 -6.0710 1.3366 -#> -5.7555 2.2560 4.2838 -1.0270 8.4408 16.8556 -2.1342 -1.6465 -#> 0.5127 -2.0896 1.1144 -0.6333 -3.0143 -9.4323 -1.9678 1.8573 -#> 1.4213 -2.9222 6.7214 -9.3739 3.3599 -8.5956 -6.5851 -9.5308 -#> -4.4239 7.0383 1.5490 -1.8766 -19.2846 -8.9789 6.5690 -12.5547 -#> -4.9949 2.4635 -9.9984 3.4919 3.7359 -2.8006 -7.1007 19.4063 -#> 0.9228 1.9885 -0.7494 10.7508 -14.2172 14.6552 7.3359 -7.5264 -#> 8.4246 -6.7451 0.3721 4.4351 3.0836 -5.5432 9.8858 13.3133 -#> -4.4414 2.3311 8.9908 -10.3337 -15.8835 -5.9958 -0.2566 -16.4942 -#> -6.2991 -1.0376 8.1352 2.4895 7.1721 5.1369 5.8482 -9.7590 -#> -0.2901 -10.0629 -0.6162 4.4312 3.4176 -8.6422 5.5362 -2.1296 -#> -2.1814 -7.6086 5.7908 -6.4812 -8.6060 5.2281 4.8243 -3.4728 -#> 3.2813 -1.3609 -0.0201 -12.3297 -2.0040 -1.9826 -3.4319 15.8522 -#> 0.5173 -4.4348 2.7485 4.3449 -20.0676 -13.4316 6.2142 -15.6838 -#> 1.4934 1.6267 -5.8460 -4.2022 7.2496 -0.9272 0.6064 4.2118 -#> -7.5285 1.3303 3.2797 6.9902 -10.4241 -16.2242 9.1904 4.0080 -#> 5.1461 -5.2510 0.6457 1.7364 -2.2899 -17.4661 4.1976 10.2075 -#> 5.8689 -0.0052 -11.2868 -1.8224 19.4149 8.4131 -1.6432 4.1972 -#> -1.6723 5.1238 -1.1278 -8.6816 -5.3589 16.0121 -6.3449 -11.0641 -#> -3.6626 -7.9673 3.6362 -7.0441 6.2639 15.9587 -14.8696 14.3943 -#> -1.4947 -3.7085 3.2552 10.2461 2.8955 0.1107 13.2408 -2.0492 -#> -0.9754 5.7900 -2.2947 0.9517 18.2419 -2.0235 -7.2679 -3.8560 -#> 3.6322 -3.7929 -5.4700 2.1984 -3.2789 8.3245 5.7207 18.7523 -#> 5.2059 -0.2214 -2.8451 1.5933 -0.0468 -8.6477 0.1390 1.3842 -#> -1.2361 -0.2643 -4.9882 -0.0226 8.1592 -3.9915 -1.6402 -2.3398 -#> -0.0395 -5.7144 -8.8134 -10.7175 -6.4796 0.8209 4.4671 -2.2380 -#> 4.9342 4.5964 1.2061 0.4309 2.9231 0.1605 -1.3336 -8.6909 -#> 6.0918 -0.9102 -4.8624 -10.4539 10.7629 -5.7115 8.8977 1.2112 -#> 4.2715 -0.6758 -2.4629 7.5929 -9.4385 -23.0817 6.5401 0.1794 -#> 1.2896 1.9627 -0.7723 5.5046 3.7965 0.8929 1.7333 -12.3365 -#> -1.6969 0.4768 -2.0244 8.8202 -0.3477 -9.0021 -7.3399 15.2483 -#> -5.3663 -2.8496 1.1370 5.4573 -9.0503 2.6561 5.2536 -5.2445 -#> -#> Columns 9 to 16 -1.9999 0.2756 -3.0347 -12.2593 3.9656 -27.9082 6.1364 -6.9188 -#> 2.1805 3.2268 -20.1301 -2.3668 -1.9788 -0.9791 7.7772 -13.9050 -#> 10.4914 -14.8245 5.8770 -2.1505 4.3295 17.9494 3.2332 15.6589 -#> -11.3138 20.1980 2.4907 1.7703 1.7816 12.1020 -16.8125 19.6667 -#> -12.8666 9.0573 -20.4490 7.0089 -22.6402 -28.0731 -14.0704 2.2834 -#> 3.1369 -21.4233 -1.3923 -9.6448 -9.6338 5.1032 -20.7964 -10.3669 -#> 4.1008 9.9791 9.0406 -1.1264 -4.4379 8.5063 -1.5124 8.5893 -#> -11.0239 -6.4762 10.6674 2.0926 -1.1683 15.1673 10.6290 -4.3064 -#> 9.1525 0.5704 3.5861 7.8905 16.3161 -14.5311 7.6131 1.7053 -#> 5.6472 -29.9151 -6.5300 -20.4768 -10.8918 -3.2348 -23.2803 -10.3696 -#> 9.7585 -10.2933 -3.6008 7.3111 9.5163 11.7164 0.6670 -3.8617 -#> -17.8701 -2.2579 4.9358 -3.7360 -7.5334 2.2365 -1.2352 -15.2224 -#> 4.6645 8.1105 -11.1601 3.5535 -20.6257 -13.1826 15.6582 -9.6127 -#> -1.1510 -13.3487 -2.8594 -3.8370 4.1328 7.3028 -8.5192 0.5595 -#> -3.8685 -14.6638 10.8199 2.5929 -21.6798 -18.7749 -20.0722 -6.4741 -#> -3.6035 -13.9078 16.3609 15.9312 25.7936 -5.2368 -4.4059 1.2649 -#> -8.0685 -13.5076 -16.3485 9.0274 3.7127 -14.7530 -16.8515 -5.1507 -#> 1.5580 -15.9283 5.5596 -4.7943 7.0539 2.2376 -9.1937 4.7592 -#> 2.2859 8.7128 -2.9579 -8.4410 1.2170 -10.6162 7.5178 -5.7933 -#> -18.0433 -5.6030 -8.8691 8.4996 -26.8533 -17.3229 -0.6320 5.0746 -#> -7.9684 18.0999 -9.5078 -11.4769 1.3021 13.0954 -2.8317 -3.7281 -#> -2.6961 2.3346 -4.2845 -11.5774 -4.2805 -2.6057 3.8918 -0.9071 -#> 3.4042 -13.9383 -8.8197 3.8370 -11.1391 3.3404 -2.8338 21.0822 -#> 16.8104 11.1530 13.4996 -9.7915 8.7716 -2.5054 12.0095 -11.5617 -#> 14.1501 1.8761 6.3314 -10.1340 -7.7371 10.3086 13.3223 0.9572 -#> -0.6895 9.7227 9.2780 1.8902 -8.7706 -9.8212 3.1535 -13.4027 -#> 9.2930 3.7463 1.2078 -19.1367 -7.3601 7.2493 -4.3065 4.2799 -#> -3.2989 -6.0517 -4.5894 3.2898 14.5887 4.2458 4.4147 16.0410 -#> 5.0267 8.3714 0.1451 6.8633 16.1347 -1.1124 5.0697 20.6016 -#> -3.2677 -17.3143 -7.0599 0.8349 -13.2482 -17.2869 -13.9283 8.5212 -#> 21.3014 -3.3553 2.7272 -0.7305 -11.4003 -4.8428 9.4332 1.6574 -#> -15.5308 -11.2574 -10.1778 -22.1764 -18.2551 6.8741 -14.3040 7.3803 -#> 12.2890 -8.7037 -0.7001 -30.3416 -4.4090 7.3282 -19.0024 5.6816 -#> -#> Columns 17 to 24 11.3415 1.8475 1.1421 -1.9459 -20.8720 -4.1786 15.6824 -8.0598 -#> -9.8086 1.3116 8.6122 -8.0612 -3.7797 -2.9260 -5.7208 0.0336 -#> 5.2273 11.1455 1.5187 3.8744 -12.5833 -14.4831 -12.2603 7.3923 -#> -25.1665 6.8883 -12.9580 0.7983 6.3732 0.4855 -4.4769 0.0224 -#> -23.7427 8.5282 -13.7325 2.0021 -7.4680 6.8203 1.4800 2.3046 -#> -10.6971 -2.8438 -5.5806 25.1138 -11.0431 15.1867 -4.4431 15.7058 -#> -9.2288 0.0881 -3.0729 -16.7718 0.6031 -6.5455 -2.6865 1.0175 -#> 4.0480 10.4326 11.0936 -11.2328 -8.6291 4.2397 -7.3878 0.1333 -#> 1.5080 1.1348 12.0883 -6.0027 -18.7562 -11.7351 7.0525 -7.5212 -#> -16.6178 14.5494 -7.9325 1.3820 -6.5049 3.9571 2.1005 -2.2390 -#> 30.8940 13.3603 -0.6191 2.7103 -6.4713 11.2503 -9.1462 -2.0153 -#> -2.8805 -5.2401 -11.1418 -16.3078 -3.2876 6.3194 4.8781 2.4407 -#> 7.1361 -22.5099 -10.3275 2.2974 8.9481 12.9058 9.1893 -6.4378 -#> -23.3701 5.0497 -1.9278 4.7373 -1.8211 -5.9550 -10.0134 -10.5139 -#> -11.5440 -10.2881 -7.1974 -0.4086 14.8722 15.4445 5.4690 -6.9529 -#> 10.5349 8.7367 -5.6713 -8.3642 4.2299 4.1340 2.8754 -13.9507 -#> -0.5988 5.8704 -8.2566 5.5887 -14.3388 7.6471 -11.1970 3.2961 -#> -0.2537 10.4046 -9.4422 -10.4438 -7.7936 -14.9798 10.1950 -15.7083 -#> -21.3917 6.3735 6.5893 -2.8601 -14.1835 2.0417 -2.5771 -13.8505 -#> -23.9282 -19.6325 -5.5975 -8.8023 1.6138 0.0271 5.3163 4.2856 -#> 2.1600 -5.2791 -5.4355 4.8082 1.0244 2.1854 -3.3372 16.7090 -#> 6.8382 -2.4120 21.7143 4.5327 -0.8197 4.1578 7.4679 3.8636 -#> 4.2070 18.5672 -7.0864 5.2927 -12.6891 -6.0291 0.3712 6.0586 -#> 8.6083 -6.0751 1.5819 -4.1973 6.8597 -7.1708 2.9634 -7.9239 -#> -9.8341 17.0530 13.1495 1.2774 17.5114 1.3631 -12.2239 7.6702 -#> 0.1338 -17.9398 5.5986 -1.6880 3.3359 -13.7695 15.4135 -8.5532 -#> 19.4245 -18.9081 5.8314 -7.6547 8.9253 -9.3756 5.7626 13.6244 -#> 4.7026 22.9031 8.1282 5.4685 -7.1700 -1.5186 2.0961 5.5197 -#> -14.1119 6.1434 1.1908 25.2888 -1.2966 -10.8851 -0.1169 12.7462 -#> 5.1816 -5.7916 11.5887 1.7730 15.3466 9.7472 7.4484 6.8338 -#> 1.6397 -10.5599 -10.0957 11.6892 8.6562 -9.3152 -3.9449 -10.9035 -#> -4.4536 4.4691 15.2725 0.5402 3.1361 -6.1849 2.5591 -6.1711 -#> -1.8157 10.1398 -5.5183 1.7201 -1.8657 -3.3718 1.1055 2.2464 -#> -#> Columns 25 to 32 6.4815 5.6893 -0.9227 -3.1664 -6.2062 -4.3568 4.5600 -3.0158 -#> 12.8531 2.1956 1.1480 9.7768 -3.6533 -9.2790 -7.2291 -0.9515 -#> 4.8664 -1.7881 -7.7357 17.3084 -8.7027 13.6272 1.0505 20.4756 -#> -17.7767 14.0274 -2.2175 1.9212 -6.6565 5.5522 -2.8884 -1.8843 -#> -0.8330 11.0690 8.7098 1.9187 0.7351 3.5510 4.4887 5.0016 -#> -4.3257 8.1221 2.6468 0.4250 0.2066 2.7744 -10.2055 5.2186 -#> 1.2346 0.7154 1.0156 6.1360 4.9314 -6.6754 13.3389 1.6581 -#> -1.7103 -5.5087 -3.7728 10.8461 -4.9028 -10.6431 -5.0634 -2.1074 -#> -0.2228 -0.4752 -7.8371 1.3122 -15.3493 -8.4393 -11.0277 8.3516 -#> -5.9093 9.9790 -1.4842 1.6851 1.4589 12.7874 4.4788 9.0027 -#> -0.4178 -19.4052 1.5148 -3.4644 -5.5994 -8.1708 -26.6034 5.4712 -#> 3.9157 -0.2716 19.1154 -0.7381 -3.5439 6.2129 19.1589 4.5414 -#> 7.1570 11.1482 -7.5456 -4.1460 -4.8931 13.3967 -3.7882 7.5528 -#> -1.6330 -0.4864 1.6889 -9.7207 10.3118 -3.4479 -9.1503 -2.1856 -#> 5.0068 12.6816 -11.8438 4.0864 0.2849 7.5290 8.5680 -6.5796 -#> 1.9399 2.8008 -0.7312 -21.1728 5.3786 -3.8231 7.9454 1.1585 -#> -4.7803 18.4195 -2.4929 -5.0381 12.4804 -1.6395 -1.5868 -1.6330 -#> -13.5353 -7.7027 0.2612 3.4031 2.1050 9.2101 3.0357 -8.1287 -#> 2.4584 -16.7182 -4.7704 -2.7177 -5.0968 -22.1300 -13.1682 5.2942 -#> 8.8140 6.0968 -12.3037 20.1467 3.0207 16.8412 -15.3083 24.1607 -#> 2.2434 -1.5179 9.1971 -1.3209 -6.3701 10.1950 4.6083 7.7751 -#> -8.9829 -4.7765 7.2443 -5.7954 -16.3653 -1.3180 -6.4603 -1.6012 -#> -2.6718 13.0925 -13.5202 7.0508 4.7404 7.2232 4.7078 12.7123 -#> -0.9451 7.0915 -2.9803 -0.7490 -5.6806 -15.7917 0.3480 -9.7282 -#> 22.9171 -2.5859 -2.9637 12.2590 3.6550 -3.5028 8.7403 -2.8165 -#> -6.8268 1.9172 -9.0970 4.3419 -7.7849 7.8244 -9.3282 -0.7085 -#> -11.6038 0.0854 1.1722 3.9985 -2.3884 10.6390 4.8932 -1.6232 -#> 6.2525 1.6452 2.3449 10.9179 2.4850 -11.0971 -8.7406 -2.9657 -#> -4.8382 -4.9508 1.7669 5.4140 10.2555 0.8560 5.6142 -5.5381 -#> -12.4554 7.3132 3.8855 2.8679 -12.0267 1.9528 -11.5046 0.5150 -#> 1.5025 -1.3696 -11.2275 -4.3624 -0.0759 0.8373 -6.3970 -9.6812 -#> 4.5267 8.3538 -7.7346 -3.5750 1.7459 3.5132 -3.8898 13.1212 -#> -18.6642 5.1290 7.8952 -2.3550 4.7407 -7.8264 5.7572 -14.2221 -#> -#> Columns 33 to 40 8.0496 -4.3295 1.8407 -8.9827 7.7951 0.9815 -3.3443 -10.9863 -#> -3.0410 10.0320 5.7510 3.9766 -7.4743 -2.5651 2.2061 -10.4112 -#> -10.6715 -1.7391 -1.1629 1.3610 -5.0009 0.7881 7.1177 11.1729 -#> -18.8247 6.3556 -7.0350 11.2825 13.5470 17.5848 -3.1697 3.9358 -#> 0.6382 2.6878 3.0333 10.8931 -6.3605 29.8396 6.9935 7.7576 -#> 9.3820 1.4815 -0.3291 -6.9713 -3.5959 9.0425 5.1229 -5.8787 -#> 1.1551 6.9779 7.5625 -5.4963 3.3594 -3.2003 -8.1196 3.4297 -#> -9.8684 12.0555 -15.4573 1.9966 -8.1726 15.2350 8.3469 1.2325 -#> 9.0008 5.6839 1.3163 -6.8948 -17.9637 1.1142 -2.0898 -4.6909 -#> -3.6163 2.3786 3.5571 -3.2093 1.1328 16.8561 -1.2516 7.7491 -#> -1.7443 -8.1475 -6.9335 21.6469 0.9863 -0.8011 5.3470 -11.8187 -#> -4.3721 7.7496 -15.3290 -14.7002 -4.6889 9.6844 5.8437 -5.3226 -#> 9.4806 -3.6792 -5.3814 2.6639 0.3295 9.1594 -12.4843 -13.4395 -#> -15.7484 6.3044 -6.4635 10.6292 -1.2039 1.8374 6.8766 6.9814 -#> -4.5046 -9.6416 -6.1771 1.9989 12.0743 0.4090 9.4904 -8.5855 -#> -19.5573 -13.9309 4.8088 -15.0457 15.1453 7.5878 1.0427 -0.1884 -#> -8.1178 -12.3319 10.5809 0.4396 -3.8857 7.0447 21.8247 2.4879 -#> -10.7616 21.6806 -9.9731 -0.2034 -8.8837 13.3950 0.4176 8.9088 -#> -1.9661 7.1422 7.4603 -2.7760 -14.4412 -18.2499 3.2360 10.8341 -#> -3.7387 -1.8276 -4.5835 -1.3713 -8.7792 8.4404 4.5047 5.8481 -#> 0.9969 2.9623 -11.4680 -2.5941 -0.0900 -2.7210 -19.6152 3.4699 -#> -7.1210 -11.8914 1.7412 -5.4167 4.6137 -12.9696 2.6714 5.5981 -#> -17.3747 1.1312 1.7433 8.5802 -1.5415 14.1470 -9.4081 3.8514 -#> 9.3312 -0.7556 22.4715 -8.4808 4.1353 -12.0296 -13.0816 -17.7008 -#> 1.0289 -14.4718 4.8107 13.3372 -2.9651 -10.1529 -4.4134 -15.8602 -#> -7.1483 6.0918 6.7277 0.3636 5.1824 0.2863 -7.3600 16.8108 -#> 8.6143 12.9924 17.0444 6.9125 -0.3173 -12.9158 -9.7363 -15.0327 -#> 12.7596 7.4542 4.2434 9.1233 -2.8078 -10.4957 16.8506 4.0370 -#> 0.6383 -4.2978 6.9013 2.2868 -4.1186 -13.9069 3.5185 12.9860 -#> -27.9366 11.4565 -8.4703 7.0572 -0.2669 2.7892 11.4372 -5.5336 -#> 2.1539 -15.5910 4.9511 5.3579 5.9842 -6.1158 2.0469 -9.6203 -#> -3.7822 7.1565 -5.9621 6.0425 -6.5943 1.6693 -0.4180 5.4690 -#> 16.8042 -5.0099 5.0895 -8.8010 0.1436 -1.9357 3.0880 -13.2745 -#> -#> Columns 41 to 48 0.3146 -14.3353 -5.9365 7.4027 -17.7053 -7.8412 -10.0810 -18.0308 -#> 4.0662 7.7610 1.7881 9.0038 8.5724 7.1975 -7.4737 -9.6039 -#> 4.9590 -4.5158 -6.8833 4.6137 18.8929 2.1048 -5.4436 1.9336 -#> -4.8528 -3.6431 13.7987 12.6808 9.2454 0.6520 3.5415 7.5007 -#> -0.6831 6.4074 18.4871 25.3673 5.4122 15.7172 23.4501 4.4716 -#> 16.4760 -2.8680 0.7216 4.0668 18.1909 3.2577 2.4170 20.2340 -#> -8.1073 -0.9868 13.7834 -1.9514 -3.2242 2.6656 3.5993 0.2573 -#> 3.5294 16.4313 -18.7772 4.2040 -2.7209 8.4672 8.0065 -2.4085 -#> -1.4619 -3.0826 8.1454 5.4698 -2.2318 -1.3324 12.1368 -9.4877 -#> 4.4847 -13.7462 19.8069 15.7085 13.8782 1.7101 -5.4297 1.1614 -#> -4.5525 7.3460 -3.3067 -9.1236 1.7664 -1.3738 -2.8044 -0.9467 -#> -27.4670 1.2062 -18.4750 -15.0367 6.5579 -3.3533 -0.4274 1.1769 -#> 12.5707 16.2844 5.2190 -4.8410 -0.9088 5.8897 1.0212 -7.7595 -#> 10.1050 3.4524 -1.3630 -3.5027 0.3427 -5.4786 -1.7661 -1.0505 -#> 4.3743 -3.2571 -2.2518 7.7369 0.3507 5.3110 0.2836 -12.8981 -#> -4.3187 6.1857 -5.9451 -8.2904 6.1723 17.5522 9.5769 -9.2986 -#> 10.3207 -4.8456 6.7989 5.4208 4.2143 -0.1745 3.9681 -5.4716 -#> -8.8992 -6.6920 -4.0081 -19.4168 4.1258 0.2592 7.3254 -13.1612 -#> -5.0748 -3.2694 3.9104 -5.0094 -4.3356 9.6339 17.6926 2.7489 -#> 7.9159 1.2296 5.7530 5.2686 -4.5178 7.0554 9.1664 8.6424 -#> 8.9341 -11.6695 -4.1183 -4.7487 3.2787 -8.7734 -8.8676 2.4187 -#> -0.5846 -7.5384 -8.2017 -3.2226 -1.1003 -13.9250 -9.9109 -12.9002 -#> -7.1770 -5.6694 13.1071 12.3576 -5.2566 -2.9248 -14.2614 1.8318 -#> 6.0011 8.2952 10.7749 2.2634 -13.4215 -1.2809 6.6806 2.5869 -#> -12.1651 -4.9027 -9.3990 -7.3810 -19.8350 -3.1701 -17.9258 -5.2328 -#> -3.2122 0.5319 12.5485 8.6093 4.8249 6.0053 -13.9898 13.6141 -#> 2.4600 6.9859 -0.7049 2.6028 -12.7993 -13.0092 2.9656 12.3087 -#> -7.3083 -3.6930 15.8727 10.6552 -6.9516 -5.3436 5.7612 1.9576 -#> 5.3848 -2.5760 -9.2433 -0.3591 -3.4518 1.0722 10.9195 -8.5985 -#> -8.0646 4.3434 -2.1281 -1.4199 -16.7954 -12.6490 0.7447 -8.1730 -#> 13.3801 4.9655 -2.5100 -0.3978 11.2956 -0.5729 5.2475 5.3386 -#> -6.9883 9.6422 18.8881 -9.8414 -0.1467 -2.5258 -16.4064 -2.6196 -#> -5.7340 -4.1890 -1.8319 4.0902 1.0645 -4.4699 -11.0413 4.2681 -#> -#> Columns 49 to 54 -15.0626 4.6630 2.1379 -3.9945 -4.5883 1.5795 -#> 17.6983 -1.3270 -3.1154 2.2603 2.6928 -0.7149 -#> -0.2805 16.9501 10.5462 -11.3031 -4.7032 6.6998 -#> 7.2113 20.6352 1.0219 1.2684 1.2694 -4.6234 -#> 7.4568 -4.2505 2.3414 -5.1908 -2.3375 -5.6689 -#> -18.9978 -11.5822 -6.5452 2.4236 2.7432 0.1641 -#> 3.4828 6.0013 3.6968 -10.0672 -6.4488 1.8463 -#> -1.6269 8.8388 1.7039 10.8039 4.8676 -0.8757 -#> 8.0023 7.3665 -3.6373 0.8060 4.4465 2.0906 -#> 14.0379 5.0369 -8.3252 1.9085 -1.2375 -0.7666 -#> -8.2903 -10.8521 5.5031 -3.2708 5.1663 9.9725 -#> -4.5692 0.8189 -5.1834 1.5868 1.9432 1.3214 -#> 12.6362 -6.4726 5.5729 -9.4261 -0.5406 2.7630 -#> 6.3896 4.7119 -2.1385 0.5686 2.1703 0.6736 -#> -5.1342 -2.3181 -4.9941 5.9961 -0.5455 -3.2953 -#> 2.0037 10.6868 5.2003 7.3448 -5.1892 -0.2359 -#> -5.1612 -9.5109 -19.9095 5.0390 8.7671 4.5186 -#> 6.5665 0.5478 -0.2521 2.9676 0.2838 2.1890 -#> -2.5895 -3.7767 13.7813 15.5430 -1.4989 -2.7487 -#> -6.8314 -2.9919 -14.5160 -7.0359 6.2852 5.7361 -#> 17.4565 -9.9268 -0.5212 -2.3981 -5.5776 -4.8160 -#> -4.4211 -1.5263 -10.7164 1.1898 7.4499 6.3166 -#> -0.1586 1.6893 3.3332 -7.7713 0.1963 5.0846 -#> -10.3228 -10.7535 5.2761 14.6063 2.9479 2.6619 -#> 11.5348 7.4959 1.8269 16.0055 4.7889 0.3962 -#> -4.4697 -7.9666 -13.3593 -4.0063 -2.3265 -0.5683 -#> 9.0687 0.1223 -1.3498 -4.1347 1.9401 0.9255 -#> -10.6185 -4.0579 -4.0332 -3.4902 -6.2295 -0.2965 -#> 0.9226 7.3937 2.3543 1.6309 -2.5689 -1.2707 -#> -19.4518 6.9227 3.6227 9.0081 8.0081 1.1943 -#> 5.5501 3.1205 11.6577 -3.1306 2.7364 2.7878 -#> -13.3373 6.9455 -4.3216 -6.7553 2.7839 0.4027 -#> -0.9990 5.7631 0.4375 -1.4641 -1.9611 1.0508 +#> Columns 1 to 8 -0.6611 0.8633 4.5181 11.8372 -7.9175 -5.1817 13.0507 -7.3446 +#> -2.1008 7.8400 -2.8013 -4.0089 -21.4881 12.2031 -13.5760 -4.6295 +#> 1.0073 -3.1811 -0.8668 -8.1886 1.7477 0.9719 -4.7175 -4.5664 +#> -4.6761 7.9701 3.6665 -4.0315 -5.4230 -0.1666 -12.6213 -3.2475 +#> -2.4110 -5.0418 3.8829 -8.8623 5.4294 1.3300 1.1614 -0.1423 +#> 3.7318 1.1395 3.6796 -8.2124 0.6972 -4.5617 6.0870 -9.2689 +#> -2.3908 -2.0876 -4.9067 9.0580 -15.1537 -1.2717 -4.1885 -1.7099 +#> -0.7408 6.1000 -8.5557 13.3696 -12.4589 2.8744 2.2003 21.9139 +#> -0.0889 3.0948 2.1662 3.8347 17.1659 -15.9021 -13.1386 12.5928 +#> 4.7578 -5.0339 -1.4616 -2.0266 0.9830 -10.5603 -8.0299 -9.0213 +#> -0.9916 4.3759 3.0263 16.4362 1.6162 -1.2849 -5.0382 -5.7440 +#> 6.0196 1.4073 6.5023 8.2701 0.6183 -0.8181 -21.5906 3.6070 +#> 1.2873 -4.6194 -5.2792 0.3995 -7.0204 -8.1191 6.7674 -5.4531 +#> 0.4421 -5.3564 3.8015 -15.8994 3.0221 5.4989 -11.3123 -5.6424 +#> -2.1676 0.9705 -1.5714 -4.1847 0.9129 14.4554 -14.7947 -4.9063 +#> -4.7807 -4.6723 5.4802 2.9786 -1.8234 -12.7308 -5.6416 12.7399 +#> -3.2166 0.2486 -4.0669 -1.7059 11.5235 -1.0892 3.4701 -0.3527 +#> -0.6959 -4.0948 14.9871 -5.0513 13.7201 -3.3918 -8.4135 -8.7584 +#> -3.5511 -3.6261 -1.5445 -17.9466 0.1794 -20.1560 8.7606 -10.5011 +#> 2.0773 1.0343 -3.0427 12.6182 12.5333 -10.1239 -2.4999 3.3978 +#> 0.8364 9.5756 10.6182 -2.4468 -9.6579 6.7550 11.4021 12.2040 +#> -7.0416 9.6265 -14.7227 2.0501 0.5188 19.7729 1.6035 8.3055 +#> -4.1370 1.7481 0.7671 18.4794 6.4262 -8.1979 -19.2873 4.3345 +#> 5.1181 1.7642 -1.3925 -0.1967 7.7939 15.3934 9.5939 11.5427 +#> -1.4124 -4.6633 5.7139 4.4405 -5.2704 -13.5925 -22.4641 0.2215 +#> -0.9937 1.6140 2.1396 6.5553 -1.7230 5.9863 1.3649 11.1956 +#> -1.7816 -9.7140 8.7916 -9.5544 -4.2154 -4.6643 14.8769 -11.7290 +#> -2.4798 0.2481 2.4461 11.5898 7.1405 2.0186 2.8154 2.9565 +#> 3.5956 2.0564 -5.1493 -0.0189 -0.5728 -2.0911 8.2910 4.7337 +#> -0.0141 0.2039 -2.6486 3.4763 13.9171 5.4500 0.9061 -3.7668 +#> -1.3178 1.7913 9.1127 -5.3201 7.6073 -13.6050 13.2268 3.2877 +#> -6.0995 -1.4246 10.5710 -12.1160 -1.6117 -8.8540 -8.9954 7.9852 +#> 0.6081 -4.1462 7.5732 -11.3362 -4.7914 0.1691 21.8157 -16.5704 +#> +#> Columns 9 to 16 -1.3819 11.6872 -1.3054 -2.3566 -1.3418 -7.3552 6.4253 5.9929 +#> 0.8817 8.4100 -20.9406 -4.2649 3.8530 17.0495 -2.8058 2.0342 +#> 12.2871 -1.4566 1.4359 -2.7449 7.5748 -0.4008 0.3269 -13.3659 +#> 0.5256 6.0077 -0.3450 -1.5874 -1.4657 5.2771 -10.0302 -0.4749 +#> 10.7343 -15.8349 20.0424 -11.8831 -22.9319 -10.2548 -4.3428 2.2176 +#> 4.7120 -12.3315 8.3134 0.0723 6.8495 -4.1431 1.7737 3.9246 +#> -15.9989 3.4592 4.0169 -7.9858 -4.9650 1.1620 -16.5610 -1.4037 +#> 0.2714 -0.6825 8.0453 1.8648 -8.9153 -11.7699 10.3799 3.9168 +#> 27.3824 -13.4740 -8.2957 2.1005 4.4899 -4.3035 3.5842 -14.0698 +#> 7.0218 0.1087 -0.7370 -6.5453 18.9192 12.9505 -18.0434 8.9276 +#> 1.8591 14.8530 8.3594 -2.4979 13.6234 2.9864 -18.4924 6.7571 +#> 20.5221 -14.5185 -18.4809 -1.3594 13.9749 1.6217 -6.5318 -29.1864 +#> 4.7099 -4.6973 12.8633 -4.8044 3.9348 -0.6298 -1.5247 -10.4734 +#> 6.2318 -11.9080 4.1567 -2.8397 4.2145 0.2820 -0.1832 -9.3342 +#> -6.2136 13.4091 -3.9193 2.4275 -10.8753 6.4462 12.9351 -5.0812 +#> -4.0848 4.7132 19.5432 0.8093 3.5631 0.6383 -0.4498 -4.9942 +#> -12.1719 7.7549 5.5829 7.3103 12.2272 -5.0218 -6.4758 11.2600 +#> 0.0171 14.9763 3.5854 -10.8155 1.2998 3.4217 -16.8142 -6.0228 +#> -17.4808 -11.8566 -6.4099 0.0224 -7.2964 2.7588 2.4149 9.9422 +#> 7.4637 8.0378 1.2751 10.2065 -6.8412 4.1106 6.3912 0.1307 +#> -4.0417 -9.4326 7.1181 3.9033 11.8671 -12.0753 7.2711 -3.8711 +#> 7.9301 -1.9583 13.8083 -3.4949 -0.8190 2.3821 -0.2717 9.6731 +#> 5.5368 16.6715 -9.6847 13.4369 2.0342 7.8106 6.7349 -13.5362 +#> -6.7949 4.3114 11.3014 -2.9501 -5.0100 12.0658 -10.7322 1.7625 +#> 11.7377 -7.5089 -12.6801 3.9749 7.1767 2.2325 -4.2643 -7.6218 +#> 14.0514 -6.3722 4.2894 10.4985 -9.8281 -2.8144 2.6320 -10.3078 +#> 8.9473 -0.4528 8.6707 -6.9895 -9.9284 -2.9170 5.7764 -5.2314 +#> -1.4368 -6.4265 -9.5321 -8.5625 -4.5725 -4.2898 -1.7135 -6.9696 +#> -3.4091 -9.2516 4.9343 -0.8970 5.5795 4.8368 -1.3720 -7.7676 +#> 5.4554 9.7498 -21.7355 22.8640 7.1671 -13.7954 7.6228 13.5363 +#> -14.2094 14.6133 -7.4670 2.5511 11.7978 -4.5729 0.9206 -1.9211 +#> -9.5800 7.0509 2.6303 2.9259 -1.3507 -11.3461 10.9748 12.0294 +#> -17.2297 9.3269 16.5957 -5.4184 2.2671 0.6134 7.4270 8.6542 +#> +#> Columns 17 to 24 -6.2700 21.5020 0.3924 -11.1093 2.9142 4.9612 -1.9417 11.1971 +#> -2.1305 7.9089 -3.3748 3.5597 -2.6662 8.1207 -16.9857 -2.4668 +#> -3.0714 10.3198 -8.7881 -3.9100 0.5989 -2.7594 4.0918 5.2642 +#> 4.3615 12.3107 4.8404 14.6435 11.4549 -7.5473 7.1962 1.8677 +#> 13.2755 -7.0422 1.5451 -1.3846 4.1460 -4.2754 7.5327 -17.9277 +#> -2.3761 11.8222 11.8904 -11.7886 7.5151 4.9451 6.3646 2.9077 +#> 2.4230 -1.8104 9.5953 1.7579 3.0649 4.9760 5.5875 11.8555 +#> 2.5610 1.1922 -11.3001 -4.3650 -19.3118 -5.0179 -13.9261 10.7239 +#> -3.0733 -1.0531 5.3610 10.0634 -3.2905 1.4873 -4.7693 4.9162 +#> 4.8763 3.7815 -0.9533 -13.4261 6.0837 -3.2680 -7.9161 -2.6066 +#> 14.9359 -6.0152 -2.2771 10.9814 -14.7333 2.2851 -1.5146 -2.7263 +#> 5.6751 -11.1411 2.2567 8.2682 -1.4867 -0.8107 -0.6556 -0.4739 +#> 14.9541 2.7392 -5.8034 -1.9498 1.1576 -13.7552 2.2034 -4.3756 +#> 2.9570 8.1543 2.5831 -0.0346 10.0457 7.9946 14.5249 0.9140 +#> -3.5943 10.2665 -8.3933 2.3322 1.1761 8.1180 2.8477 6.9188 +#> 17.5834 4.9643 -2.2898 21.1595 2.1176 0.1345 -4.6422 1.0019 +#> -3.0537 4.3746 -5.7071 -3.8752 -0.8578 -2.8941 8.3767 9.7314 +#> 19.8109 15.4926 -15.4173 -6.3662 1.9449 -2.6532 -0.3744 -5.7484 +#> 1.0483 9.8707 -1.4664 9.7985 20.9499 11.9705 -15.2943 -3.0298 +#> -1.5864 -2.9502 13.6922 5.0553 -7.3467 -1.6777 -10.3997 -5.2738 +#> 9.2870 -3.9020 5.6074 4.9261 -2.6122 -8.8907 18.6157 21.1201 +#> 3.3948 16.0149 -17.9195 -7.5514 -11.7402 -7.7145 -4.0195 2.5575 +#> -1.5184 18.2546 -15.3843 2.7558 5.9218 18.4407 7.7432 5.0433 +#> -13.6985 -0.7083 7.9733 5.6892 -2.2178 1.1768 7.7553 20.1054 +#> -2.8000 -1.7509 4.1501 2.3787 4.9932 -28.4399 -6.3160 1.4748 +#> -6.7018 5.6688 -5.0720 -3.7965 12.7174 -17.0035 4.6901 23.8814 +#> 11.6533 17.6957 -6.3218 -8.3857 14.5962 -6.7053 -6.2555 -2.8368 +#> -13.2520 4.0038 1.1624 4.3522 2.3364 -9.7136 -6.7032 -1.1039 +#> -13.5111 4.9304 17.6480 6.9267 12.1660 0.3082 -6.3641 -2.5587 +#> -10.5522 -3.1797 9.3514 1.4593 -1.5527 4.6419 -6.2983 -13.9793 +#> -2.6949 9.4026 0.2440 -6.1458 1.3880 -4.4307 2.4953 -2.6057 +#> -10.5914 6.7204 -17.3545 1.5614 2.6628 13.3528 10.3756 -6.4603 +#> -6.7977 10.1937 15.7656 2.4495 16.4780 0.0115 8.4397 8.3374 +#> +#> Columns 25 to 32 5.6433 -11.5957 -3.2475 -1.9969 9.8283 -15.1986 -0.3113 5.2194 +#> -7.7169 -5.6458 -3.2262 15.2147 8.3005 3.9418 -8.3297 -4.5994 +#> 9.7193 -0.7962 12.6110 7.5069 -12.4057 5.7746 -0.1675 -6.9663 +#> -9.1199 -8.3813 -9.2700 -3.2203 7.3467 4.5803 6.0129 -11.5480 +#> -0.8221 -10.5491 2.8890 2.2817 -14.8863 20.3709 4.4128 11.8123 +#> 3.3845 -4.7599 6.9778 0.0233 1.0711 5.9510 7.4425 27.4308 +#> -8.6123 -1.8007 -9.7699 3.1298 -4.6956 -4.6226 3.3166 6.6086 +#> 2.9795 -7.0045 11.3283 -1.2507 1.5304 -4.3956 4.0280 -3.5158 +#> 4.1307 -3.0581 13.3507 -9.1241 -0.0047 12.7855 10.7001 -1.6175 +#> -6.9317 9.6178 -8.8974 5.7012 -1.7189 3.3241 1.2916 -2.5986 +#> -10.4041 14.2560 -4.6121 -5.6433 14.3503 -3.5790 -14.4444 4.3369 +#> 17.0635 -7.9951 -1.1646 -6.6689 -5.4740 1.2155 1.6072 -2.1184 +#> -6.6551 7.4389 -1.3174 -3.5804 -1.6672 -10.2388 2.1840 1.4120 +#> 1.0087 -19.9252 15.6099 -16.4470 -7.3322 15.5909 7.8169 3.3432 +#> 2.5525 -18.7971 -2.5102 -14.5601 -7.7290 -7.3098 13.3930 3.3696 +#> 9.9850 13.5237 0.7382 -2.0884 -13.5249 -8.0925 1.2982 10.5711 +#> 3.9558 -14.3177 -0.0545 -3.0658 -2.6066 -4.4142 10.0609 14.9270 +#> 5.4432 5.2410 -6.8290 -20.5748 -0.0284 -8.3720 -1.6086 10.6433 +#> -11.9347 -5.8632 9.0586 -0.3780 0.0432 -3.3674 1.5463 -4.0893 +#> 11.2623 -6.1923 18.5771 -1.2367 7.3070 -10.5772 -3.8635 -4.1126 +#> 5.6362 -3.1024 0.5384 14.1959 6.7002 1.5288 -9.8320 -1.5188 +#> -0.0173 3.0810 -0.6256 -3.2138 -1.8541 3.2328 10.2637 13.0101 +#> -8.9923 3.5888 21.8703 -5.2004 11.6331 -1.6214 1.5849 -0.5848 +#> 7.3452 6.8070 1.6576 -2.2558 -0.9527 4.1380 -8.0898 1.7644 +#> -13.1351 15.4876 -6.3969 3.9100 -16.2921 -1.3991 -5.7334 -12.3163 +#> -6.6052 1.6109 -3.3378 17.0653 -7.9649 0.3000 -2.8696 -1.1269 +#> 7.6506 -11.1752 11.8160 -12.1253 -22.6390 -3.7539 18.1623 11.5049 +#> 3.3027 -10.1043 3.4715 1.8730 0.7828 0.0773 -3.0767 1.0280 +#> -10.8444 0.7015 -14.7629 -0.7397 21.7868 -8.4905 -2.6188 -4.6749 +#> -8.6479 18.4425 -3.5645 13.9119 7.6663 -24.8181 -4.6933 -2.1963 +#> 14.7601 8.3412 4.0687 -11.7805 -2.8518 -7.1183 -8.2542 -2.7343 +#> -16.0110 -4.9783 10.6725 7.4092 -1.1833 6.3177 2.6677 14.9228 +#> -12.0955 0.6484 1.8794 10.4174 4.6666 0.3868 2.2247 12.5836 +#> +#> Columns 33 to 40 -0.2962 9.6208 12.4398 8.1668 -3.2740 -7.4233 -4.8652 -10.3557 +#> 4.2657 -11.5017 5.4273 -8.7239 5.4944 8.6025 -3.8007 8.1229 +#> -8.5016 -7.0981 5.6711 2.0142 2.6624 11.2980 1.2697 11.7873 +#> -2.9312 -5.0712 -5.0920 6.0426 16.1940 2.1960 -3.4998 3.2446 +#> 0.3163 -9.8282 -11.0563 16.8790 -1.4579 -4.4826 0.2670 -5.4285 +#> 6.3637 -1.5199 -7.3059 -4.9864 -12.0109 -2.1307 16.6465 1.7417 +#> -1.0534 -1.6207 1.4929 -12.2766 15.0021 7.2522 12.2082 -3.2669 +#> 19.6507 -18.2253 18.6699 6.9492 -1.7718 3.0325 14.6211 -14.4381 +#> 0.8946 -3.7411 2.2395 17.5453 0.1537 -7.2091 8.9026 4.7404 +#> -16.0100 9.2009 4.9782 -8.9104 4.5533 4.5445 -1.3998 -7.9842 +#> 4.9243 -15.5702 12.2728 -20.6112 23.8211 -1.8126 -5.4976 13.8467 +#> -1.1646 1.1321 6.5600 11.7968 -21.2288 -12.8414 10.9074 12.0915 +#> -3.1935 -8.6543 3.1002 -7.9556 -0.9918 7.6725 5.9351 -3.1350 +#> -4.5953 -5.4783 -7.5917 3.4128 -0.9738 2.1710 7.5060 -7.8006 +#> 8.9759 5.5037 -3.4227 -1.4271 -10.2973 4.2965 -1.5208 -4.6537 +#> -7.9960 -4.4194 -17.5747 -0.3333 -4.9945 -2.4433 14.5897 13.0269 +#> 10.5848 -5.2157 -2.2250 -3.4535 -9.3965 -0.9765 4.1304 -6.7142 +#> -4.8346 5.1334 -6.3334 -1.8039 3.9677 18.1494 -13.2672 0.7295 +#> -1.4480 -16.5949 -17.0096 -7.6926 -5.1956 9.8031 -1.2939 -5.9984 +#> 5.4517 -9.8674 4.9495 2.5906 -0.0842 -2.5847 -1.3374 1.4544 +#> -2.9599 11.4454 -3.1959 -11.2170 -7.1678 -4.1683 13.2276 1.9520 +#> 5.5110 -6.6832 -14.8506 -2.9290 -4.5262 9.5364 -5.3194 -4.3918 +#> 2.1519 -2.9140 1.3189 5.1745 -8.2314 3.8524 -9.0984 12.9303 +#> 8.0010 -1.8399 17.9560 -7.7676 3.5190 2.2511 3.7484 -0.7531 +#> -13.1318 21.6737 9.4858 -1.4879 7.4344 -10.5489 -11.7592 4.3336 +#> 4.6551 17.8564 -0.0870 0.6019 -10.0032 -9.3478 -5.4171 -5.9865 +#> -5.7263 -3.3877 -0.9149 4.7041 -7.8953 3.7291 2.7934 -7.3602 +#> 2.6266 -3.5659 -4.1171 17.0871 -11.5948 -6.4614 -3.5060 4.1321 +#> -10.4943 -4.3512 7.2377 0.4649 10.9122 -5.1840 5.8800 -4.5709 +#> -10.7941 11.6392 11.6164 0.2958 10.5741 -20.1508 -10.0372 -14.8401 +#> 3.7105 20.0499 -7.9650 5.4661 -6.2146 -13.2299 -5.2941 -11.7309 +#> -7.9633 20.7494 -10.8635 4.5992 -10.8483 -9.4899 2.9116 2.4278 +#> -10.3842 -11.6309 1.2829 -18.7227 2.0096 20.9153 6.3166 0.5752 +#> +#> Columns 41 to 48 -11.2703 -9.3680 -0.6615 4.2703 -9.2414 -4.0478 -9.2309 2.4971 +#> -10.8318 10.6628 -6.2700 6.8731 -20.5364 -1.8110 -3.9961 8.8645 +#> 2.9298 -7.3589 7.5589 6.3543 3.3836 15.8111 -2.9703 -3.8744 +#> -8.6284 -15.1688 -0.5740 -8.7777 0.6015 25.6741 20.9157 -3.7018 +#> -5.5992 4.0974 6.8765 -24.2026 12.2104 -11.4697 0.1142 -15.2414 +#> 4.7343 1.6210 -3.5755 -4.1490 -2.8426 -15.8829 9.5526 -3.5506 +#> -0.2106 0.3444 13.3444 0.4776 -7.2632 -12.1724 -4.9528 4.1733 +#> -1.2222 17.4621 -3.8672 -10.7412 -5.0470 -12.5450 -9.5587 -10.1776 +#> 10.2154 -6.1850 -8.3131 -3.0441 6.7150 -11.6100 5.6020 15.0109 +#> 13.7385 -2.7406 14.3025 -9.7996 16.2925 13.6234 0.8185 -3.0422 +#> -9.0469 0.6524 -2.0106 -0.9984 -6.7472 6.4343 10.1060 19.3191 +#> 10.7824 -5.0717 -4.2690 -10.8837 17.0596 -3.1963 19.6327 22.1616 +#> 2.9831 -16.0540 26.6622 -10.2835 1.5282 -7.1523 3.4325 -8.0947 +#> -4.8666 19.0890 -5.7237 -21.0986 2.2810 0.0252 4.5897 14.8064 +#> 4.1780 -1.6036 -19.3588 7.1971 10.5707 9.3126 -3.3774 1.3225 +#> -8.0808 9.0564 15.7622 0.9649 -7.1808 2.2131 17.3118 -1.1069 +#> 1.6990 9.8326 -2.8595 -8.5353 -13.1813 -3.0796 11.5611 3.0494 +#> -7.1744 -3.4298 11.6984 9.4984 -17.2590 23.4537 8.3871 -2.4809 +#> -3.3677 17.9684 9.2585 6.5045 -11.8390 20.1953 9.1470 -8.1887 +#> 2.8777 -6.3442 4.1901 11.3522 4.2888 -3.3249 -14.2432 6.0946 +#> -5.9562 -1.5024 5.2301 11.1633 15.5780 10.8966 13.4162 11.4966 +#> -5.0927 3.2306 -13.3710 8.2159 -9.2961 -1.5798 9.3789 -1.5476 +#> 3.9564 -0.9356 -0.2322 40.1798 -5.9086 1.2422 8.4549 9.4432 +#> 6.3274 -4.4504 -11.1821 6.8195 1.7822 -2.0375 -3.7111 2.7139 +#> 5.0028 -27.6794 6.9989 -4.3020 -4.8355 -15.1243 -14.3543 0.7256 +#> 1.9120 -34.6642 0.6808 1.0192 -5.4055 -9.7786 5.1670 -8.4873 +#> 4.8587 15.7434 2.1805 -5.4320 -11.9293 -7.6229 1.8188 1.6439 +#> -8.7987 5.5679 2.0597 2.9719 -10.0983 -20.3033 0.5446 -3.6602 +#> -1.5910 -4.4877 13.5183 -14.5505 -1.7806 2.5759 8.7403 -7.3604 +#> 8.9671 -17.7200 1.9284 0.4708 3.7125 -9.0244 -1.7188 -4.1713 +#> -8.9352 -4.5148 4.9734 3.8490 4.2269 -10.3070 -6.5548 -4.2115 +#> -12.5068 5.6170 7.0301 -13.1469 1.9742 -10.0978 -2.2247 -17.4883 +#> 1.2284 2.7850 6.9505 12.5677 -2.8584 13.6294 4.2878 -3.3950 +#> +#> Columns 49 to 54 8.5138 5.6686 -10.1385 -1.9343 4.2850 -4.9671 +#> -6.4914 -7.1378 0.6255 2.1053 -11.9386 0.1720 +#> -10.8129 0.5106 2.9359 -4.8550 2.5544 2.3402 +#> 14.7798 -7.1283 13.0324 3.6824 -6.7595 -6.6901 +#> -19.9904 5.3055 0.9131 1.4752 -7.4882 -0.3692 +#> -13.0732 0.4756 -6.9973 0.0914 10.4741 7.9303 +#> 9.2921 -8.1054 -14.8033 -8.9238 2.6688 0.8758 +#> 10.5006 -2.7311 -1.3946 -13.4778 3.8063 7.5441 +#> 13.9077 16.5992 2.0752 -10.2177 4.2673 6.3937 +#> 1.7830 -2.8624 -2.5858 -3.7568 1.3992 -0.0297 +#> -5.2593 -17.6484 5.1824 15.7374 -2.6939 -3.0136 +#> 4.0844 -4.3191 -6.6714 -0.3591 -4.2012 1.2912 +#> 8.6858 -3.9376 -7.5272 8.1540 -4.1832 -2.1365 +#> -13.7469 1.1257 -3.2148 6.5900 8.6270 -1.0831 +#> 0.1524 7.6185 -5.2935 -2.7522 -8.5996 0.4535 +#> 8.9265 8.8100 -7.9665 2.8130 1.1212 6.1957 +#> 0.9362 -11.6891 4.3630 3.4745 6.1096 3.4688 +#> -0.8777 4.6962 4.9339 7.8613 -14.1701 2.6426 +#> -2.8167 3.5472 -2.4456 2.3522 10.9154 2.8535 +#> 12.7140 24.1550 -4.5807 -11.9758 0.2368 -6.0218 +#> 7.5006 -4.3749 -5.7951 8.0049 1.6383 10.3586 +#> 10.9017 8.0931 5.5237 8.5042 -0.1454 7.7731 +#> 9.3341 11.5738 2.2706 8.3116 2.0555 5.3507 +#> 13.6485 -5.9455 -1.2566 -12.1174 0.6620 3.5176 +#> 19.6660 1.7354 -0.5429 -15.3752 -12.5786 -1.4114 +#> 11.8797 11.1055 -3.0169 -10.4807 -1.4093 2.4014 +#> 10.6284 -6.2706 -2.1637 0.8827 6.1283 0.0022 +#> 23.6576 10.5404 -12.5570 0.7822 1.2994 1.3891 +#> 11.9360 15.4484 -1.8128 10.1307 -5.7153 -6.8657 +#> 0.6321 5.6983 12.0583 -10.3662 -14.0226 -3.1265 +#> 11.1468 -2.2041 -6.7250 -12.3004 4.4522 2.7180 +#> -16.3479 6.1125 -4.6348 -3.1702 2.7357 -0.0780 +#> 6.0281 -6.9400 9.2174 21.0303 -1.3634 -2.4955 #> #> (11,.,.) = -#> Columns 1 to 6 -3.8360e+00 1.5939e+00 -3.7936e+00 1.8678e+00 -1.8081e+01 4.4164e+00 -#> -1.1471e+00 -2.8029e+00 -2.5762e+00 5.1902e+00 6.6967e+00 7.1688e+00 -#> 2.5485e+00 -4.8021e+00 -3.1657e+00 -1.4638e+01 1.5666e+01 3.7014e+00 -#> -1.6291e+00 1.9018e+00 -4.8757e+00 1.5393e+00 -2.5465e+00 1.8616e+00 -#> -1.8028e+00 -2.3302e-01 5.7494e+00 -1.0798e+00 1.0129e+01 9.0177e+00 -#> -6.6964e-01 1.0868e+00 6.3221e+00 5.0807e+00 4.7175e+00 2.8995e+00 -#> 3.0178e+00 -3.2517e+00 5.1202e+00 5.3788e+00 1.1414e+01 -1.3915e+01 -#> -4.1800e+00 1.3642e+00 3.0657e+00 -1.0666e+00 -4.3976e+00 -3.3562e+00 -#> -3.0942e+00 -9.7947e+00 9.8732e+00 -1.2089e+01 9.8079e+00 4.0579e+00 -#> -3.1702e+00 4.5015e+00 2.2437e+00 -7.6928e+00 -7.5949e+00 -1.4488e+00 -#> 2.8843e+00 -2.5495e+00 5.6960e+00 -4.4285e+00 -6.2185e+00 -2.1271e+00 -#> -6.1734e-02 -2.3013e+00 4.0962e+00 -2.6230e+00 -5.5801e+00 6.3505e+00 -#> 5.1560e+00 2.2354e+00 7.4479e+00 -2.7796e+00 6.7787e+00 7.4205e+00 -#> 8.6645e-01 -4.0719e+00 -1.4128e+01 5.4836e+00 -1.8982e+01 -1.4326e+01 -#> -1.6762e+00 1.3226e+00 6.9057e+00 7.8310e+00 -4.7439e+00 -4.6881e+00 -#> 3.0498e+00 7.4968e+00 -6.1214e+00 8.7065e-01 3.5176e+00 1.2662e+00 -#> 1.5084e+00 -3.7030e+00 5.4233e+00 1.1332e+01 9.5553e+00 -3.7449e-01 -#> 1.7620e-01 -7.8269e+00 -1.3969e+01 -5.6421e+00 -2.0002e+00 -4.6152e+00 -#> 3.1169e+00 -8.0233e-01 -3.1288e+00 -1.1862e+01 -6.6224e+00 -3.2739e+00 -#> -2.7442e+00 2.3985e+00 3.2201e-01 -6.7745e-01 -4.5126e+00 -7.6190e+00 -#> 3.8572e+00 1.3669e+00 4.1118e+00 -1.1800e+00 1.6259e+01 -7.0737e+00 -#> 3.7801e+00 -1.9738e-01 9.7668e+00 -3.9714e+00 4.9906e+00 -1.9493e+01 -#> -1.6236e+00 6.5810e+00 -6.8311e+00 3.3564e+00 -2.2126e+00 1.1171e+01 -#> 7.3322e+00 -2.2437e-01 5.6662e-01 -1.2073e+01 -9.4299e+00 -4.0470e+00 -#> -1.7680e+00 1.6458e+00 -7.3639e-01 -6.0911e+00 -1.2497e+01 1.1654e+00 -#> -7.3167e-01 1.8133e+00 -2.0562e+00 5.6627e+00 -3.3552e+00 -6.1999e+00 -#> 5.3507e+00 2.7325e+00 4.9057e+00 -3.1438e+00 9.1261e+00 -6.7998e-01 -#> -3.0363e+00 -8.1935e-01 -3.0917e+00 5.6052e+00 6.7996e+00 4.9521e+00 -#> -1.8488e+00 3.3741e+00 -8.2848e+00 -5.4631e+00 1.0128e+01 5.8941e+00 -#> -1.2998e+00 1.3414e+00 -4.5637e+00 4.8459e+00 -8.2052e+00 -1.0843e+01 -#> -5.1679e-01 -2.2994e+00 -7.7908e+00 -5.5937e+00 -1.0556e+00 2.5691e-01 -#> 3.5901e+00 -2.1805e+00 -6.4358e-01 6.8101e+00 -1.3151e+01 -3.8822e+00 -#> 3.8115e+00 4.7139e+00 -3.8519e+00 2.4953e+00 -2.6490e+00 1.2095e+01 -#> -#> Columns 7 to 12 1.1355e+00 8.0373e+00 9.8857e-01 -3.5068e+00 6.9162e+00 9.3497e-01 -#> 2.8756e+00 -2.6017e+00 1.1818e+01 -8.8997e+00 -3.1929e+00 -7.8767e+00 -#> 9.8390e+00 8.1572e+00 1.4665e+00 -1.1117e+00 -4.8860e+00 -3.9219e-01 -#> 7.8412e-01 2.0463e+01 3.2234e+00 2.9016e+00 5.3400e+00 3.6160e+00 -#> 5.6648e+00 1.6924e+01 1.1407e+00 4.5146e+00 8.7476e+00 -3.6236e+00 -#> 1.2562e+01 7.6311e+00 5.9790e+00 5.1021e+00 -4.0275e+00 6.5509e+00 -#> -1.3679e+01 1.5848e+01 9.6090e+00 -1.3240e+01 -9.9157e+00 -1.6351e+00 -#> 4.1676e-01 4.6696e+00 2.1223e+01 -1.5720e+01 1.4410e+01 4.6688e+00 -#> -9.9709e+00 5.2292e+00 5.8353e+00 7.9057e-01 3.5686e+00 2.8194e+00 -#> 1.9564e+01 1.1953e+01 -1.2233e+01 1.0940e+00 -8.2733e-01 2.0802e+01 -#> 7.7502e+00 -7.1128e+00 7.5082e+00 -2.4273e+00 6.4676e+00 4.0435e+00 -#> -5.3417e+00 1.2981e+01 8.5225e+00 -1.3927e+00 1.4157e+01 3.2939e+01 -#> 9.7841e+00 -7.4163e+00 -6.2632e+00 1.8813e+01 -4.8736e+00 -5.7193e+00 -#> -5.2379e+00 -1.6308e+01 8.9360e+00 -2.1223e+00 -9.0632e+00 -8.2404e+00 -#> -1.5874e+00 2.3141e+01 -2.1215e+01 -1.1144e+01 6.3798e+00 7.7378e+00 -#> 2.7432e+01 -6.5000e+00 -9.0771e+00 -1.0587e+00 5.5313e+00 7.7932e+00 -#> -4.2467e+00 -2.2803e+00 -1.1580e+01 -7.6551e-01 -1.0155e+01 -3.6816e+00 -#> 3.7888e+00 1.7301e+00 -8.4693e+00 -3.7254e+00 -6.6073e+00 1.8567e+01 -#> 2.5772e+00 -1.0706e+01 9.7069e+00 4.7409e+00 5.5050e+00 6.3782e+00 -#> 2.6113e+00 -6.4904e+00 -1.4243e+01 1.3646e+01 1.1248e+01 -1.3836e+01 -#> 1.9483e+00 8.2244e+00 5.3361e+00 -1.8463e+00 3.8943e+00 -5.7493e+00 -#> -1.2684e+00 1.0993e+01 -6.9079e-01 9.9783e-01 1.4274e+00 1.8779e+01 -#> -3.4843e+00 -4.9284e+00 -2.8626e+00 -1.5959e+01 3.7211e+00 -1.3182e+00 -#> -1.2014e+01 2.7731e+00 -8.7801e+00 1.4702e+01 1.3477e+01 2.8389e+00 -#> -7.1466e+00 1.6777e+01 1.2249e+01 6.4850e+00 1.4657e+01 -3.4285e+00 -#> 3.8257e-01 -9.9398e+00 2.2121e+00 6.9386e+00 -1.5108e+00 -7.0370e+00 -#> -1.9808e+00 1.1421e+01 -1.3566e+01 -1.3428e+01 5.7615e+00 1.9043e+00 -#> 9.5193e+00 5.8011e+00 -1.1230e+01 -2.5744e+00 -9.9539e-01 -1.8106e+01 -#> 7.9076e+00 6.8125e+00 -1.8321e+00 5.3211e+00 -4.6525e+00 -8.1244e+00 -#> -3.5788e-02 -2.8294e+00 1.1478e+01 4.6395e+00 6.3767e+00 2.9761e+00 -#> -8.7438e+00 8.3134e+00 5.7363e+00 1.3889e+01 -5.7648e-01 2.6462e+00 -#> 1.5458e+01 4.3068e+00 -4.7382e+00 -2.4897e+00 -9.8575e-01 1.3976e+00 -#> 9.4647e+00 1.0196e+01 -9.4177e+00 9.1721e+00 3.3925e+00 1.1460e+01 -#> -#> Columns 13 to 18 -1.0379e+00 -5.1405e-01 7.7139e+00 -1.0642e-01 8.3741e+00 1.9785e+00 -#> -7.2977e-01 -1.2779e+00 -8.6253e+00 5.1659e+00 2.0163e+00 -7.9648e+00 -#> 1.9047e+01 -7.8211e+00 -1.7251e+01 1.0182e+01 -3.4768e+00 3.5280e+00 -#> -6.8537e+00 -8.3516e+00 -1.0993e+01 6.2705e+00 -1.7694e+01 -4.0229e+00 -#> 1.1758e+00 5.2104e+00 -6.0128e+00 -1.2200e+01 -1.4170e+00 1.2333e+01 -#> 1.8981e+00 -7.7969e+00 -8.2999e+00 7.0828e+00 6.2507e+00 2.2976e+01 -#> 1.0308e+01 2.0559e+00 -1.7678e+01 2.1769e+00 -3.3622e+00 -1.1646e+01 -#> -8.8872e+00 -1.0547e+01 -1.0122e+01 -1.6779e+01 4.8407e+00 -1.0983e+01 -#> 1.9676e+00 6.8608e+00 1.3503e+01 -1.0649e+01 -3.7704e+00 6.5000e+00 -#> -5.4224e+00 -2.0540e+01 -3.5309e+00 3.0255e+00 -1.4995e+00 1.4269e+01 -#> 3.9895e+00 -6.3869e+00 5.0963e+00 -4.7499e+00 4.6711e-01 5.8398e+00 -#> 1.1262e+01 1.1528e+01 -4.1378e+00 -8.8242e+00 2.8453e+00 -4.3191e+00 -#> -7.2129e-02 -2.3884e+00 -2.0270e+00 5.6990e+00 -2.8940e+00 1.8328e+00 -#> 9.1767e+00 -4.4345e+00 1.6328e+01 2.1799e+00 -7.5807e-01 -2.0310e+01 -#> 2.5884e+00 -1.7368e+00 -1.2152e+01 1.0830e+01 -7.4779e+00 3.1142e+00 -#> -1.3374e+01 -1.4053e+01 -4.6780e+00 -1.5641e+01 -1.0669e+00 2.0404e+00 -#> -1.4194e+01 1.7131e+00 6.0105e+00 7.9512e+00 -6.1213e+00 1.9179e+01 -#> -5.0826e-01 -6.4392e+00 -3.9512e-01 4.7914e-01 8.4086e+00 -4.8526e+00 -#> 1.5437e+01 1.6998e+01 1.0508e+01 -8.6180e+00 -3.1337e+00 -7.2217e+00 -#> 2.4477e+01 1.0709e+01 -9.2398e+00 -5.8075e+00 -1.3620e+01 2.6313e-01 -#> -3.0655e+00 -1.0128e+01 -6.8721e+00 4.9362e+00 -1.2731e+00 -8.4537e+00 -#> 1.6656e+01 -1.2754e+01 1.5588e+01 1.4962e+00 -1.3077e+01 -1.4743e+00 -#> 2.4511e+00 -2.1280e+00 -1.1724e+01 5.4683e+00 9.7342e+00 8.6476e+00 -#> 1.1642e+01 5.3199e+00 1.3054e+01 8.3679e+00 -1.0100e+01 -1.1214e+01 -#> 1.3779e+01 8.0466e+00 -1.1280e+01 1.2072e+01 1.1482e+00 1.6226e-01 -#> -3.8898e+00 -1.2344e+01 6.9595e+00 -4.3888e+00 -1.5284e+00 -5.0525e+00 -#> 9.0238e+00 1.5506e+01 4.0511e+00 -7.3665e+00 -1.4392e+01 -1.0761e+01 -#> 8.3401e+00 2.9931e+00 -9.2663e+00 -7.3426e+00 -2.0989e+00 1.3918e+01 -#> -5.0650e+00 3.7366e+00 1.7401e+00 -3.5033e+00 -1.0104e+01 -7.8859e-01 -#> -3.7941e+00 9.6133e+00 5.1849e+00 -5.6589e+00 7.7796e+00 2.6745e+00 -#> -1.9627e+00 -5.5493e+00 -8.1682e+00 -4.2417e+00 -2.1606e+01 -2.9949e+00 -#> 7.0163e-01 -1.0841e+01 -8.8551e-01 -8.6048e-01 1.1358e+01 -1.1409e+00 -#> 1.2621e+01 1.4819e+00 -3.9977e+00 -4.2345e+00 -1.2149e+01 -6.6803e-01 -#> -#> Columns 19 to 24 4.0309e+00 -9.1221e+00 -3.9438e+00 -1.7986e+01 1.8854e+00 9.0591e-01 -#> 5.4623e-01 5.4963e+00 2.4605e+00 -4.4006e+00 1.0538e+01 1.6970e+01 -#> -1.0713e+01 6.1633e+00 -2.0267e+00 3.7437e+00 -1.0923e+01 8.9042e+00 -#> -1.1127e+01 8.0173e+00 2.9128e+00 3.2007e+00 4.7226e+00 -9.0789e+00 -#> 6.1319e+00 1.1253e+01 6.9746e+00 2.8675e+00 5.8655e+00 1.0044e+00 -#> -1.6951e+00 9.3632e+00 6.8477e+00 -7.9658e-01 -5.9129e+00 1.8410e+00 -#> 7.1308e+00 -2.0794e+00 -7.7926e+00 8.8771e+00 -4.4297e-01 8.4418e+00 -#> 3.3092e+00 2.3140e+00 6.0336e+00 -4.7366e+00 -7.6794e+00 4.8340e+00 -#> -8.0594e+00 3.9725e-02 -9.1456e-01 -4.6298e+00 -3.2125e+00 1.3890e+01 -#> -8.9300e+00 1.3681e+01 -9.5088e+00 1.2522e+01 7.4292e-02 6.4323e+00 -#> 2.0328e+00 2.6531e+00 -1.2640e+01 -9.4904e+00 -1.5841e+01 5.4049e+00 -#> -4.0114e+00 1.4736e+00 -1.1151e+01 -8.2712e+00 4.5151e+00 -7.3063e+00 -#> -4.3667e-01 -2.3232e-01 -3.1493e+00 1.7114e+01 7.8338e+00 1.8030e+01 -#> -4.7537e+00 2.0193e+00 -1.0430e+00 -1.0014e+01 3.9959e+00 -2.3063e+01 -#> -9.8200e+00 1.0670e+01 -5.9706e+00 2.4227e+01 -2.7277e+00 5.0799e+00 -#> -6.7298e+00 4.2245e+00 2.7938e+00 -2.8428e+00 2.2324e+01 -6.9202e-01 -#> 8.1541e-01 1.5880e+01 -1.3547e+01 -9.0919e+00 4.8563e+00 -1.3871e+01 -#> -1.3599e+01 1.1264e+01 -2.1159e+01 -3.1379e+00 -7.7998e+00 9.0941e-01 -#> -1.5343e+01 -2.8572e+00 -5.8122e-01 -3.8335e+00 -9.4521e+00 -4.6786e+00 -#> 7.6362e-01 -2.8310e+00 9.1508e+00 6.4683e+00 5.6846e+00 4.6314e+00 -#> 9.5910e+00 3.8809e+00 8.4984e+00 -3.5116e+00 1.0446e+01 -1.0756e-01 -#> -4.8337e+00 9.8921e+00 -5.1602e+00 -7.1428e+00 -1.0196e-01 -7.2319e-01 -#> -1.2248e+01 1.1094e-01 -5.7928e+00 7.0802e+00 5.0713e+00 -3.1212e+00 -#> -2.4178e+00 -1.0519e+01 -2.3732e+00 -2.7023e+00 -5.9592e-01 -7.1338e-01 -#> 6.3050e+00 -3.9132e-01 7.2173e+00 9.4026e+00 -2.7707e+00 9.6987e-01 -#> 2.0358e+00 -1.3545e+00 7.1738e+00 -3.6405e+00 3.7646e+00 -1.6337e+01 -#> -6.3457e+00 -1.4147e+00 -6.5676e+00 1.1348e+01 1.1023e+00 9.8485e+00 -#> 1.5842e+00 3.5686e+00 5.5394e+00 -1.1322e+00 -3.6309e+00 5.8952e+00 -#> -8.2128e+00 9.2595e+00 1.7783e+01 -4.6057e+00 3.9662e+00 1.4638e+00 -#> -1.2341e+01 2.5213e+00 1.2736e+00 -2.4336e-01 -1.6478e+00 -1.7396e+01 -#> -9.3529e-01 2.8352e+00 -6.1613e+00 1.5112e+01 -1.2392e+01 1.0438e+01 -#> 1.2253e+01 -4.4346e+00 6.0388e+00 -2.3199e+00 -1.7197e+00 -7.9958e+00 -#> 1.8004e+01 -3.0359e+00 -1.5593e+01 -4.7352e+00 -9.6357e-02 1.7268e+01 -#> -#> Columns 25 to 30 4.5417e+00 9.9356e+00 -7.9734e+00 8.6843e+00 1.9243e+00 7.8274e+00 -#> 8.7007e+00 -4.7809e+00 -3.4190e+00 7.8836e+00 -5.8560e+00 4.9606e+00 -#> -2.3389e-01 1.1928e+01 -9.1595e+00 5.2450e+00 1.2959e+01 -8.0261e+00 -#> -2.3453e+00 -1.1489e+01 3.8974e+00 -2.7657e+00 6.5121e+00 8.7784e+00 -#> -2.8829e+00 7.7365e+00 -2.3080e+01 1.0360e+01 4.4827e+00 -1.6566e+01 -#> 1.7922e+00 1.4484e+01 3.0660e+00 -1.2048e+01 7.5874e+00 6.2713e+00 -#> 5.5566e+00 -1.4929e+01 5.3574e+00 -5.9776e+00 -9.7370e+00 1.1398e+00 -#> 2.4957e+00 -1.4012e-01 -2.0386e+00 -2.8146e+00 1.4249e+01 2.2646e+00 -#> -4.2497e+00 1.3767e+01 -3.7657e+00 7.9206e+00 2.9652e+00 -9.6100e-01 -#> -1.6080e+00 1.4514e+01 -2.0387e+01 -1.7081e+01 1.1001e+01 1.1186e+01 -#> 2.7661e+00 2.3567e+01 9.5477e+00 1.2551e+01 8.0121e+00 3.5173e+00 -#> 1.3002e+01 -6.6859e+00 1.1111e+01 4.0720e+00 2.4169e+00 9.7231e+00 -#> -7.4153e+00 -5.0619e+00 -1.1198e+01 1.4229e+01 -6.1807e+00 4.0081e+00 -#> -1.9085e+01 -7.4257e+00 -1.2958e+01 8.1068e+00 2.7206e+00 2.1854e+00 -#> -1.0049e+01 5.6731e+00 8.5600e+00 -1.7086e+01 8.3425e+00 -8.1282e+00 -#> -1.0292e+01 -1.0067e+01 -2.2118e+00 8.2715e+00 6.0819e+00 -3.0324e+00 -#> -3.3625e+00 4.6885e+00 -9.0741e+00 -1.1488e+01 6.0181e+00 -2.8482e+00 -#> -7.8853e+00 6.8509e+00 4.0404e+00 5.2101e+00 -3.8899e+00 3.3171e-01 -#> -9.4386e+00 1.4857e+01 -2.6685e+00 1.1545e+01 9.6147e+00 1.5497e+00 -#> 2.8378e+00 -1.1177e+01 -1.1782e+01 3.5629e+00 1.0285e+01 4.6724e+00 -#> -1.3464e+00 3.1451e+00 1.8565e+00 1.0258e+00 -1.0872e+01 9.4655e+00 -#> -1.1968e+01 6.8576e+00 7.9288e+00 -1.9535e+01 1.2262e+01 1.3630e+01 -#> 5.4541e+00 7.6256e+00 -1.1775e+01 8.9696e+00 -1.0420e+01 -1.0184e+01 -#> -7.0127e+00 -1.0383e+01 5.1228e+00 1.2812e+01 1.4420e+01 6.0488e+00 -#> 4.6218e+00 -4.0049e-01 1.6324e+01 2.4842e+00 9.8292e+00 5.9595e+00 -#> 5.6140e+00 -1.7067e+01 6.4086e+00 -3.8410e+00 -6.4977e+00 4.8475e+00 -#> -4.9237e+00 -6.8551e+00 7.2600e+00 9.0261e-01 7.7543e-01 -1.7415e+01 -#> 7.6608e+00 1.7461e+01 -9.9003e-01 -1.1081e+01 1.3593e+01 -2.8464e+00 -#> -4.0654e+00 1.7042e+00 1.9005e+00 -2.1048e+00 2.3070e+00 -8.9824e+00 -#> 4.1790e+00 2.9341e+00 -4.1916e+00 -8.8421e+00 1.0892e+01 -9.1511e+00 -#> -6.2952e+00 7.3589e+00 1.1996e+01 1.7767e+01 -1.4724e+01 -6.8176e+00 -#> 1.6905e+01 -5.8042e+00 -2.2659e+00 -1.0988e+01 1.2340e+00 5.7880e+00 -#> 2.8737e+00 3.9366e+00 -6.4715e+00 -8.6918e+00 7.6655e+00 8.2046e+00 -#> -#> Columns 31 to 36 4.9854e+00 -3.8726e-01 6.5628e+00 6.7788e-01 -5.2493e+00 4.1982e-01 -#> 1.4268e+00 2.0383e+00 -2.1152e+00 -4.2073e+00 9.1627e+00 1.5472e+01 -#> 8.2502e+00 -1.0515e+01 -1.5214e+01 -6.1506e+00 5.3685e+00 -1.2504e+01 -#> 5.2293e+00 -9.7807e+00 4.1479e+00 -2.4804e+01 -5.1699e+00 7.7294e+00 -#> 5.3212e+00 1.3778e+00 -3.7450e+00 -1.1298e+01 4.2642e+00 4.4505e+00 -#> 1.5083e+01 -1.1646e-02 -6.4796e+00 9.1971e-01 7.9833e+00 1.7602e+01 -#> -2.6859e+00 2.7046e+00 9.2215e+00 2.6360e+00 -4.5803e+00 2.3114e+00 -#> -1.6276e+00 -9.2005e-01 -2.5471e+00 1.0154e+01 -1.1071e+01 2.7301e+00 -#> -4.7220e-02 1.0298e+01 5.7568e+00 3.7165e+00 5.3836e+00 1.2636e+01 -#> 1.3372e+01 -4.1079e+00 3.2401e+00 -1.5440e+01 1.0938e+01 -2.2595e+00 -#> -6.2048e+00 1.0810e+01 -4.6557e+00 -5.7423e+00 1.9922e+01 6.8378e+00 -#> 1.6257e+01 3.3141e+00 2.6023e+00 -7.5886e+00 -5.4325e+00 1.6676e+01 -#> -1.5218e+00 1.2508e+00 -2.9083e+00 1.2418e-01 1.8454e+01 -3.9098e+00 -#> -7.6743e-01 -1.0442e+00 -6.1045e+00 -7.6160e+00 -7.2777e+00 -8.3719e+00 -#> -5.6189e+00 1.0683e+01 4.7618e+00 -1.0877e+01 1.6728e+01 2.3661e+01 -#> -3.1498e+00 4.8652e+00 7.7276e+00 -1.7810e+00 5.4526e-02 1.3808e+01 -#> -1.6673e+01 2.2490e+00 4.5205e+00 -7.0093e+00 2.0609e+01 5.7754e+00 -#> 1.0943e+01 -1.8617e+00 1.0474e+01 -7.6102e+00 -1.5107e+01 -2.9932e-01 -#> 2.8436e+00 2.7909e+00 2.2022e+01 1.6944e+00 -1.2976e+01 -3.5025e+00 -#> -5.4795e+00 3.9904e-01 -6.4733e+00 6.7021e+00 7.2860e+00 -2.8783e+00 -#> 2.6436e+00 -5.1108e+00 3.1331e+00 1.4062e+01 5.9850e+00 6.8239e+00 -#> -8.2778e+00 5.1245e+00 3.0526e+00 6.2872e+00 1.4696e+01 1.1970e+01 -#> 8.6024e+00 -5.6751e+00 -9.3626e+00 -1.4129e+01 -3.8140e+00 -1.1897e+01 -#> -4.4061e+00 -2.3178e+00 9.2872e+00 -8.1672e-01 1.2030e+00 -1.2162e+01 -#> 5.3659e+00 7.7213e+00 5.7185e+00 9.0630e+00 -5.1373e+00 -3.1592e+00 -#> 3.2050e+00 -2.3414e+00 -6.9714e+00 7.1941e+00 -9.8566e-01 -3.3091e+00 -#> -1.6919e+01 -1.2293e+01 -6.6814e+00 3.7901e+00 5.9487e+00 8.8619e-01 -#> -1.2029e+01 -7.6166e+00 -4.6589e+00 -1.7603e+01 -2.6050e+00 4.8997e+00 -#> 2.4186e+00 -5.6908e+00 -2.5564e+00 -1.6754e+01 8.0733e+00 -1.1196e+00 -#> -8.7796e-01 6.5197e+00 6.5565e-01 -8.6806e-01 -2.6335e+00 6.8496e+00 -#> 1.1316e+01 2.8948e+00 4.4362e+00 4.8354e+00 -2.3211e+00 -1.2829e+01 -#> 8.5893e+00 -3.7059e+00 -8.4572e+00 6.7882e+00 -8.4576e+00 8.2972e-01 -#> 1.6636e+01 -8.8189e+00 6.4427e+00 6.5319e+00 6.9739e+00 1.1096e+01 -#> -#> Columns 37 to 42 4.4878e+00 -2.3642e+00 -9.1210e+00 1.5661e+01 -1.9917e+01 -6.3785e+00 -#> 1.1384e+01 3.5865e+00 -2.5288e+00 -1.7164e+01 8.7337e+00 -1.0503e+01 -#> -6.2520e+00 3.9174e+00 -1.2509e+01 -5.3240e+00 3.7293e+00 -1.5637e-01 -#> -1.9859e+00 -6.6096e+00 1.2515e+01 -1.4296e+01 3.2204e+00 -6.3126e+00 -#> 1.6918e+00 2.0285e+01 -3.0068e+00 3.2718e+00 -1.4610e+01 -1.4036e+01 -#> 9.0918e+00 -5.9544e+00 7.9972e+00 -1.1101e+01 -1.3772e+01 -1.9439e+01 -#> 1.8324e+00 -3.4902e+00 1.1935e+01 -2.5032e+01 1.8524e+01 -5.9759e-01 -#> -3.1921e-01 3.2242e+00 2.3243e+00 1.0866e+01 5.6447e+00 -4.0742e+00 -#> 2.0699e+00 1.4610e+01 -1.7303e+01 -2.6887e+00 -1.1823e+01 1.4420e+01 -#> 5.1856e+00 2.0304e+00 -7.1594e+00 -5.7598e-01 -1.5643e+01 -1.7092e+01 -#> -5.6571e+00 -6.3264e-01 -1.8718e+01 -1.6022e+00 -1.6949e+00 8.5261e+00 -#> -2.5168e+00 -3.4719e+00 6.6238e-01 7.8183e+00 -7.9532e+00 7.6461e+00 -#> 5.2890e+00 4.8125e+00 -9.6915e+00 -3.7230e+00 -5.9298e+00 6.9522e+00 -#> -2.5441e+00 7.1421e+00 2.4809e+00 -3.4307e+00 -1.7882e+00 6.1988e+00 -#> -7.7466e+00 9.6058e-01 1.4210e+00 -8.9925e-02 -5.7440e+00 -1.0791e+01 -#> -5.5046e-02 -1.0465e+01 1.9446e+00 -4.2025e+00 5.9414e-04 -7.0922e+00 -#> -1.1868e+01 2.7056e+00 1.4955e+01 -4.6315e+00 -2.0707e+00 1.0120e+01 -#> 2.2510e+00 5.3352e+00 -6.9040e-01 -3.6846e+00 -4.9931e+00 -2.6069e+00 -#> 3.9388e+00 3.7917e+00 1.4283e+01 -2.5320e+00 -5.9300e+00 -9.6440e+00 -#> 9.9951e-01 1.2395e+01 -1.7485e+01 6.0842e+00 4.0708e+00 -1.4335e+00 -#> -5.4947e-01 -1.1178e+01 1.4815e+01 -4.7548e-01 1.6524e+00 -6.1346e+00 -#> -1.8547e+01 -3.5868e+00 5.7109e+00 1.0381e+01 5.3679e+00 4.0875e+00 -#> -6.2024e+00 -3.8099e+00 3.1032e+00 8.7504e+00 -2.4905e+00 -3.1567e+00 -#> -5.7587e+00 2.6403e-01 -6.2622e+00 -3.2774e+00 -3.4080e+00 1.7630e+01 -#> -6.5531e+00 -5.1616e-01 -2.5381e+00 4.3673e-01 1.3827e+00 -1.7608e+00 -#> 1.3258e+00 7.4871e-01 7.2413e+00 2.9663e-01 -6.5212e+00 7.2607e-01 -#> 7.7659e+00 7.0330e+00 -1.2670e+01 8.9590e-01 1.4491e+01 1.8087e+01 -#> 6.2934e+00 8.0264e+00 3.6147e-01 -7.0904e+00 7.8345e-02 2.3515e+00 -#> -1.1642e+00 1.2683e+01 6.3582e+00 -1.7169e+01 6.1291e+00 -2.4972e+00 -#> 1.1013e+01 1.7106e+01 7.4904e+00 7.7218e+00 -1.7584e+01 4.4086e+00 -#> -1.2509e+00 1.5248e+01 -3.4874e+00 1.3855e+01 5.7941e+00 -9.0559e+00 -#> 2.0705e+00 -1.1685e+01 3.1573e-01 -1.4187e+01 6.1962e+00 1.0098e+01 -#> 4.0630e+00 7.0444e+00 -6.7645e+00 -1.2123e+01 1.0104e+01 6.0825e+00 -#> -#> Columns 43 to 48 -1.3225e+00 7.5155e+00 3.5457e+00 -7.7401e-01 -7.9913e+00 -2.8263e+00 -#> -4.4239e-01 -6.5029e+00 -5.3231e+00 -1.2749e+01 1.2061e+00 1.3572e+00 -#> 6.6092e+00 7.3148e+00 4.0908e+00 -1.6144e+01 4.0651e+00 9.7613e+00 -#> 1.6555e+01 -4.9039e+00 2.0471e+00 2.6363e+00 1.6037e+01 7.5261e+00 -#> 5.6720e-01 -1.8519e+01 -1.4613e+01 -1.0380e+01 7.9935e+00 -2.4992e+01 -#> -9.9498e+00 -1.4021e+01 3.0135e+00 9.9928e+00 -1.4162e+01 -7.7319e+00 -#> 4.5391e+00 -9.1612e+00 8.2764e-01 1.4789e+01 8.5675e+00 -4.3570e+00 -#> -1.1531e+01 5.0008e+00 -1.0593e+01 6.8532e+00 -7.9979e+00 -2.3096e+00 -#> -6.9519e+00 9.2419e+00 -5.3410e+00 4.6456e+00 1.2305e+01 -1.4149e-01 -#> -5.0381e+00 4.8369e+00 -9.1629e+00 -1.7132e+00 -5.3706e+00 -1.9919e+01 -#> -2.1738e+01 -6.7851e+00 -8.8066e-01 -8.6924e+00 -1.1466e+01 9.4515e+00 -#> -9.2470e+00 4.1282e+00 6.4433e+00 1.4437e+01 -5.9255e+00 5.0156e+00 -#> -3.8086e+00 -8.3930e+00 -8.6682e+00 -2.9186e+00 8.8949e-01 -7.8708e+00 -#> 9.3219e+00 -2.1789e+00 2.8101e+00 1.1117e+01 3.1088e+00 3.8468e+00 -#> -4.7248e+00 -1.1072e+01 -4.4787e+00 1.2252e+01 2.8345e+00 -1.1620e+01 -#> 2.5926e+00 1.7134e+01 -7.0157e+00 7.1150e+00 -1.0001e+01 7.1872e+00 -#> -9.4637e+00 -1.9046e+01 -4.2980e+00 5.0236e-01 8.4141e+00 -5.4341e+00 -#> -8.8937e+00 2.2311e+01 -1.7071e+01 9.9567e+00 1.3399e+01 -7.8803e+00 -#> 9.9015e+00 7.5980e+00 1.0382e+01 -6.2705e+00 -5.7278e+00 -1.9555e+00 -#> -1.4069e+01 -1.9179e+01 -8.3042e+00 -3.1122e+00 1.2979e+00 -1.2923e+01 -#> 3.4753e+00 -3.3074e+00 6.5502e+00 -2.8901e+00 4.0644e+00 -1.2498e+00 -#> -8.5863e+00 -1.1009e+00 4.9903e+00 -9.2786e+00 8.5524e+00 4.5484e-02 -#> -2.3117e-01 -6.9398e-01 -2.4986e-01 -7.5937e+00 -1.0116e+01 1.9911e+00 -#> 9.7825e+00 3.6422e+00 8.0094e+00 -2.3208e+00 -7.2454e+00 1.1305e+01 -#> -3.8630e+00 8.8709e+00 7.7070e+00 2.0446e+01 1.2637e+00 8.0608e+00 -#> -6.2783e+00 -9.8522e-01 -1.4196e+00 -8.4213e+00 -2.2625e-01 -1.6038e+00 -#> -8.0083e+00 7.4533e-01 2.5935e+01 -1.0613e+01 4.8281e-01 2.1464e+01 -#> -2.3609e+00 -1.1517e+01 -3.8818e+00 -1.6465e+01 -5.4758e+00 3.3605e+00 -#> 1.6953e+01 1.1260e+01 9.7832e+00 -1.2325e+01 -4.8735e+00 1.1821e+01 -#> 5.7424e-01 9.8175e+00 2.3017e+00 -1.5989e+00 3.4220e+00 1.0485e+01 -#> -6.5441e+00 -8.2770e+00 -4.5998e+00 1.2889e+01 -2.1546e+00 -1.7968e+01 -#> -5.9971e+00 -1.7395e+01 -8.5698e+00 1.1068e+00 6.6695e+00 -4.2924e+00 -#> -1.5701e+00 9.1711e-02 -1.4028e+01 9.8882e+00 3.9283e-02 1.3230e+00 -#> -#> Columns 49 to 54 -3.8444e+00 1.1858e+00 4.9792e+00 9.6627e-01 1.2525e+00 -1.7345e+00 -#> 7.1986e+00 2.9036e+00 -1.3004e+00 1.1473e+01 -4.6877e+00 1.6839e+00 -#> 3.4666e-01 3.0801e+00 1.3069e+01 1.7583e+01 -8.5806e+00 4.5497e+00 -#> -2.2805e+00 1.7387e+01 -1.5286e+01 -5.8295e+00 4.2371e+00 -2.7819e+00 -#> -2.4679e+00 7.7251e+00 -2.3069e+01 8.4689e+00 -3.3133e+00 -8.6648e+00 -#> -3.6375e-01 -2.5602e+01 1.0000e+01 3.5638e+00 4.2016e+00 9.5066e-01 -#> -1.1269e+01 6.4835e+00 3.1930e+00 -4.3767e-01 -1.6418e-01 -6.3701e+00 -#> 2.7810e+00 5.8144e+00 4.0933e+00 -1.3157e+00 1.1435e+01 -3.0485e+00 -#> 8.1508e+00 1.4555e+01 -2.8499e+00 -2.8760e+00 8.9614e+00 2.1232e+00 -#> -8.3261e-01 2.7100e+00 8.7359e+00 -8.2285e+00 -6.2578e+00 -5.3856e+00 -#> 2.2066e+00 1.0378e+01 9.5569e+00 9.5588e+00 1.6320e+00 1.0084e+01 -#> 1.4588e+00 9.2064e-01 -7.1439e+00 -5.2017e+00 -2.4140e+00 5.3526e+00 -#> 1.3612e+01 5.6620e+00 -1.1620e+01 2.3625e+00 -1.1330e+01 1.5248e+00 -#> -3.5426e+00 -7.0384e+00 -2.9504e+00 -9.5061e+00 4.3853e+00 4.7988e+00 -#> -9.7679e+00 1.7907e+00 -7.9331e+00 -6.1811e-01 5.9971e+00 -3.7143e+00 -#> 7.6239e+00 6.9541e+00 5.5589e-01 -4.7093e+00 2.4645e+00 5.1278e+00 -#> -9.8085e+00 8.3869e+00 8.0053e+00 -9.6987e+00 8.2273e+00 5.5820e+00 -#> -9.3834e+00 6.4200e+00 1.0142e+01 -8.7463e+00 -4.5796e-01 3.4890e+00 -#> 7.2693e+00 2.4162e+00 -4.3967e-01 4.9801e+00 -6.2719e+00 -4.2498e+00 -#> 5.4299e+00 -4.0730e+00 -4.8699e+00 -3.7527e+00 2.8188e+00 -6.4849e+00 -#> -1.6949e+01 -7.3988e+00 -6.1241e+00 7.4695e+00 1.5441e+00 1.4237e+00 -#> -9.0940e+00 6.1137e+00 9.6657e+00 -2.4854e+00 7.8322e+00 2.6870e+00 -#> 1.3036e+01 1.8695e+01 2.8623e+00 1.2134e+01 -8.4328e+00 -2.1355e+00 -#> 1.8717e+01 7.8995e+00 7.2458e+00 -7.6360e+00 -9.9172e+00 -2.4202e+00 -#> -3.4935e-01 -4.2851e+00 -3.0568e+00 1.6756e-01 -2.4791e+00 2.2609e+00 -#> 8.0906e+00 -7.4999e+00 -4.3320e+00 1.7309e+00 -6.8967e+00 -3.5653e+00 -#> -1.0714e+01 -1.1555e+01 -2.1318e+00 -1.1573e+01 1.0170e+00 1.3795e+00 -#> 2.5068e+00 -1.0478e+01 1.8676e+01 2.4759e+00 1.5531e+00 -4.1684e+00 -#> 8.1488e+00 -8.4433e+00 -7.5819e+00 1.6040e+01 -4.2868e-01 4.2592e+00 -#> -1.5774e+01 -4.5053e+00 -2.0056e+00 -1.8684e+01 1.1285e+01 5.9708e+00 -#> 1.0074e+01 -1.4951e+01 -4.2046e+00 1.2449e+01 -3.6492e+00 5.5716e+00 -#> -5.2416e-01 1.1741e+01 1.2132e+01 -4.0776e+00 -2.5397e+00 -3.4564e+00 -#> -7.4641e+00 3.1837e+00 1.8403e+01 -2.4848e+00 -5.9302e-01 -9.3820e-02 +#> Columns 1 to 8 -1.8278 -1.9992 12.0137 0.7742 -4.3592 7.2621 8.1447 7.4280 +#> -1.7553 4.2251 -2.8547 -7.8134 -9.7632 0.0592 4.3939 -18.6819 +#> 2.9278 -8.9142 -6.8949 2.4460 -12.0093 0.8691 1.1334 -5.5789 +#> 1.0978 -1.4901 -1.0441 -7.7989 -3.0274 -1.1836 0.8401 1.7227 +#> 1.3817 -10.0784 8.7597 8.8902 12.2989 14.1231 4.9507 -1.3499 +#> -3.3349 -17.0731 -4.9188 -13.6262 4.9822 10.3247 -19.9588 -5.4589 +#> -9.0754 3.3522 -4.5549 2.3084 -13.7354 -6.1428 -3.4103 13.2395 +#> -0.6018 -7.1244 7.2283 8.9938 -3.4478 -2.9633 4.0425 -3.1454 +#> -1.7671 1.4279 15.3357 -9.2404 -12.3784 7.3627 3.8409 7.3812 +#> 0.2318 4.4662 8.2655 8.9807 -10.3119 -7.4330 -2.4399 5.8157 +#> -1.2583 -4.1496 -6.0251 -6.1863 4.0612 -10.3667 -0.6054 15.0585 +#> 1.0225 1.7582 3.4433 -13.3864 7.3118 5.4278 -4.3103 2.2919 +#> -2.0022 -4.6432 -3.4370 2.3838 2.0596 -3.8474 -4.7552 9.8073 +#> 1.2191 -0.4041 8.8284 1.7027 -3.4531 15.1536 -3.0719 -4.3556 +#> 0.6785 -0.2781 -0.2994 2.9963 7.8222 -6.5244 2.3621 -10.7839 +#> -4.2437 -11.7469 -1.8946 5.1964 5.0602 -12.2637 -5.4357 -4.9670 +#> 3.2541 -6.0174 -4.7469 3.9014 -5.6371 6.1688 6.9332 2.7519 +#> 3.9222 -6.0205 -6.8673 21.9821 0.4441 -10.1525 -11.5251 6.3250 +#> 2.2674 -13.1944 -7.3018 0.6320 -6.4537 -0.3320 1.2296 -6.7274 +#> -4.7276 -4.1470 4.0511 6.6782 -4.1980 -11.0360 -10.3439 13.8242 +#> 4.1586 7.2105 -5.9131 -10.6936 -6.4093 0.5122 -14.9520 -0.0415 +#> -3.4269 5.2123 9.6663 18.2697 13.5317 0.9326 -3.9832 -7.3083 +#> 2.9821 1.4320 5.1003 -3.9346 -12.4614 -0.0960 0.6008 1.0544 +#> -3.8915 4.3737 -3.5099 13.1990 9.2120 6.7522 -16.2094 12.1509 +#> -4.9955 5.8569 9.9225 4.4663 -8.7695 -0.7714 -3.1265 -0.1352 +#> 0.0058 1.3676 5.6374 8.3946 10.9936 9.2321 -2.6340 -5.0205 +#> -5.6368 -4.7662 7.2619 10.5504 -5.3719 2.5085 -0.8908 0.1013 +#> 1.9447 0.6654 8.5346 -8.5676 -4.4802 -2.4839 -1.0572 -8.3232 +#> 4.0766 -9.7166 1.4965 -7.6203 7.8403 -5.2291 -4.4750 -0.2420 +#> 3.2524 -3.2185 3.7339 -14.8517 16.9115 10.5897 -10.1153 7.9807 +#> 1.6478 -10.4716 7.3207 -0.9193 -11.4669 -8.8775 -4.5622 -7.5420 +#> -0.3521 -1.9796 -0.1008 -8.4364 -6.5148 4.5422 -4.6248 -4.0213 +#> -0.8232 -8.6533 -18.9698 8.5933 -13.4778 3.6377 -6.1830 4.5912 +#> +#> Columns 9 to 16 16.3031 13.3162 0.0441 -9.6332 -6.9281 -7.0784 10.1682 -15.0466 +#> -14.3116 -18.6331 -2.0147 0.0963 -4.5613 4.5297 -7.2080 -6.5696 +#> 21.7557 -8.0346 12.6664 -1.0153 -3.4079 7.1477 -6.6836 -5.0452 +#> -0.7489 9.0841 0.3517 -2.2716 -5.5166 -7.2953 -6.1782 -8.0897 +#> 10.9753 4.1531 1.0763 -1.3994 1.0888 0.3557 -2.2661 3.0627 +#> -12.6661 -1.3871 4.7954 -0.8387 9.6736 -2.1913 -0.2700 -0.6660 +#> -11.2423 7.9876 -21.8495 4.6218 -2.4685 -6.2939 -0.5695 -1.1823 +#> -5.1893 1.0698 2.3094 5.6548 -5.5176 1.8193 -9.7336 -13.2829 +#> -4.2866 5.2463 -11.3882 -8.7327 -8.5612 -2.2047 7.6348 0.7481 +#> -18.1333 14.9107 1.1302 5.7585 -7.2209 11.4020 -3.8563 -1.8114 +#> -10.8373 -7.0064 7.8502 -6.7826 -0.1871 -15.9070 -4.1897 -8.2457 +#> 7.8334 -2.7950 -10.3673 -7.8100 -7.7432 7.8274 5.8466 3.0237 +#> -11.4523 7.2507 -9.3757 6.8782 0.9394 12.7514 8.3915 -6.6221 +#> -6.4298 -0.2166 18.2928 -2.0460 8.1428 3.1872 -4.6073 2.1645 +#> -1.8766 -6.7421 -1.5052 3.4517 3.3160 4.1193 0.4470 -10.5357 +#> 1.8184 -3.3434 -4.9086 4.9641 12.3714 -2.6826 6.5897 -8.1739 +#> -10.1332 5.5788 -9.5882 1.3482 -1.3735 5.2645 11.8176 0.0861 +#> -5.9807 -12.4909 31.0489 -6.8092 -1.6492 -13.5120 1.0149 -14.4195 +#> -5.7085 -26.2626 -8.2131 -3.0630 -0.5366 6.7058 -1.8958 10.0420 +#> 0.7780 11.5537 -1.8851 -2.1664 -2.2292 -9.4855 10.2857 -0.0787 +#> -1.7473 12.8294 -7.4605 5.8547 -6.8567 -3.5955 -4.5156 -4.2422 +#> -5.3133 13.5608 7.5875 0.7132 9.4777 -6.0114 9.9703 -11.1773 +#> 20.2499 3.5352 14.5475 -13.3674 3.4155 -2.5999 13.1113 -8.1474 +#> 3.7472 5.3684 -1.9821 4.4593 0.2074 -5.2935 -5.6149 0.1898 +#> -1.6840 3.5513 0.9613 1.7634 -7.2022 11.3500 -13.4342 13.6657 +#> 7.1025 14.6000 5.7225 8.9757 -7.4920 10.2955 -20.1670 3.6177 +#> 21.4246 -3.4838 -9.6575 -8.5567 -3.7634 2.6889 2.1730 7.8899 +#> 1.9030 -6.2530 3.0929 -7.3119 -11.1117 -11.5511 -5.6199 -15.5764 +#> -17.8055 3.6392 13.2821 -0.3390 -9.5785 4.0666 -16.5819 -2.7986 +#> -6.0328 19.2414 -13.0046 -2.9299 -2.6221 5.2583 -5.3400 -3.4284 +#> 0.4950 17.2507 -1.0922 3.8960 7.1311 -7.5944 7.2633 -12.3290 +#> -5.9907 2.4158 3.3354 -3.3003 18.5557 -1.3097 -6.5577 -3.1158 +#> -2.7529 -2.1604 -7.0972 7.4385 -4.7679 7.2684 0.1251 -3.2474 +#> +#> Columns 17 to 24 14.1471 -1.6221 10.5489 19.0133 10.9805 22.9026 18.4415 8.6729 +#> -3.3398 5.1112 13.9802 -11.7035 1.9743 -11.2474 9.6685 -13.1868 +#> 11.4249 -12.5929 11.4603 -0.8304 9.9173 0.2101 9.0032 -12.0680 +#> -5.4534 -3.3778 16.7488 15.4319 19.8737 26.0228 2.7345 1.4849 +#> -9.3980 4.8794 0.5792 7.7060 3.7078 0.8509 -2.5579 4.9784 +#> -11.1519 -2.1167 -14.5700 -1.5012 0.2889 9.5857 -13.2802 -3.4726 +#> 2.6271 -6.8448 -6.6396 -3.2060 5.9826 -9.6228 1.7255 -12.4407 +#> 5.7910 -9.1598 1.3664 3.5546 -7.4978 -0.0121 -9.0969 -0.9117 +#> 8.7712 2.1619 7.8947 -9.5589 -0.8316 15.2951 -16.3607 5.2267 +#> 4.0864 11.9321 2.2410 11.1664 -1.7695 -6.4023 -4.8266 -7.6543 +#> -7.8193 -6.7269 0.0553 0.0495 7.5919 -6.0707 12.9644 -4.1214 +#> 9.8959 -7.3465 -11.0739 -20.9455 -7.1304 -0.7368 -16.6885 12.6987 +#> 0.6348 -15.9486 4.1711 4.0479 -0.0220 9.2770 0.3611 -0.8650 +#> -6.9850 -0.6714 13.5144 -3.4413 -9.8248 8.4805 5.9827 -2.5657 +#> -1.0861 -5.3642 12.4004 -1.4940 1.7654 3.8199 1.1674 -9.8535 +#> 11.8522 -14.4678 -20.0183 -6.0879 4.8844 11.6476 -4.8730 9.3403 +#> -11.8238 -5.5082 -22.9524 -5.8221 -0.0325 -0.5233 7.4460 -3.0812 +#> -3.7692 -5.1480 6.1451 -1.7952 4.5798 4.0911 2.6925 -16.5468 +#> -1.1947 8.8372 -3.1783 3.0410 -15.8259 3.2736 0.6511 -0.5271 +#> 2.4455 -2.3751 -2.1832 -11.0963 5.4442 0.5341 0.2816 5.8151 +#> 9.8548 -9.4681 -1.9801 -9.6426 12.5498 -12.8970 -5.7238 -5.1347 +#> -2.9029 -3.0799 9.5785 -4.2105 14.8580 -4.1471 10.4564 15.2692 +#> 11.3599 -6.4316 -2.3842 -5.4518 3.6670 -6.7810 -2.8211 -2.3596 +#> 4.0217 -3.7635 -6.2476 -3.8510 11.7067 -20.7715 17.7182 1.6303 +#> 10.7473 13.8563 3.6357 -4.5467 16.0371 3.8422 -1.8105 -12.6746 +#> -1.5486 3.2842 -2.6390 1.5962 9.7386 13.2267 -11.8230 6.7691 +#> 11.5688 -19.7452 -8.9386 10.1395 -13.1120 11.6775 3.4599 -3.8841 +#> 10.7813 3.1226 -7.0240 -19.7247 -1.7867 -0.1655 1.4773 12.6155 +#> -12.6501 19.3373 -7.3785 0.5540 -9.3232 2.3527 3.7409 22.0885 +#> -0.6158 29.1081 -7.7667 -13.0275 -1.6349 9.0042 -3.8634 2.3394 +#> 15.4306 -5.2225 -12.0317 12.3403 -6.3439 7.4529 -9.7596 7.6394 +#> -8.2285 11.1189 -15.4673 22.6747 -1.9692 -0.1479 -10.5180 -5.7190 +#> -7.5455 -11.5259 0.1797 7.6880 13.8323 -9.0994 12.2357 -10.6590 +#> +#> Columns 25 to 32 -3.8070 4.0626 10.0321 2.9133 -11.9386 -8.7714 7.9659 -24.7761 +#> -2.1164 -7.0922 -1.4403 -11.0323 -5.9337 13.8197 18.1642 -7.3607 +#> 13.5477 -3.7010 8.6035 8.9446 9.4659 2.0519 0.7980 2.6327 +#> 3.6474 -5.2079 -12.3851 3.4422 -5.3575 0.5633 -6.8533 -3.2238 +#> 10.6037 -0.6468 15.9020 -8.7088 -9.9113 5.0383 7.7306 -6.9287 +#> 5.3263 2.4196 -0.2952 0.9148 13.2521 13.7524 6.0847 -1.7773 +#> 7.9296 -7.7511 -10.0406 7.5685 -6.6879 4.1834 13.2754 -5.4886 +#> -14.2614 6.8018 -7.6910 5.2495 2.8189 -8.5175 -0.6668 -1.6376 +#> 11.0703 0.1423 -4.4549 -15.0306 2.0572 -8.6217 0.4768 -13.0389 +#> 8.0307 -9.6294 -3.2351 5.3387 -8.5965 -1.9904 0.3424 3.6076 +#> -7.3230 -4.0098 27.4761 21.0948 -3.9312 -4.0498 1.0309 2.1574 +#> -10.0410 -18.1823 -8.6550 -3.6033 -1.4088 -7.4984 -9.2645 8.7458 +#> 5.3361 2.6729 -12.3806 -0.9159 -2.6683 2.1241 11.3422 0.4183 +#> 27.1691 -3.1880 1.4364 -4.3836 -5.6235 3.2178 -2.5931 -16.2138 +#> -8.2161 -10.2309 -1.9759 -3.3680 6.3068 -13.8779 -12.1415 -16.8366 +#> 6.6612 3.0679 10.4860 -4.2603 12.9672 3.0420 13.6200 19.1005 +#> -2.3542 -2.6570 10.1231 -3.0807 10.5623 -1.6872 -0.2589 -1.1286 +#> 9.3881 -7.2766 -8.2052 5.4098 8.9287 -23.6507 9.9289 -5.0458 +#> -6.6019 1.0735 13.7641 -8.0233 7.2367 1.2418 20.4790 31.1730 +#> 11.9357 13.7167 1.0437 -5.9436 10.4147 -17.2705 14.2735 2.1576 +#> 4.3458 -7.4362 18.7311 -6.3015 -7.7887 -1.2063 9.4937 7.5900 +#> -11.1522 4.9899 3.2782 1.0124 7.6420 8.4002 2.9606 -22.6415 +#> 17.3937 19.3920 -11.4020 -3.3475 0.5184 -24.7598 -5.4586 -3.1849 +#> 7.1874 -11.5002 11.7036 7.1107 3.6857 -0.1435 -6.1266 -2.1732 +#> 6.5301 -3.5415 -13.6247 -5.4981 -33.6445 3.4628 -11.6791 -0.6334 +#> 8.7089 10.6086 4.1865 6.6284 -12.9061 4.4287 -6.3520 4.4147 +#> 3.8152 1.5419 -6.0620 -10.0631 -9.7848 -0.3725 4.2617 11.0266 +#> -3.1238 2.6697 -2.5713 -8.7944 15.7555 6.5145 -2.5702 -11.7910 +#> -2.8956 -9.4594 -11.0735 -0.2199 1.9091 0.0760 -6.8349 13.5783 +#> 5.5080 -2.7804 16.0789 -0.3762 5.0091 3.8944 -13.5909 7.5931 +#> 3.4339 -7.3356 1.5105 -12.9250 10.2782 -12.0469 11.4916 1.4644 +#> 4.2244 -2.2059 19.0733 -21.7453 -3.8929 0.0641 -26.1059 8.6540 +#> 20.8482 1.0211 0.2668 0.2010 -0.6060 2.6662 11.6536 15.4551 +#> +#> Columns 33 to 40 -4.1759 0.1792 6.3306 6.7061 1.1700 0.3439 -0.2116 1.5787 +#> 8.8917 1.2146 16.3504 -14.7963 -8.9506 -7.1300 -0.9518 -18.5146 +#> -23.8418 5.1508 -15.5476 6.3501 10.8675 0.8243 -6.8501 1.7874 +#> -8.9844 2.2340 8.9887 1.5850 -1.2605 2.1202 5.3737 5.1156 +#> -9.3110 10.5398 -8.6976 -0.7967 7.4244 18.2843 -7.1259 7.5997 +#> 16.1473 4.0424 -6.3295 -2.7665 -14.5501 14.4538 -12.9577 9.4903 +#> 7.2387 -10.8876 9.4133 -6.2578 -1.8405 -9.1878 4.4472 1.0935 +#> -9.1707 -11.0896 0.7047 10.7887 1.0210 2.0706 -2.4303 -10.6248 +#> -0.0595 0.2143 6.3909 9.3707 7.9404 4.3553 2.7386 -2.5387 +#> 3.5608 1.4678 -12.8538 -10.7144 13.8932 -5.8710 12.9713 -4.2875 +#> 5.5676 -1.6633 -7.3778 -5.4125 -8.8952 5.7221 9.0450 -3.3968 +#> 24.4307 8.4293 -14.3973 -5.0354 -9.5832 7.0662 -7.5748 7.9493 +#> -1.9314 -1.2003 -7.0768 0.9991 -1.6183 -9.8344 -7.3185 -4.3012 +#> 4.0534 1.2099 9.9007 -0.3536 -5.2408 3.7769 7.4002 8.1614 +#> 5.8442 -5.6219 19.0496 -3.1157 -3.1701 0.2326 10.2120 2.5049 +#> 0.8920 0.3652 -25.1249 -11.6316 18.6287 -8.7130 0.6353 5.2994 +#> 3.9898 8.8716 7.0474 -13.7945 -6.0692 -5.0922 11.3454 -5.7957 +#> -9.2591 6.2370 2.3607 -12.0233 -3.0580 -0.5462 23.3562 -0.3377 +#> -7.2532 8.2455 -5.0663 -4.7884 4.5751 1.0790 -4.0215 -4.0910 +#> 10.3187 -18.9974 1.4884 3.6102 7.5574 11.1837 -4.6688 -3.3943 +#> 7.8745 -2.3474 2.5750 3.3975 -2.7609 0.8566 -0.0632 3.8676 +#> 15.1801 -10.1422 -1.0819 -12.3601 -5.0162 6.5887 0.3800 -5.3402 +#> 23.3701 -7.1501 0.3758 -10.2395 12.1159 13.0760 17.4721 -0.0280 +#> 2.1601 -5.2557 10.3193 -6.4740 1.6852 -16.3359 3.3005 -7.3239 +#> -9.9006 -3.4947 5.3245 3.4283 -0.0992 -11.3374 -13.7996 -3.6147 +#> -6.0846 12.2785 7.9146 -5.6806 -6.8068 7.7441 -3.0438 0.7109 +#> -9.6249 -11.1059 -15.9537 14.1597 8.3405 -7.1891 5.3502 -0.6583 +#> 4.1647 6.5661 5.9930 -1.3052 -7.6803 -3.6735 0.2886 11.7744 +#> -6.4241 10.4401 -0.0793 11.7241 -10.1892 -8.0390 -8.3995 15.4166 +#> 17.5252 -5.8873 -12.0533 5.7357 14.8050 17.6203 -17.7314 -1.7866 +#> -6.1603 7.8373 0.3816 0.3329 8.5804 -9.1982 -6.5034 -5.7855 +#> -1.2352 7.1834 -3.4181 -9.7114 36.2866 -1.0136 10.0630 -10.2468 +#> -13.3563 -13.3916 3.5529 0.8110 18.1894 -7.3221 14.0168 -5.2680 +#> +#> Columns 41 to 48 -0.4203 -5.3919 10.1363 -7.1045 -11.6670 -6.9815 14.7989 -5.8473 +#> 0.7813 -9.1485 7.1495 2.5925 14.0859 21.3443 -13.7789 -23.8908 +#> 6.3872 3.3134 11.0106 -9.2730 4.3757 2.1895 9.1677 5.0160 +#> -6.2241 9.6219 5.9175 0.0616 -10.5226 -6.9645 -5.2340 -6.0893 +#> -9.8494 -4.9540 -6.1446 -10.2490 -7.5325 -12.5611 20.3521 -3.1144 +#> 6.5833 0.7438 0.2831 3.8526 -7.4652 5.5956 21.8544 -3.6685 +#> 19.5054 -9.5118 4.6375 7.2778 12.2823 5.4768 -2.3338 -7.7278 +#> -2.5981 -0.1810 -7.1396 5.1544 9.2667 4.5204 13.8634 8.2614 +#> 16.8392 3.4734 3.8301 -7.0999 5.7907 1.4268 0.0401 -14.9712 +#> -0.2924 -8.1524 3.3882 -17.1059 0.6868 -12.9145 -19.1256 -10.5931 +#> -1.6853 -6.0860 -7.0604 -1.8475 5.6664 -0.5509 17.1244 -12.0091 +#> -11.0133 11.3395 4.0484 -1.3233 7.6724 4.7709 11.2414 -9.2826 +#> -0.3401 0.4798 7.4614 -4.7404 4.1452 -12.6775 16.5851 -10.9787 +#> -1.0534 4.8472 0.3311 -2.8344 -0.2976 13.5925 -3.7241 -18.2202 +#> 3.1590 4.5599 4.8182 -11.2759 2.1343 12.7832 7.7773 8.4494 +#> 4.1884 -1.9868 13.3732 -0.0600 10.0132 3.1800 8.7206 4.8105 +#> 12.0572 -14.9972 0.0707 -1.5617 2.9682 -4.8372 -21.5722 9.2030 +#> 3.5336 -7.6226 -1.2871 -1.7145 5.0222 9.3502 0.8102 9.6789 +#> 0.8985 -5.4043 12.0690 9.3996 15.3610 -9.1556 -18.7414 -7.9379 +#> 4.5196 -3.6074 2.2545 -0.9103 -5.2378 -5.8195 14.1626 12.5672 +#> 4.0691 -1.3085 3.3314 -11.5787 6.3989 4.8375 4.8127 5.2556 +#> -0.0278 3.3645 -12.2148 8.2952 -6.0585 3.5579 -3.8833 -0.1563 +#> 3.6362 -7.7613 -7.4743 -3.6094 -8.3921 -0.8610 -8.1597 7.8543 +#> 17.1323 2.6529 0.3249 -3.9764 2.8374 8.6943 -6.2051 17.6460 +#> -4.2134 5.9890 2.3131 -3.9054 -5.0735 3.7393 -1.4429 -12.5281 +#> -2.2282 -4.9036 -1.6334 -10.6815 -6.5110 -13.8104 0.2534 9.8076 +#> -3.5680 5.0634 5.9500 7.9774 -7.8356 3.4612 9.7782 9.7200 +#> -3.9451 3.4529 1.1338 -3.4034 -5.8660 9.3493 12.5976 -3.0950 +#> -10.5798 5.6987 10.6136 -7.9722 -1.3188 -5.8405 -3.6458 -14.3120 +#> 7.7916 -5.7089 4.6727 -20.9463 2.4246 -8.1153 5.8464 -3.5677 +#> 11.7358 0.3333 6.2847 -22.3661 -4.6214 -4.4525 6.7179 0.9785 +#> -4.7052 -18.9650 -3.1811 -15.5645 -11.8443 14.4625 4.6566 7.8471 +#> 15.9889 -6.2135 7.9098 -7.7195 -11.2409 4.5874 2.8155 11.0690 +#> +#> Columns 49 to 54 -23.7621 -2.0672 5.1202 2.8693 -3.5247 -1.1408 +#> -0.5914 15.5443 12.9888 -2.8760 -9.3422 5.9938 +#> -18.5729 -5.6788 1.0306 -3.8441 -6.0085 1.2521 +#> -9.5754 -18.4452 -16.0340 3.8308 10.5140 -2.4937 +#> 9.2793 -24.4750 6.3770 13.0336 10.5139 2.6575 +#> 0.1169 -19.7163 8.7442 -7.4142 -0.3510 -1.3783 +#> -20.4549 12.7792 1.3947 20.0064 0.2754 6.7898 +#> 2.3452 -4.8371 21.0461 18.3207 4.3536 0.6791 +#> -1.6700 0.3947 -2.3495 -4.5524 -4.9053 2.6297 +#> 0.0885 15.4859 -3.0774 -7.5609 -10.5294 3.6316 +#> 6.2226 14.6700 10.8972 -2.9759 0.9148 -4.2356 +#> 29.9997 9.5160 0.7651 -18.8166 -20.9002 4.1593 +#> -8.5250 -0.1514 -10.3785 -3.6711 1.9608 5.1791 +#> 9.1076 12.1738 -5.0964 -2.1688 -5.8933 -3.4682 +#> -13.8920 -1.6102 3.0901 14.5004 -6.1199 -1.8855 +#> 3.6077 -3.8308 -11.6881 -4.0734 -9.5537 4.4149 +#> 9.9225 -0.1492 -6.4503 6.9651 -4.6334 1.3232 +#> -12.8566 13.6474 4.2665 5.5423 -1.5723 0.2077 +#> 3.2742 -5.8253 1.1354 -14.6569 -12.1710 3.1549 +#> -10.4806 -0.3699 0.9769 10.6879 3.8503 5.4269 +#> 17.8956 -11.3729 -2.0963 4.5150 -4.6084 2.9865 +#> -12.6670 -19.6047 -3.4059 -6.0671 12.6761 -3.2279 +#> -16.8891 16.2462 4.3667 -4.0076 -6.0983 -4.3323 +#> -7.3072 2.0849 9.6141 15.3027 -6.0594 -4.7458 +#> -5.2251 14.7473 2.6430 -10.7033 3.4242 5.2835 +#> -3.2220 -10.7773 4.0306 1.7556 -0.8909 0.1858 +#> -1.6882 4.1642 -0.8937 -4.1788 5.2175 1.8850 +#> 13.4893 -2.5422 17.0995 8.1240 2.2313 6.1781 +#> 10.8599 -5.8666 -5.0010 -7.6735 11.1980 2.3107 +#> 13.3253 4.4453 16.6472 6.6833 -5.4616 5.4430 +#> 1.8033 1.1504 5.3888 8.4619 -11.5018 5.4091 +#> -9.3979 13.8763 -9.2113 16.0083 -10.2207 1.1649 +#> -14.9877 -17.5661 -11.5863 2.2509 5.4443 4.4319 #> #> (12,.,.) = -#> Columns 1 to 6 5.6711e+00 -9.7715e+00 8.0139e+00 -3.1456e+00 -8.2597e+00 -8.6188e+00 -#> 4.5534e+00 -2.6602e+00 5.7101e-01 4.5924e-01 -7.1426e+00 -7.6855e+00 -#> 2.3944e+00 -1.8833e-01 -1.2372e+00 1.3183e+01 -1.1774e+01 -1.2614e+01 -#> -3.7058e+00 1.2332e+01 -7.1135e+00 1.0252e+01 -1.1324e+01 1.5886e+00 -#> -1.3778e+00 1.3744e+01 1.0130e+01 1.7991e+01 -9.5907e-01 4.4537e+00 -#> 4.3923e+00 2.0173e+01 -1.1748e+00 2.2084e+00 1.0591e+01 -2.1620e+01 -#> -4.6225e+00 -4.0510e+00 4.9409e+00 -8.6606e+00 -4.5434e+00 4.4733e-01 -#> -1.6025e+00 -6.5477e-01 1.8544e+00 -2.0565e+00 -2.4117e+00 3.3357e+00 -#> 2.8552e+00 -1.0342e+01 1.0375e+01 4.7829e+00 -7.2180e+00 -8.8688e+00 -#> 1.0235e+01 9.7207e+00 3.1417e+00 4.0030e+00 -7.3955e+00 -6.9170e+00 -#> -5.7200e-01 5.5964e+00 -1.5976e+01 8.3666e+00 6.3403e+00 -7.6915e+00 -#> 3.4703e-02 3.5169e+00 3.8781e-01 1.0636e+00 4.0983e+00 -2.7044e+00 -#> 4.6174e+00 -2.4054e-01 -8.3332e-01 -1.0104e+01 5.7178e+00 5.9626e-01 -#> -3.6530e+00 6.3614e+00 -1.4573e+01 1.8428e+01 -1.0996e+01 7.7436e+00 -#> -7.8050e-01 1.7219e+00 1.0551e+01 -7.6440e+00 -1.9886e+00 -2.9778e+00 -#> 3.0868e+00 -2.7772e+00 -5.4846e-01 -4.6270e+00 7.8908e-01 -1.3109e+01 -#> 2.8907e+00 -1.3217e+00 -1.0009e+01 1.4561e+01 4.7109e+00 -3.6655e+00 -#> 2.1374e-01 -3.3225e+00 1.0521e+01 3.3724e+00 -1.4473e+01 1.0237e+01 -#> 2.7409e+00 1.5870e-01 8.3594e-02 1.3877e+00 6.6924e+00 -2.4924e+00 -#> -2.1034e-01 1.1392e+01 4.4399e-01 -2.0182e+00 6.3715e+00 -1.6278e+00 -#> -2.7978e+00 2.1835e+00 -3.4727e+00 -1.1099e+01 1.1308e+01 1.9844e+00 -#> 1.4916e+00 -4.6658e+00 7.5224e+00 -4.0165e-01 4.3843e+00 2.9431e+00 -#> -4.8740e+00 3.3266e-01 -1.0033e+01 2.4769e+01 -1.0689e+01 5.7757e+00 -#> 9.7244e+00 -2.0223e+01 1.5571e+00 -6.1857e+00 -4.8121e-01 5.3037e+00 -#> 2.1985e-01 -6.2206e+00 1.8928e+00 4.3969e-01 -4.0883e+00 1.2525e+01 -#> 3.6974e+00 -6.7978e+00 1.4884e+01 -1.1140e+01 1.9589e+00 2.8536e-01 -#> -2.5475e+00 -1.5448e+01 -9.8222e+00 2.7567e-02 -1.6978e+01 -1.3631e+01 -#> -2.5236e+00 7.1482e+00 9.9684e-01 1.6637e+00 -1.8525e+00 -3.7578e+00 -#> 6.8408e-01 -4.5971e+00 -2.3067e+00 1.6949e+01 -1.8636e+01 -1.2232e+01 -#> -6.7232e+00 9.1325e+00 2.0426e-01 2.6954e+01 -8.9083e+00 -1.2419e+01 -#> 2.1591e+00 -1.0398e+01 1.2408e+01 -2.9166e+00 6.0366e+00 1.0637e+01 -#> -2.0474e+00 9.3563e+00 -5.2706e+00 -7.9547e+00 7.8486e+00 1.2689e+01 -#> 3.8286e+00 6.1758e+00 -7.4604e+00 2.8888e-02 -1.1185e+00 1.1341e+01 -#> -#> Columns 7 to 12 -1.1826e+01 2.2621e+01 5.7299e+00 -1.0211e+01 2.0500e+00 -2.0580e+00 -#> 2.2837e+00 -4.1392e+00 -1.3015e+01 4.3746e-01 1.4448e+01 4.2647e+00 -#> 1.8809e+01 -5.0689e+00 1.1920e+01 4.9381e+00 -1.1874e+01 1.6306e+01 -#> -7.0689e+00 -8.3381e-01 1.5031e+01 5.1379e-01 9.0282e+00 -1.0080e+01 -#> 9.0968e-01 6.8403e+00 -1.2286e+00 -7.2483e+00 1.4132e+01 1.1597e+01 -#> -1.2094e+01 -3.4152e+00 2.6146e+00 1.4391e+01 -4.1691e+00 -3.0251e+00 -#> 1.3840e+00 -6.1495e+00 -6.8200e+00 -7.8748e+00 1.6628e+01 8.4340e+00 -#> -1.9125e+00 8.5565e+00 -3.7415e+00 -9.7756e+00 -1.5671e+01 -1.0357e+01 -#> -2.6787e+00 -3.3692e-01 -1.3210e+01 4.1409e+00 5.5250e+00 6.2864e+00 -#> -6.5676e+00 1.9990e+00 -4.1325e+00 1.8645e+01 4.8182e+00 6.1783e+00 -#> 1.2527e+01 -9.8162e+00 -3.1660e-01 -4.4354e+00 -1.9086e+01 -7.7893e+00 -#> -2.1272e+00 -1.3353e+01 7.4139e-01 1.5879e+01 1.2691e+00 8.5015e+00 -#> 4.1590e+00 4.9067e+00 -1.2046e+01 -1.4323e+00 1.0125e+01 -2.5866e+00 -#> 3.5672e+00 4.8377e+00 3.1483e+01 4.4150e+00 2.4836e+00 -7.8096e+00 -#> -4.1977e+00 -1.7131e+00 5.3965e+00 6.3721e+00 6.7158e+00 1.0346e+01 -#> -5.3131e+00 -3.7630e+00 -3.8306e+00 -2.7599e+00 5.5997e+00 1.4647e+00 -#> 1.8103e+00 6.5942e-01 7.3887e+00 -9.9311e+00 8.3289e-01 5.5044e+00 -#> -4.5116e+00 -5.5039e+00 -1.5688e+00 1.6182e+01 -3.9394e+00 -1.0381e+00 -#> -2.6093e+00 -3.5263e+00 -5.2329e+00 -3.5045e+00 5.7408e+00 -3.6470e+00 -#> 1.3142e+01 1.6011e+00 4.2480e+00 -1.1670e+01 1.3931e+01 2.5416e+01 -#> -4.3017e+00 -2.4624e+00 8.3654e+00 6.6246e+00 3.9868e-01 -1.0769e+00 -#> 6.2551e+00 -3.5745e+00 7.6361e+00 -5.0444e+00 -7.1267e+00 -1.1137e+00 -#> 2.0551e+00 -9.9614e+00 -1.9466e+00 7.4334e+00 1.4679e+01 -4.0151e+00 -#> 4.6821e-02 1.1432e+01 -3.3222e-01 4.6008e+00 -7.8557e-01 -1.3359e+01 -#> -2.3926e-02 6.8382e-01 -4.8464e+00 7.1137e+00 2.9912e+00 4.5205e+00 -#> 4.2275e+00 6.5084e+00 -1.7213e+01 8.4103e+00 -5.6102e+00 3.6258e+00 -#> 1.3469e+01 -2.8784e+00 -8.9576e-01 2.5106e+00 -5.1594e+00 1.0925e+01 -#> 2.6589e+00 2.8767e+00 3.8252e+00 -1.2938e+01 -2.2467e+01 -3.1135e+00 -#> 5.5026e-01 2.3104e+01 1.6266e-01 -8.8421e+00 -2.3897e-01 -1.3208e+01 -#> -3.0139e+00 7.5189e+00 2.7617e+01 1.6454e+00 -2.2950e+01 -7.1717e+00 -#> 2.6843e+00 1.2358e+00 -8.7613e-02 -1.3074e+00 1.4932e+00 -4.6459e+00 -#> -7.2866e+00 6.1055e+00 -1.9969e-01 1.4076e+00 -7.0442e-02 8.3251e+00 -#> -7.3165e+00 6.8355e-01 -6.6699e+00 -9.3733e-01 1.8232e+01 2.1962e+00 -#> -#> Columns 13 to 18 1.9015e+00 -7.1054e-02 6.5036e+00 1.8471e+01 -7.8308e+00 -1.3398e+01 -#> 1.4368e+01 -8.2049e+00 8.0801e+00 -3.8359e-01 7.3189e-01 -1.1775e+00 -#> -1.4420e+01 -4.6961e+00 -1.6440e+00 -7.7410e+00 -6.0583e+00 -3.1094e+00 -#> 3.3701e+00 -3.4220e-01 -1.1748e+01 -5.6438e+00 -3.8476e+00 1.4843e+01 -#> -9.6171e+00 -6.9606e-01 -4.0281e+00 -1.1368e+00 -6.0511e+00 -1.3137e+01 -#> -9.2864e+00 -2.6392e+00 -1.1496e+00 -1.6844e-01 1.0979e+01 7.7849e+00 -#> -9.9460e-01 1.0206e+00 4.4142e-01 1.1463e+01 -7.1979e+00 -1.1835e+01 -#> -8.3785e+00 2.4509e-01 -4.0796e+00 3.0417e+00 -3.3664e+00 -1.6919e+01 -#> -2.7345e+00 6.1984e+00 3.6193e+00 1.6176e+00 1.8195e+01 -8.8061e+00 -#> -6.9595e-01 1.5538e+01 -5.7967e+00 4.6592e-01 -7.5308e+00 -2.6751e+00 -#> -8.8344e+00 1.1063e+00 1.5160e+01 -2.0454e+01 3.6625e+00 -7.1725e+00 -#> -5.0083e+00 1.1612e+01 -1.2413e+01 -1.8029e+00 -1.0512e+01 -4.8151e+00 -#> 7.4919e+00 7.1667e+00 1.0053e+01 -3.3491e+00 -1.1306e+01 6.5037e-01 -#> 2.3728e+01 -8.9091e+00 -1.0776e+01 -8.8852e+00 4.8242e+00 5.6458e-01 -#> 8.7586e+00 -9.7872e+00 1.0937e+01 2.2949e+01 -7.2551e+00 -6.7513e+00 -#> 9.4950e+00 1.3832e+00 -1.6443e+01 2.3965e+00 4.3156e+00 4.0602e+00 -#> -3.6678e+00 3.3257e+00 4.8965e+00 1.3294e+01 6.1973e+00 -1.0398e+01 -#> -2.4726e+00 2.3567e+01 -2.0121e+00 4.4666e+00 -2.9066e+00 -2.0574e+00 -#> 6.9333e+00 1.1008e+01 9.4940e+00 2.6360e+00 -2.3520e+00 -9.7747e+00 -#> -6.8040e+00 -6.3517e-01 -1.3560e+01 3.8996e+00 4.3230e+00 -1.5269e+01 -#> 5.5237e+00 -5.9878e+00 -4.8637e+00 -5.7554e+00 3.2201e+00 -9.9667e-01 -#> -1.5524e+01 4.4229e+00 1.0867e+01 -6.8404e+00 -8.2170e-03 -1.1930e+01 -#> -8.9235e+00 2.8391e+00 7.8580e+00 5.2690e+00 -1.1646e+01 -1.6284e+01 -#> 1.1135e+00 2.0755e-01 -5.3085e-01 4.4674e+00 -2.2772e+01 1.2490e+01 -#> 3.5108e+00 -1.7097e+01 -1.0687e+00 -6.3410e+00 -1.7460e+01 3.0247e+00 -#> -1.5075e+01 4.4179e+00 2.9446e+00 4.5553e+00 3.9893e+00 1.0847e+01 -#> -3.8475e-01 -3.0224e+00 1.3595e+01 -6.9657e-01 -1.1146e+00 7.6620e+00 -#> -2.6613e+00 -1.5902e+01 -9.7376e+00 5.7318e+00 5.6692e+00 1.2321e+01 -#> 5.4724e+00 -1.1878e+01 -4.7547e-01 9.2209e+00 -3.4451e+00 1.6410e+01 -#> -2.8532e+00 3.8302e+00 1.2172e-01 -6.0661e+00 -4.2698e+00 -1.0130e+01 -#> 1.6465e+00 -5.3229e+00 7.0761e+00 -1.5705e+00 -7.0562e+00 -2.7597e+00 -#> -6.1864e-01 -3.3426e+00 -9.9110e+00 -9.6252e-01 7.0601e+00 6.6784e+00 -#> 1.9348e+01 3.2684e+00 -5.1465e-01 3.5049e-01 -1.0837e+01 9.5411e+00 -#> -#> Columns 19 to 24 -3.8948e+00 -2.6723e+00 -6.6318e+00 1.6838e+00 -3.7660e+00 7.2476e+00 -#> 1.1216e+01 8.1438e-01 -3.6275e+00 -2.4376e-01 -9.8312e+00 -7.3129e+00 -#> 1.5957e+00 -1.2974e+01 1.8953e+01 9.7969e+00 -1.1822e+00 4.6118e-01 -#> 1.1569e+01 2.7565e+01 6.6624e+00 -1.0608e+01 -4.3190e+00 -8.5527e+00 -#> 1.7083e+01 4.3328e+00 -4.9356e-01 3.5066e+00 -7.9629e+00 1.8458e+00 -#> 6.6433e+00 2.2971e+00 -1.3667e+01 -4.0295e+00 -1.0085e+01 -4.5433e+00 -#> 1.5841e+00 -2.4033e+00 5.9013e+00 -2.2349e+01 1.7666e+00 -1.9011e+01 -#> 8.8770e+00 1.1447e+01 4.4222e+00 2.0156e+00 3.1334e-01 6.5320e+00 -#> 4.4325e-01 -9.0440e+00 -5.9457e+00 3.4535e+00 -1.6740e+01 5.7113e+00 -#> 5.1010e+00 6.8260e+00 -8.1804e+00 -2.4639e+00 -1.3020e+01 4.3330e+00 -#> 1.1201e+01 2.3794e+00 2.9129e+00 1.5070e+01 -1.1008e+01 -5.5706e+00 -#> 4.8113e-01 8.7237e+00 1.0651e+01 -1.0053e+01 -1.4475e+01 9.6086e-01 -#> 4.8966e+00 -4.8608e+00 -4.7656e+00 4.5974e+00 -5.3435e+00 -3.1645e+00 -#> -2.8056e+00 6.0239e+00 4.9811e+00 9.3587e+00 -1.2518e-01 1.4811e+01 -#> 1.5243e+00 5.9463e+00 -1.9694e+01 2.4770e+00 -7.4116e+00 4.2682e+00 -#> 1.7486e+01 5.6142e+00 -2.6774e+00 -5.4607e+00 3.1753e+00 -7.7612e+00 -#> -4.7767e+00 -4.1876e+00 8.2306e+00 1.8335e+01 1.6702e+01 -6.1712e+00 -#> -1.0461e+01 7.7642e+00 9.8448e+00 -3.9824e+00 -1.4456e+01 1.0921e+01 -#> -3.0338e+00 -3.8039e+00 3.7088e+00 1.1555e+01 -6.6480e+00 -7.0763e+00 -#> 1.2589e+00 -9.1476e+00 -1.0267e+00 -9.8251e+00 1.3203e+01 -1.1121e+01 -#> -7.1104e+00 -2.1209e+01 -4.1617e-01 -1.6712e+01 7.2966e+00 8.6806e-01 -#> -1.3472e+00 2.4518e+00 2.5488e+00 5.1634e+00 8.3433e+00 -6.9640e-01 -#> 4.9083e+00 1.1894e+00 1.6084e+01 1.8053e+01 -8.3202e+00 5.9620e+00 -#> 1.4155e+00 -1.2155e+01 -8.0382e+00 1.2269e+00 -2.4019e+00 -4.7523e+00 -#> 5.7603e+00 2.8282e+00 -6.3438e+00 9.6194e-01 4.2417e+00 -8.6298e-01 -#> -6.2398e+00 -1.3997e+01 -1.0611e+01 -1.2185e+01 5.8461e+00 -1.5036e+01 -#> -9.8415e+00 7.8770e-01 -7.4182e+00 -8.3712e+00 -5.5750e+00 5.2374e+00 -#> -6.6236e-01 8.8197e+00 2.5809e+00 9.2737e-01 -2.7482e-01 -3.0782e+00 -#> -4.8489e+00 1.2334e+00 -8.1420e+00 8.0714e+00 1.8021e+00 1.0030e+01 -#> 2.2816e+00 2.9167e+01 5.3001e+00 1.2647e+01 -1.0302e+01 1.7506e+01 -#> 4.0581e-01 4.3354e-01 4.6884e+00 2.4039e+00 -7.0114e+00 -2.3394e+00 -#> -1.7465e+01 -2.9286e+00 -3.3655e+00 -5.9884e+00 1.8096e+01 4.9700e+00 -#> -8.3931e+00 9.1925e+00 -6.2284e-01 -3.7626e+00 7.2228e-02 -1.4662e+00 -#> -#> Columns 25 to 30 2.0364e+00 3.4924e+00 8.8576e+00 1.9560e+01 -5.3159e+00 -6.7273e-02 -#> 8.5199e+00 1.2460e+01 1.0849e+01 -1.2488e+01 1.1534e+01 8.3744e+00 -#> -5.2998e+00 -1.3045e+01 7.0058e-02 -1.6556e+01 -5.1436e+00 1.2320e+00 -#> -1.9245e+00 2.6752e+00 2.5943e+00 3.8957e+00 1.9414e+00 -4.5554e+00 -#> -1.7785e+01 3.6812e+00 1.3851e+01 7.9270e+00 1.4260e+01 -5.3344e+00 -#> 1.7070e+01 7.5528e+00 2.5537e+01 2.0648e+00 9.1443e+00 4.2659e-01 -#> -5.8135e+00 -1.1264e+00 5.3336e+00 -7.6384e+00 7.7942e-01 -1.4103e+00 -#> -6.6164e-02 -1.1689e+01 -5.0414e+00 2.4737e+00 -8.3804e+00 2.4184e+01 -#> -1.2746e+01 -1.4828e+00 -5.7543e+00 -5.3110e+00 5.5415e+00 -1.6850e+01 -#> 3.1513e+00 1.5225e+01 6.6620e+00 1.6091e+01 1.0120e+00 7.5204e+00 -#> -1.2891e+01 7.4804e+00 8.0265e-01 -1.2574e+01 9.4596e+00 7.2928e+00 -#> -4.2098e+00 4.1588e+00 6.9672e+00 2.2453e+01 9.1519e+00 1.8179e+01 -#> -4.1001e+00 1.8540e+01 4.0887e+00 -1.0073e+01 1.8330e+00 -1.4200e+00 -#> 5.6480e+00 -1.3350e+00 -1.2036e+01 -9.5990e+00 7.4656e-01 3.2980e-01 -#> 3.4966e+00 -1.6570e-01 1.8722e+01 2.3947e+00 -2.9675e+00 2.2816e+01 -#> 2.8179e+00 -3.1789e+00 -1.1038e+01 5.5216e+00 5.2880e+00 4.8121e+00 -#> -1.4641e+00 7.2584e+00 7.5047e+00 -1.4729e+01 1.1529e+01 4.0212e+00 -#> -2.1840e+01 1.6311e+01 -2.2091e+00 8.8685e+00 -2.6726e-01 1.0857e+01 -#> -6.0878e+00 -1.9527e+01 -2.7790e+00 -2.3459e+00 9.2915e+00 3.6225e+00 -#> 3.0910e+00 -8.8166e+00 -1.2194e+00 2.1275e+00 1.4776e+01 -6.3646e+00 -#> 3.4392e+00 1.2337e+00 -2.4477e+00 -4.2940e+00 1.2196e+01 -8.3100e+00 -#> 2.1095e+00 -1.6299e+01 -1.8262e+00 -8.5428e+00 5.4610e+00 -1.8177e-01 -#> -8.0396e+00 -3.5281e-01 -4.7305e+00 -2.0184e+00 -1.3290e+00 -1.2946e+01 -#> 6.7542e+00 2.7995e+00 -2.6588e+00 -5.7228e+00 -1.7821e+01 -1.2070e+01 -#> 8.5798e+00 1.3482e+01 5.4124e+00 4.5472e+00 4.2665e-01 9.1178e+00 -#> 7.9867e+00 -1.7613e+01 -3.2339e+00 4.7845e+00 -1.2815e+01 5.4119e-01 -#> 6.9358e+00 -7.6092e+00 8.9106e+00 1.4474e+01 -5.1735e+00 6.0925e-01 -#> -3.7007e+00 -1.1889e+01 1.2766e+01 -4.3393e+00 -8.2446e+00 -5.4101e+00 -#> 1.4022e+00 -2.8613e+00 -1.5519e+01 5.5836e-01 -5.3816e+00 -1.6839e+01 -#> 3.9563e+00 -1.7790e+00 1.2531e+01 2.5413e+01 -6.9938e+00 -5.0654e+00 -#> -1.3314e+01 1.3742e+01 -5.1880e+00 -2.6514e+00 7.9159e+00 -3.9425e-01 -#> 5.2115e+00 5.2123e+00 8.2073e+00 4.7590e-01 -1.2351e+01 6.1773e+00 -#> -1.2535e+01 1.6295e+01 5.8316e+00 1.8009e+01 7.4288e+00 4.5441e+00 -#> -#> Columns 31 to 36 1.2023e+01 -6.7178e+00 -5.8166e+00 1.1401e+01 1.5205e+01 1.9883e+01 -#> -1.2687e+01 -8.2098e+00 5.3369e+00 -8.2628e+00 1.0745e+00 -1.1268e+01 -#> 9.6230e-01 -1.3118e+01 -6.6789e+00 1.8343e+01 4.4205e+00 -7.8813e+00 -#> 4.5552e+00 -2.2099e+01 -1.1815e+01 -8.8782e+00 -3.4950e+00 4.7670e-01 -#> 1.8077e+01 -6.6238e+00 -1.3619e+01 -7.2434e+00 -8.6265e+00 -7.9528e-01 -#> -1.0967e+01 9.8506e+00 -2.7321e+01 -1.0223e+01 6.2490e-01 -5.6411e-01 -#> 1.4522e+01 8.1797e-01 -8.0892e+00 4.3848e+00 1.1477e+00 -3.4365e+00 -#> 4.7297e+00 6.4393e+00 -8.8589e+00 6.6986e+00 1.0247e+00 3.6755e+00 -#> -1.7666e+00 7.6427e+00 -2.5547e+00 -4.3410e+00 -1.5351e-01 -1.1309e+01 -#> -3.9239e+00 -2.5789e+01 -8.5965e+00 1.2510e+01 -7.7351e+00 -1.1317e+01 -#> -7.5335e+00 -1.1775e+01 -6.7104e+00 1.6478e+00 1.5399e+01 3.0482e+00 -#> -1.0282e+00 -3.0531e+00 -1.1335e+01 -1.5999e+01 -1.5555e+01 1.6338e+01 -#> 7.3122e+00 -8.6837e+00 6.5041e+00 -1.4024e+01 4.4772e+00 -5.9621e+00 -#> -2.8027e+00 -5.2603e+00 1.3327e+01 5.9999e+00 -5.2299e+00 -3.3551e+00 -#> -2.3727e+00 -1.3826e+01 -1.2738e+01 -1.9202e+01 -7.7574e-01 -1.4906e+00 -#> -8.8552e+00 -2.1012e+00 1.3027e+01 8.1915e+00 -6.9022e+00 -4.1185e+00 -#> 1.3801e+01 1.0830e+01 -5.0325e+00 4.9789e-01 -2.2028e+00 -1.6712e+01 -#> 1.0335e-01 -3.7909e+00 -6.2674e+00 1.4032e+01 1.0125e-01 -4.1395e+00 -#> 9.5018e+00 1.9901e+00 1.2336e+01 -1.1491e+00 -1.4146e+01 -9.2964e+00 -#> 1.6288e+01 -1.8714e+00 -1.2278e+01 -6.2869e-01 -2.4788e+01 -1.2325e+01 -#> 1.3845e-01 -4.5797e-01 6.1840e+00 8.4224e+00 9.5258e+00 -1.0124e+00 -#> 1.2613e+01 -1.3998e+01 -1.5340e+01 -4.1053e+00 6.9608e+00 4.7141e+00 -#> -5.0590e+00 1.6985e+00 9.5863e+00 8.1763e-01 -6.1154e+00 5.2919e+00 -#> 4.0963e+00 -1.0605e+01 -5.2540e-02 6.2528e+00 1.6638e+01 1.9486e+01 -#> -1.9335e+01 -3.2307e+00 -2.5775e+01 2.1714e+00 9.3489e+00 5.1945e+00 -#> -1.1523e+00 6.8666e+00 6.6111e+00 1.3814e+00 -8.8345e+00 8.1187e+00 -#> 2.5454e+00 -1.1054e+01 -3.9274e+00 1.4365e+00 7.5614e+00 2.1234e+00 -#> -1.0611e+01 3.2993e+00 -4.9833e+00 5.5129e+00 -1.5963e+00 -3.0104e+00 -#> 3.0948e+00 7.7989e+00 2.8320e+00 -6.8328e+00 5.3871e+00 -7.3793e+00 -#> 1.0356e+01 1.0498e+01 -2.1031e+01 -2.4123e+01 2.2733e+00 1.2569e+01 -#> -5.8569e+00 -2.2212e+00 8.6687e+00 -1.8951e+01 1.2854e+01 9.3725e+00 -#> -1.1501e+00 -1.2831e+01 -6.0599e+00 6.2460e+00 -4.7948e+00 -3.0693e+00 -#> -2.7834e+00 -1.5981e+01 -2.3720e+01 -5.7574e+00 4.2840e+00 2.6247e+00 -#> -#> Columns 37 to 42 -9.1028e+00 1.2591e+01 1.0310e+00 -5.8421e+00 -1.1338e+01 4.1153e+00 -#> 3.1706e+00 2.9428e+00 1.4845e-01 8.9454e+00 1.3134e+01 6.5534e-01 -#> 4.5841e+00 -8.7716e+00 8.0809e-01 -1.6475e+01 4.4807e+00 -1.0819e+01 -#> 2.3524e+00 -5.6207e+00 -2.0095e+00 -4.4384e+00 1.3316e+01 -6.8045e+00 -#> -2.4933e+00 -3.9729e+00 -7.7103e+00 1.1894e+00 -1.1831e+01 -5.8958e+00 -#> 4.1788e+00 -4.9825e-01 1.6073e+01 -3.8185e+00 1.1832e+01 -8.7843e+00 -#> 8.6549e+00 -4.8510e+00 2.8659e+00 1.5565e+00 5.0214e+00 3.4118e+00 -#> -7.3109e-01 9.8369e+00 -4.7401e+00 9.5182e+00 5.5939e-01 -4.8440e+00 -#> 5.2137e+00 1.8856e+01 -3.0772e+00 1.2977e+01 -1.7254e-01 3.3656e-01 -#> 1.0825e+01 -4.8495e+00 -1.2025e+01 1.1839e+00 -1.6483e+00 1.5632e-01 -#> 7.4011e+00 1.1780e+00 -8.3667e+00 3.9612e+00 -1.0989e+00 -1.1149e+00 -#> 7.2148e+00 9.1705e-01 1.3965e-01 1.2729e+01 -8.3906e+00 6.4803e+00 -#> -8.2265e+00 3.5863e+00 -6.5682e+00 -4.1911e+00 -4.0294e+00 1.6310e+01 -#> 6.1826e+00 -8.1974e+00 -1.2904e+00 1.0439e+01 -7.9703e+00 -7.8956e+00 -#> 5.9539e+00 1.3534e+01 -3.9589e+00 6.4846e+00 -3.7262e+00 -9.1065e+00 -#> 7.2161e+00 -2.5541e+00 -9.6471e+00 2.1303e+00 -5.7376e+00 3.6745e+00 -#> -5.3257e+00 4.5439e+00 -1.3490e+01 -5.4855e+00 9.1892e+00 -9.2638e+00 -#> 1.5159e+01 -8.0027e+00 -1.8171e+01 3.2348e+00 -1.1226e+01 -1.2237e+00 -#> 4.0426e+00 4.7529e+00 -3.9383e+00 4.2050e-01 7.3034e-01 1.5667e+01 -#> -1.0191e+00 1.3406e+01 8.5509e+00 -4.3237e+00 1.7792e+01 -1.0651e+01 -#> 9.1773e+00 -5.7015e+00 5.4302e+00 -3.8808e+00 -7.5434e-01 -8.2778e+00 -#> 1.1293e+00 1.3398e+01 -6.2309e-01 1.1985e+00 1.2024e+01 -1.1281e+01 -#> 4.1953e-01 -1.8520e+01 -1.6476e+01 -2.2505e+01 -8.8054e+00 -3.2068e+00 -#> 1.7972e+00 1.9958e+01 2.3866e+00 1.9869e+01 5.1840e+00 2.0026e+01 -#> 7.1083e+00 6.2996e+00 2.2023e+00 5.7702e+00 -3.2933e+00 1.0233e+01 -#> 3.9714e+00 2.4181e-02 1.1250e+01 -1.2165e+00 1.1519e+01 -5.1492e+00 -#> -4.7458e+00 2.2553e+00 9.3586e+00 7.0260e+00 -8.9473e-01 3.7660e+00 -#> -9.3217e-01 2.3749e+00 2.4645e+00 7.2412e+00 -1.8842e+00 -3.2332e+00 -#> -3.2802e+00 1.5417e+00 6.4068e+00 1.9257e+00 2.9942e+00 -1.0548e+01 -#> -8.2871e+00 -2.6355e+00 4.1110e+00 4.1654e+00 9.5214e+00 -1.0637e+01 -#> -8.8577e+00 -1.2026e+00 3.2324e+00 -1.0275e+01 8.1027e-01 5.3804e+00 -#> 1.2246e+00 -4.9161e+00 6.2280e+00 -1.3336e+01 3.3344e-01 -1.7871e+01 -#> -2.5499e+00 3.3478e+00 6.1585e+00 -9.2499e-01 -4.8937e+00 -7.7639e+00 -#> -#> Columns 43 to 48 -1.8949e+00 3.1792e+00 -2.1589e+00 6.3949e-01 3.2063e+00 -1.3578e+00 -#> -1.0469e+01 -5.6708e+00 7.5980e+00 9.6664e+00 6.7643e+00 -6.5708e+00 -#> -6.1512e+00 -2.0264e+01 3.4358e+00 6.0738e+00 -1.3076e+01 2.5472e+00 -#> 7.7560e+00 1.4065e+00 1.2090e+00 -8.3934e+00 1.8032e+00 8.1045e+00 -#> -1.9726e+00 -3.6610e+00 -2.0917e+01 -5.1021e+00 -8.5341e+00 9.6339e+00 -#> 3.1744e+00 -1.5621e+01 3.4864e+00 -1.6545e+00 4.0293e+00 -5.3260e+00 -#> -8.8298e+00 6.7961e+00 1.1437e+01 -2.5266e+00 3.7172e-01 9.4313e+00 -#> 3.0545e-01 1.8151e+00 9.5635e+00 -1.7820e+01 -6.6283e+00 -8.8928e+00 -#> 1.2522e-01 4.0336e+00 -7.4177e+00 9.1586e-01 -4.6258e+00 3.6882e+00 -#> -1.2605e+00 -9.0548e-01 8.1084e+00 -6.0548e+00 -1.6634e+01 2.2741e+00 -#> 1.8113e+00 1.5702e+00 5.0019e-01 2.8852e+00 -4.3100e+00 2.9644e+00 -#> 7.3638e+00 3.9861e+00 -3.0309e+00 -2.8950e+00 -3.3484e+00 -6.0369e-01 -#> 7.5221e-03 3.2168e+00 -1.2645e+01 1.7435e+01 -3.2664e+00 3.6868e+00 -#> 3.1162e+00 -5.5322e+00 1.7958e+00 -8.8683e-01 2.2310e+00 -1.3305e+01 -#> -1.9391e+00 3.5524e+00 6.6669e+00 -3.1837e+00 -1.0453e+00 -9.0635e+00 -#> 2.0948e+00 1.3118e+00 -6.5282e+00 -1.7253e+01 2.3108e+00 8.9559e+00 -#> -1.7999e+01 -3.3407e+00 -5.3289e+00 4.9456e+00 -1.0145e+01 8.4892e+00 -#> 2.7427e-01 8.0205e+00 1.4450e+01 -1.3079e+01 -1.2436e+01 4.7463e+00 -#> 4.4269e+00 5.9370e+00 9.3929e-01 -1.3290e+01 -1.3577e+01 -9.1268e+00 -#> 1.6066e+01 -8.7729e+00 -2.0591e+00 4.5703e+00 4.9376e+00 -7.4957e+00 -#> -1.2894e+01 -1.7018e+00 -6.3262e+00 2.8053e+00 3.8833e-01 1.0766e+01 -#> 2.5407e+00 1.3492e+00 5.4623e+00 4.9909e-01 -1.1941e+01 -3.1936e+00 -#> -4.5454e+00 -4.9026e+00 7.8692e+00 1.5897e-01 -8.2075e+00 7.7568e+00 -#> 1.6057e+00 1.8280e+01 -6.0971e+00 1.3906e+01 -2.5730e+00 -3.1180e+00 -#> -2.2184e+00 2.2779e-02 6.5701e-01 2.4582e+00 9.5279e+00 -3.7017e+00 -#> 2.0165e+01 -1.5902e+01 9.6445e+00 -7.0694e+00 7.1113e+00 -1.9141e+01 -#> 6.5520e+00 9.2279e+00 8.5650e+00 1.9217e+01 4.2980e-01 -2.1048e+00 -#> -1.4847e+00 7.6447e+00 -1.0691e+00 6.1191e+00 4.5094e+00 -4.1965e+00 -#> 4.9171e+00 -3.5977e+00 -5.1303e+00 9.4302e+00 1.0969e+01 -2.8175e+00 -#> 1.7654e+01 1.2303e+00 -1.8604e+00 -6.7042e+00 8.0041e+00 2.6355e+00 -#> 3.4652e+00 -8.8835e+00 -2.4696e+00 -6.1194e+00 -2.3170e+00 1.8372e+00 -#> -3.2396e+00 -1.2917e+01 1.1964e+01 -2.1103e-01 1.5640e+01 -9.4318e+00 -#> -7.8141e+00 4.2299e+00 2.8658e+00 7.7334e+00 -2.7448e+00 -8.3782e-01 -#> -#> Columns 49 to 54 1.2333e+01 -1.3767e-01 -3.5458e+00 -2.4091e+00 -4.3892e+00 -1.8596e+00 -#> 3.0590e+00 -8.6098e+00 1.2912e+01 1.1340e+01 -2.3048e+00 1.8209e-01 -#> -1.4247e+00 -1.1627e+01 2.7382e+00 2.6747e+00 1.1848e+01 8.3825e+00 -#> -1.0018e+01 6.1788e+00 -8.4593e+00 7.7510e+00 2.9908e-02 -1.5930e+00 -#> -6.7280e+00 1.1571e+01 9.2223e+00 4.1258e+00 -8.2129e+00 7.0230e-01 -#> 1.0891e+01 9.3680e-01 -7.9255e+00 4.5878e-01 -7.3924e+00 -4.2198e+00 -#> -9.4668e+00 -5.4795e+00 1.5067e+01 -3.1557e+00 -4.1782e+00 -1.7167e-01 -#> 3.4064e+00 -4.4980e-01 1.1592e+01 7.2165e+00 -2.5246e+00 -2.8478e+00 -#> 1.0886e+01 -1.5934e+01 1.2154e+00 -6.4077e+00 -4.7183e-01 -4.6793e-01 -#> 1.0714e+01 4.9262e+00 -1.0091e+01 -4.0105e+00 -2.2166e+00 -6.6209e+00 -#> 2.1358e+01 4.1649e+00 -2.9043e+00 -6.3058e+00 2.6100e+00 -5.6352e-01 -#> -4.0338e-01 1.6002e+00 -8.1261e+00 -5.7080e+00 -2.5969e+00 -1.8784e+00 -#> 7.2134e+00 -4.0635e+00 -1.1044e+01 -7.3213e+00 4.8594e+00 2.2916e+00 -#> -8.6441e+00 2.0574e+00 -2.0262e+00 8.5201e+00 9.9343e+00 -2.0685e+00 -#> 8.1453e-01 -3.3945e+00 1.0309e+01 6.3051e+00 -2.7420e+00 8.4013e-01 -#> 5.6428e+00 1.0729e+01 6.6803e+00 -2.7419e+00 1.7320e+00 -1.8016e+00 -#> 1.6561e+00 1.4647e+00 -7.4047e+00 9.6098e+00 7.3993e+00 -9.3957e-01 -#> 6.3324e+00 6.9388e+00 -5.6508e+00 -1.0262e+01 6.1348e+00 -3.7880e+00 -#> 2.7018e+00 -4.5579e+00 -5.6484e+00 -1.1570e+01 -1.1231e+01 -6.9494e+00 -#> -1.4790e+01 -5.7864e-01 5.0066e+00 8.4038e-01 9.4816e+00 -3.1154e+00 -#> -1.1103e+01 -2.8144e+00 8.4787e+00 7.7905e+00 6.3268e+00 8.5337e-01 -#> -5.4284e+00 -9.9734e+00 8.3666e+00 4.5437e+00 4.6585e+00 2.5157e+00 -#> 1.3540e+00 -1.0940e+01 -5.3762e+00 1.2806e+01 1.4043e+00 7.3552e-04 -#> 6.4825e+00 -8.2156e+00 -4.1786e+00 -1.4547e+01 -8.1273e+00 6.3691e+00 -#> -1.3291e+00 -2.9907e+00 4.6872e+00 -4.9224e+00 -8.3817e+00 5.2724e+00 -#> -3.5581e+00 2.5445e+00 -7.8640e-01 -2.6496e+00 -6.5729e+00 -4.9490e+00 -#> 5.3952e+00 -1.0344e+01 1.2800e+00 -1.8468e+00 3.4351e+00 2.9873e+00 -#> -5.5920e+00 3.8213e-01 9.8507e+00 2.6040e+00 2.8464e+00 -1.8464e+00 -#> -1.4649e+01 -1.0690e+00 5.6015e+00 1.5896e+01 -5.8986e+00 8.7044e+00 -#> -1.1121e+01 -4.9316e+00 2.6215e+00 2.8442e+00 4.1814e+00 -4.4011e-01 -#> 3.7989e+00 -9.5559e+00 1.2084e+00 6.3876e+00 7.4033e+00 5.4886e+00 -#> -1.5470e+01 5.8021e-01 -1.7981e+01 -6.3462e+00 5.2296e+00 -3.3604e-01 -#> -2.3717e+00 8.1532e+00 -7.8498e+00 -2.2843e+00 -3.7729e-01 8.6050e+00 +#> Columns 1 to 6 5.6421e+00 5.9514e+00 8.7758e+00 1.1337e+00 -1.1600e+01 -2.4098e+00 +#> -3.3859e+00 2.5444e+00 -1.2338e+01 1.5982e+00 -6.8984e-01 1.4842e+01 +#> -2.4738e+00 -1.1530e+00 7.1141e+00 2.1818e+00 -5.2785e+00 -4.2995e+00 +#> 1.6312e+00 6.0255e+00 4.6905e+00 3.0506e+00 4.7901e+00 -7.3336e+00 +#> 1.6581e+00 1.8267e+00 2.3954e+00 3.6370e+00 -2.7057e+00 -1.2975e+01 +#> -8.8971e-01 -1.1515e+01 -1.2797e+00 3.8290e+00 -1.0209e+00 4.2400e+00 +#> 3.4707e-01 -5.0225e-01 1.6227e+00 -1.0604e+01 -9.6857e+00 7.7212e+00 +#> 8.4716e-01 -5.2614e+00 -1.1029e+00 4.7822e+00 -7.9505e-02 -1.3063e+01 +#> -2.4924e-01 4.7284e-01 -1.3204e+00 -1.0713e+01 -1.3655e+01 -1.3402e+01 +#> -1.1695e+00 -8.2465e+00 7.5490e-01 1.3367e+01 1.9606e+01 1.7177e+01 +#> 3.2295e+00 -5.8235e+00 -1.0723e+01 2.3644e+01 8.6335e+00 -3.6504e+00 +#> -6.5045e+00 -7.6210e+00 -6.0815e+00 -4.0829e-01 4.3174e+00 -2.4908e+00 +#> 1.0245e+00 -5.6323e+00 4.6078e+00 5.2485e+00 8.9380e+00 1.0256e+01 +#> -6.6109e+00 -2.5496e+00 5.6999e+00 -5.0328e+00 1.6521e+01 -5.6609e-01 +#> -4.4788e+00 4.2669e+00 3.9316e+00 -4.0342e+00 -4.5785e+00 -5.5384e+00 +#> -2.1253e+00 -2.4201e+00 2.3308e+00 3.8880e+00 5.4073e-04 -4.0642e+00 +#> 2.8740e+00 -1.3873e+00 -6.3214e+00 8.7914e+00 1.2729e+01 -6.3549e-01 +#> -3.6005e+00 -8.1801e+00 -2.8416e+00 8.4031e+00 2.3685e+01 -4.3816e+00 +#> 3.8497e-01 8.7318e+00 -1.4292e+00 -2.3954e+00 1.9753e+00 -4.1494e-01 +#> 1.0812e+00 1.1659e+00 3.1414e+00 -3.4453e+00 -1.4109e+01 -5.6707e+00 +#> 2.0676e+00 5.7838e+00 2.0268e+00 1.0947e+01 -3.7929e-01 -1.1772e+01 +#> 3.8142e+00 7.7487e+00 -5.9545e+00 8.4684e+00 3.1953e+00 1.0657e+01 +#> 1.3140e+00 9.3326e+00 -1.5039e+00 5.7755e+00 -1.1504e+00 -1.1975e+01 +#> -1.1858e+00 2.2620e+00 -1.1432e+01 -3.7626e+00 -1.1710e+01 6.5101e+00 +#> 3.5093e+00 1.8896e+00 2.5598e+00 -2.9082e-01 7.8567e+00 8.2710e+00 +#> 2.0613e+00 6.3802e+00 7.9352e+00 -6.7460e-02 1.3046e+01 -1.6659e+00 +#> -2.1816e+00 9.4957e+00 7.0137e+00 -1.8771e+01 -8.3001e+00 5.4263e+00 +#> -6.5825e-01 8.0723e+00 -8.9852e+00 -1.9129e-01 -1.4055e+01 -1.4530e+01 +#> -1.6265e+00 5.7905e-01 5.1807e+00 -1.6214e+00 -6.6498e+00 -1.2818e+01 +#> 6.0556e+00 -4.2692e+00 3.6834e+00 9.5250e+00 -1.3678e+01 -7.1343e+00 +#> 5.5887e+00 1.4082e+00 7.6184e+00 1.4062e+01 6.1112e+00 7.0163e-01 +#> -7.7306e-01 5.8380e+00 1.7265e+01 -3.4233e-01 3.5887e+00 -1.3688e+01 +#> 4.3715e-01 5.8768e+00 3.3457e+00 5.2262e+00 -9.6363e+00 -4.4150e+00 +#> +#> Columns 7 to 12 1.8717e+00 -3.8462e+00 -7.0571e+00 -1.9892e-01 -3.1488e+00 -4.6469e+00 +#> 2.0289e+00 -1.6235e+00 -2.0590e+00 -2.0266e-01 1.1852e+01 6.7884e+00 +#> 2.5091e-01 -6.5207e+00 5.5788e+00 2.3274e+00 4.7012e-01 -3.7623e+00 +#> -1.3844e+01 8.6533e+00 4.3534e+00 -1.3939e+01 -3.5549e+00 3.0343e+00 +#> -1.3927e-01 8.1590e+00 -1.7224e+01 -2.4374e+00 -1.3905e+01 2.6416e+00 +#> 7.5704e+00 -9.6043e+00 -2.1781e+01 5.7073e+00 -1.7014e+01 1.0927e+01 +#> -6.8453e+00 -2.1309e+00 -7.0816e+00 -2.1569e+00 -8.9727e+00 -1.1888e+00 +#> 7.4492e+00 -9.8248e-01 -9.9627e+00 1.5302e+01 -4.8582e+00 6.1168e-01 +#> 6.3909e-02 7.0717e+00 -3.7311e+00 1.1669e+01 -6.3676e-01 -1.1931e+00 +#> -7.4045e+00 -1.0613e+01 -9.4320e+00 2.9507e+01 3.7915e+00 4.2571e+00 +#> -8.1949e+00 -1.8488e+00 -2.0948e+00 -1.0077e+01 4.5891e+00 9.0483e+00 +#> 1.5817e+01 -8.8738e+00 2.2322e+00 1.3272e+01 -4.5677e+00 -1.3289e+01 +#> 1.2123e+01 2.5267e-01 4.4614e+00 1.1802e+01 1.1462e-01 -5.5703e+00 +#> -3.8266e+00 -1.4259e+01 -7.2775e+00 6.6205e+00 -1.4896e+01 3.3552e+00 +#> 6.9123e+00 -8.6870e-03 6.1058e+00 -2.4124e+01 -3.5880e+00 -9.3689e+00 +#> -6.0387e-01 -9.3752e+00 7.2756e-01 3.6307e+00 9.9265e+00 -4.1738e+00 +#> 6.0102e+00 -8.5303e-01 -1.3038e+01 -4.3333e+00 -3.4516e+00 -5.1734e-01 +#> -1.4759e+01 -1.3811e+01 5.0702e-01 -6.6804e+00 6.2980e+00 -2.8277e+00 +#> -2.5279e+00 -8.3110e+00 -5.6109e+00 6.5594e+00 4.0858e+00 4.5019e+00 +#> -3.1133e+00 -1.3220e+00 1.0349e+01 3.1432e+00 1.3162e+01 -1.3658e+01 +#> -1.9074e+00 6.1141e+00 -7.7314e+00 -6.1643e+00 1.9595e-01 -2.2582e+00 +#> -7.1831e+00 1.0418e+01 -1.0707e+01 -1.1143e+01 1.0416e+00 8.3760e+00 +#> -7.1877e+00 -1.2295e+01 -1.5401e+00 -6.6699e+00 1.9101e+01 -1.2868e+01 +#> 5.2554e+00 1.2176e+01 -1.0990e+01 -8.8784e+00 -9.1264e+00 8.7949e+00 +#> 4.1445e+00 1.4764e+01 7.2696e+00 1.1312e+00 4.6407e+00 3.8955e+00 +#> 4.2100e+00 4.1919e+00 -3.1944e+00 -1.0460e+01 -2.8014e+00 -3.5093e+00 +#> -3.6877e+00 -8.7712e+00 2.5401e+00 8.3708e+00 -9.1170e+00 1.3523e+01 +#> 5.4637e+00 3.2682e+00 -6.0641e-02 -2.5750e+00 -8.2128e+00 -1.3273e+01 +#> 1.8581e+01 1.6255e+01 1.2747e+01 -6.8879e+00 -2.1825e+00 -2.5067e+00 +#> 1.2481e+01 4.5978e+00 -7.3489e-01 -1.3409e+01 -6.0107e+00 -2.7504e+00 +#> 1.7816e+01 -3.5718e-01 -1.1780e+00 1.6923e+00 -4.5382e+00 -2.0238e+01 +#> 6.5862e+00 -1.3457e+01 8.5478e+00 -1.3558e+01 -3.4814e+00 -1.5367e+00 +#> -1.1326e+01 7.2230e+00 -3.6186e+00 -8.4180e+00 -6.4513e+00 7.9379e+00 +#> +#> Columns 13 to 18 -8.0057e-01 -1.3328e+01 9.8669e+00 -2.5136e+01 -5.3349e-01 3.6857e+00 +#> 1.0354e+01 -1.4761e+01 1.1679e+00 1.4412e+01 -1.2440e+00 -1.1661e+00 +#> -6.5192e+00 -7.1688e+00 -2.9187e+00 -5.2366e+00 -4.9826e+00 3.7546e+00 +#> -3.1300e+00 7.3697e+00 -4.1025e+00 -5.7561e+00 -6.4322e+00 -4.9924e+00 +#> 1.0797e+01 -3.0347e+00 1.1637e+01 -1.8523e+01 2.2189e+00 -5.2789e+00 +#> 6.5284e+00 1.3239e+01 1.2901e+01 -1.0149e+00 1.1488e-01 -7.5773e+00 +#> 1.3891e+01 1.2985e-01 9.3304e+00 1.1353e+01 6.6067e+00 -2.0041e+00 +#> -5.6352e+00 1.5733e+01 -8.8031e+00 5.5721e+00 3.4226e+00 -9.7906e+00 +#> -1.1781e+00 -2.5947e+00 3.6996e+00 5.0159e+00 1.3265e+01 -7.8744e+00 +#> -6.6246e+00 -2.3119e+01 5.5166e+00 -1.0686e+01 1.0688e+01 -7.0927e+00 +#> 1.9604e+01 3.1592e+00 -6.8978e+00 -4.5115e+00 -1.4070e+01 1.8832e+00 +#> 1.3349e+01 3.5876e+00 8.9876e+00 -1.3067e+01 7.4223e+00 -1.6353e+01 +#> -8.8548e+00 -7.7655e+00 5.2639e+00 -1.5279e+01 1.3111e+01 -8.7192e+00 +#> 9.2301e+00 1.2825e+01 3.8590e+00 -8.9350e-02 -3.3228e+00 -6.6612e+00 +#> 1.1719e+01 8.8251e+00 1.0758e+01 1.1020e+01 1.7316e+01 1.0903e+01 +#> 1.0597e+01 3.0815e+00 5.4455e+00 -1.1521e+01 3.5875e+00 1.0045e+01 +#> -4.6327e+00 2.6468e+00 9.0459e+00 3.5549e+00 -1.7233e+00 -3.7580e+00 +#> 5.5239e-01 -5.8831e+00 -6.4494e+00 6.9075e+00 -7.5687e+00 -4.6467e-01 +#> 5.6701e+00 1.5749e+01 -7.3728e+00 3.2362e+00 -7.7297e+00 -9.7480e+00 +#> 5.6887e+00 -1.1115e+01 -1.2254e+00 -5.2029e+00 -2.1490e+00 5.6609e+00 +#> 7.8467e+00 -7.5810e-02 7.7566e+00 -1.0227e+01 -3.7912e+00 -3.1270e+00 +#> -4.7080e+00 -4.5689e+00 -7.6721e-02 -5.8001e+00 -9.5479e+00 8.3101e+00 +#> -1.5013e+00 -3.6316e-01 -3.1087e+00 6.1050e+00 -1.5130e+01 1.5111e+01 +#> 3.4386e+00 9.8310e-01 -2.0312e+00 1.0758e+01 -3.3430e+00 5.0932e+00 +#> -1.3168e+01 -6.7728e+00 1.6069e+00 -4.6869e+00 4.6065e+00 1.3057e+00 +#> -1.6464e+01 5.7022e-01 1.0934e+01 8.7831e-01 -2.6596e+00 2.1941e+00 +#> 1.2239e+00 2.0631e+01 3.1795e-01 -6.3166e+00 -7.4960e-01 1.2326e+01 +#> -1.1780e+00 1.4964e+01 1.4871e+01 -6.6004e+00 -2.4907e+00 -4.5097e+00 +#> -3.2585e+00 1.6227e+01 1.4389e+01 -1.4555e+01 1.3821e+00 -1.6833e+01 +#> 2.2738e+00 -1.8023e+01 7.3146e+00 -1.3987e+01 3.7863e+00 -1.3842e+01 +#> 1.3159e+00 -6.0347e+00 2.4591e+00 -6.5787e+00 9.6240e+00 5.3632e+00 +#> -3.8836e+00 6.9408e+00 1.7871e+01 2.8488e+00 1.1042e+01 2.4041e+01 +#> 4.5042e+00 -3.3415e+00 4.6526e+00 -1.9363e+00 -5.7645e+00 4.8845e+00 +#> +#> Columns 19 to 24 2.1816e+00 1.1398e+01 -2.0100e+01 6.5745e+00 -7.2871e+00 -1.7311e+01 +#> -1.1820e+01 1.2387e+01 -4.6485e+00 1.2276e+01 4.6670e+00 2.7533e+00 +#> 4.0599e+00 -1.0616e+00 1.3343e+00 5.2283e+00 1.6967e+00 1.7240e+00 +#> 2.0656e+00 -1.1546e+00 3.8879e+00 -6.8895e+00 7.4366e+00 5.5816e-01 +#> 5.9550e+00 -1.4099e+01 -7.4364e+00 -1.8394e+01 1.4459e+00 -4.3082e+00 +#> 6.3243e+00 -1.3173e+00 -1.5185e+01 -3.2522e+00 -1.0327e+01 8.6914e+00 +#> -5.9197e+00 7.5110e+00 -9.7248e-01 5.1201e+00 -2.8549e+00 -1.2483e+01 +#> 8.3908e+00 -1.6846e+00 3.8770e+00 -1.9234e+00 1.0995e+01 -2.1494e+00 +#> -3.8551e+00 -3.2826e-01 1.6225e+01 -2.6102e+00 -3.8071e+00 1.4121e-01 +#> 1.0434e+01 2.7217e+00 3.9908e+00 2.0863e+00 1.8488e+00 -1.4448e-01 +#> -9.7046e+00 6.4612e+00 -4.5994e+00 -3.9989e+00 3.6108e+00 -1.7516e+01 +#> -2.2860e+00 -1.9504e+01 2.7930e+00 5.0934e+00 9.8681e-01 5.5149e+00 +#> 1.9064e+01 -2.3494e+00 1.3243e+01 1.6121e+00 1.2582e+01 -5.4996e+00 +#> -7.0795e+00 2.1689e+00 9.7785e+00 2.1558e+00 8.3160e+00 -2.0315e+00 +#> 1.9758e+00 -1.0558e+01 -3.7069e+00 3.8015e+00 -9.3428e-01 6.8966e-01 +#> -1.9669e+00 1.7340e+01 -5.4626e+00 2.6395e+00 -6.7900e-01 4.5230e+00 +#> 5.6454e-01 3.9365e+00 3.7507e-01 -3.5966e+00 7.3264e+00 -1.4597e+01 +#> 3.9740e+00 6.0941e+00 -1.2176e+01 -1.9403e+01 -4.5312e+00 6.1493e+00 +#> -3.5115e+00 6.1865e+00 1.2620e+01 6.3291e+00 7.9115e+00 1.8524e+01 +#> -3.5108e+00 2.5150e+00 3.5326e+00 -1.1806e+01 -4.4001e-01 -1.7239e+01 +#> 3.7839e+00 6.4758e+00 -1.4811e+00 1.1773e+01 6.4469e+00 -7.4049e+00 +#> -1.1164e+00 9.4232e+00 3.1854e+00 1.1059e+00 9.7922e+00 4.8349e+00 +#> -6.3895e+00 8.3978e+00 -2.9053e+00 -1.1393e+01 -7.3833e+00 -1.0466e+01 +#> -4.5210e+00 -9.0873e+00 -3.9491e+00 -2.0665e+00 5.5214e+00 -1.6368e+01 +#> 5.9251e+00 -4.5887e+00 1.7094e+01 8.2391e+00 -1.6594e+00 1.8229e+01 +#> 1.5021e+01 -9.5044e+00 9.0490e+00 -4.7405e+00 -9.9340e+00 4.3366e+00 +#> 1.3484e+01 -9.3071e+00 -1.6405e+00 1.1124e+01 2.9578e+00 -5.1350e+00 +#> -1.6481e+01 4.3615e+00 -9.7366e+00 -9.5827e+00 3.0091e+00 2.2236e+00 +#> -1.8121e+00 6.1377e+00 2.6013e+00 8.3860e+00 6.7466e+00 8.8319e+00 +#> 1.1963e+01 -6.6936e+00 -9.5687e+00 -9.3228e-01 -1.7769e+01 -2.6604e+00 +#> 1.3484e+01 3.2324e+00 -5.0558e+00 -8.7698e+00 1.2637e+01 -6.5493e+00 +#> -6.0411e+00 6.4152e+00 -1.2675e+01 -9.6146e+00 8.2074e-01 4.9775e+00 +#> -1.3057e+00 5.5402e+00 -1.4782e+01 8.4229e+00 3.9776e+00 -1.2717e+01 +#> +#> Columns 25 to 30 4.2712e+00 -1.8313e+01 1.2869e+01 1.0771e+01 9.3267e+00 -8.9804e+00 +#> 3.1789e+00 -5.3914e-01 4.8706e+00 -7.7735e+00 -1.0663e+00 3.6529e+00 +#> -3.1381e+00 -5.4898e+00 -5.0572e+00 -1.5204e+01 6.8879e+00 1.5140e+00 +#> -5.6012e+00 4.8856e+00 -1.1568e+00 -1.0985e+01 -1.6185e+00 2.6259e+00 +#> 3.9710e+00 -2.6853e+00 9.4064e+00 -1.5029e+01 2.2575e+01 -5.8623e+00 +#> -5.5364e+00 -8.5947e+00 3.2784e-01 1.4177e+00 1.3231e+01 -8.0045e+00 +#> 1.5313e+00 -5.1651e+00 -1.4980e+00 -3.6880e+00 -3.4109e+00 8.9855e+00 +#> -1.1175e+01 -1.3714e+00 -1.0365e+01 4.7220e+00 5.3410e-01 1.7925e+00 +#> -4.5655e+00 -6.0778e-01 -3.1458e+00 -6.3064e+00 -6.5464e+00 -1.8196e+00 +#> 1.4990e+01 8.8926e+00 4.1020e+00 -2.1035e+00 6.3438e+00 7.0223e+00 +#> 2.5534e+00 -5.8687e+00 2.8717e+00 -7.4900e+00 6.2523e+00 8.9355e+00 +#> 1.4022e+01 1.0725e+01 1.5612e+01 -8.7809e+00 3.9462e+00 -1.3987e+01 +#> 1.0360e+01 2.7338e+00 4.0706e+00 -1.2100e+01 1.0978e+01 5.0141e+00 +#> -7.5337e+00 6.6931e+00 8.3354e+00 -6.1995e+00 2.4049e+00 -1.0162e+01 +#> 2.2830e-01 5.2956e+00 1.6554e+01 1.5506e+01 -5.9415e+00 -9.6459e+00 +#> -5.4945e+00 -4.1556e+00 7.5814e+00 -1.0024e+01 -3.3956e+00 5.4715e+00 +#> -1.8974e+00 4.0738e+00 4.3747e+00 5.3339e+00 1.0370e+01 3.5695e+00 +#> -6.3953e+00 8.1155e+00 2.6668e+00 -5.7607e+00 9.3227e-01 9.6839e+00 +#> -6.8767e+00 -3.6269e+00 -8.4079e+00 3.5210e+00 -1.5193e+01 -2.7687e+00 +#> -1.1708e+00 -1.3841e+01 4.0027e+00 1.2555e+00 -4.2463e+00 -2.2155e+00 +#> -1.0225e+01 -1.7509e+01 -1.6429e+00 -1.6112e+01 -5.7260e+00 -1.9660e+01 +#> -5.5449e+00 1.9966e+00 1.0666e+00 3.4531e+00 -1.5475e+01 3.3974e+00 +#> -1.3588e+00 7.5988e+00 -1.8388e-01 7.8243e+00 -1.5999e+01 1.0581e+01 +#> -8.8002e+00 8.7883e+00 1.1669e+01 4.5411e+00 -9.6372e+00 4.4150e+00 +#> 4.1970e+00 3.1673e+00 9.9109e+00 -2.0725e+00 1.4861e+00 8.6069e+00 +#> -8.2932e+00 7.1537e-02 1.0104e+01 1.3536e+01 -1.4057e+01 4.9435e+00 +#> -3.1966e+00 1.6351e+01 -3.0836e+00 5.8257e+00 7.8308e+00 -1.1259e+01 +#> -2.9138e+00 -1.9039e+01 -3.7361e-01 -1.1853e+01 2.7653e+00 -6.2049e+00 +#> -3.4330e+00 1.8411e+00 -6.2346e+00 -4.5798e+00 6.0478e+00 1.1831e+00 +#> 4.3443e+00 -2.8794e+01 5.0782e+00 -2.0471e+00 2.9354e+00 1.5715e+00 +#> -9.7509e-02 -1.1681e+01 6.5869e+00 -3.6830e+00 9.8735e+00 -7.4123e+00 +#> 2.3951e+00 1.6265e+01 -4.8599e-01 9.5051e+00 3.5874e+00 1.2326e+01 +#> -1.1905e+00 -5.8175e+00 -3.5236e+00 -1.7360e+01 -1.2221e+00 -1.0987e+00 +#> +#> Columns 31 to 36 -5.6949e+00 1.3361e+01 2.3993e+01 6.7339e+00 -3.0410e+00 -1.9543e+01 +#> -1.9175e-01 1.9476e+01 -8.1250e+00 -1.0638e+01 3.8086e+00 -1.4532e+00 +#> -1.7334e-01 2.9818e+00 1.5608e+00 1.4213e+01 -1.3916e+00 -6.3702e+00 +#> -2.5140e+00 -4.6497e+00 1.2057e+01 7.0821e+00 6.0922e+00 -2.0688e+01 +#> 1.5012e+00 -1.3077e+01 1.5612e+01 1.1582e+01 9.8053e-01 8.3894e+00 +#> 4.0044e+00 -9.4430e+00 1.9241e+01 1.6960e+00 2.6499e+01 1.2580e+01 +#> -2.1191e-03 -2.8668e+00 -8.1291e+00 6.1644e+00 2.1598e+01 1.7666e+01 +#> 4.6148e+00 9.9687e-01 7.2539e+00 -6.8223e-01 4.6920e+00 -3.5754e+00 +#> 1.6859e+00 -7.5914e+00 -5.8969e+00 4.6902e+00 -5.8453e+00 -3.9254e+00 +#> -2.5888e+00 5.3150e+00 -5.4238e+00 2.0027e+01 -2.3618e+00 1.2522e+01 +#> -5.9867e+00 -6.4037e+00 1.3977e+01 -4.9397e+00 7.3722e-01 3.8638e+00 +#> -1.1407e+01 -4.7524e+00 1.8301e+00 -4.3101e+00 -1.5760e+01 1.5808e+01 +#> 1.1545e+00 -4.5749e+00 3.9353e+00 8.2907e+00 -6.0958e+00 1.8457e+01 +#> 1.7913e+00 1.6733e+00 3.3430e+00 -5.1444e+00 1.1334e+01 -4.8110e+00 +#> -7.3777e+00 1.2817e+01 -1.1085e+01 1.9805e-01 -4.1106e+00 4.4191e+00 +#> -1.6404e+01 -9.1713e+00 -1.0913e+01 1.3962e+00 8.4605e+00 8.7429e+00 +#> 4.8666e-01 -1.1638e+00 -1.3549e+01 4.1671e+00 9.1208e+00 1.2404e+01 +#> -1.3434e+01 -2.3100e+01 -4.2072e+00 7.5665e+00 1.7500e+01 -3.3207e+00 +#> 1.1719e+01 7.1473e+00 -1.1867e+01 -8.7646e+00 -1.1132e+01 4.0229e+00 +#> 4.4853e+00 -3.3253e+00 4.9775e+00 9.0879e+00 5.9107e+00 7.4434e+00 +#> 1.8142e+00 1.6967e-01 1.7707e+00 -7.8519e+00 -1.2571e+01 -2.9638e+00 +#> -7.3384e+00 -1.7540e+01 -4.1018e+00 -7.7791e-01 1.2584e+01 -4.1257e+00 +#> -1.2738e+00 -9.7462e+00 5.6063e+00 7.9758e+00 3.4011e-01 -7.2899e+00 +#> -3.6470e+00 4.9329e-01 1.0851e+00 -8.8301e+00 -5.7992e-01 9.0049e+00 +#> -9.5606e-01 2.8600e+00 -2.0506e+00 7.6949e+00 5.8242e+00 -4.7942e+00 +#> -7.5989e+00 -6.2703e+00 -1.0120e+01 1.4883e+01 -8.2820e-01 -7.1206e+00 +#> 3.7361e+00 -2.6730e-02 1.1486e-01 -9.8292e+00 7.5682e+00 9.1253e+00 +#> -9.8936e+00 -1.1247e+01 1.4983e+01 -7.0797e+00 3.0183e+00 -1.3355e+01 +#> -2.5079e+00 9.1406e+00 1.0627e+01 2.1261e+00 -4.9782e+00 7.8488e+00 +#> 4.2701e+00 1.0740e+01 5.4955e-01 -3.6912e+00 -4.8871e+00 -6.4244e+00 +#> 3.0608e+00 1.6989e+00 1.6671e+01 6.5070e+00 -9.4148e+00 1.5586e+00 +#> -2.0385e+01 3.9488e+01 -4.0000e+00 1.5101e+01 -3.2709e+00 -9.0798e+00 +#> 1.2203e+01 7.4935e+00 -7.6433e+00 -5.0494e+00 5.9084e+00 1.4025e+01 +#> +#> Columns 37 to 42 -3.5567e+00 4.9884e+00 -2.0162e+00 -9.8983e-01 -8.1743e+00 1.8105e+00 +#> -1.0130e+01 1.0737e+01 -1.3030e+01 -2.4097e+00 -7.1381e+00 -2.2804e+00 +#> -1.0712e+01 -4.6281e+00 8.6404e+00 5.0877e+00 -1.2724e+00 -1.3629e+01 +#> -1.4130e+01 -6.7262e+00 -6.5459e+00 -5.0702e+00 5.8389e+00 -6.6099e+00 +#> -1.0146e+01 -2.3493e+00 -5.7388e+00 2.3630e+00 7.7276e+00 1.9727e-01 +#> 9.4642e+00 -1.6979e+00 -1.6908e+00 4.0453e+00 -9.3064e+00 -4.9294e+00 +#> -1.7900e+01 -1.0781e+01 -2.2128e+00 3.4972e+00 4.5195e+00 2.1352e+01 +#> 1.4080e+01 -1.7566e+01 1.7405e+01 7.3100e+00 5.6941e+00 6.8552e+00 +#> 2.8797e-01 4.2590e+00 9.6440e+00 3.7909e+00 6.8174e-01 -1.1576e+01 +#> -1.3965e+01 2.2105e+00 -3.2947e-01 2.6924e+00 2.5126e-01 8.7139e+00 +#> -1.2245e+01 -1.3169e+01 2.3058e+01 4.0956e+00 1.4707e+01 -4.5470e+00 +#> 1.0996e+01 -5.0388e+00 6.5529e+00 7.1346e+00 -1.0733e+01 -1.8054e+01 +#> -4.9208e+00 -6.2861e+00 -1.5915e+00 -1.7812e+01 -6.9850e+00 1.1769e+01 +#> -1.0391e+01 -2.5315e+00 -2.4661e+00 -1.6226e+01 1.2565e+01 1.1249e+01 +#> 4.8863e+00 7.6682e+00 -4.4582e+00 1.3917e+01 2.2450e+00 4.1614e+00 +#> -9.7247e+00 -5.3544e+00 2.3091e+00 -5.2290e+00 6.1582e+00 -1.7322e+00 +#> 4.6380e+00 7.5338e+00 -2.4575e+01 5.9214e+00 7.2310e+00 1.1455e+01 +#> -3.4109e+01 -1.2936e+01 7.2160e+00 9.2824e-01 6.4974e+00 8.5199e+00 +#> 6.4172e+00 -1.0787e+00 -9.8345e+00 7.6054e-01 -6.6593e+00 -8.7434e+00 +#> 6.0668e+00 -3.8605e+00 7.7222e+00 1.3185e+01 -8.8122e-01 2.1869e+00 +#> 8.5262e-01 1.2873e+01 -9.3571e+00 -4.0499e+00 2.7709e+00 7.4133e+00 +#> 3.8747e-01 -1.2859e+01 -3.6468e+01 -1.4145e+00 -5.2340e+00 1.3269e+01 +#> -6.7104e+00 -6.0494e+00 9.6427e+00 2.2719e+01 -1.8434e+00 -1.1373e+00 +#> 8.3560e+00 2.1794e+00 -8.9665e+00 4.2148e+00 4.8866e+00 9.1118e+00 +#> -1.9341e+01 4.2699e+00 3.9515e+00 -1.1069e+01 -2.4024e+01 -5.1794e+00 +#> -4.9691e+00 8.9193e+00 -5.9994e+00 -1.3280e+00 -1.0818e+01 -3.2965e+00 +#> 1.9666e+00 -3.0454e+00 9.0428e+00 1.3853e-01 4.1911e-01 9.6931e+00 +#> -7.9676e+00 2.1590e+00 -1.2932e+01 1.2422e+01 8.0138e+00 -5.9471e-02 +#> 8.2083e+00 7.7886e+00 -1.2701e+01 1.6966e-01 1.0069e+01 7.4440e+00 +#> -8.3398e-02 7.0096e+00 -8.7646e+00 2.5291e+01 -1.1063e+00 -1.0413e+01 +#> 1.8087e+00 1.3406e+01 -6.4398e-01 -6.0747e+00 6.3313e+00 -3.4190e+00 +#> -6.2972e+00 3.5243e+01 6.8191e+00 1.2734e+01 -6.4595e-01 -8.1191e+00 +#> -6.4373e+00 9.3643e+00 -1.7571e+01 -8.0169e+00 9.8722e+00 1.5727e+01 +#> +#> Columns 43 to 48 7.9088e+00 -5.6556e+00 4.8938e+00 1.0608e+01 -6.9977e+00 5.4173e+00 +#> -1.1238e+01 -1.3247e+01 9.6207e-01 3.6348e+00 1.4571e+01 1.0120e+01 +#> 8.0333e+00 2.7531e+00 -4.3690e+00 -6.9672e+00 -9.6015e+00 -3.1335e+00 +#> -1.1907e+01 -4.2905e+00 6.9964e+00 4.0701e+00 5.0200e+00 1.0520e+01 +#> -1.7194e+01 8.4560e+00 4.9195e+00 -1.8132e+00 3.3464e+00 9.0690e+00 +#> 1.0605e+01 3.6800e+00 1.9141e+00 -7.7366e+00 8.8173e+00 1.6478e+01 +#> 7.6670e+00 -4.4036e+00 -2.0118e+01 3.7706e+00 4.1627e+00 2.3883e+01 +#> 5.7098e+00 7.1396e+00 -6.4154e+00 -7.3870e+00 7.1860e+00 2.8694e+00 +#> 7.8667e+00 1.2995e+00 1.5219e+00 -1.5403e+01 1.4408e+01 -8.8732e-01 +#> -8.8178e+00 4.1856e+00 3.4311e+00 -3.3330e+00 1.1866e+01 2.7426e+00 +#> -3.2672e+00 -2.0059e+00 1.0937e+01 1.8572e+01 1.3395e+01 -8.7461e+00 +#> -4.6177e+00 2.8059e+00 1.3937e+01 -4.1939e+00 -7.0773e+00 -1.0014e+01 +#> 1.0897e+01 1.2618e+00 -5.3603e+00 1.6801e+00 -4.7967e+00 1.2367e+00 +#> -1.0665e+01 -1.6151e+01 -2.8564e+00 6.4615e+00 1.5634e+01 8.7862e+00 +#> -8.4990e+00 -1.8268e-01 1.0268e+00 4.8410e-01 3.3697e+00 6.8629e+00 +#> 2.1992e+01 9.2984e+00 2.2908e-01 -7.1116e+00 -5.9029e+00 2.3546e+00 +#> -6.6385e+00 6.8041e+00 5.2392e+00 3.6962e+00 5.2319e+00 -4.3501e+00 +#> -1.4431e+00 -1.4766e+01 -3.2638e+00 -9.8882e+00 3.5751e+00 -1.4642e+00 +#> 1.9757e+01 6.0210e-01 1.8813e+01 -1.7528e+01 1.3589e+01 -7.2073e+00 +#> 2.2319e+01 1.0252e+01 -8.5589e-01 2.3263e+00 -2.1076e+00 4.7572e+00 +#> 2.5049e+01 4.3311e+00 3.0760e+00 -2.1143e+01 -7.4189e+00 -5.7686e+00 +#> -6.0083e+00 7.5146e-01 -2.0329e+00 -2.1183e+01 -1.4873e+00 4.5826e+00 +#> 9.2355e+00 6.3142e+00 -6.5020e+00 6.2657e-01 -4.6874e+00 -4.8295e+00 +#> -9.6173e-01 6.2268e+00 -7.0243e+00 3.9041e+00 6.7295e+00 -1.3892e+01 +#> -3.6028e+00 1.2075e+01 3.5572e+00 -5.3261e+00 -2.6636e+00 1.1129e+01 +#> 7.4841e+00 -9.2742e-01 8.9835e+00 -8.4415e+00 -7.2958e+00 7.8062e+00 +#> 2.2400e+01 2.1623e+00 -3.0189e+00 7.1862e+00 -6.3412e+00 1.0901e+01 +#> 3.6117e+00 -7.0971e+00 2.6814e+00 5.4192e+00 3.5838e-01 1.1047e+01 +#> -9.8853e-01 -1.7515e+01 2.4676e+01 1.4085e+01 8.2997e+00 -4.8444e+00 +#> -4.5072e+00 5.5580e+00 2.3702e+01 4.8946e-01 7.3788e+00 -1.6394e+01 +#> 2.5949e+01 2.4009e+01 -8.7447e+00 -7.0793e-01 4.5913e+00 -4.3254e+00 +#> -9.8891e+00 8.4424e+00 -4.3977e-01 1.0079e+01 7.7925e+00 2.1888e+00 +#> 1.4065e+01 -1.0794e+01 -7.0287e-01 2.4411e+00 -9.2024e+00 -4.9945e+00 +#> +#> Columns 49 to 54 1.2240e+00 -1.2818e+01 9.5647e+00 6.5460e+00 -1.5940e+01 1.1664e+01 +#> 4.5038e+00 8.0489e+00 4.2433e+00 6.2366e+00 4.6840e+00 4.4786e+00 +#> -1.6363e+01 -1.4942e+01 7.2150e+00 5.6045e+00 4.0751e+00 -2.1345e+00 +#> -1.0558e+01 5.4887e+00 -1.2569e+01 -3.2999e+00 8.2827e+00 1.8991e+00 +#> -3.8149e+00 6.1766e+00 -2.0876e+01 -6.9356e+00 8.3863e+00 6.9102e-01 +#> -2.7710e+00 -4.1936e+00 -1.1201e+00 -5.6233e+00 -1.3735e+01 8.6153e+00 +#> 1.5911e+01 1.3823e+00 3.4872e+00 -2.3034e+00 -3.7417e+00 -4.8286e+00 +#> 1.5463e+01 -2.4522e+00 -4.8940e+00 1.0528e+01 6.3326e+00 -4.1093e+00 +#> -1.3318e+01 -6.1879e+00 1.4634e+01 -6.2361e-01 -5.1670e+00 -2.5203e+00 +#> -1.5569e+00 -9.1091e+00 -2.2504e+00 -2.4537e+00 5.0198e+00 -1.3754e+00 +#> 1.1113e+01 2.0553e+00 -1.1050e+01 -5.2365e+00 2.9043e+00 -9.1591e-01 +#> -1.8847e+01 -8.8923e+00 1.0530e+01 -2.0581e+01 -6.0758e-01 9.6417e-01 +#> 9.6848e+00 -9.3837e+00 1.3975e+01 -5.5193e+00 3.6848e+00 1.5615e+00 +#> -1.2492e+01 5.5814e+00 6.1893e+00 -7.8973e+00 2.9681e+00 6.9512e-01 +#> 1.1504e+01 7.6018e+00 6.9501e+00 7.5726e+00 6.8567e-01 -2.4113e+00 +#> 6.7309e+00 5.1267e+00 -9.7423e+00 3.6266e-01 -1.6027e+00 -1.7865e+00 +#> 1.6913e+01 1.1763e+01 -6.8572e+00 -1.0648e+01 -8.2093e+00 -3.2516e+00 +#> 4.6734e+00 -6.2526e-01 -1.5552e+01 1.4470e+00 -3.3042e+00 2.4781e-01 +#> 1.1611e+01 -6.9366e-02 4.6982e+00 -6.0526e-01 -1.9356e+00 -4.0384e+00 +#> 1.7692e-01 -1.7620e+01 8.2326e+00 -3.1783e+00 -3.9814e+00 4.4209e+00 +#> -1.0654e+01 8.7792e+00 -2.7505e+00 -1.5891e+01 1.6422e+01 -1.3983e+01 +#> 1.7992e+00 8.1260e-01 -8.6144e+00 -2.9489e+00 7.1418e+00 1.2316e+00 +#> -1.4337e+01 -6.8194e+00 -1.7666e+01 2.5777e-01 -1.2038e+01 -3.0056e+00 +#> 7.4552e+00 9.3896e+00 -1.7496e+01 1.4413e+00 4.9794e+00 -8.6028e+00 +#> -6.5202e+00 9.1406e+00 1.1102e+01 -7.9955e-01 1.4321e+01 3.1646e+00 +#> 6.7650e+00 -3.1582e+00 -2.6166e+00 -3.2835e+00 8.4187e+00 2.4054e-01 +#> 1.5329e+01 -5.0739e+00 7.8081e+00 -2.6967e+00 -5.3574e+00 1.0191e+00 +#> -2.0436e+01 8.6585e+00 1.1495e+01 1.8867e+00 -3.0256e+00 2.2732e+00 +#> 1.8309e+00 8.7694e+00 7.3270e+00 -7.8188e+00 1.1073e+01 3.1894e+00 +#> -1.1917e+01 6.3724e+00 1.4758e+00 9.3766e-01 -2.3483e+00 9.6741e+00 +#> -1.7902e+00 6.6935e+00 -5.0005e-01 1.1740e+01 -5.9915e+00 3.6669e-01 +#> 2.5202e+00 2.1117e+00 4.2934e+00 9.3296e+00 4.1314e-02 1.2129e+00 +#> 6.5548e+00 -3.7900e+00 -9.4372e+00 -9.1410e+00 1.8987e+00 2.0038e+00 #> #> (13,.,.) = -#> Columns 1 to 8 0.6225 -1.5633 -1.0825 5.1088 5.7446 -3.3708 3.6607 11.8080 -#> -5.9414 4.0864 -1.2822 13.1553 -15.7463 6.3882 1.5634 -11.4881 -#> 3.8365 -8.4567 4.2103 13.8929 -3.8816 -6.1134 -0.6663 -8.0852 -#> -3.0222 6.8643 -2.3826 -0.7312 -5.0444 10.4230 3.6401 -5.1320 -#> 2.9885 0.6388 -1.7581 -0.5176 8.0534 9.4546 17.4677 2.8854 -#> -0.0614 2.0208 -14.5500 0.9292 16.2407 1.4846 5.7644 0.9823 -#> -5.0461 4.9234 2.7604 1.9248 -0.2402 -1.4744 6.1263 -18.5446 -#> 1.5126 -6.5236 7.4569 -6.4902 0.9539 -34.3821 4.7376 -6.4473 -#> -1.8858 -1.7096 -1.9603 0.5967 -2.9680 -0.6489 -4.0181 -0.8944 -#> -0.4119 -1.9274 1.7104 6.9449 2.2967 14.1728 4.3677 5.9133 -#> 3.1806 0.2433 -6.9227 -6.6097 -3.4068 -1.5102 1.1768 -7.3246 -#> 5.1976 3.0144 2.7979 -7.2828 7.7405 4.1381 7.9900 10.3155 -#> -1.6468 0.6064 -5.1478 9.8157 2.6589 6.8723 -2.0592 11.5436 -#> -0.2124 -2.1939 -5.1560 13.0934 -6.1557 5.6263 -11.1988 0.6105 -#> -0.7770 5.0216 -9.4610 3.9703 5.4409 11.2737 12.1204 -2.3762 -#> -4.3581 6.0825 2.3620 -0.5420 -0.5396 -2.1976 -2.9201 -17.2563 -#> 1.2618 5.1261 -7.6242 3.7173 9.8331 17.3887 2.6580 -6.2977 -#> -1.8163 -2.6399 9.1186 -5.1573 2.7076 10.3487 -5.5950 6.4227 -#> -2.2549 2.9415 -1.3807 -0.5117 -13.7537 -9.9439 -7.9778 -11.1709 -#> -0.0283 -3.4669 0.2005 4.9303 1.9195 -14.3671 -4.5576 6.9302 -#> 1.2863 6.2312 5.7416 -1.8370 -6.7823 11.3672 5.8564 1.9807 -#> 1.8187 2.6548 4.8856 -0.5090 3.4795 -2.6129 -6.3327 3.1360 -#> 0.8995 7.3851 -0.9977 -1.0382 -0.0063 7.6577 -0.6447 -18.8161 -#> 0.1038 -3.5217 -9.7995 3.8576 -10.7382 -3.8209 -21.4523 -5.3858 -#> 0.1988 -0.2407 2.9182 9.4107 -7.8575 8.6792 11.2470 1.5597 -#> 1.0691 -7.2791 10.4935 -11.5572 2.8200 -11.1891 0.6554 4.3099 -#> 3.9875 -1.9605 1.6879 8.9195 6.2382 12.0618 -4.7136 -1.3624 -#> 1.0449 -2.2370 -5.4446 3.2154 -7.8951 -3.5772 -16.4724 -0.0223 -#> -4.2487 -0.0201 -5.0634 15.8200 3.4258 2.2750 -6.9639 -1.5070 -#> 7.8249 -2.3258 -11.5096 6.5237 25.4575 12.2537 -2.4187 9.4461 -#> -4.5060 3.7490 -4.8310 -6.0221 -7.5922 4.1831 -8.2478 1.6914 -#> 4.5197 -6.3969 6.6634 -7.9376 11.1415 -1.7244 3.6967 -5.8130 -#> 1.6796 10.2047 2.9963 0.4611 15.4367 8.6640 12.3652 9.3268 -#> -#> Columns 9 to 16 -1.2855 0.0185 -9.0798 23.0098 1.5778 7.3526 15.9490 -2.5839 -#> 12.9727 -1.6580 6.0191 -1.8907 3.2326 -4.4916 -12.2917 9.3610 -#> -9.4650 -3.3754 9.2407 -20.8298 -4.0272 -4.0452 -8.5894 -13.6104 -#> 4.6112 -15.6477 8.9300 6.9903 -3.0968 0.7586 -4.6015 -13.4000 -#> 8.4996 -4.4138 -5.4577 -7.4059 -19.6451 -5.3132 -17.6285 -8.1284 -#> 17.0900 -12.0884 7.6606 -5.2643 -3.9708 8.9101 -12.8181 1.2724 -#> 11.7912 3.3413 1.8484 -10.1811 -3.7404 5.9754 12.8199 3.5822 -#> 9.0085 0.0326 -8.7861 4.5516 -3.4405 -11.4276 -1.4082 -14.7301 -#> 14.3183 -13.1309 -5.4163 -2.6748 -15.5476 -1.9758 -9.0136 7.4097 -#> -2.9459 1.3235 14.2310 -4.8895 1.0256 18.3548 3.3083 8.2479 -#> -10.8172 -6.8999 -12.8204 -11.6486 -5.6255 2.9987 7.2506 -16.9556 -#> 8.6517 2.0668 -5.2789 2.6444 16.9798 5.7512 -6.2904 9.3246 -#> 8.5083 -4.5114 2.7383 -3.9136 2.9081 11.4299 -2.0102 4.7976 -#> -14.5124 2.3619 11.0900 13.8907 5.7813 -8.9663 6.9377 15.0322 -#> 9.4700 -5.7718 21.7840 1.7330 -13.4566 9.4243 11.2065 3.2765 -#> 7.1058 -3.1070 -12.9466 -0.9314 6.2793 -5.7981 -1.6923 4.2464 -#> 9.8869 2.1072 0.9882 -8.5688 -6.7740 5.2935 -1.1322 -6.0721 -#> 13.6170 10.1805 -0.6006 5.0214 24.5619 17.5630 12.9840 8.9247 -#> 0.7140 1.3764 5.9720 0.2036 2.1168 20.9245 2.4649 -10.0140 -#> -5.4863 -4.5560 6.1671 2.2992 -9.4693 3.2962 8.1224 -7.1518 -#> 4.0313 -3.1694 -3.7682 8.7248 6.9283 -6.7974 1.0717 3.5391 -#> -12.6850 -11.1544 -3.5832 -17.0292 -10.2922 10.5102 -8.5805 -16.6701 -#> 1.6295 -9.6518 -9.2382 -21.4327 -4.1098 14.8623 6.2514 -18.4324 -#> -4.7173 -13.7172 -15.1778 -13.1720 -12.6725 4.9590 -3.7975 -8.5846 -#> 4.4336 -0.8218 7.3717 -5.4434 -4.8480 -20.0243 -2.8340 7.2669 -#> -3.9407 -5.8612 -9.2493 2.7658 -6.3236 7.3682 0.8614 -2.2647 -#> -5.1057 15.3606 17.8084 0.8658 -4.1440 11.4043 27.9573 -0.7385 -#> -14.7314 4.1165 11.2977 -13.5234 -15.3198 -13.3219 -12.4830 -9.5864 -#> 12.5962 -2.0848 20.9790 6.7316 -7.9422 -7.3325 -11.7881 2.1356 -#> -2.2800 5.7363 11.4628 7.3722 10.9769 -12.4952 -6.5846 4.3698 -#> -5.5631 6.5627 -2.8050 -8.0543 13.7225 5.1217 -13.3229 -10.1688 -#> 1.1927 8.0427 -9.9975 1.4130 -20.1412 10.6100 13.6041 -6.3356 -#> -3.2324 13.7741 -1.9013 -4.1233 -2.3054 2.5272 6.2873 4.8383 -#> -#> Columns 17 to 24 18.1636 2.6890 5.8660 -0.9663 -7.8354 13.3825 -5.7938 -3.0844 -#> 14.9101 -8.1264 9.3529 -5.6501 -13.8379 -5.1529 12.0903 6.6171 -#> -12.9787 -4.5172 -3.5492 -0.1676 -0.3024 -20.2456 -17.2140 -3.7271 -#> 15.0217 -17.9985 -2.7245 0.7354 -10.6781 -4.0954 16.4370 -3.9958 -#> 19.8714 -15.6207 3.7955 -11.6631 0.5926 3.2357 -10.5831 -6.8354 -#> 7.3107 10.1958 -12.5282 -15.6849 9.5947 -0.6776 -3.5694 -6.8568 -#> 4.1721 6.1826 -3.5171 -1.6282 -4.5980 -12.7351 2.4134 1.4011 -#> -3.8929 0.8567 -2.3181 10.7230 -0.4437 -1.1194 -2.6472 -1.0988 -#> 0.5700 -4.2833 0.8950 9.9760 0.1999 -7.7351 8.4031 -1.9698 -#> 8.6987 1.8773 5.9658 -6.3321 10.9693 8.2531 -7.0048 -16.8536 -#> -25.2377 -12.1537 14.2061 -8.3470 7.8966 -2.2503 -19.2228 6.2746 -#> -4.2712 -3.8228 -5.6966 -14.1824 -10.1671 -10.6292 -2.9392 -2.9507 -#> -1.0011 -7.9093 8.0571 -11.0527 12.1593 -3.7356 -10.7960 -6.1334 -#> -0.5150 -10.1990 13.7810 -3.9238 -9.3145 12.6934 7.1857 6.6778 -#> 6.2490 -4.6729 -2.4496 -4.4163 1.3302 24.1066 -2.6552 -12.0586 -#> 3.7691 3.7795 -1.8822 9.3887 -7.6862 -0.9493 0.4402 -6.8497 -#> 7.8804 20.2961 4.4132 -0.6763 1.9556 -9.0676 -0.4367 -9.1325 -#> -8.1008 11.3290 19.9934 18.0099 -12.1368 -15.7701 15.0487 -3.7897 -#> 3.9090 -12.1362 11.0174 8.8886 1.6636 4.9241 -14.1295 5.4046 -#> 2.5688 -3.1138 -14.4614 0.4418 -14.4975 8.2753 -17.5903 -9.1665 -#> 9.6189 -2.4555 4.4813 -10.1254 -0.2373 -9.0948 8.7192 4.3740 -#> -16.1791 5.0023 -5.0693 12.8547 -3.6058 11.3588 -5.5056 -5.0991 -#> 5.2873 -8.9815 3.3710 -13.0669 14.1989 -8.1130 -9.1110 9.0331 -#> -23.5598 -30.5964 -14.4885 4.7473 2.7410 -11.7885 -4.4209 -2.4463 -#> -0.0130 -15.6218 -14.0751 1.8073 -3.6559 1.4849 7.6913 4.5255 -#> 4.9514 -0.2254 -3.8016 15.9771 1.8918 12.4013 -6.2868 -1.3038 -#> -19.2015 2.9579 -6.1137 19.9605 3.5282 -10.5849 6.1530 0.7995 -#> -0.4617 -3.9831 -4.4857 -0.1848 1.6324 9.0417 -0.4186 -10.2384 -#> -1.0973 -10.0601 -0.4049 9.9800 4.7209 -10.4516 18.6407 1.3664 -#> 2.4310 13.5651 -10.4308 18.9115 8.8863 17.2386 15.7466 5.9440 -#> -19.0119 17.7831 11.2511 -6.0253 -4.8593 -1.7501 -1.0230 5.3960 -#> 3.3324 -6.8259 -16.2877 11.6091 16.1198 6.0207 -9.4439 6.7558 -#> -8.2590 8.0797 -6.6256 10.1228 -0.1918 -10.1314 1.6924 -16.9398 -#> -#> Columns 25 to 32 9.7602 -7.7804 8.1099 -10.5406 -0.8016 4.2707 1.6632 6.3764 -#> -6.5471 -11.5912 -6.5146 0.6060 -2.6289 6.8809 -8.7830 14.0514 -#> -8.0664 -0.9994 24.0894 -6.0419 11.4410 7.0581 0.7835 15.4865 -#> -12.7290 8.7368 -21.3734 12.8849 -0.0553 3.9926 3.6383 1.3704 -#> -13.4071 3.8879 -24.2288 11.2635 1.5915 7.5209 6.7797 -0.2068 -#> -17.5532 -16.1468 -2.0649 10.6989 -0.0317 -5.5492 -0.6623 -1.1360 -#> -5.5422 15.5088 -10.3694 -7.2312 -0.0041 -6.3378 14.6600 -10.6514 -#> -7.5865 1.1804 -1.6392 -22.0979 -11.5349 -7.3807 9.2519 7.6128 -#> -10.6179 -12.5250 1.7685 8.5996 14.2387 -8.7196 -6.3942 6.2967 -#> -4.5517 3.1659 1.0517 5.2230 1.6644 7.6189 7.8894 -9.2089 -#> -0.7227 -8.7862 21.3328 -13.5036 9.0519 -3.5375 -10.7146 7.8418 -#> -7.3776 -9.2339 -9.0334 4.7565 9.8160 3.7270 15.0688 7.1635 -#> 11.3902 -10.1863 1.0542 -6.8610 2.8108 3.0329 -15.4997 3.2724 -#> -1.3498 1.5421 4.7036 2.1384 1.6352 -6.2726 8.5540 15.2884 -#> 4.6787 -2.6138 12.6692 5.2700 -5.3515 0.3652 10.9815 -3.3373 -#> -2.6148 11.6006 1.1225 -5.9275 5.0149 -0.7353 2.5467 -10.4715 -#> 3.8180 12.0771 8.0940 0.3466 -15.9737 -17.4497 7.3648 3.8229 -#> -1.6993 19.6717 -6.1632 -9.6537 8.7153 -3.2533 13.1750 5.6769 -#> -8.5416 -11.7091 -9.0161 -15.5721 -10.8837 2.5474 8.6095 1.1659 -#> 8.6536 2.1465 4.0729 -7.8629 -11.2442 10.7150 0.3810 13.0305 -#> -23.0618 1.2090 1.6410 8.9639 2.0674 0.9350 -4.3420 6.6411 -#> -5.8685 3.1908 9.4136 -13.6394 -15.1120 -19.4785 5.9027 11.2434 -#> 9.5898 2.5199 9.4495 4.7774 8.1902 15.0195 -1.8043 7.9227 -#> 11.5086 -11.9990 0.6903 -15.1623 -4.1851 0.9766 6.7075 16.3212 -#> 21.3647 -18.4335 -2.4270 7.1990 4.3983 2.1114 0.8842 -0.2500 -#> 3.3664 -6.8320 -6.3752 8.8395 -9.9285 1.5100 -6.8595 -5.1740 -#> 9.0478 -0.2612 10.8768 -13.0471 -8.4355 -2.3569 -8.9594 -6.3812 -#> 6.6571 -4.9372 4.6812 -3.2220 -4.3514 8.6508 -2.3734 -10.8352 -#> -9.3337 13.9356 -11.9485 16.9562 -3.8194 1.4336 -10.1326 4.5468 -#> 11.5920 -1.3552 0.0446 -6.0608 -11.8932 -7.2355 -17.2701 6.8176 -#> 13.2213 -3.6443 -12.2204 -14.1070 7.7651 0.5954 2.4866 11.5992 -#> -23.3829 12.7079 -3.0813 7.9698 -1.7549 -0.0543 3.8429 -3.8380 -#> -11.4183 11.7327 -9.6460 -18.0117 17.6143 -1.6256 9.8787 -2.0024 -#> -#> Columns 33 to 40 -3.9341 -2.0305 0.6524 -5.1456 0.6766 5.5498 1.0476 -0.1857 -#> -6.8117 2.9403 7.8930 -7.9923 3.1811 -1.0809 1.7309 -3.3199 -#> 2.2975 -11.8925 3.8148 10.5640 -6.8398 25.4739 1.6435 14.8361 -#> -10.5531 -3.5991 -4.7588 -8.2663 7.6058 2.9165 0.2993 -17.7117 -#> -8.8943 -11.8832 10.5742 -11.1564 0.5177 -2.3430 -7.9058 -8.9088 -#> 4.5182 4.0905 -3.4065 -13.8010 -11.5456 -3.7047 -10.3269 11.0212 -#> -11.1254 3.4482 14.1089 -15.2183 2.6742 4.0473 3.2420 -20.2393 -#> 0.8347 2.2787 4.5881 2.3749 8.7257 -4.3932 7.1358 -8.3630 -#> 5.0211 9.5129 -15.4526 -4.9501 10.4043 12.4970 -3.7509 23.3894 -#> -10.2102 3.5111 -12.3805 3.7743 -6.8626 -0.0747 -2.9689 9.1161 -#> 8.2601 3.4696 -12.2933 -0.6306 -1.6229 4.7930 -7.3460 32.7231 -#> 2.5878 10.6950 -19.4801 -2.9348 -1.4626 -0.3699 2.5573 5.6626 -#> -6.8752 0.9755 -4.6640 -17.8662 -0.8453 3.5109 -4.6187 9.9316 -#> 1.6028 -0.1389 -1.2994 -6.2128 10.0624 -2.3837 2.1566 8.8314 -#> -14.0722 1.5607 -20.0004 22.8786 -0.1401 2.0176 -14.8383 -13.5920 -#> -18.6510 -0.4177 -2.9582 4.9505 12.8327 7.7842 1.7678 -8.5907 -#> 7.1897 -2.1071 1.5479 6.3302 1.3973 -2.8158 -8.7517 1.4023 -#> -6.8289 3.7599 1.2994 -1.9434 0.1248 5.1686 1.8590 8.0154 -#> -1.5625 2.7974 14.1518 -10.1855 -17.9860 -7.5408 8.3709 10.3564 -#> 0.7383 -5.3753 11.1572 9.0830 -4.7921 5.3966 -17.6511 0.3852 -#> 1.2908 -10.6373 9.6046 0.0826 -3.8387 0.5976 2.8609 -9.5399 -#> -2.6528 -1.6465 -1.9558 13.9648 7.2380 -0.6112 -14.5066 2.6307 -#> -2.3397 -0.2024 0.8239 -6.0347 4.0913 11.3913 14.4397 4.5889 -#> 11.2896 10.6101 -10.7042 -15.5441 -9.2189 -9.8412 5.1248 32.5710 -#> 11.0244 5.5595 -7.1076 16.0597 6.2505 6.3358 -0.2796 -2.1579 -#> -9.9230 0.2472 -9.2166 5.0312 3.5981 -7.8504 -7.2184 -1.3071 -#> -2.4879 -11.3545 -12.4110 24.2905 -8.7519 -17.0857 -10.2919 -3.4404 -#> 4.1187 -2.6143 1.0428 10.6671 -13.0915 -3.5546 0.5629 6.4200 -#> -3.3104 -12.0760 -0.7076 -3.0841 24.2404 -3.1691 -6.9119 1.7941 -#> 3.1939 -8.4331 -17.0943 10.9742 6.5465 8.0851 0.8856 -8.9553 -#> -14.2499 13.0122 -2.6418 -2.2588 -0.4159 14.4637 -3.2607 8.2385 -#> 18.2165 -3.9107 0.1991 -8.0249 0.6290 -2.4179 11.3165 6.8118 -#> 3.9876 -3.3598 14.0990 -5.3588 -12.7347 -11.6581 -0.1995 11.9736 -#> -#> Columns 41 to 48 2.6384 12.6728 -3.7734 -20.3231 -0.2065 -1.2895 5.8673 -0.0072 -#> -0.5856 -7.6191 9.3754 -7.0757 -1.0990 -0.6045 -2.5725 0.2833 -#> -11.6955 1.5578 -5.1756 -6.8691 -13.8330 1.5411 -2.2008 13.6485 -#> 5.0361 -17.7714 10.7466 -6.2763 13.0006 -3.0142 2.9823 1.0376 -#> 8.8774 -10.9829 0.9405 -9.5145 2.8328 -4.1716 0.8532 2.6348 -#> -10.9172 3.7905 8.3874 7.9075 -8.7231 7.7682 -9.8785 -1.4428 -#> 4.1869 -4.6672 -1.5456 -1.7447 -8.1386 3.2080 0.5161 8.1082 -#> -0.7679 5.4786 5.5345 8.9032 5.1916 1.9881 -1.1946 8.5584 -#> 2.3608 22.4209 -18.1195 -9.3485 -0.4505 0.1381 -21.7665 -0.1648 -#> -8.8235 4.9310 -6.2566 -16.0138 3.0802 -1.5473 1.1070 1.7021 -#> 1.9942 9.4574 4.0311 11.4089 -5.2419 4.3092 -11.9660 12.5830 -#> -2.6881 3.7668 2.4198 13.9039 5.7139 14.8638 12.3989 -1.7952 -#> -22.4898 -14.8158 -6.6019 -5.1961 5.4858 -0.3881 -13.2620 -0.4163 -#> -4.5293 8.1611 5.8838 -9.0136 -5.5433 -14.5050 -8.3331 9.2668 -#> 7.0383 0.8013 0.0123 7.6101 25.3196 -5.0556 -8.8034 6.1687 -#> -13.9259 -4.7097 -6.8706 4.4354 4.7897 29.0184 -1.8586 -9.1406 -#> -8.2563 5.5888 -0.1159 8.7976 -2.7876 12.0876 -13.8446 21.1724 -#> -4.6292 2.1923 -8.3140 1.8247 4.4025 0.4105 -1.3992 6.8583 -#> 7.0735 2.5204 0.8192 -3.7221 -21.1281 -4.5568 -1.9114 -10.8041 -#> 0.4631 -5.1953 3.0787 2.2137 9.7525 -2.0934 -5.1761 10.7299 -#> 3.2992 2.7955 6.4101 6.6697 -15.3544 -2.3311 10.5766 -5.4545 -#> -4.7215 12.9134 5.2489 11.9368 -10.3907 0.9388 -7.1882 20.6407 -#> 11.2167 -1.1713 -4.6229 -26.7602 8.4682 6.1601 7.4882 -2.4842 -#> 11.0823 13.9282 -1.9220 -9.2037 -4.9446 -20.9689 -21.6739 -3.2960 -#> 22.2801 11.3753 4.0951 17.2048 6.1801 -12.2547 10.9081 -2.8286 -#> 6.4324 -4.8378 -0.7057 -3.4801 0.3702 -13.0156 3.2830 -12.0592 -#> -4.2670 7.1882 -2.7934 2.9694 10.5591 -1.4846 6.7703 20.9930 -#> 3.9284 -4.1591 -8.8767 -2.8856 -2.4679 -2.1686 12.8365 9.9197 -#> 1.9951 -20.0595 -4.7230 -15.3246 16.8989 2.5943 -2.6376 3.5854 -#> 0.8159 7.6094 -14.7591 -1.7713 14.2379 1.4623 -9.3418 0.7935 -#> -5.7499 8.6248 12.7241 -6.9128 -4.7761 -6.7299 -4.8388 0.9684 -#> 24.4516 -7.3553 -1.6329 0.4585 -2.1951 -10.1090 9.4520 -1.1079 -#> 2.9964 -5.9975 10.7465 8.8183 -7.0952 -0.7106 4.2629 1.4456 -#> -#> Columns 49 to 54 8.2625 0.9033 1.5123 3.2150 -4.9727 -0.7974 -#> -9.1411 -6.7768 -3.3312 -7.6552 -5.8629 1.5708 -#> 19.4400 -2.8266 0.6595 2.9002 2.2003 -3.4884 -#> 10.0413 0.8295 9.7387 -5.7218 2.9886 1.8164 -#> 0.7244 -4.3088 15.7654 -5.2667 6.0944 -3.1314 -#> -3.9781 -0.5492 3.8910 -8.0746 9.1950 0.4971 -#> -4.2548 -0.8059 1.0549 7.2977 -5.5230 -1.4808 -#> 6.5204 -2.4275 3.5413 -2.1586 -8.4760 1.7143 -#> -17.1326 3.4917 3.2997 2.7240 0.5655 3.8225 -#> -7.7256 -0.1063 4.6172 -5.3888 7.1422 -8.5851 -#> 7.6347 -2.0811 -0.6710 -2.1723 -7.2139 1.1386 -#> -1.6509 1.1366 16.1597 -1.2614 -3.2961 1.4994 -#> 4.6051 22.8175 1.6403 -9.0837 7.6439 -2.1571 -#> -3.5945 -6.0262 -16.3896 0.2016 0.9746 1.5790 -#> -7.6154 -7.2821 4.4517 5.6029 -6.9573 -4.2717 -#> -10.4473 -0.8336 -9.6946 9.4865 -7.1611 1.8194 -#> 11.3759 -6.8616 -11.5624 3.5180 -1.9766 -3.6941 -#> 2.0064 -1.6333 4.0976 0.6205 -2.6042 -4.9661 -#> -12.8823 -7.8360 1.0782 5.6972 5.6660 2.1111 -#> 1.9252 -9.6661 8.4916 -9.2061 -4.0192 -8.4211 -#> 6.6718 -3.4963 -4.0314 -7.0760 5.7587 3.5493 -#> 12.1766 11.8530 -6.2429 -4.2156 -5.7470 1.1385 -#> -3.2129 3.4765 6.8705 2.2789 -1.6691 -9.8592 -#> -3.0113 9.5897 -7.1651 3.0243 4.2135 -0.0837 -#> -7.6214 2.8420 1.4394 -0.0282 -3.7063 6.0629 -#> 8.3831 -2.2977 16.5964 -4.5028 2.6921 -2.9995 -#> 7.3325 -0.6589 -8.6266 0.7577 -7.1119 -4.5866 -#> -5.9435 -12.6229 2.5341 8.3034 5.0543 -2.7943 -#> 1.8347 6.9768 7.7201 0.5520 6.6773 1.3603 -#> 11.4995 9.5357 -4.6271 4.5563 -4.1167 -1.1788 -#> 1.0763 0.1444 -15.0228 -0.1753 2.3464 -6.2756 -#> 11.7925 -10.6398 18.1778 6.3685 -4.0435 -2.9618 -#> 6.0329 12.3226 -8.7054 2.7213 1.1492 1.0349 +#> Columns 1 to 8 0.1502 -0.6327 -5.1165 11.6773 14.3463 -16.5857 -23.2968 1.7444 +#> 0.0141 -4.0258 4.1974 8.9528 -9.9707 -8.5931 -3.2126 -1.9768 +#> 5.8446 -6.9787 8.9577 -0.7572 3.3967 4.2266 -5.3755 12.2286 +#> 0.7716 6.1096 -0.6805 2.2177 2.3422 -6.7453 -0.7093 -3.9760 +#> -3.5904 1.2967 3.7849 1.1136 12.1699 1.5712 3.1402 -4.8506 +#> -4.9829 2.7822 -8.2093 2.0288 22.9273 -13.8559 7.9110 -0.1632 +#> 0.9712 4.1440 4.5902 3.1206 8.4393 8.0681 2.4422 -6.5444 +#> -4.1766 6.5025 -8.0135 -0.7057 -4.7804 10.9730 -14.6142 -6.0363 +#> -1.2651 7.5690 0.0950 -4.7592 1.2051 -8.0752 8.2937 6.4398 +#> -2.5072 2.2357 -6.0020 -17.4529 6.3990 7.8647 11.7904 2.6119 +#> 1.3361 -10.6175 -0.2354 2.7564 7.6634 -1.0736 -14.1549 -8.6648 +#> 0.3114 -1.5055 -0.3231 1.0974 -6.9884 -9.7584 0.9568 10.1247 +#> 1.6453 -5.7996 3.5452 6.3986 9.3010 -0.6876 1.1725 5.0440 +#> -5.3501 1.5010 3.2083 -0.1423 6.0821 -4.6799 10.5806 14.2973 +#> 5.5053 -3.4022 -9.2779 3.2031 -17.1135 -14.0270 -7.7946 8.4620 +#> -1.0949 -0.3882 13.5959 13.5594 -6.5282 3.5082 12.8191 -11.8238 +#> -5.4553 4.9206 5.0066 0.6998 2.7967 -3.7996 -3.5733 -5.6437 +#> -2.7895 -7.2860 4.7593 2.8820 6.8320 0.6149 -2.1145 -10.6138 +#> 1.4033 6.6825 14.9566 2.1453 2.2461 -3.6996 12.8808 -5.8650 +#> 0.7737 -0.9963 4.8971 -1.6165 2.1081 3.5645 -0.0486 -3.1669 +#> 2.4272 -6.0975 -8.8798 -4.1698 13.6304 -7.3528 15.2820 8.6944 +#> -1.3885 7.3461 -5.3565 -2.9205 4.6033 5.2413 -4.7957 -7.1355 +#> -2.7855 0.4383 -2.0094 5.3199 11.6295 13.1496 -7.0949 -3.9079 +#> 7.6482 4.3722 -1.2289 -9.8542 -14.4585 -0.2983 3.6377 -1.7866 +#> -2.0140 -5.7528 -2.2515 -6.1826 -3.5021 -4.1251 5.6230 6.0546 +#> 2.3866 -2.8660 -2.6130 2.1253 -5.8539 -7.4563 14.0099 9.4419 +#> -1.9124 1.7875 4.3873 8.6576 8.1988 1.7990 -20.1503 -0.2226 +#> -1.3105 0.7783 -2.3225 10.2201 3.3992 -8.8959 0.0339 -13.3772 +#> 1.2475 4.8226 -4.0829 -10.4948 0.0785 -14.0801 20.2100 2.5078 +#> -0.8625 1.8876 -9.3857 -1.3849 0.8895 -29.8070 6.9208 -1.5414 +#> -5.8760 0.3630 -10.4666 -6.4514 5.2589 3.1505 8.0784 -13.7259 +#> -6.8187 -0.2258 -0.3130 -0.8500 -3.7169 -8.6896 16.5658 -0.4019 +#> 2.9267 -2.9513 14.0778 -5.9623 9.5521 -5.5139 5.5557 1.7858 +#> +#> Columns 9 to 16 1.7864 -5.0491 -0.5680 -13.1897 -3.9846 -3.8399 2.7190 11.2911 +#> -7.3188 -12.3025 -4.9221 1.7435 8.5387 5.5209 0.6618 10.5780 +#> -5.5940 9.3218 10.8462 -15.2501 2.6757 -7.6190 -5.6595 5.6197 +#> -2.5642 -7.0475 -17.7531 -6.4610 -0.2528 -2.6088 -5.9995 6.3092 +#> -18.1159 -9.3682 -5.4989 -12.7824 7.5788 -3.4397 -8.2629 30.7666 +#> -2.1718 -0.3497 -3.1785 -6.8824 -4.1225 -13.7171 14.8214 6.4650 +#> -7.4951 -5.4636 -5.1662 -3.3794 -0.5324 8.7031 -7.3074 0.7377 +#> 10.7573 1.2803 -2.8465 -7.6641 16.8036 -17.2760 17.4993 -3.7881 +#> 6.5791 4.9164 4.6700 -19.1050 -2.1860 -3.5184 19.9508 10.7379 +#> -0.1242 -4.1409 17.6556 1.3901 3.6041 7.7495 -15.2813 -8.3537 +#> 8.5827 6.2256 -5.9061 -0.5912 -8.7770 -15.7100 -5.5663 -4.7435 +#> -3.7580 15.1016 17.3715 -15.2703 0.3931 5.8730 -0.7814 -8.5623 +#> 6.3721 -10.4422 -2.1435 -14.6633 -8.7295 -2.0260 3.0931 -6.2568 +#> -2.6818 4.1069 -21.5864 -6.9363 18.0050 -11.0571 -5.2293 0.4206 +#> -2.5457 -8.8578 -5.3770 4.6376 -3.6898 -9.7821 -14.0432 -5.2195 +#> 4.6188 3.7077 1.7767 -21.9502 -7.9970 -0.6138 -9.0604 -8.8355 +#> 13.8772 1.5815 -3.6044 -4.8382 -6.5071 -2.6235 -5.2686 -4.5968 +#> -1.5794 -22.0361 -2.2059 1.1024 8.4288 4.4157 -5.9486 -2.7891 +#> 8.1590 3.8084 -5.2762 -8.1521 10.3248 -8.1053 5.0296 -20.4825 +#> -8.1600 3.9923 3.1615 3.3658 -12.1478 -0.4582 4.4161 -7.0431 +#> 14.1191 -2.9299 -8.3054 -5.0831 7.5364 4.5573 -12.9784 6.0358 +#> -2.2307 -8.5232 -1.5594 7.7415 -4.2493 9.5669 6.8826 11.5447 +#> 2.0640 -1.1619 8.9586 12.2880 -8.5156 -0.8169 3.9353 -2.8133 +#> -5.9796 -1.3531 -5.1153 5.4403 6.6031 1.6265 -9.1132 -2.4750 +#> -12.1595 14.5719 7.3589 -0.8528 -12.0797 6.5165 -6.5158 9.1897 +#> -7.7593 -7.5360 -4.2608 0.6677 5.6086 7.3084 -1.1450 -0.6650 +#> 2.1285 5.0666 3.8205 -19.9812 -5.3825 -6.2388 -5.0001 5.1073 +#> -11.4498 7.9439 2.5264 1.7372 12.4589 4.7601 0.8722 11.8413 +#> 4.5614 0.2426 -1.1967 0.3214 8.3549 -3.9374 -1.9127 -30.0761 +#> -10.6920 -17.6461 19.2189 18.9718 -17.5786 -13.3243 3.8246 -20.3998 +#> 1.9363 2.6901 11.4226 -0.8000 -8.4201 -10.7214 -15.2685 -1.2141 +#> -6.4414 5.5103 -14.8857 16.8497 -6.3816 -15.7200 -11.5210 3.0433 +#> 12.2955 -23.6837 -9.0876 2.5366 -11.2098 -4.3643 -6.7683 -6.3079 +#> +#> Columns 17 to 24 8.8265 18.9406 -9.4133 -2.5348 7.2665 6.5188 -4.3930 2.1048 +#> 2.8716 -16.0539 -11.8397 5.5599 3.8261 -27.1507 7.7110 -5.4979 +#> 2.3130 -0.5389 -9.2514 -8.8838 -0.5748 0.2937 7.8625 12.3091 +#> 13.4952 5.9597 -5.5660 -3.7617 -3.2340 8.2575 -6.2173 12.0081 +#> -4.9171 8.4606 7.0278 13.6968 8.3683 -7.3618 6.8984 -11.2440 +#> -3.2943 8.3736 3.7466 9.5787 -8.9279 6.5024 -18.1383 0.4755 +#> -0.4949 5.5721 6.9379 -7.8568 2.8956 11.2934 1.4106 -1.6120 +#> 11.9518 11.8384 2.5023 -11.5562 9.9104 1.2761 12.6963 -14.4882 +#> 5.2451 4.2083 9.0829 0.8515 -6.3686 2.6889 0.3345 10.9705 +#> -12.5338 -10.2935 -1.3780 -4.7442 -2.3097 7.1551 -0.1443 9.5783 +#> 25.3115 -3.0398 -14.3625 -1.2636 12.7686 -3.3883 13.4861 -11.1937 +#> -4.5211 2.1346 12.5857 -8.1697 -14.3383 -12.2419 8.2375 -5.4028 +#> -2.0818 -7.1800 0.7283 2.0070 1.7882 -2.7738 -3.0338 9.9644 +#> -6.3256 9.6141 -8.1130 12.2936 8.5317 -11.5170 5.0715 7.5243 +#> -2.7383 -9.7668 -16.7810 -10.9428 3.4754 -8.3081 -5.1900 -4.5701 +#> -10.2102 11.2480 6.5073 -6.5978 -13.8941 5.6511 4.0472 -10.9076 +#> -5.5926 -1.5593 5.7522 4.2259 -1.7510 -4.6279 -4.5535 5.6030 +#> 14.6310 -3.2063 -10.0208 16.2688 -2.3531 3.7047 -11.1398 3.4741 +#> 1.2885 -12.0961 4.9428 -12.3416 -1.2494 14.2553 -13.7385 5.5886 +#> -3.4728 4.1505 12.0654 -12.3847 -6.6229 -0.7823 -3.0840 12.5855 +#> 14.5684 -0.3764 -7.1810 -6.3587 14.2933 9.1784 -11.0523 -3.9788 +#> -0.3546 11.6056 1.1543 -4.5159 -8.4306 -9.4144 -5.4696 -8.1684 +#> -15.3279 3.0093 4.7932 13.0679 -9.1388 -5.4328 22.8265 0.3704 +#> 7.6645 2.6893 -14.4119 -2.8792 4.4938 8.0528 3.1974 -8.6381 +#> 3.3738 -7.1812 -5.0788 2.8829 5.2612 1.0766 1.6137 -0.3375 +#> 5.5916 -8.0586 6.7475 -4.2617 2.6975 4.4982 -3.8766 9.6640 +#> -5.5459 18.6531 1.8227 10.8455 -4.3877 2.5642 -12.4524 -15.8890 +#> -7.4719 23.2260 4.1229 4.9025 6.2354 -9.6361 0.0786 -8.6084 +#> 11.6038 -4.1914 3.5742 -8.2894 -1.1181 9.8882 -3.5115 5.7064 +#> 9.0505 -17.6689 11.3823 -12.1330 -14.1131 8.9340 -5.1862 4.9549 +#> -12.8777 -9.1465 1.0852 6.5645 1.3628 11.2346 -2.7197 3.3082 +#> -15.9775 -3.1818 -4.5302 10.2368 -2.1682 2.2111 11.4631 1.7134 +#> 8.4578 10.4538 -14.2874 11.6500 -3.2734 5.6058 -14.4480 12.7504 +#> +#> Columns 25 to 32 4.5362 -6.3846 -9.1947 -1.4785 -4.4318 2.7225 -3.3800 8.8245 +#> 3.6867 -12.9515 -7.3310 6.4607 23.0355 8.0931 20.4505 -3.2334 +#> 12.1664 -2.9485 -6.0665 0.1145 -10.9513 0.0830 -4.1382 -14.1843 +#> -3.6729 -10.5543 -6.1293 -10.1133 -10.5977 7.9843 -13.1441 -1.9370 +#> 5.4258 3.2411 -6.3558 -8.8474 4.2419 -4.6076 2.1246 18.6591 +#> 12.4402 5.9000 -9.1547 -3.6564 -2.2394 17.8233 -26.5256 8.5273 +#> -0.2354 3.5966 -4.7936 -3.3391 -1.6590 -0.3001 4.9479 -1.6429 +#> -2.5284 12.9843 -1.1777 13.4040 7.5414 7.3708 6.3774 5.7664 +#> -11.7246 -7.4048 5.6794 -6.5819 -9.6858 -6.8145 -26.7890 15.7343 +#> -6.4861 -3.1409 6.0822 2.7258 7.0966 1.6550 2.2907 1.6123 +#> 12.6386 7.5574 -17.8627 -4.7396 2.8532 4.6215 -4.0242 0.3481 +#> -13.9927 1.0784 8.9757 -2.3967 3.4257 -14.6185 -16.4024 -3.1132 +#> -6.5631 3.2515 1.3259 1.5545 -5.2817 6.8179 2.4699 7.5062 +#> 7.6563 -18.2317 -5.9616 14.4318 2.4898 -1.0191 -2.6948 13.5260 +#> 5.0151 -18.5543 -0.4373 1.6251 3.7774 4.4423 3.4176 -18.3328 +#> 0.2585 3.5288 -7.7036 8.1640 -0.5656 1.0587 -3.1771 -10.6755 +#> 1.1604 10.2628 -1.4101 -1.9162 0.8279 -9.7483 -5.9036 -0.6826 +#> 2.0231 -14.9928 -17.9465 12.3793 -7.7265 27.5180 -8.1285 -20.4272 +#> -7.4535 1.1490 1.7212 -11.0781 -4.4916 10.2222 -4.3039 -14.2790 +#> -8.5627 1.9616 4.6415 -16.0075 -1.2049 5.0215 -10.4812 7.2736 +#> 7.8055 15.1741 -3.3337 0.9341 -5.7580 -12.2163 -14.0667 12.3840 +#> -10.8541 13.6793 12.8612 1.0421 9.4164 -5.6323 -7.4601 -10.8486 +#> 0.4551 -0.9507 -14.4768 -4.5887 -16.6857 -2.4758 0.4379 -4.1190 +#> -1.1506 -5.2307 -1.4388 15.7405 2.6871 -11.9327 9.2181 -12.6076 +#> -4.4578 -11.9439 2.8058 5.7662 26.6840 9.0248 2.8032 4.2246 +#> -10.3300 -9.6205 4.1342 -2.7574 9.2160 3.0372 1.0326 -8.0832 +#> 6.9992 6.7391 8.4815 8.7365 -2.1005 -2.3835 1.9618 -13.2855 +#> -3.9171 15.1513 -0.8424 -1.0606 3.2967 -4.1415 -5.7364 -4.2694 +#> -14.0996 4.1496 9.1888 2.7240 -2.9387 -2.3929 -4.0292 6.5702 +#> 0.5942 -13.2248 1.4505 -9.6488 -5.8043 10.6196 -11.6304 24.4467 +#> -2.7895 -4.9634 -5.1119 12.7252 7.1553 10.3754 -19.7647 0.2771 +#> 8.7760 -8.9328 -12.1186 0.7542 6.9072 -5.5977 19.3347 2.9616 +#> 10.7522 7.7890 -17.6029 -10.4164 -21.0359 -8.3593 4.2845 -0.8286 +#> +#> Columns 33 to 40 8.2293 -5.1600 2.4583 2.5163 -1.0784 9.5208 -15.3089 -1.7521 +#> 9.2848 -9.5671 -14.9465 2.1182 4.4059 7.3621 5.9493 1.2706 +#> 17.0957 1.0428 5.7024 1.9221 8.5981 -15.9624 -5.3606 -6.3537 +#> 14.9423 3.4573 6.8398 8.1765 4.7029 -1.8634 -17.5590 -0.9544 +#> -12.7008 -4.0907 -4.9392 -3.9029 -19.0786 6.2376 -3.0325 22.7123 +#> -15.3415 -9.7980 5.7301 10.5588 -1.5494 14.9005 -1.5633 5.6834 +#> 4.9847 -2.1010 -13.1506 -7.0318 -16.9139 -1.7904 5.2275 6.5568 +#> -6.2782 -4.1310 5.4280 7.1735 4.1964 -8.1535 2.6125 23.0726 +#> 0.6093 4.6743 12.5202 4.8873 -7.3422 -10.9752 -0.0823 10.2310 +#> 15.8328 12.4338 -1.4879 -12.5920 1.4926 2.2287 12.7423 8.4751 +#> 6.4443 8.5776 1.8122 8.8571 -5.5670 1.4204 -0.4372 9.1904 +#> -8.4314 -3.8573 2.6211 -5.6659 5.3228 3.5362 0.8988 7.5738 +#> 2.0035 7.1615 -10.6302 2.9194 6.9806 9.4983 -5.4700 11.1988 +#> -5.3793 -5.6574 6.8903 2.4261 -2.8877 -7.1431 6.0547 0.1253 +#> -10.6209 6.6820 7.0431 0.9343 5.0204 -16.7050 -8.7897 -11.5912 +#> -6.4801 -0.3452 4.4141 0.4342 9.4311 8.6459 -16.5092 2.4078 +#> -9.5491 -11.3154 -8.8797 -5.5353 6.2301 -3.0708 9.0026 6.3432 +#> 17.7493 13.4238 -9.5344 -21.4205 0.7244 -7.3182 -7.6547 0.4352 +#> -4.1389 -22.7554 -4.8486 4.0074 9.3635 -2.8265 4.1809 1.0639 +#> 4.8367 -0.1893 -1.2777 14.8569 5.5085 -7.2500 1.0789 2.7517 +#> 13.1695 11.4243 10.3081 -0.4784 -1.4638 12.9745 -13.0287 -1.4219 +#> -6.5016 10.9037 21.7328 -3.0877 -1.4793 10.7955 -20.6368 5.9169 +#> 14.2078 3.3812 -13.7932 -8.9849 -8.9467 14.5796 24.6045 0.8933 +#> -7.5996 3.5048 22.0829 2.5851 8.0192 -13.6655 -4.3487 -3.7454 +#> 14.5181 4.7206 11.7895 -1.5944 -1.3652 -6.5049 8.1058 -0.1272 +#> 0.8222 -2.5204 1.9191 -2.2974 5.8231 3.0175 -9.3610 -13.6032 +#> 3.6852 -5.0083 0.2621 -0.3519 1.8717 -0.2546 9.9030 3.9011 +#> -0.2744 -7.0254 -3.2710 2.9479 2.9215 4.8423 -17.4727 6.9660 +#> -13.3550 0.9482 22.9237 16.7080 20.9580 5.3614 -2.4341 -7.4789 +#> -13.3255 -17.2470 12.6000 0.2714 2.5818 4.9169 -4.2032 -15.0249 +#> 6.1829 -1.3314 0.1468 18.2705 -2.9847 5.8745 4.7609 0.4977 +#> -0.4490 -24.9648 -3.1186 -8.9949 13.4103 -8.0018 27.0184 -7.6923 +#> 7.8353 -5.8453 2.2504 17.2211 5.8414 5.0291 -5.5030 1.7877 +#> +#> Columns 41 to 48 4.0916 3.3849 4.7426 -1.3405 10.1076 16.2660 10.6860 -6.1657 +#> -19.7972 -5.1898 -4.8162 -10.3623 10.6441 -3.9971 -8.4521 -7.2454 +#> -9.4200 -1.9175 5.8213 12.5241 18.8085 -5.1161 5.2490 8.6126 +#> 2.1210 -0.8823 -5.0897 2.6917 1.2090 15.9057 2.4324 5.3099 +#> 20.1725 -10.2449 -8.4428 -5.3989 2.6726 -1.9335 1.5664 -0.2841 +#> -7.7274 -8.1707 -19.7667 3.5507 -12.9730 23.8874 3.7928 9.6482 +#> 3.4079 8.7545 -2.4725 -7.2952 -7.7775 -15.5888 1.6426 -10.1780 +#> -12.2906 0.5766 -5.7266 -9.7333 -24.4787 -8.5168 5.8254 -14.4201 +#> -7.4206 -2.5292 6.2337 -5.6087 -7.9186 -18.1385 2.1090 -9.9210 +#> 3.5527 7.2321 18.0147 2.9648 0.2699 -18.7313 5.7041 2.9222 +#> -3.7048 1.2978 -12.4707 -7.6922 7.0586 5.9125 6.4581 -1.0633 +#> -3.6145 -22.2058 2.8666 -4.8607 2.6456 -10.1019 17.1695 18.6168 +#> 4.2739 -3.9789 20.4105 4.6241 8.9154 -10.1849 6.2545 5.0430 +#> -0.2601 2.5646 -1.2959 -7.4923 -14.5485 7.5918 8.7384 -17.6047 +#> 21.9885 0.5478 2.0581 -6.5970 2.2651 3.1050 -13.9449 -19.2407 +#> 3.6517 -8.8977 12.0841 18.7272 -1.2955 -8.1800 4.6427 5.9706 +#> 9.1178 7.7942 5.9473 -18.4547 11.2522 2.0277 -3.8946 2.1382 +#> 9.6460 2.5854 -11.6359 7.5342 -10.0983 -8.3343 -13.1944 -0.1597 +#> 2.2920 -12.5184 6.8451 14.7949 -3.9423 -9.1176 4.3645 4.6963 +#> -1.5796 -12.6671 17.2917 -7.0164 7.4424 -10.2480 12.6685 5.7798 +#> -5.4726 5.6697 -10.1118 -15.8220 0.1771 -2.1173 16.5993 4.6345 +#> 2.0185 -1.7008 0.6645 3.0304 -1.5405 12.6602 -13.5373 9.3011 +#> 3.5688 2.3965 -11.0686 16.3498 -6.6246 3.4093 -4.8364 5.5354 +#> 6.6491 21.9286 -3.4636 -2.0928 10.7779 6.7079 -6.7564 6.8138 +#> -5.0027 0.2820 2.3162 7.4655 -7.6358 -8.0254 -10.0874 -1.0490 +#> 16.2587 0.4248 -13.0793 16.2570 2.3401 3.0679 -10.8025 15.4457 +#> 2.9828 0.1336 10.0219 -11.0347 -15.0825 -4.9162 8.7773 -10.9194 +#> -7.1183 -11.7349 -6.9990 -5.8554 -22.7783 4.5044 -0.2356 -11.5437 +#> 5.6971 1.9848 3.5486 -1.4376 -11.2005 15.1607 16.3215 3.7568 +#> 1.9127 -3.7770 -11.8825 5.1776 10.8349 9.3161 -13.8649 9.8430 +#> -0.8122 6.2886 12.2971 0.0506 -0.5405 -1.1465 8.9510 -4.6449 +#> 19.2128 -13.2209 0.0055 1.4520 -4.1927 -1.8904 -5.9837 -12.5014 +#> -11.3788 12.8628 10.0100 -23.3328 12.1723 13.6150 0.2646 2.8340 +#> +#> Columns 49 to 54 -7.4826 0.3962 3.7641 -1.1592 -3.7543 -3.7121 +#> 7.9611 2.5701 -0.4473 -9.0542 -11.3221 3.8200 +#> -25.5656 -4.4084 -2.6764 -2.7857 8.7528 5.1498 +#> -10.9501 -9.3092 -13.1660 -1.3856 3.2337 -2.7899 +#> 0.5280 4.3192 4.8902 10.1795 2.4985 -3.8265 +#> 8.3818 -7.1733 6.0724 -0.6022 -1.3414 0.6556 +#> 9.2775 4.6217 -5.8455 3.5248 0.6118 9.2599 +#> 18.7403 -12.7942 -10.3054 8.0210 -2.2770 3.3177 +#> 9.1307 -6.8422 -5.4990 -0.1244 5.8394 5.7740 +#> 2.8057 6.9146 -8.7885 -9.1022 -3.3002 3.7682 +#> -2.3153 -1.6333 -1.5210 6.5993 -2.9678 -3.2647 +#> 9.3252 6.6995 0.1459 -13.1617 -2.7700 3.0207 +#> -11.0236 -7.9975 3.2984 -4.9898 -2.6405 -1.1378 +#> 24.2149 -3.8564 -9.6210 8.6175 -4.2841 -0.5845 +#> -5.5724 -4.8857 10.0525 -0.0648 -8.1390 -3.6214 +#> 6.2020 0.1164 -2.5706 -14.6186 1.3396 7.3444 +#> 13.0301 9.3384 2.4529 3.4333 -5.0788 -0.8720 +#> -1.2997 -6.2029 18.7258 -1.0295 -12.3016 -1.7109 +#> 3.2627 1.8864 -2.1414 -10.6178 1.5966 4.1469 +#> -11.8324 11.3675 -2.3672 -10.3189 3.6826 -1.7218 +#> 5.7709 -12.1997 -9.2240 10.8699 8.0572 8.1489 +#> -9.9726 -4.7731 -10.8461 0.6376 -0.7442 -5.3611 +#> 15.7831 -1.7745 14.6319 3.0108 -6.2453 0.4071 +#> 13.3609 0.9176 -3.8600 10.5494 0.6311 2.1822 +#> 1.6072 7.8351 -17.8949 -15.1692 -1.0781 1.0849 +#> 10.4373 -1.5727 4.1686 -7.1519 0.6870 5.7558 +#> -3.9311 -2.1158 1.6981 0.0296 1.9939 -2.8001 +#> -6.9511 -1.2249 0.4667 -2.8986 4.0628 -1.9922 +#> -9.7188 -6.9660 -14.2356 -9.4817 4.1744 -9.6052 +#> -3.0306 7.9162 9.8755 -14.9937 -4.3824 4.2074 +#> 10.9695 -5.6113 0.2919 -5.8939 -1.1476 6.1831 +#> 2.7958 14.0478 2.1897 -0.3721 -7.0056 1.1450 +#> -8.2584 -4.7282 1.8714 3.5121 3.0562 -0.5528 #> #> (14,.,.) = -#> Columns 1 to 8 11.9867 -8.2984 -1.3146 -3.6354 -8.8529 0.2349 -13.9622 -3.0493 -#> 6.4384 6.9374 -6.6913 7.1586 -13.3554 1.9107 0.1068 -1.9499 -#> -1.5772 0.2219 10.3118 -10.6105 -2.3904 0.7299 8.2559 3.1453 -#> -0.2696 18.1811 0.1068 4.8140 7.3397 -9.6986 2.2253 4.3820 -#> 6.5211 9.0709 10.7426 21.6991 15.5631 1.3897 4.9629 -9.8507 -#> 4.8956 11.2489 10.0215 12.3291 6.3059 -13.7399 -11.5145 -16.4382 -#> -13.6097 1.8697 4.8802 -6.9369 -8.3503 -5.2314 -4.0539 16.6210 -#> -8.3542 -3.8929 5.4088 -9.4494 1.2373 2.9799 -0.9539 5.0020 -#> 7.5931 -16.0784 -0.6130 4.4867 1.8511 -10.0700 -6.7818 -12.6482 -#> 6.8347 7.8955 15.9790 5.6663 0.2463 -7.2013 0.4226 -2.7929 -#> 12.1881 -6.6606 -13.5196 -0.0920 -4.2267 3.3949 3.1965 1.4301 -#> 1.1594 -2.0000 7.4615 -1.4799 10.2481 -19.5450 -6.4259 -7.2719 -#> 6.6965 -1.9487 -3.2567 2.7485 -5.5610 -3.7478 17.9432 9.7405 -#> 9.2307 7.2232 -16.9909 18.2683 -5.5177 18.6778 6.8506 8.0292 -#> 1.0844 5.2928 12.9493 8.5347 5.1063 -22.1321 -3.8124 -10.9738 -#> 0.1242 -0.8362 0.9491 -2.1039 9.1767 -10.5851 -22.4580 0.0073 -#> 4.1889 -6.7578 0.6158 10.6993 20.2475 -2.6803 1.4228 2.6368 -#> 2.6003 12.7629 -5.6436 -10.5305 16.5167 -12.4023 -1.8523 -2.2946 -#> 1.3399 -0.8541 -0.2925 -1.9588 -3.1782 7.4115 -7.8541 -7.5948 -#> 1.3034 5.2238 8.1993 11.3116 1.9543 6.8484 19.9934 1.7398 -#> -9.7040 1.3257 0.3924 -16.1826 0.2296 -2.0072 -7.8176 16.1278 -#> -2.4648 -0.0334 8.7678 -7.9293 4.9946 2.8806 12.8632 3.9208 -#> -2.9757 -2.2769 7.5569 5.6977 -13.9843 3.7280 0.5950 5.5230 -#> 17.0316 -4.8927 -7.4138 -23.9129 -16.3091 15.0418 12.8346 4.5024 -#> 0.2592 -3.1293 5.7996 9.5155 -17.7701 -0.6414 -0.9153 -3.0268 -#> -4.5513 -0.0853 3.2202 -2.6910 4.5919 -0.7635 16.7898 -1.5109 -#> -1.3973 -1.0692 -12.9523 -30.2056 -19.6100 5.0399 13.8209 -17.2502 -#> 2.2549 2.7606 -5.4813 0.8905 5.9473 8.5687 -7.6555 -13.5678 -#> -0.9458 7.3816 -7.9265 2.9466 -7.6597 -5.3167 8.1822 -8.8753 -#> -1.8768 8.9373 6.3757 4.5212 5.9159 0.5309 -3.3872 -5.6588 -#> 4.2539 -4.2977 2.2383 4.8838 3.9870 5.2324 12.4818 -3.0790 -#> -4.5559 10.2216 -4.1422 -6.7525 7.4266 1.5538 1.6988 16.4077 -#> 4.8749 15.8087 -3.5180 -10.5315 -0.0794 -6.0009 1.2058 3.4717 -#> -#> Columns 9 to 16 17.1385 -1.7777 -3.5087 -12.1099 -3.3899 5.2232 10.7039 -0.3877 -#> -13.5326 -20.5020 3.1858 -1.0639 2.6542 6.7054 -14.8185 4.5870 -#> 5.9097 -11.6857 -10.4692 11.6955 -0.4011 -2.0465 -2.4632 6.9488 -#> -14.1932 9.3081 3.2745 8.7878 5.5004 0.1761 -14.6211 -10.5968 -#> 1.7818 -3.1959 3.9106 -5.2226 0.7281 -2.4754 -10.0146 -12.3474 -#> -6.9109 10.7561 4.2927 3.9453 -0.1506 -6.4972 -4.0949 17.0473 -#> -10.4787 8.9851 -4.0980 -3.8323 18.6653 14.4851 -15.8948 -3.3505 -#> 2.8277 -15.8193 -3.6551 1.9299 0.3159 23.0975 6.4151 5.7881 -#> 7.1573 -2.5123 11.9745 -6.2904 -11.5491 -2.1794 -3.4662 10.7510 -#> 10.7676 -11.3751 1.2635 -1.9772 5.0680 -24.8550 3.4418 -7.2806 -#> 14.6815 9.9974 -4.7321 6.8144 4.9209 -5.8303 -0.1228 2.7059 -#> -7.8457 2.3963 8.4782 11.1909 11.9533 5.7583 -7.5739 0.8734 -#> 17.4703 -11.4663 -10.2138 10.8576 10.6437 -6.2552 7.0891 6.3207 -#> -17.5415 -19.9175 6.7340 2.6164 -5.2055 -2.9767 3.3248 -7.6270 -#> 13.8038 8.1952 15.0012 -1.9493 -11.4842 -2.7522 -1.7889 7.3613 -#> -9.8399 -5.3707 15.4129 13.8695 17.0482 4.0208 -13.5116 0.9707 -#> -9.0796 2.1351 -9.5623 -11.4923 1.1438 -4.7025 -0.7771 20.3234 -#> 1.8303 -4.7797 9.7657 -7.4804 14.0735 -10.1588 8.9940 2.3020 -#> 8.9459 3.2066 15.0946 -13.5423 8.3755 28.6378 0.6761 3.6107 -#> 2.0416 -12.4461 -8.3948 12.2424 -6.6186 2.3483 6.4522 -8.7031 -#> -23.7058 11.7056 20.3069 -2.6221 8.4900 9.8045 7.4044 -1.5314 -#> 1.4330 6.2013 -19.5130 -3.8664 3.1176 6.3897 3.1219 22.7205 -#> 7.3159 -3.7498 -0.9872 -9.3042 -9.2403 -1.6763 -0.1357 -9.7366 -#> 2.7242 -2.1545 -1.2852 -8.8201 2.2035 -5.8164 -9.1770 7.0053 -#> -3.7575 -21.8548 7.5362 -27.8420 -9.4957 -0.5877 -18.0524 -18.7607 -#> 3.0822 21.1661 3.2644 13.9712 -8.8555 -9.6856 3.4375 0.3204 -#> -7.6758 1.7168 -13.5409 -0.6890 -6.3293 -11.9538 -7.7964 3.2132 -#> -1.1651 -1.6261 8.0642 -12.0310 7.2915 -9.8800 -0.7667 0.9424 -#> -10.3043 3.9737 -5.0173 -13.3134 5.6635 5.2256 -6.4463 3.3400 -#> 6.5242 -11.1280 -29.3445 -12.3845 -20.2978 -2.3998 -5.4099 6.7119 -#> 12.9018 3.5089 -1.3941 -5.3138 -3.4071 3.0327 9.5516 0.7164 -#> -7.0746 5.9619 -4.6661 8.6978 -15.0780 -2.8786 -3.4132 -4.3365 -#> -8.3428 5.4275 -7.3953 7.4961 24.9181 -2.1467 -8.6705 -3.6659 -#> -#> Columns 17 to 24 11.5171 -13.5963 2.5003 6.5864 -14.5384 6.9039 -18.7580 0.8669 -#> -1.2107 10.7887 6.0819 -11.5975 -2.2031 6.5728 2.1513 -7.7375 -#> -0.8223 3.6664 2.4372 -5.6217 12.5742 19.4965 0.0526 6.5964 -#> -6.2581 3.5735 1.2482 8.0885 10.9134 -4.5984 10.8286 3.3268 -#> -3.3290 -4.1021 11.1832 16.9330 4.6907 11.7326 29.6726 -0.0969 -#> 0.5262 4.0599 14.4213 -10.2620 6.5553 -11.4401 20.3248 -26.7666 -#> 2.4857 11.3926 -14.0582 -2.2133 8.6209 -7.9137 4.1798 -1.4231 -#> 5.2495 -0.5675 -2.2643 -7.5104 10.9990 3.7068 -6.7966 13.7726 -#> -19.1619 10.3072 6.5310 -1.0618 5.0327 5.1669 14.8004 21.6915 -#> -16.9673 -8.3238 -10.4902 18.6426 11.9763 -15.1956 -3.9650 2.7984 -#> -11.7504 0.0668 8.7361 0.3543 -6.3230 8.1647 -6.3996 3.7345 -#> -7.2738 0.4172 5.9568 -0.7624 4.1710 0.5172 3.5015 -5.8163 -#> -15.7977 1.5926 6.3499 -7.6268 -13.1398 14.6137 -6.5688 -8.8103 -#> 0.7629 4.0633 -4.5180 -0.3671 5.7061 -1.2815 -5.2170 -8.4716 -#> -11.7767 0.9209 -18.8888 4.4665 9.5710 -6.4211 1.0147 1.5405 -#> -8.4317 9.7537 1.9881 4.6121 0.4938 -6.5393 -0.0713 4.9697 -#> -15.2060 4.7632 1.2563 12.2544 6.4372 -11.2441 5.9420 0.2902 -#> -20.6139 -0.3935 -16.5252 -0.3189 15.2685 -21.3963 -1.8786 9.5508 -#> -1.3409 -5.6721 -2.4587 4.6961 -12.9813 -0.7488 4.6960 6.8340 -#> 1.4624 1.1889 -4.2159 4.0801 2.9847 -1.3100 12.9584 1.6323 -#> 23.1831 2.5809 -5.2848 -6.6509 9.1711 -1.1559 -11.4886 1.3686 -#> -0.6514 6.4197 -7.5416 6.3921 15.6665 0.7941 -8.1033 14.6386 -#> -12.0915 -0.6876 13.3686 5.8638 -0.8935 3.7644 7.6196 0.2200 -#> -1.6452 -1.5316 7.2437 4.4482 -12.4975 17.8050 -11.5380 9.0727 -#> 7.0956 3.6777 -3.2342 12.4985 9.9739 5.1889 13.3412 -8.5417 -#> 6.1458 -12.5611 14.1390 -12.5054 9.4268 -0.0469 2.0785 0.5694 -#> -2.4983 -8.0480 -4.1426 1.6966 -8.7688 -7.3796 -19.6699 10.0829 -#> 9.6098 8.0111 7.6299 5.3213 -1.2586 -9.8363 14.0288 -0.7746 -#> 6.7143 0.5243 1.7446 -9.3635 -3.6307 -1.0245 14.2477 -7.5727 -#> 0.0423 7.0274 4.0858 9.4900 -4.4992 -6.8116 8.3830 -0.1123 -#> -13.3977 7.0101 2.2001 0.4743 -2.4360 19.8296 -4.3704 5.9854 -#> 17.4984 -14.0824 0.6152 4.4569 16.7575 -3.1909 -0.1884 14.3114 -#> 1.5785 -11.2978 -3.4116 15.3848 3.2945 -3.2251 1.2248 5.3961 -#> -#> Columns 25 to 32 -8.1145 -8.5795 5.9131 2.4377 7.5201 -4.6986 -7.5430 -9.7138 -#> 3.7041 3.2672 -11.4226 -7.4969 2.8387 -8.5854 -3.1510 -4.9110 -#> 26.2928 3.8416 -3.0306 1.4455 -18.7729 5.4855 -0.0242 -3.6069 -#> 7.4172 -7.8073 13.4746 -4.8996 8.2154 11.3917 6.2756 -6.2200 -#> -6.6342 8.8344 6.9771 1.2821 8.9269 15.9903 12.5617 -2.2922 -#> -6.2727 0.8454 -2.9785 16.1220 12.7200 -8.8158 13.0746 -13.5324 -#> -4.1055 -2.8899 -2.6837 -10.1672 -14.6019 4.6622 2.9432 6.0218 -#> -11.6356 14.2121 1.5131 -4.1166 10.9975 -2.0590 -14.1106 7.0641 -#> 1.2110 12.9263 -2.7073 9.8011 -4.0078 4.8110 -5.7098 -7.0139 -#> 2.5019 -4.6459 14.4936 6.9029 6.8220 -6.1545 4.6982 -13.7960 -#> 3.8880 24.0579 5.2231 22.2702 10.4620 -6.8659 -7.0574 10.2661 -#> -4.2881 5.0802 -7.6389 1.0890 5.0370 18.1272 -6.3395 11.5115 -#> -6.9992 13.3076 -3.4046 -7.0203 2.2997 -4.9772 3.6725 1.6278 -#> 15.8015 5.4040 -0.5921 1.0171 13.0780 -0.1570 -21.2774 4.6421 -#> -14.5559 -4.4831 4.5585 -9.3380 16.2462 0.5818 12.1105 6.9620 -#> 1.8914 4.9895 2.9694 -11.0574 -16.9952 12.5293 -15.0569 18.8390 -#> -4.6414 -9.7090 15.4887 5.8955 -1.0994 2.9043 1.4137 4.9682 -#> -2.6412 -7.0216 5.2076 -16.7756 10.3232 17.7661 -11.6975 3.4358 -#> 10.6992 9.7291 6.3405 7.0945 -3.1234 -4.5210 -12.6039 2.7055 -#> -0.9061 -3.8552 -3.4819 -5.2309 -2.0386 7.6870 8.0089 5.3179 -#> -9.2466 -1.9734 -17.7327 1.4467 -4.5935 -2.4652 -7.3554 -4.5967 -#> -0.0229 -3.8766 6.8379 15.7671 -4.2786 8.3920 -1.8504 0.6503 -#> 13.0757 -6.5117 6.0628 -2.8543 -5.9146 -3.3948 7.3113 -8.8372 -#> 12.8777 10.6938 11.7491 13.2491 -6.4082 14.3670 8.2762 -10.5485 -#> 20.8602 5.2665 -2.1849 8.4760 4.6720 -9.7380 14.8252 2.4705 -#> -2.9191 2.4862 5.1356 3.3433 5.7164 10.1240 10.1418 -12.5545 -#> 7.7613 -17.3256 -14.2300 3.0357 -8.0037 -9.3167 5.4464 -9.5051 -#> 0.3402 -13.6247 8.6591 -8.0319 -4.1533 -5.3817 -8.5196 -2.2487 -#> 12.3752 -11.3022 -5.1591 -7.1980 -4.7129 -1.5999 6.0342 0.5003 -#> -8.3133 -7.0496 17.7399 11.1410 0.1447 5.0498 -7.6757 -12.1452 -#> 11.3473 -1.1404 0.2236 3.8494 6.2571 -3.1258 6.3751 -2.4485 -#> -6.3385 12.0757 2.5867 -5.1963 3.9730 2.3733 -4.1170 -6.3596 -#> 6.0022 -3.2219 4.8640 2.0869 3.6794 0.9510 3.1537 -5.3880 -#> -#> Columns 33 to 40 -9.7750 4.3122 7.7314 -0.3517 6.2544 -6.1334 0.5192 -9.4471 -#> -8.7052 3.9934 23.8465 8.6498 2.4528 -4.0451 -4.4595 15.7021 -#> 10.9452 6.8185 23.5919 10.0206 7.8168 -9.7620 0.6737 12.7527 -#> 8.2015 -5.9087 11.5235 -4.9371 -8.2185 -6.3917 -5.5924 2.4774 -#> 8.4680 6.6133 -5.3330 5.5582 11.6515 19.1548 10.5538 9.0030 -#> -9.4199 2.2284 6.7835 8.8719 -8.4643 -8.7587 -6.3010 -1.5074 -#> -1.1034 -8.4749 -5.3583 14.8149 8.7404 20.1333 -4.1000 1.9190 -#> -7.1595 4.5246 -1.5435 -8.6677 -2.4809 -10.2686 4.5958 1.0697 -#> -3.0526 5.9754 -1.7177 0.9364 7.2218 15.6521 5.0914 15.7620 -#> -2.3993 3.3825 17.5335 7.8474 18.1009 -8.3220 6.9126 1.3761 -#> -9.2370 5.9037 4.8368 -10.3760 1.5467 3.6940 5.9009 2.6261 -#> -6.8671 -5.5725 11.4637 1.8629 -4.3842 -8.8654 -5.1225 -19.9867 -#> -2.1253 8.3315 -0.9771 7.8220 11.6228 3.0225 -10.3436 4.4569 -#> -0.0098 0.1681 7.3899 -14.7242 -10.5119 -3.0313 13.5913 16.3598 -#> 2.0848 13.5177 -1.9371 -9.8813 7.2109 0.1098 2.8727 5.3730 -#> 9.2950 -5.0439 1.8789 -8.6116 0.4375 0.2704 -4.6135 -0.4481 -#> -15.4528 6.8408 4.1062 -1.2382 -1.1624 14.0390 -3.2953 9.6390 -#> -2.4949 -13.5640 2.6893 -2.2827 2.5954 -0.7560 -3.5461 7.4404 -#> -4.1343 -22.1728 -12.7734 -8.1149 0.2042 -6.0266 -0.7888 5.2665 -#> 18.2782 1.1584 9.0241 2.2745 21.5432 4.8197 -8.2883 -2.4910 -#> 9.1289 5.9599 6.8255 7.6997 -0.3532 -4.1977 8.8119 0.5369 -#> -10.6763 6.4529 2.8220 1.1191 1.3069 -0.0376 -0.4241 1.5943 -#> 3.2870 1.2273 15.5045 11.5385 12.0046 2.5891 -2.5266 10.8700 -#> -10.8855 2.7544 -10.3731 -7.6112 5.5987 2.8348 0.2626 4.2871 -#> 1.8254 3.8367 3.7787 0.6154 1.4018 -6.2460 -8.3149 -3.8919 -#> 4.5782 -14.2862 -4.3419 -2.8506 12.8187 6.0623 9.9037 -8.7852 -#> -7.6454 7.2889 -7.3449 -14.5525 -4.5972 8.2140 -0.5723 -8.6042 -#> 8.7800 0.2766 6.5277 -5.1133 1.4634 -2.1700 5.2418 -1.3187 -#> 4.1663 6.2197 9.3275 -3.1985 -8.7473 2.1206 -2.6255 6.9047 -#> -7.2054 0.4252 0.9457 -13.5936 -9.6846 5.9752 9.3577 7.8062 -#> -7.9872 -9.4947 -3.2777 -1.0915 7.9006 4.6527 2.2801 -0.2176 -#> 9.3088 -7.8706 0.1679 11.2275 8.6029 8.0369 -5.1506 -6.3863 -#> -9.3878 3.4855 -3.5517 18.5091 4.3952 13.0829 -0.8334 -5.4597 -#> -#> Columns 41 to 48 6.5011 -16.1431 -4.3466 -14.9900 -11.7130 7.8898 -2.9533 -2.2251 -#> -0.9887 -5.6208 3.8264 6.5192 -9.3852 7.1299 3.5867 -6.3774 -#> 0.2947 -3.7010 10.4453 -2.2354 -0.0995 9.3341 13.2049 6.7727 -#> -6.2542 8.5937 1.3642 -10.2065 -0.6303 -10.4907 -6.5015 -0.1814 -#> 11.8759 14.5775 7.2311 -4.1882 -3.3403 -1.6617 -13.8284 -15.8210 -#> 8.9016 -4.4975 -2.8111 -3.9421 -8.8392 -15.6729 5.8049 -9.0963 -#> -22.4988 -3.1013 5.6064 -1.3716 -0.4750 4.9586 -0.3411 -1.9196 -#> -11.7136 -0.1444 0.6934 4.2718 -4.5249 26.0992 -3.3704 -2.0736 -#> 3.3940 -4.8608 -6.7398 2.2891 -11.9959 19.4911 -4.5567 7.0563 -#> -2.8931 0.6912 2.3908 -20.5148 -8.7039 -21.1667 -5.3784 -4.7638 -#> 3.5246 2.6905 -2.7401 12.7459 -13.7531 -1.9057 -24.9776 -5.5714 -#> -8.2436 12.1626 8.5889 0.0052 1.3230 -19.5498 -0.2498 -2.6468 -#> -4.6138 -4.6218 -2.2591 8.9042 -4.3583 6.2058 -3.3368 1.2475 -#> -8.1050 7.6328 -15.1233 -5.7670 -5.6250 -2.8747 -12.5963 17.1892 -#> 3.3816 9.4714 9.2402 4.2461 -1.8082 1.3889 -18.4537 -7.4553 -#> -6.8785 -3.8493 4.7792 8.7750 5.6983 3.3413 -2.4371 7.8816 -#> 14.1797 -8.5735 1.2438 11.8582 -10.3231 -2.7905 7.6571 -1.2268 -#> -18.1333 1.1585 -0.1916 -7.3058 3.1073 -9.1316 -14.7963 4.7654 -#> -8.7561 -18.2628 -10.8891 4.9068 10.5885 -1.6268 -4.1198 6.0831 -#> 0.5767 -5.7960 14.2352 -6.2464 -0.0318 4.7683 -3.9952 -16.9078 -#> 4.2374 6.0610 8.8447 -2.9822 9.9090 7.2188 8.9347 9.0626 -#> -1.2002 2.3952 6.9179 6.7029 0.9641 11.7896 4.9396 10.7570 -#> 8.7479 3.4975 6.1097 3.5737 -9.6930 2.4751 -0.9593 6.4604 -#> 8.7248 -4.2146 0.1575 -8.3680 -4.5994 -3.6923 -11.4719 -6.3495 -#> 8.2636 8.0527 1.2133 -4.7065 7.7905 4.3437 1.3388 6.6275 -#> 11.6542 -13.8952 6.7065 -26.8512 8.7871 -20.8686 13.1924 -11.7947 -#> 3.9899 0.6227 -15.7883 -3.1894 -3.9283 -0.7139 -5.5691 -12.3483 -#> 3.3880 -5.5641 -5.1218 -7.5778 7.3389 1.1221 15.2900 1.8776 -#> 0.4520 -0.0513 -6.6910 -1.2088 14.5641 14.3009 8.1152 2.6959 -#> 2.3548 1.1817 -16.2080 1.7125 -10.1196 9.7649 3.8981 3.3544 -#> 7.7892 8.2897 0.6544 8.3422 -3.1110 -0.1419 2.9984 8.1290 -#> 2.9468 7.7314 1.1350 -15.7203 7.8031 -26.0266 4.3703 14.3819 -#> 6.5798 21.6411 6.7600 11.7019 -0.3172 -9.8948 -6.3248 9.0993 -#> -#> Columns 49 to 54 5.0734 -3.1985 -3.1484 -18.5449 3.5854 2.6389 -#> -9.9198 1.8784 -2.8820 -2.0754 -2.6528 -2.0361 -#> 13.2967 -9.1146 -10.2911 8.6274 2.0238 0.7990 -#> 4.9978 6.5459 5.7172 1.0634 -1.8348 0.2181 -#> -6.6560 -6.9395 -2.7945 -13.4165 0.3720 -5.3638 -#> -13.8443 17.2028 1.6859 -3.0717 7.0974 -2.4310 -#> -11.1912 0.7051 0.4575 -1.5135 -5.2907 3.6489 -#> -0.9601 -1.2169 5.9644 -14.7754 -2.9408 9.7640 -#> 6.9968 3.5194 -5.5172 12.0147 0.2977 -1.2645 -#> 4.0465 -13.0005 -4.5254 -6.8678 -5.6679 -0.6868 -#> 11.8746 9.7976 -6.3857 0.3301 6.3049 -1.9373 -#> -4.6632 10.6690 11.6423 1.4945 0.5620 1.4400 -#> -3.6426 -3.7228 -9.2963 2.9672 8.7875 0.7035 -#> -8.5415 -6.0032 8.7938 11.3102 -3.4259 1.2885 -#> -4.5487 10.1005 8.8807 -16.3164 -13.8255 -7.4006 -#> 4.4433 -13.8975 -6.4573 1.1518 -4.9269 3.9875 -#> -16.5790 -15.7375 1.8982 9.4079 -4.0299 2.0403 -#> 5.4469 -11.0663 3.1077 5.7961 -17.1680 6.1061 -#> 11.3496 7.0669 8.9131 1.8658 6.6558 4.8192 -#> -1.0470 -21.5752 14.9542 8.4510 -1.2142 -0.3483 -#> -10.2189 -3.1078 -1.9010 -6.4678 1.8478 -1.3810 -#> 11.3472 6.0519 4.0031 1.0326 -0.1843 5.3557 -#> 5.0970 -4.6407 5.5243 0.9926 -4.7836 -8.5118 -#> 5.8961 23.3211 7.8782 7.2337 11.0661 -2.1141 -#> -1.7034 11.4659 6.8905 -1.7727 1.0258 4.5055 -#> 3.9860 -2.7932 6.7847 -3.8188 0.9939 -9.3229 -#> -5.9320 4.4995 -13.8126 -5.9319 -6.1371 -5.0455 -#> 7.7796 -4.0133 4.3013 2.0061 -6.8397 -7.0406 -#> 10.1231 5.2064 -10.2621 -0.2242 0.5774 -5.2299 -#> 3.2700 5.6559 3.3086 2.3718 -15.8170 2.4293 -#> 3.7895 3.7239 -2.7195 -0.8528 -0.1359 -2.3084 -#> -2.4946 0.8061 17.5376 4.8175 -4.9913 -5.3384 -#> -6.9893 11.9924 -3.7919 -3.3553 0.5683 0.4901 +#> Columns 1 to 8 2.0483 2.8171 2.1817 7.0182 5.1270 -7.9854 6.3096 10.3784 +#> 2.6052 -3.8704 -3.5973 -7.3507 -2.4192 -9.0850 -0.6693 -2.1878 +#> -1.9051 -8.2699 4.6939 -7.8013 -16.2656 -7.9998 -4.0699 -0.1254 +#> 3.0601 3.2293 11.0668 3.2417 6.1561 -8.0102 -6.5164 13.2991 +#> -1.4684 -1.3421 8.1883 -4.2006 1.0094 -0.7288 4.6309 11.9590 +#> -2.9930 1.6211 -2.6800 -8.3973 0.8828 -25.0192 -8.4731 -7.3463 +#> -2.7903 -6.1639 -0.4059 2.6958 7.1756 0.5466 -12.5127 4.9266 +#> -1.8259 5.8285 2.9052 -0.5450 18.2841 -4.1749 8.3484 3.4708 +#> 0.7899 1.6427 1.4980 2.6466 3.0877 -5.3801 -7.0835 7.4466 +#> 1.5463 -3.1601 -0.7089 -16.6168 -10.8211 5.3150 -1.3674 -10.9028 +#> -10.2982 -2.1975 10.4029 0.8741 3.5458 -0.3036 0.1896 -8.7427 +#> -2.0001 1.7725 9.2228 14.5550 -4.8925 10.8646 -5.8209 -3.9372 +#> 4.3509 3.3802 3.9177 0.2544 -12.7767 7.7736 9.1449 4.0556 +#> -2.7691 15.4949 -3.5401 -4.9217 20.0237 -4.4575 7.9682 16.4020 +#> 7.7987 2.5612 1.9509 16.0925 11.5299 5.4404 5.5854 0.7955 +#> -6.1617 3.3401 -2.4637 6.3745 3.1217 -0.6190 -6.3978 -15.2816 +#> 1.6892 -4.0921 1.7836 13.7757 -0.4059 -7.8691 4.5175 -6.2729 +#> -0.4282 -3.1315 4.1787 10.8730 8.4929 10.0354 8.5351 -17.6654 +#> 3.1141 0.2154 -1.9783 -13.6804 -8.9142 6.5795 -5.1016 0.1180 +#> 5.5324 -1.0862 -2.1182 3.3810 -1.8815 14.4442 -12.4813 6.7168 +#> 4.4775 6.0969 3.3110 4.4587 -14.6556 -3.1994 -5.8314 7.5657 +#> 9.2710 -5.7303 3.6967 1.8235 6.8391 -17.6486 10.0485 -4.2315 +#> -1.1558 -1.5740 9.4869 -6.0505 3.7959 1.1696 2.4942 -4.4600 +#> 4.5613 -10.1540 3.2603 -0.0989 3.3937 -10.0462 16.6124 4.4885 +#> -11.4087 -2.7969 9.7151 -0.5111 4.3528 17.4705 -0.0372 0.3828 +#> -1.3272 -0.7748 3.8768 7.2436 -6.2681 0.5956 -1.7136 13.4700 +#> 2.5192 14.0231 2.1485 6.8496 13.4577 9.1016 10.8995 4.2937 +#> -3.1544 -11.1025 3.1132 4.2347 17.9970 -2.9984 0.5567 1.8000 +#> 1.7558 2.6680 -2.5301 0.6804 -5.6686 3.8812 7.3442 5.9400 +#> -2.2579 -9.3441 0.2845 -1.2302 -9.1843 10.7536 3.4147 3.4666 +#> 0.6889 2.6817 -2.6930 -1.1192 4.1683 16.7969 -5.8238 5.3186 +#> -10.9125 16.3318 -8.4345 4.1855 9.5059 -16.0090 7.7850 4.6360 +#> 6.8469 4.2239 -12.7071 -1.2498 -12.8182 -9.5137 8.5964 -2.5512 +#> +#> Columns 9 to 16 -2.3523 3.1771 4.8633 -1.8067 7.6656 7.2096 -0.5283 1.9938 +#> -0.3175 -2.5652 -20.3070 7.0495 -2.7538 -5.0101 18.2074 -1.1214 +#> 13.9994 -2.1166 18.2260 5.9787 4.0359 5.0937 -3.4541 -10.2350 +#> 11.7015 -0.7359 3.5844 19.4892 5.7383 3.8325 15.9566 1.1753 +#> -10.0147 6.8668 7.8851 5.2145 3.1191 -17.1693 2.5109 -3.7144 +#> -10.5480 -9.1764 -4.3777 -13.3429 0.7453 3.2906 7.1985 -0.1988 +#> 15.4651 4.6034 -14.7709 -3.8301 4.8131 -8.3639 6.4618 2.2399 +#> 7.8516 4.0185 3.0853 -14.0399 -0.2901 8.2163 -14.6712 -4.0115 +#> 1.0268 8.5333 20.5010 -0.8681 -3.7316 -0.6342 -16.6141 1.6666 +#> -16.1924 10.9530 -1.9614 14.2302 19.3348 17.9828 8.2189 18.9737 +#> 2.9337 -0.3779 -15.6893 -9.1333 17.2737 4.6129 8.3059 -1.2873 +#> -19.9053 10.7389 -6.4006 -10.9317 -6.4664 13.4150 10.5242 13.1741 +#> -0.3921 19.7432 -0.4637 0.0092 15.1561 4.9532 -5.2236 6.2684 +#> -9.6794 0.3285 1.6589 4.7561 6.9246 -1.7614 0.8781 7.1642 +#> -7.8437 -1.1849 -6.6127 4.5542 -13.5849 -7.4552 10.3579 7.2606 +#> 8.9161 -7.2971 -4.7357 -8.2574 -2.6160 -4.2778 0.6448 10.9217 +#> -10.3911 -4.8965 -11.8475 5.2317 -3.3840 11.2177 1.8663 -4.9843 +#> 10.6675 -11.0052 -15.9643 -4.3065 -0.6591 -0.3957 0.8641 12.9674 +#> 7.5725 -8.0643 -10.6968 9.1187 1.9843 28.3923 -16.6680 7.4496 +#> -4.6260 10.7212 5.6854 -6.2471 -6.2971 -0.9964 -14.6339 -1.9753 +#> 1.8728 2.3720 -9.0977 -0.4728 -3.4320 -1.8218 2.7187 -10.6726 +#> -13.9849 3.5338 -5.6726 -5.5427 0.1115 -4.5678 4.8786 -10.0060 +#> 5.8341 0.2327 17.1221 -3.4749 2.6155 -2.1026 -1.9057 -0.3805 +#> -6.7081 -2.1162 -5.1836 -7.3530 -18.4240 -8.5944 22.7212 -4.4626 +#> 7.2314 14.8975 9.6902 10.3583 2.8377 -7.1998 11.5357 -0.5511 +#> -5.2351 6.0340 2.5682 -6.9786 -12.3467 -9.0609 2.7647 -2.1528 +#> 1.3923 -11.0850 2.9330 7.7764 -5.4198 5.6025 0.1478 -1.8264 +#> -2.3406 13.3075 7.4693 -10.6425 -12.6399 -19.7282 -4.6386 3.1225 +#> -5.5779 15.4978 7.9327 5.4769 0.0235 6.7429 -9.6360 16.0188 +#> -9.7868 -6.6325 -8.5226 5.4234 -10.6048 -23.8821 -11.4005 -4.3078 +#> 13.5427 -3.9301 3.6775 0.8042 -12.6796 5.9962 6.7765 4.2510 +#> 7.0764 2.1136 12.1797 23.4169 -15.0435 -2.0228 3.4215 -8.3234 +#> 1.8828 -1.3288 -2.4978 23.3028 1.0032 -2.7889 -3.6193 -8.0648 +#> +#> Columns 17 to 24 -10.2348 -10.3924 28.3538 23.5301 -1.6500 -10.1804 -10.0480 -15.2089 +#> -8.9161 5.8792 -3.2699 -6.1232 12.9389 5.0260 5.7754 0.3234 +#> -15.4402 -1.9281 -7.6691 16.2835 0.4242 -8.6604 -3.8395 -10.5689 +#> -2.1347 -6.7671 14.9591 5.0173 13.4562 6.1168 -23.7830 6.4363 +#> 12.7730 -6.3039 0.5441 -4.7507 3.2715 -3.8849 2.3002 9.2171 +#> -5.3868 -23.7438 3.7346 1.6850 -3.1972 2.0915 7.9911 -3.4694 +#> 5.9121 -5.1906 -3.0446 -8.3304 -5.7022 11.0526 11.6562 -2.9498 +#> -17.2484 -10.6187 1.4781 8.3619 7.5431 -2.8201 -5.6948 8.7816 +#> -4.7339 -3.1140 0.8105 7.1769 -9.5033 -14.8670 -5.1933 4.7480 +#> 1.6259 -17.2838 -14.6744 -14.6320 1.5441 13.9075 16.0969 -5.9717 +#> -12.3126 9.4735 3.1425 8.1340 14.8011 16.8741 -6.0387 -3.2562 +#> -4.7323 4.1972 -10.1715 6.3317 0.5642 4.6339 7.4144 17.3780 +#> -7.3307 6.3004 -2.1724 -2.1328 -1.1456 6.2455 -0.2815 -0.3057 +#> -3.0029 -5.2331 5.3913 -5.6159 -0.1986 8.4029 1.4879 -10.6394 +#> 11.6895 18.7261 7.2288 -3.5144 -8.8820 -13.3798 -3.0939 -10.1335 +#> 2.9795 0.2223 -11.0558 5.5597 -1.1708 10.5407 -1.9903 -4.8966 +#> 13.4637 4.1461 9.6946 -4.4173 -9.4509 0.7498 3.1487 1.2051 +#> 10.6868 5.9891 -15.9531 6.0373 3.7671 22.5487 8.1482 -12.9323 +#> 1.0647 3.6761 -6.6383 7.8673 -11.3775 6.0606 3.4448 -3.0820 +#> 2.4590 -2.6091 -2.5434 13.3988 1.6054 -7.6742 5.7351 -8.5271 +#> -20.7212 -6.6214 1.7931 11.1464 -6.0346 12.6079 -7.6567 -1.7598 +#> 11.3305 -5.3623 1.9753 -1.6536 -0.8524 1.7750 -1.4573 -12.3850 +#> 10.4981 -2.9261 -4.7843 -10.2992 -5.5276 6.9699 -6.6368 -1.6385 +#> 1.2611 10.0195 4.3912 -2.0264 -3.4332 -2.8160 13.1815 -0.2798 +#> 4.0165 -18.8673 -0.9714 1.5572 -0.1710 7.8988 -0.7883 1.7974 +#> 1.8325 0.7240 -3.1175 6.3640 -2.0914 -5.3480 5.9340 -1.5273 +#> -4.7145 8.8223 4.8152 5.9093 1.3893 -15.7724 -13.5039 -1.8057 +#> 9.0114 -13.7019 12.0908 9.7055 -3.3504 16.4803 -2.4017 3.5436 +#> -0.0961 -4.5853 -8.7671 16.7746 -3.1751 10.5036 -2.7427 0.4062 +#> 4.2938 -1.5279 4.8288 -3.5993 8.1544 18.6168 -10.0799 6.6257 +#> -10.4978 -22.2202 12.8952 -0.2795 -0.2268 17.3916 -9.8229 -4.5361 +#> 10.3517 -4.3437 0.8798 -12.8159 -16.6323 -0.2328 -5.1834 -14.1388 +#> -7.8771 9.1761 -5.3219 -2.0524 -4.2862 -2.6113 5.0800 -8.5416 +#> +#> Columns 25 to 32 -0.9159 13.4203 6.5323 7.5950 13.8688 -6.9460 -0.1792 -16.4621 +#> 6.9652 -2.4826 -9.9765 -19.0936 -30.2546 -11.6350 -1.9545 0.5948 +#> -10.4552 -0.8037 -2.5198 3.4383 10.2861 3.7880 -3.5247 8.1405 +#> 7.4821 -3.8323 5.4414 -1.2910 4.2027 1.7808 10.0969 0.3735 +#> 8.3640 4.7307 3.8161 16.9917 -1.7441 -5.2732 -16.7697 -9.1089 +#> -0.4242 -8.8090 -1.9215 8.1409 -5.8040 -0.9260 -9.6041 -4.5747 +#> -6.4201 6.3877 9.5317 -3.8934 -6.3176 -3.7419 6.3712 -6.7074 +#> -21.5882 14.1418 -0.6532 -3.6961 -0.4233 -14.6904 -0.6364 -5.9010 +#> 18.6200 7.4164 10.3222 -0.1400 9.0266 -7.2192 10.3088 4.9905 +#> 6.8061 -5.0992 -10.0704 -9.0681 5.0483 13.1940 9.5656 11.5561 +#> -5.8975 1.2614 -4.3039 -9.0968 9.5044 -0.2363 -3.8939 -3.7445 +#> 10.8748 -11.1940 -14.2923 -0.4156 0.4765 0.9525 8.4732 9.5188 +#> 7.0825 -8.1913 4.2862 -8.7126 6.4783 -11.0109 -11.2965 11.6086 +#> 5.9322 -0.8264 -6.4513 13.2633 -6.0782 -12.1683 13.7421 9.4527 +#> -4.3458 -19.1304 11.3339 -1.4040 -1.8719 6.7522 2.3990 0.5421 +#> -15.5739 -8.1081 11.7671 -7.1687 3.0119 -11.2907 8.4919 -2.2549 +#> 3.4496 -7.5678 7.8432 -10.9499 6.9601 2.1162 -3.6398 5.1774 +#> 5.8573 -16.8634 11.2962 -10.6024 -2.1106 9.3152 5.0799 -3.2991 +#> 3.5228 -12.3862 19.4526 -12.3581 -10.8081 -0.2490 -3.4801 10.2120 +#> 1.9833 -1.1323 15.5792 -3.3417 13.6646 -3.1423 -10.8112 2.0569 +#> -2.8532 -20.8118 17.1460 5.1504 6.7858 8.9569 9.5942 -8.1865 +#> -11.6045 2.7472 6.8489 -17.4607 7.4088 7.1632 -0.6412 -6.1571 +#> 4.6663 -0.0405 11.8816 10.4567 24.6613 -3.0307 3.9927 -10.8379 +#> -3.9458 -0.2320 4.1353 -12.4750 -13.3768 13.9787 0.6004 -6.3795 +#> -0.1637 -0.9540 -16.8383 8.0502 -3.2454 -2.7919 -1.1321 20.5537 +#> -10.5922 -5.9244 -3.6159 0.7996 -2.2223 13.7091 3.0005 -4.1734 +#> -2.1762 9.7432 6.9444 7.7467 4.5227 -20.9266 -8.7350 13.4254 +#> 1.9172 -3.8725 -0.0471 1.6605 4.0943 -8.7245 -0.2301 -17.9943 +#> 17.3466 -3.8644 3.9618 1.4779 -0.9825 -5.0319 3.4316 3.1853 +#> 2.8328 -19.3811 2.0911 2.8130 -1.9346 8.6048 -15.1948 -2.9528 +#> -7.0527 -21.1938 17.2286 -0.7637 10.8436 -0.2206 8.4798 -3.9594 +#> 2.7843 -11.4965 1.3322 13.7793 6.6676 -9.4186 -2.2445 -15.0435 +#> 6.0846 -7.4912 19.7761 0.4588 5.7567 -3.9668 -8.8055 7.0106 +#> +#> Columns 33 to 40 18.6396 7.1904 0.0509 4.4550 -16.2284 6.6531 13.7506 -2.6644 +#> 6.6045 1.9599 1.3909 0.0837 9.4284 10.7175 -11.2015 -11.1156 +#> -7.3619 -6.9144 -0.3754 3.9721 -3.8965 5.6269 -4.2670 -2.9937 +#> -4.2725 8.3841 5.9296 -5.5361 -2.6811 -1.3009 2.3152 1.7558 +#> -3.1047 8.4638 -3.5369 -10.2167 8.7408 15.1797 16.7173 2.1196 +#> 17.4756 -5.6791 -17.2619 -12.4031 8.2213 -6.4023 14.2544 6.5709 +#> -8.3391 -1.8479 8.5197 1.9586 -7.0193 2.8856 -2.3662 -6.9705 +#> 15.8320 -8.4019 6.2829 8.6353 -0.8195 -10.5387 -8.0319 6.0173 +#> 0.1851 -10.3071 -2.3758 -2.1783 -3.3879 -10.7620 -8.7636 0.9066 +#> 1.9244 -4.0913 8.6562 11.9832 -2.7403 2.3984 2.8265 4.1045 +#> 15.7915 2.2120 5.1031 -8.1328 -8.7761 7.5448 3.7938 -11.0116 +#> -0.1379 -9.2567 -0.9971 -12.5197 -10.7772 -2.9520 -20.5811 2.5617 +#> 16.7674 -6.0425 12.6763 14.3236 -5.7227 8.6642 3.8490 -13.0048 +#> -18.5483 -6.2450 -5.8645 13.6455 8.1484 -2.9606 -7.2609 14.2135 +#> -10.5598 -8.8060 -8.8076 -1.0404 -11.7503 0.2692 9.7035 3.3732 +#> 5.6073 -9.5617 -0.6657 1.6180 1.2846 0.9699 -11.6399 -19.3874 +#> 6.2403 6.5218 -7.6153 8.9744 -9.2173 -10.6295 -6.8481 -3.2323 +#> -6.4528 -8.3036 -0.4019 13.1187 -10.7443 -9.0710 0.0980 -8.2492 +#> 13.9228 3.2469 -6.3787 19.7753 14.1159 -4.4331 -11.0666 10.8947 +#> -1.4693 -0.8730 -6.9941 15.8055 -5.1114 0.3584 9.0701 7.9444 +#> -3.0482 -1.2769 -8.7067 -7.8207 -10.1355 -11.6483 -6.9194 7.0103 +#> 11.3450 4.9000 -12.8353 5.5950 2.4555 -10.0292 17.2249 -12.3853 +#> -6.8362 -1.2283 -1.8838 -6.3650 -3.0411 4.4704 7.9367 -13.7584 +#> -14.2842 2.3737 -5.6434 -2.8510 -8.1665 -10.6020 6.2823 17.9619 +#> -3.2322 6.7363 22.6806 -12.1045 6.1171 14.7458 -0.8419 -12.5650 +#> -7.8762 0.6909 1.0330 -16.9335 -2.2932 -2.7218 13.9762 0.5134 +#> -8.1761 20.5615 4.4597 -2.9705 -2.1040 7.0635 -9.4867 -0.3010 +#> -4.3827 1.6377 -2.8162 6.3927 10.1479 -0.8918 3.9715 -3.6091 +#> -1.4799 -3.7579 12.8654 23.8155 1.0990 -7.5794 14.5106 11.8321 +#> 20.8702 -15.5642 7.4340 -7.2400 -5.6068 1.4093 5.5772 -3.3926 +#> 4.6256 4.1646 6.5021 -4.7770 1.0834 5.4780 3.9725 1.2136 +#> -14.0916 16.9296 -18.0211 -9.9699 19.3443 11.1253 10.8632 1.9144 +#> 1.2075 -1.2523 -6.7294 5.9618 -10.4208 0.3372 6.2555 5.8249 +#> +#> Columns 41 to 48 8.5576 7.5433 2.6797 3.0871 0.0767 4.7525 7.1795 4.3351 +#> 3.6851 7.4914 18.1230 -12.1484 -5.7966 0.4726 0.7648 7.6670 +#> 0.3106 -4.1319 5.5173 1.6237 -8.3065 -1.5440 -3.0000 5.4854 +#> 12.1217 0.0225 1.1910 10.8809 -11.7799 -10.1549 -2.2679 -5.0619 +#> -4.3544 -5.4043 -1.6194 4.9990 4.1381 0.3335 -11.4544 6.4979 +#> -0.0970 16.1548 -3.8121 13.4631 4.0613 2.3344 -10.0733 6.2545 +#> 4.1619 -3.9533 1.1096 1.2977 10.7076 -11.4872 10.5086 1.3446 +#> -8.0701 -17.3169 7.5144 2.8291 5.3520 13.0136 -4.9439 0.6834 +#> 5.6396 17.1359 11.8835 -9.2396 1.5524 8.4900 -16.9708 -2.5895 +#> 8.7313 10.3437 0.2953 -11.7832 -4.6645 -13.1998 3.3757 15.5541 +#> -10.2804 0.6929 19.8981 2.3942 2.0070 -4.3088 -12.2119 11.3075 +#> 8.5153 11.9262 8.1268 -16.4382 -6.0893 -10.5934 -0.6553 14.1596 +#> 6.8772 -4.7055 -4.1626 -5.7810 1.2507 -0.2576 -6.4328 -1.5552 +#> -9.6600 -7.0994 7.8232 -1.6231 1.4039 3.8520 -12.9451 -2.9715 +#> -8.9316 -13.7418 8.7478 6.0414 2.4571 -1.0620 9.6386 3.9649 +#> 16.4171 -10.0602 -9.5793 9.5198 5.2759 -8.3260 -1.7487 -1.0387 +#> -2.2099 -0.9547 4.8525 0.4844 19.7238 2.9641 -7.6010 5.1132 +#> -14.9295 0.9643 5.3782 6.2052 -14.3373 -18.5915 -9.6981 17.4214 +#> -4.0840 -4.6899 -26.1996 3.8893 -3.0055 10.0548 2.2240 -19.3547 +#> -2.9221 -4.8070 -9.3611 -9.6042 -2.7436 13.8186 3.2881 -8.2854 +#> 3.9435 -9.8249 -3.4478 -7.1110 5.7017 -11.0682 3.4407 1.2799 +#> -6.2238 -6.1991 -4.1156 5.5609 -0.5584 7.1017 -15.8312 16.3446 +#> -6.4259 5.4845 -1.9898 -7.9629 -1.0325 5.4699 -7.5042 2.7843 +#> -5.8199 3.0797 15.3120 0.9456 3.8138 -7.8040 6.1432 24.1412 +#> 3.7536 9.8367 2.2500 -17.0865 0.7510 -6.5477 13.5176 -3.4241 +#> 14.1747 12.4147 -7.4757 -8.8212 -3.8793 -7.6130 16.1602 7.7567 +#> -13.9329 -13.9696 0.6841 4.1562 12.1386 11.1311 -1.1848 -22.4898 +#> 0.8757 -3.5722 -16.4026 3.2604 0.3412 0.9699 0.8866 -1.3551 +#> 9.0806 9.5547 -13.6876 2.2703 4.3662 -7.1310 2.8670 3.8843 +#> -6.2397 5.3594 -8.2173 -3.0896 9.3805 -6.6170 16.8530 12.8967 +#> -1.2034 -1.6997 -7.8824 2.8570 6.0805 -1.6090 8.8409 -6.6037 +#> 3.1053 -6.8886 -4.5419 -3.2972 16.4854 -1.6520 25.8332 -9.2220 +#> -4.6863 -16.3210 5.9312 0.2007 5.4184 6.4026 -12.7763 -9.0798 +#> +#> Columns 49 to 54 -3.9847 1.6295 2.5405 -1.5959 -0.1645 -6.1709 +#> -2.4922 8.3674 -0.1290 -6.2076 -6.5233 -7.9762 +#> -2.5895 -9.3321 4.5612 -4.0574 8.3154 3.1874 +#> 3.4926 -1.7231 -13.7573 -10.6458 5.6517 2.2175 +#> 10.4529 4.9393 12.7976 -8.3547 7.6803 -0.6987 +#> 14.4214 13.8274 -6.6765 4.4461 0.7199 -10.7774 +#> 2.8338 -3.3699 3.4500 4.0040 6.0632 -0.3591 +#> -1.9455 1.2128 -10.9467 3.0886 6.4146 -5.8782 +#> 1.4996 -5.0270 -11.5554 7.0102 14.1503 -2.1967 +#> -8.1273 -15.6613 9.4757 -7.3414 -8.4077 4.9521 +#> -20.4313 -1.9778 4.8580 6.4747 0.3780 -5.9440 +#> 0.1205 -13.0912 9.8917 12.8688 4.2765 -0.9285 +#> -1.0324 -2.5524 6.0610 -4.3612 -3.8263 -3.0147 +#> 3.9707 7.6382 -7.8979 -8.4047 5.3099 -7.3539 +#> -0.5268 -0.5446 -18.5820 -11.4591 2.6708 -3.4061 +#> 4.9823 -4.0362 1.4343 -0.8815 -4.8684 0.8801 +#> -0.1282 4.4348 5.3564 1.1135 -3.1455 -7.9813 +#> 11.9521 -4.7571 -1.4570 -6.3113 -3.4599 7.1929 +#> -18.7061 15.4748 -3.5646 6.1948 -6.9990 0.1782 +#> 3.0922 -0.7009 -11.6909 1.7172 5.9202 -1.2143 +#> -26.1815 -7.5516 15.6631 6.4886 3.6771 2.7164 +#> 7.9054 -6.0297 -6.6026 -8.0419 -9.2620 -1.7847 +#> -2.5703 -6.3069 -1.0247 5.0530 5.6225 -2.2818 +#> -2.3374 -10.2969 1.9019 5.3632 6.3660 -3.1475 +#> -4.6428 -5.8613 -0.3048 -7.1832 -3.3850 0.3479 +#> 7.8183 -2.7134 -1.4327 -1.9916 -8.1875 3.9433 +#> -7.5695 9.4886 3.9988 -5.2189 -2.7899 -1.1938 +#> -0.2347 -6.1187 -8.4674 -2.1549 9.5906 -0.2897 +#> -10.2787 -15.5042 -16.7421 10.1928 -11.7354 -1.7132 +#> -16.1504 0.9005 -1.2476 12.9076 4.7648 -8.9070 +#> -2.3000 -0.4876 -7.2084 -4.1374 10.0935 -4.1776 +#> -7.0180 5.9344 -10.9723 -8.1919 0.6407 -3.8926 +#> -0.5738 2.8627 1.9321 3.7696 -3.1981 -1.5959 #> #> (15,.,.) = -#> Columns 1 to 8 3.1122 3.8611 4.8205 -1.6298 6.9329 9.8906 2.1740 -4.2024 -#> 8.5203 9.7147 -1.5809 -0.6421 -2.0104 2.4745 -14.1152 -2.4426 -#> 7.0298 6.1547 -9.4351 -0.5382 -19.4606 -12.2878 -1.7764 -3.0312 -#> 0.2865 -0.9511 5.3513 4.3796 -2.7623 -13.8982 -14.1224 0.6164 -#> 0.0973 -0.4816 -4.8435 -3.1812 0.4628 -2.6683 -0.5602 22.2579 -#> 0.4775 3.2235 -1.8869 0.2404 13.6895 -0.6795 15.2676 5.6317 -#> -2.1784 -1.1375 4.5558 3.1385 -4.9571 3.7874 -5.9118 10.8517 -#> -4.7388 -6.9078 -10.4307 -2.2684 6.6817 -1.7626 -18.4548 -10.9733 -#> 6.7687 13.5322 -5.5984 -12.1420 -1.2412 5.6165 -13.5941 -2.0271 -#> 2.0455 9.0392 1.6625 6.0172 3.8340 7.0512 1.2465 -1.9772 -#> -2.1886 -6.4816 4.6749 5.9960 -10.1334 -6.3482 -2.7780 -6.9412 -#> 5.0527 4.9251 2.5347 7.6254 -1.3561 3.7016 -3.2117 -11.3581 -#> 7.7865 4.6978 -7.2020 -4.0605 8.9861 15.5755 -12.3648 11.8017 -#> 8.7464 -11.9171 12.6224 5.7931 -9.1825 1.7586 8.2715 -19.4382 -#> -3.1150 -6.2088 0.4635 8.2511 8.0410 4.9534 2.9653 17.6959 -#> 0.9174 -0.7699 -4.1018 7.9598 0.7056 11.8100 10.3111 -20.1756 -#> -5.3045 -6.8303 -2.2864 -6.5579 11.0886 22.3864 7.9265 -1.5356 -#> 4.6490 -1.5498 1.7408 14.1294 2.5618 18.0424 -1.0681 -17.9160 -#> 3.5954 9.3813 17.2237 18.8808 -6.6940 8.5774 -5.3845 -30.3545 -#> 5.2688 -3.6840 -5.1910 -5.7160 -11.5041 4.3481 3.6597 7.6335 -#> -1.1897 0.6484 3.4448 4.7273 -4.5146 -2.8914 7.4103 30.1499 -#> -4.3721 1.5688 2.7589 0.5297 1.1048 0.7333 -2.0578 0.0610 -#> 5.4195 2.0839 -1.5410 -10.0713 -13.5664 1.3268 -0.3629 4.4647 -#> 9.1930 9.5706 -7.9102 -7.7028 -10.7286 -22.2398 -20.2083 -15.1622 -#> -5.6316 3.1994 6.1807 0.7756 1.6676 -14.9653 -9.9982 -4.2857 -#> -5.1895 3.7136 -17.8547 3.9746 1.8439 -4.8682 12.8033 -7.1466 -#> 2.1943 9.0551 -0.9147 -4.1940 -11.3143 -11.8204 12.1720 16.6048 -#> 2.7969 -7.6488 -2.1236 -4.0005 -30.9301 -11.7749 10.6926 -4.1824 -#> -1.0040 2.9021 -5.1487 -15.2700 2.6248 -2.1149 6.4104 3.2766 -#> -3.7081 -1.6625 4.2692 -1.5709 11.0265 10.0323 11.2276 -8.9382 -#> 2.6578 5.1897 0.1735 -5.7386 5.8531 -11.7441 -14.0351 17.3496 -#> -2.1352 6.3114 10.2460 -8.7306 -20.9668 -3.4366 25.2204 -8.0201 -#> 5.8776 15.5592 22.1419 -8.1323 9.0823 -11.0189 -2.7949 18.9450 -#> -#> Columns 9 to 16 -2.4820 8.6576 5.9839 -1.8184 -9.2920 5.6428 13.7748 8.7812 -#> 21.4926 4.5923 -11.8291 0.6987 11.9017 6.1472 3.6525 2.2308 -#> -4.7901 22.6197 -4.5075 -11.4693 -5.4537 5.0686 5.0970 9.4705 -#> 7.0716 0.2369 -2.7498 1.5082 -6.0825 -12.4953 -6.6059 -1.0555 -#> -2.6514 12.4401 14.5083 4.0852 4.0618 4.1491 26.6751 3.6393 -#> -3.9949 11.5821 4.5012 7.7274 12.3697 3.9179 -5.3370 3.5276 -#> 12.0472 -18.0762 7.6975 5.1272 3.4334 -6.3926 -1.2136 -6.6883 -#> 4.8552 -11.6885 -1.4379 12.8208 0.1779 -9.7202 -6.0333 5.4248 -#> 5.1050 5.3917 -5.1519 1.0647 -4.9101 -10.4036 2.9865 5.9236 -#> 15.4829 12.2093 -8.3707 -0.1868 17.1047 -2.8015 6.6041 -1.1927 -#> -10.6184 4.6587 -6.7819 -5.7780 4.3709 -7.5517 1.9509 2.6140 -#> 17.4269 5.2521 -7.8666 3.7005 0.9033 -2.5157 -3.4188 5.0107 -#> -0.0271 20.8237 -0.5649 -6.5968 5.7183 8.3035 -5.4941 -7.4708 -#> 2.0521 5.0497 -8.4153 2.5045 11.8565 -9.7868 -6.0838 -5.0893 -#> 2.5230 -5.0851 4.8680 -1.1900 0.4753 0.8048 5.0609 -11.2180 -#> 6.2523 2.1532 -4.1158 -8.2963 5.3402 -1.7555 5.6635 -5.3381 -#> 8.4795 10.2285 -10.0182 -3.3915 14.9372 7.8887 9.9017 -7.2846 -#> 6.7871 2.0844 -7.8338 3.5193 14.5492 2.6393 7.4061 -12.4265 -#> -8.6737 9.2819 15.6804 4.1469 1.2852 -19.8180 -7.3381 10.9151 -#> -10.3532 8.9571 -0.5642 -3.9206 2.9838 9.9372 -15.1253 1.8576 -#> 7.4981 -7.1064 3.3215 9.7494 2.7893 3.1182 -2.1001 25.5508 -#> -4.8811 -3.8547 0.6671 -8.4423 -12.9163 -11.9388 -7.4008 9.0730 -#> -0.4422 -5.3445 -14.6563 5.8467 6.8341 -4.3484 21.7684 -0.6717 -#> -18.2219 -7.6869 16.1875 -7.0942 -22.3077 -7.1760 -10.6216 -3.2437 -#> 4.1118 -20.1772 -7.1432 -6.9128 -18.2796 2.3830 0.2257 -3.6572 -#> -3.8686 11.1459 2.9378 0.2278 -14.0622 9.4110 -20.2442 1.3321 -#> -6.3837 -4.3450 9.4582 7.1413 -9.9865 -0.6224 -0.6129 -0.0896 -#> -13.8273 0.5218 13.4575 -12.8272 -4.1247 -1.7526 7.2794 0.0497 -#> -16.4962 -2.7740 19.6613 0.7536 -15.6204 -9.4792 17.8453 -7.5782 -#> -14.1994 -2.2635 12.2250 11.7569 -18.9640 -5.6786 2.5413 4.4004 -#> -6.9980 -4.5628 2.6912 -0.7503 -9.6685 -0.6845 15.9196 -2.9514 -#> 3.3739 12.9420 -3.2897 -14.8992 -4.0071 17.6254 -12.3500 10.5809 -#> 4.4941 -1.2783 2.5828 -7.5568 4.0072 6.3986 7.0076 -7.2597 -#> -#> Columns 17 to 24 2.7415 -14.5243 7.7590 -11.1564 -9.3308 -7.2492 -1.2720 2.2529 -#> -11.5159 10.8135 -16.2901 4.3608 8.0637 -9.7066 -8.7165 1.8619 -#> 0.0860 12.3243 -31.5053 4.8606 2.1148 0.3605 -5.5557 -0.4323 -#> -11.2532 -3.9996 0.5845 3.8652 22.2795 -1.9149 6.9725 8.2957 -#> -2.9669 11.4869 -0.4343 -9.9054 -2.8446 -8.0256 1.0610 9.3770 -#> 10.0457 -3.4253 -4.7768 -7.3880 -3.9299 8.4463 -1.8519 6.3592 -#> -1.4612 4.7071 -2.8134 3.0198 10.9095 1.7908 -3.9836 5.6276 -#> 6.5276 4.2167 5.7684 -0.7467 0.8570 -4.0737 -14.7975 9.9143 -#> -5.9155 16.2076 -8.2101 8.2188 -2.8018 7.0341 2.4280 7.5394 -#> 1.4553 -11.1364 -17.4621 10.4478 -7.7759 12.9649 5.3697 -5.1533 -#> -4.2117 7.7752 -15.3058 13.0305 -7.9705 0.6638 -10.2953 23.3320 -#> -5.2965 1.2251 -1.9521 -11.8184 1.4711 0.9470 0.6935 -0.8830 -#> -4.1606 4.4501 -20.9371 14.0476 11.0796 3.0351 2.3615 -5.1264 -#> 2.8865 -12.0706 -5.3796 -0.6273 -10.8796 4.0407 -12.8834 -2.8364 -#> 2.2295 -2.5573 2.4217 7.7431 -0.0052 5.6104 0.5390 5.1543 -#> -10.9820 -3.3330 7.8870 20.0087 -1.9390 15.2596 6.6805 3.3873 -#> -3.5583 0.9338 -2.4335 5.2367 -8.3011 -2.0619 -16.8000 8.4627 -#> 5.1808 1.4008 -11.4610 12.9522 -1.9121 -5.4007 -1.5541 4.0131 -#> -5.7829 10.3096 6.7852 -5.5373 -18.5659 10.4546 -6.8009 -3.4478 -#> -5.0784 10.0420 -11.7954 2.7051 -16.1865 3.7774 -13.4981 -10.3071 -#> 3.2760 5.2329 -1.7290 -3.1013 2.8472 -4.8237 1.2745 19.0922 -#> 6.6286 6.4568 0.0930 18.2459 -13.5861 -7.7898 -8.3763 -5.8059 -#> -9.0000 -10.3056 -13.3352 -3.0509 -3.2558 3.6195 -7.3976 10.1592 -#> 1.1644 -4.5979 -6.7606 -2.0620 1.7103 -17.1890 -7.9710 -19.0924 -#> 9.3919 16.9688 15.8126 0.2450 7.8224 1.2130 2.5086 -3.5090 -#> -1.9533 1.1459 0.4736 10.3822 1.3048 -6.4972 13.3227 -29.6419 -#> -7.0148 1.7177 0.0313 6.3580 9.1552 -11.2252 7.7353 7.3579 -#> -4.7833 -0.1028 2.7229 -5.2943 -12.8086 18.4304 -4.4705 -3.4158 -#> -9.2536 4.5833 5.5145 -0.7567 23.3196 -10.2063 17.2270 3.5223 -#> -15.1389 8.7041 12.2054 -12.8302 -0.7823 13.7584 2.4872 -0.4881 -#> 14.4617 3.9974 -7.9709 21.8895 -4.8486 -19.2757 6.6048 5.4127 -#> 6.0082 -18.5477 2.9759 -20.5861 8.7631 10.0609 4.0358 -11.9522 -#> 14.4334 -10.0376 8.1364 -16.9371 15.9354 -13.9389 7.9507 10.4880 -#> -#> Columns 25 to 32 4.4647 -7.0875 2.3327 1.8307 7.3989 0.4803 -3.6664 -6.0721 -#> 5.8742 -8.4197 6.2609 -6.9642 -2.8786 2.0274 -7.7869 10.8764 -#> -14.0883 8.3286 -1.4974 -4.1067 10.7286 3.1396 0.8001 -1.4272 -#> -8.3975 1.1112 -14.3248 14.9886 0.6196 3.2767 -2.5909 4.2835 -#> -13.5545 9.2322 -5.9542 -2.5594 1.4618 16.6292 -8.4196 12.4791 -#> -0.7116 11.6965 -10.5454 -3.5316 4.3152 9.0918 5.8082 -13.3541 -#> -11.4994 4.4063 -5.8573 -17.3401 -6.0474 18.5556 -1.1967 6.8481 -#> 12.0583 18.5907 9.8987 -8.1644 0.4287 2.6380 3.0048 0.5976 -#> -7.0870 -14.3711 0.4314 -16.2165 -2.3792 -12.6777 -4.3406 6.0900 -#> -5.8610 11.7489 -7.8149 -6.6605 9.2336 2.3260 7.7857 -4.9880 -#> 0.3623 -1.4200 13.0666 -2.4665 -4.7151 -3.9452 12.5038 3.2693 -#> 13.9317 -6.6125 -2.8563 9.5944 0.0860 4.3587 14.8142 -6.9758 -#> -13.4904 -2.4979 6.6735 -11.6457 0.9460 -1.9097 -13.3650 8.9596 -#> 1.9304 -4.0284 0.7302 14.4181 -7.0430 -10.5987 7.4161 6.4617 -#> -0.0161 -4.2945 2.9026 1.7768 3.9085 -6.9946 -4.5485 -2.9448 -#> 6.8562 -10.8369 -5.6905 -11.0385 0.1285 -4.7716 2.3382 -0.4609 -#> -6.1776 19.8724 18.4406 -8.7792 -20.6198 5.8089 7.3201 0.5096 -#> 1.2028 3.0662 -5.1415 -0.9428 4.9533 -6.4426 5.5798 0.8762 -#> 4.4632 -17.6349 -13.0296 -4.2663 -16.4941 -6.0344 -5.5801 8.4649 -#> -6.9736 13.5637 2.4916 -1.7900 11.5020 8.7405 2.0377 -4.7403 -#> -6.1378 -4.8473 -9.7515 2.3663 -1.1794 -0.7174 -5.0061 2.4843 -#> -3.4997 3.5160 9.5409 -3.1535 -11.6830 -3.5705 2.6792 1.4836 -#> -11.8763 4.6721 -1.8193 3.2386 11.2250 7.8626 -1.8528 0.4861 -#> -8.0426 -14.7109 4.3708 2.2663 13.2169 6.3566 -9.9510 6.0865 -#> 11.0725 -3.9107 0.6830 -4.5783 -1.6197 -2.3960 10.7319 -13.7979 -#> 7.3039 0.8473 -9.9158 6.4912 4.9668 -9.9030 -4.0687 -9.6390 -#> 0.3819 1.3212 18.2018 8.2165 6.2052 4.6291 5.0011 0.7982 -#> 6.8205 -2.5124 9.6381 9.0118 2.8916 -1.5941 9.1080 -7.1449 -#> -13.4964 -8.6642 14.5336 15.9082 -0.8775 -10.1957 -18.6116 3.5536 -#> 0.9977 4.0760 11.8884 2.6569 0.1379 -7.2319 -0.1400 -11.4456 -#> -9.2941 1.0872 5.3523 -7.4826 2.3029 -2.4002 -1.3751 -1.8256 -#> -3.2221 5.9747 -5.6958 20.7800 -4.3516 4.8361 0.0648 4.8459 -#> -24.3830 2.8290 1.2183 -13.2337 -16.3716 6.1607 7.4295 -0.7610 -#> -#> Columns 33 to 40 1.8205 -1.9031 12.3617 -5.0926 -3.0377 7.9483 11.3565 5.3461 -#> -0.4750 7.1601 -2.3730 -7.5397 -10.2509 -7.0522 -0.8622 7.4636 -#> 16.1951 -2.6339 10.4763 6.7128 -2.4033 1.5973 0.5560 -12.5422 -#> -5.8209 -14.4871 6.3812 -20.7239 6.6207 4.5128 -6.8785 -0.2301 -#> 10.9282 8.5717 6.2680 2.4384 0.5621 -0.7995 -20.6212 -2.2964 -#> 9.4476 -14.1702 4.5617 -12.0078 -8.1397 -2.5639 -2.7928 11.8655 -#> -12.6462 2.1810 1.2845 9.7156 -20.5358 -5.9804 -10.9644 -0.5939 -#> 3.5793 5.2351 -6.4354 -4.9213 -9.9919 7.4415 -3.4633 -9.8310 -#> -12.8611 2.8465 0.1158 0.1859 -1.2493 9.6240 1.1403 -0.6776 -#> -0.3449 7.4077 -0.7959 -7.9119 11.8331 -4.9754 -14.2622 6.4361 -#> 0.7595 -17.6125 -5.8273 -3.2121 8.0339 2.5602 12.3164 -3.8753 -#> -1.0412 -5.4503 -3.5347 -13.2825 1.0322 6.5559 13.9156 15.9307 -#> -9.2140 6.1835 -6.6971 -3.4164 2.1543 -10.5855 -3.5077 6.1390 -#> 4.4808 -10.5445 7.2821 -3.9587 9.7382 -2.9853 2.7431 -9.6809 -#> -2.5871 -4.7222 -5.5093 -2.2494 0.6249 9.5892 -3.0757 10.6005 -#> 1.6629 -3.8746 5.5493 5.2350 -16.2316 -10.5616 -7.7546 3.6809 -#> -0.4978 -3.4466 13.3552 -10.9518 -5.6727 -18.6570 -15.8404 -3.5805 -#> -13.1766 -0.4777 0.2826 -0.6377 -9.3905 -3.6232 -9.2420 4.7829 -#> -3.6613 -5.0635 2.4324 11.3758 -8.1947 11.6599 0.3691 -0.2076 -#> 12.5498 0.2379 11.0158 -4.5225 -6.0512 -6.8703 -1.2930 -6.1242 -#> 4.9730 -2.3177 -4.2980 5.2178 -5.1310 0.4080 2.9159 -8.6934 -#> -11.7598 6.6139 2.1697 -0.6532 10.4587 21.6341 -3.2036 -3.5018 -#> 13.5214 -2.9392 21.8901 4.7567 0.3415 -9.1058 12.1258 9.6166 -#> -8.3603 1.6233 -2.6932 7.0846 15.2218 17.6835 15.8797 10.3380 -#> 2.6707 0.4024 -3.1447 2.7802 9.8796 16.6959 23.8264 7.0454 -#> 1.6237 -1.3032 -4.3692 1.1793 9.3805 -1.8181 -2.0411 -1.3207 -#> -0.9425 6.4807 -5.7085 6.3386 10.6126 2.3979 -8.2275 1.4569 -#> 11.1681 -1.0505 0.3317 1.7951 -0.9978 7.5406 -8.7477 -3.5775 -#> -5.3785 7.9169 11.0730 -17.4246 -5.1151 6.9251 -13.2934 -0.3874 -#> 5.2756 -3.2304 1.5981 -14.7328 11.5731 7.4060 9.7360 -6.7432 -#> -15.2574 4.5115 7.5608 14.3726 -2.1682 -10.3855 -5.8989 9.8928 -#> -0.3856 -8.3256 2.6750 5.4766 2.3158 6.9830 10.7369 -5.5621 -#> 0.8368 -4.4529 -2.9222 -10.4655 7.5028 3.0049 -13.9050 6.4207 -#> -#> Columns 41 to 48 10.6267 2.0566 -0.1899 -4.2216 10.1488 4.8102 -8.9333 -12.9299 -#> 2.8428 -1.9617 12.4254 7.3913 4.2177 2.8855 -6.2039 1.6922 -#> -4.9332 -2.4444 -0.4150 4.8136 8.5222 -3.2814 -2.5849 -4.5969 -#> 2.6271 -12.1750 -6.9400 -3.1252 9.6071 -13.4722 10.1572 2.5616 -#> -5.8844 -6.8112 -7.7014 -8.0022 1.7783 4.1806 -2.3968 -17.2372 -#> -6.7355 1.5915 -9.2817 -0.8666 6.2460 14.7628 11.8597 -12.6760 -#> 17.6917 5.5680 1.8349 -1.1989 -4.6840 7.0459 -3.3207 10.4828 -#> 5.6398 -6.0632 1.6924 0.6557 -4.7924 10.7048 -0.6390 -3.2781 -#> 15.3371 2.9338 8.0760 13.9686 1.7372 -3.0089 -2.3561 0.3753 -#> -11.5604 -2.6880 -11.4365 2.3654 7.7114 2.9627 13.5443 -12.7850 -#> 9.6262 9.4812 -0.5602 -2.8509 0.3019 -4.0123 16.3873 13.6085 -#> 20.5345 -8.0568 -11.0704 -9.2712 -10.8319 0.2600 9.1865 -6.3293 -#> -11.1006 -12.9762 4.1232 -8.3061 -6.4398 -0.4549 -17.1532 6.3581 -#> -7.1097 4.8705 16.1682 -2.3171 4.2617 -22.1824 5.9422 -4.8015 -#> -10.4708 -1.7752 -7.4766 -12.3306 1.3784 -6.3603 3.2971 -7.3621 -#> -1.7025 -12.1037 9.3636 7.3521 4.8636 7.5377 2.7529 10.1537 -#> 11.0573 9.7915 7.8233 -2.0144 -3.4713 -2.7266 16.8523 -8.3005 -#> 2.3746 1.5750 5.2558 7.2855 -5.3842 -2.6478 9.7472 3.5639 -#> 5.4006 -0.8139 -6.0416 7.4403 6.9999 -15.6390 -5.8958 10.2864 -#> 5.7947 3.8600 -1.4406 -14.5901 -4.3508 -9.8071 -14.7991 1.8049 -#> -5.0717 -16.6773 7.0603 -2.3248 -4.0577 -4.1785 0.2248 0.3476 -#> 7.0438 -5.2534 -8.0476 -2.4107 -17.2631 -6.7523 1.3226 -2.7569 -#> -1.7567 -9.6459 -2.5314 -1.1619 10.4042 -12.3261 -9.5224 -4.6514 -#> 12.4365 2.8640 10.7389 -0.2652 -2.4801 -11.0003 -14.6895 -1.4004 -#> 4.1451 0.1326 -14.8814 -10.9389 14.0706 -7.5585 4.3588 -4.7046 -#> -12.1984 7.8382 -6.0839 3.8610 -9.9043 7.9125 -14.7297 2.9360 -#> 5.7610 14.6969 7.7319 7.3925 -3.0844 4.3962 4.0488 14.4855 -#> -0.1482 8.7862 1.5423 -5.4913 -0.2252 -7.1675 13.4628 -12.1098 -#> -12.1577 -7.5669 12.8937 7.8985 -0.8192 -7.9449 -0.0794 -7.7434 -#> -9.4886 -0.4424 -1.1893 -10.5959 -9.3650 -1.0334 1.2311 -10.4145 -#> -16.8748 -7.4350 -2.3928 -9.6226 3.4923 3.5035 -0.6490 1.1591 -#> 0.8123 9.7618 -9.3335 -0.5018 -11.4629 -11.9040 -5.3682 7.9294 -#> 2.4693 5.7884 11.3076 -19.0914 -11.9091 -3.3234 10.0314 -6.7216 -#> -#> Columns 49 to 54 0.4406 1.6129 11.1615 -6.4500 -0.8354 0.9156 -#> -8.4724 3.7934 -0.6207 7.8873 1.1957 0.5047 -#> 1.6362 -12.6187 8.1702 9.1539 -0.2782 4.0106 -#> 1.5858 13.5181 3.5678 -1.2366 7.3036 -0.1232 -#> 9.1165 15.2435 -6.9428 1.9598 -6.4923 3.1891 -#> 9.3808 -9.5047 -2.5205 5.0731 10.0928 -2.0794 -#> -14.7524 16.4280 6.8513 0.7544 -8.9306 1.7082 -#> -11.2358 15.5635 3.2996 1.5688 1.8301 -1.2618 -#> -1.0093 0.3102 -0.9837 1.1016 -3.9838 -0.2361 -#> 12.1988 -5.1664 -0.1060 0.1254 0.0174 0.3351 -#> 12.4086 -1.4730 -5.9202 -0.6916 -7.4567 -2.5951 -#> 5.1866 12.7100 -2.6120 -5.1527 2.4302 -4.9550 -#> 23.0999 -1.2934 -6.9460 1.9001 1.8792 2.4004 -#> -13.5533 -15.6596 -1.9504 10.2901 6.8115 -5.0187 -#> 0.1487 21.4774 -5.9941 -1.9582 -7.3697 -3.1072 -#> 0.5098 7.4638 3.3007 4.9267 1.0696 2.9138 -#> -13.9605 -15.3375 2.3721 4.2856 7.6262 0.1114 -#> -0.0946 -1.9983 7.4403 -1.6219 -3.5262 0.3071 -#> -3.5941 6.7896 1.7399 4.9699 -0.1605 -0.2439 -#> -10.2240 2.4551 6.6242 -12.3264 -2.7923 -6.0393 -#> 4.6394 -4.9876 -2.6554 5.6472 1.9260 -3.1263 -#> -12.1955 8.9774 -6.0791 -7.6644 2.5696 -1.0618 -#> 3.4853 -8.6396 4.8650 4.8160 -5.9536 5.4277 -#> 5.7871 -7.7097 -5.2413 -6.2673 -2.1149 -1.7612 -#> -6.8536 1.3883 -7.2248 -8.3984 0.0079 -0.3945 -#> 4.2400 -1.7337 3.1773 -7.3190 2.1686 -9.1689 -#> 19.0523 14.2516 7.0283 -3.9777 -12.5752 -2.5832 -#> -11.7205 -0.0090 11.9184 2.2404 0.3422 -3.7061 -#> 4.4009 -0.8376 5.5358 9.0462 0.6890 3.6592 -#> -13.7356 4.7588 4.9515 -8.4374 4.7421 1.8016 -#> 2.9980 -13.3701 -1.4301 3.5365 -8.7095 5.2190 -#> -13.4774 -7.1882 -0.0140 -5.5037 2.3036 -1.9311 -#> 12.4994 -2.9208 4.1533 -4.6218 -6.7373 2.3593 +#> Columns 1 to 8 -1.5480 -2.3226 -12.6145 -11.4732 -7.8165 -9.4983 -3.5700 -15.6556 +#> 1.0601 -4.6836 -4.3608 -6.0285 3.4574 9.6195 0.6524 -7.0485 +#> 5.6251 7.5929 -2.7877 -1.8816 4.9794 2.9114 4.6998 -0.4624 +#> 2.7641 0.4292 -3.0854 -14.3098 -10.5390 -5.3486 -10.4439 -9.0938 +#> 1.1096 -4.3114 -4.3827 -10.5254 -17.6882 18.4926 -5.6819 -1.4153 +#> 2.2133 0.2157 -5.5338 -17.4456 -8.1320 8.4154 -8.5801 -5.5160 +#> -0.5291 10.5321 -1.4005 2.4837 -12.5371 2.5787 -11.4416 -5.5872 +#> 4.7381 -4.2311 -2.5796 -1.1170 12.5779 16.1060 -1.3978 -4.3200 +#> 3.4056 0.0756 -6.4885 0.0520 -4.1468 8.8738 8.6866 -9.8233 +#> 3.7480 1.8686 1.9059 9.0625 -17.1037 -3.2422 -2.5089 13.7175 +#> 2.1866 8.4718 0.0546 -9.8847 -24.0309 -4.8157 10.6850 5.7006 +#> 1.4033 -0.3059 8.7609 5.4726 -13.6462 7.7370 1.4408 8.9779 +#> 4.6412 6.3536 -6.1421 7.9591 -9.0173 9.5937 3.8149 4.3593 +#> 5.3953 -6.3292 -0.2669 5.3851 -0.7025 -3.3660 -0.1432 -2.4160 +#> -4.1293 -5.5192 3.2517 -0.5529 5.5178 -15.3576 -7.3132 -4.2888 +#> 0.6848 4.4763 7.5650 -10.9784 0.2439 14.8388 1.9178 -2.3989 +#> 0.5960 -0.7973 4.7239 -5.1132 -6.0069 -13.6999 -7.3106 3.4710 +#> 4.1932 4.8065 4.9770 -17.2746 6.3694 -15.3226 10.8627 -3.2967 +#> 3.4117 -7.1715 -7.9748 -9.1588 21.0270 8.4184 3.5228 -4.5519 +#> -2.3926 3.0979 -6.8742 2.9889 2.0614 -9.2233 15.1879 -17.0244 +#> 1.9140 0.7177 5.1660 6.4429 -2.7839 10.8547 -12.4839 10.7146 +#> -5.0641 2.4166 6.4848 13.2724 3.0504 4.3973 -6.4006 4.7745 +#> 1.2893 0.9322 10.8218 -1.7183 -4.4995 -19.4674 16.5020 1.0697 +#> -9.3303 -0.8401 8.5638 0.9223 2.4065 -12.5260 -16.0471 10.8620 +#> -2.2359 3.5106 -1.2181 10.5718 -4.7001 2.8325 3.7966 8.1014 +#> -3.8732 -1.1684 1.4643 7.9242 -3.8844 0.7609 -6.6007 0.2390 +#> -2.1598 -7.4457 1.5657 -0.6186 11.0294 -4.3382 -3.4645 6.8675 +#> -0.4584 -8.1934 4.4102 -5.8540 3.4823 8.6603 -5.4523 -1.6470 +#> 5.6762 -6.5943 -0.6382 2.8509 1.3007 3.9492 -8.7240 -15.6477 +#> -3.0598 -1.1246 -0.3795 -7.7945 -6.5833 -6.1003 -2.1444 -12.0461 +#> -2.1948 -4.9020 9.9337 -9.4449 4.7021 -4.3830 -9.7941 -3.8972 +#> -2.7172 -4.0026 -6.8488 -9.6995 3.0726 9.4563 -1.8003 -14.0530 +#> 2.2239 -0.5233 -4.4547 -5.8050 -1.2404 -7.0466 1.7092 2.6113 +#> +#> Columns 9 to 16 10.9001 -6.9516 -9.9832 -7.7073 -21.7719 -3.9437 -15.9477 -18.6319 +#> -3.7237 11.6001 8.5005 4.1985 11.0552 9.6969 3.6887 3.6450 +#> 4.0110 4.2526 -0.8369 9.4248 8.3532 15.4726 -3.0963 5.1259 +#> -18.3138 6.0425 8.1423 -0.3699 4.1318 1.2594 -7.6515 -8.0281 +#> -0.9162 -13.8312 2.7840 -5.4233 3.1747 -4.6699 -3.4324 -5.5202 +#> 11.1367 2.8137 -17.3162 -1.3527 1.3507 13.0210 -13.8186 26.8187 +#> 6.3357 -7.7561 -3.6762 15.1476 -5.7097 21.4325 -0.1750 7.5073 +#> -11.2215 7.4079 6.6160 -9.4466 16.0628 -2.4552 2.4160 -5.8624 +#> -7.4508 5.0155 -0.1214 -12.1375 16.9189 -4.7154 1.0027 -8.0713 +#> 23.4865 -6.0109 -4.5907 -5.7444 -18.8072 -2.4224 11.1117 3.5363 +#> -1.1102 -2.5905 2.6187 10.5455 1.4857 6.4054 -9.6781 -13.4437 +#> 24.6610 -4.5083 10.4631 -14.3266 4.6868 -8.1047 8.6586 1.1920 +#> 6.5881 -13.6663 -0.9451 9.0214 -2.9227 7.7702 6.2728 5.1214 +#> -1.0705 -7.4547 0.3573 -12.3091 9.6111 1.4313 -8.6579 9.8779 +#> -5.1110 -9.0238 4.4972 -6.8154 4.4076 -10.9123 2.5202 0.8639 +#> -0.5783 -9.5890 7.2320 -1.5034 2.2434 6.4146 19.6122 -2.1466 +#> -3.7124 -6.7418 -6.3208 -2.0437 2.6679 2.0410 0.6387 -3.7876 +#> -5.2312 6.5131 0.8760 14.7253 -8.0860 -2.4185 -2.6448 7.6081 +#> -16.2556 -1.7937 -1.8222 -0.4736 10.3241 -0.3719 28.7630 7.5050 +#> 1.0380 -5.1460 -9.3551 -4.7798 -6.7171 0.1936 4.7133 -2.9299 +#> 10.8932 1.6696 -2.8752 -12.7621 -6.5751 9.2449 -8.1203 14.1162 +#> -3.2963 4.3001 -8.4615 1.6528 -12.8636 9.9290 0.9848 5.5399 +#> -9.3516 7.4380 -11.8734 -4.0161 3.8160 -6.4307 3.9864 6.7317 +#> 18.6668 15.8147 -5.8428 0.3814 -13.7620 -4.5685 -14.6382 -12.2606 +#> -7.4702 5.5498 7.4251 -8.4443 -13.8517 6.9375 -5.9914 10.3369 +#> -2.6617 11.2949 5.4276 -13.5057 -26.9067 -18.3660 -6.4792 -10.6989 +#> -0.4519 -17.8086 -9.4939 -6.1366 1.4040 1.3275 16.4785 -2.9151 +#> -3.5705 1.9239 10.5705 -4.0311 3.5690 11.8709 -3.3305 -13.0298 +#> 8.4804 -6.7775 8.2662 10.1708 -4.1508 -8.1485 3.4573 -10.4160 +#> 2.7779 -5.6409 -3.2864 -7.3414 7.5454 -14.8958 -20.6072 7.1933 +#> 7.0204 8.7989 -7.2620 -27.7442 -0.0752 -15.0155 -2.0379 -4.6041 +#> -6.8519 -8.9223 -5.1441 -11.2840 15.1385 -5.0153 -5.8679 11.5172 +#> -9.8625 -7.4851 -19.9489 14.0669 2.2448 21.1529 1.6980 5.5803 +#> +#> Columns 17 to 24 4.2597 -13.7939 -7.7846 -17.2603 1.7428 -2.4263 -4.0600 6.0426 +#> 21.4571 -0.6034 3.9594 2.5508 8.8771 5.5903 -14.4536 -8.9530 +#> 4.2347 3.8858 -0.6975 -11.7504 -11.3260 -3.9687 3.8406 0.3280 +#> -16.9006 4.0892 -0.0560 -14.2800 -14.8899 4.8978 -13.6346 -0.7563 +#> -12.5520 -6.8838 -5.3907 -6.0314 0.0258 -10.2345 -1.7311 6.0769 +#> 5.3802 8.6944 2.4642 1.1627 7.6099 -17.2395 -3.0697 17.0651 +#> -2.6690 4.9361 -5.9838 0.4278 -14.3752 -7.1909 -0.6866 7.6025 +#> -0.3400 1.1488 -0.8959 -10.7551 -2.9267 -10.6040 -7.8760 -0.2190 +#> -11.8994 -0.6909 -9.4784 -2.7070 5.3315 -14.1667 -3.2866 1.4254 +#> 18.1085 15.4950 14.5003 4.0541 0.4752 -2.8371 8.4423 -4.6955 +#> 3.7797 2.6540 -12.3780 -24.3250 -4.5251 4.2954 1.7625 16.6302 +#> 6.7297 13.7620 7.4653 2.9460 4.2122 -12.6542 10.9362 23.1284 +#> -2.6228 7.5622 -1.1427 -1.6263 10.7495 0.5432 -3.8417 5.2567 +#> -12.0876 3.5873 -9.5721 -4.2999 -4.6252 -7.8572 4.9122 -15.0571 +#> -3.8800 8.3790 5.1583 10.1358 4.2538 19.4156 2.0299 -23.0303 +#> -4.4879 -6.4741 2.1706 12.3167 4.3516 -1.9165 -1.7489 13.6663 +#> 3.6242 0.2784 -10.3865 1.7086 -4.4610 1.3640 11.7412 5.1147 +#> -4.5507 6.2862 6.3273 -3.3545 -0.6348 3.8607 -5.4115 -6.5705 +#> 10.1680 -18.2731 3.8560 -9.3279 4.6786 16.4668 2.7389 -9.3028 +#> -11.7139 -6.4300 -7.7580 6.8943 9.9658 -7.6174 -10.6152 2.1464 +#> -0.9245 16.6915 -18.8516 -9.7559 -7.6859 5.0988 7.4145 9.9781 +#> -15.0904 13.0561 3.9313 3.1098 -0.3813 1.2831 6.7172 -1.5725 +#> -4.1603 -9.2554 -6.3740 -2.8171 -4.5400 19.3786 -13.3872 0.1061 +#> -15.0199 7.9187 2.8637 1.0640 -9.4443 -0.7164 13.8786 -4.9321 +#> 6.7410 11.0764 5.8694 10.5676 -1.4785 -10.4956 -2.1647 -2.4979 +#> -4.8420 0.3223 -1.8641 2.2764 3.5282 3.1243 -3.8928 -0.7861 +#> -15.6744 -4.9896 5.6997 -6.9834 6.3976 2.2967 -4.4781 -14.0095 +#> -0.9168 -13.6836 -5.3883 0.2359 -7.2814 -8.4654 -15.1297 7.9175 +#> -5.6942 -3.7938 6.3266 6.8379 -8.2443 -3.4863 5.9245 -3.7879 +#> 6.7252 -18.2728 -7.5994 0.7930 5.2211 -10.6935 6.9960 13.5182 +#> 7.7631 -4.5409 16.6383 1.1098 -2.2515 4.2739 -6.3872 20.7529 +#> 11.5400 -3.8880 -4.8843 20.8790 -1.9235 2.3904 -0.5893 -18.2680 +#> -3.7502 -8.8683 -7.8292 -0.6399 0.3981 -3.2471 -3.3288 0.3509 +#> +#> Columns 25 to 32 -1.0282 0.4582 -15.3311 -8.6707 -13.5440 3.9472 4.6046 8.3668 +#> 0.1522 2.3482 -2.8826 2.4415 9.4128 16.4714 27.9099 -0.7864 +#> 9.4208 -1.2501 -13.7185 0.2158 -0.1669 7.8426 -4.5836 5.8697 +#> 6.2956 -5.5135 -10.9481 0.0356 7.6555 1.7165 10.4980 14.5851 +#> -23.4205 5.2852 -10.4222 -6.3602 -8.3993 -9.9196 -7.4862 21.0609 +#> -4.0632 -2.6121 -7.4521 -23.1105 -9.9574 -2.0262 -6.3906 10.4616 +#> 4.2014 -20.5212 -0.1920 -14.6375 -7.5854 0.4322 5.7235 -2.3992 +#> 10.9157 -1.1830 -12.9771 5.2316 0.9914 1.1060 15.9885 5.1279 +#> 9.6905 8.5292 -9.1180 3.6933 -2.1834 0.0974 5.8803 8.8771 +#> -1.3064 9.2922 5.8561 -5.5635 4.3657 10.5714 -14.9753 5.2954 +#> -1.3752 -3.3854 5.1899 3.2407 -15.6044 3.2689 12.1928 -10.5994 +#> 9.9412 2.0251 7.0985 -24.8129 -3.4721 0.9038 2.8694 -6.7046 +#> 15.2142 -16.6872 0.0271 3.6146 -2.9019 7.1541 0.9173 0.9618 +#> -0.8263 -6.3602 -19.2822 7.8230 1.5329 0.3437 -8.3381 11.1148 +#> -9.2765 -10.0126 -10.1303 9.1527 -2.0900 -10.1066 1.8279 7.8473 +#> -5.3532 -0.8918 -4.5856 -13.6197 -3.7120 -10.6242 -6.2875 -1.0124 +#> -0.1493 -7.5038 -5.5090 0.5891 -11.3242 4.7411 -19.3341 -7.6166 +#> 14.9721 0.1261 -10.9862 -2.3502 -0.5491 -3.7918 -2.1191 11.7330 +#> -5.9561 -14.4365 -0.9097 2.7735 20.1569 8.6849 -6.5850 -14.8361 +#> 13.5322 -3.3381 1.5737 -7.6319 -17.5363 -16.1658 -7.4887 -1.9301 +#> 8.0783 -3.5145 11.6705 -8.6861 23.5980 -5.1621 -4.4400 -5.5569 +#> -11.5186 9.8255 -1.8861 0.3933 9.2411 -1.6147 22.0441 2.4700 +#> 17.7148 6.9463 -11.0379 3.6581 -11.7796 -11.1479 -4.7689 -6.2846 +#> -2.8002 2.3347 -10.8528 4.0733 -15.5047 -11.9804 3.0149 -2.1179 +#> 17.1219 10.0335 -5.3631 5.5909 6.5102 10.8323 14.5000 24.1880 +#> 27.5205 -2.0819 -9.4357 -2.8115 1.0335 -10.3944 -1.9690 10.5398 +#> 4.7011 -5.6275 -22.3002 4.7902 -10.4215 6.5076 -11.2655 -8.1794 +#> -4.4113 5.3305 3.1989 1.8326 6.0643 -2.6075 1.4840 12.5007 +#> -0.8936 -13.4811 27.4145 -5.7787 4.8533 1.6192 -10.1593 9.9651 +#> -12.8642 10.6825 9.4401 -16.8412 1.1883 -5.4021 -12.4144 2.2342 +#> 0.1948 7.5835 4.8703 0.6781 -3.7487 -1.8040 -13.0540 17.2460 +#> 6.7588 -16.5598 -3.5190 9.9915 -16.6773 14.0928 -18.8078 18.4728 +#> 4.0239 -20.5449 -0.4540 7.6054 -8.4693 -3.3453 -11.0471 -3.3487 +#> +#> Columns 33 to 40 -10.0044 8.8446 -5.4153 -2.7522 -7.1615 -10.7928 -10.0474 -5.3207 +#> 13.5996 -2.4725 -2.3259 4.4499 11.4691 -15.1970 22.5800 6.1409 +#> -1.9622 -3.2464 11.3140 30.6990 -1.4250 5.8495 1.4358 0.6305 +#> 0.2717 -16.3668 -6.9023 15.0843 -7.5832 -12.4764 -12.7843 -3.9944 +#> 2.4591 17.4164 5.2842 0.9430 -13.3163 -12.1380 -6.7616 -7.1223 +#> -11.2559 9.0321 1.6306 14.1726 -7.6880 10.3294 -7.3962 8.1921 +#> -7.0996 -10.0326 8.8506 9.7593 14.6049 -2.1472 -8.2472 -8.2946 +#> 7.4699 -16.4769 10.8192 1.6106 -2.4452 -6.5099 5.8837 -7.5283 +#> -7.6865 -13.6181 5.6755 -2.6459 -5.2640 -6.7292 0.1028 -3.3698 +#> -0.9626 -9.6243 17.1599 -18.2705 -2.6131 21.2705 21.4044 -3.4941 +#> -5.9849 3.6610 13.6420 -0.9752 -2.7782 -0.4650 -13.0801 -21.8521 +#> -5.0176 -8.3594 -3.3800 -0.0317 -3.1850 0.9126 6.6621 -7.9703 +#> -7.4689 8.0285 1.1802 5.0644 -3.7316 14.8885 6.7005 -0.7299 +#> 9.2908 -0.1285 6.2898 -5.7670 -13.0555 -0.9993 -0.4255 -1.9627 +#> 7.2127 9.3229 -14.3159 2.2984 -4.7685 -19.1841 5.2852 4.9894 +#> -4.1292 7.0298 -16.4050 14.0357 9.9598 6.2736 -5.6160 4.5912 +#> -5.5833 -1.3794 5.9041 -2.4800 -0.4108 -10.4670 1.7214 -9.7661 +#> 9.6540 -3.0273 -5.6618 18.2986 -17.7096 -1.3812 -2.7293 -9.8936 +#> 3.2495 -7.0192 8.0887 -3.8346 1.4242 1.8322 3.7756 7.3413 +#> -1.6291 -3.6846 -13.2125 -1.3214 6.7250 -1.9049 13.2982 -0.5769 +#> -18.3707 -16.5870 4.2834 0.1563 9.2321 3.9839 -1.3116 -3.7813 +#> 1.7285 -0.3777 3.2121 -10.7871 2.7076 -12.3704 15.4249 10.5765 +#> -10.9551 -0.9031 -10.2472 0.0455 3.5755 -10.1855 7.9231 -16.8731 +#> -1.4941 -12.0283 8.3931 -3.3628 -0.6169 -14.8074 11.7382 3.7825 +#> 6.6997 -11.8422 -5.6994 1.4233 -10.1577 12.2058 10.3908 3.7953 +#> -1.7648 -8.1783 -8.6478 -3.0441 -5.7994 -14.4964 -0.2980 17.7039 +#> 10.3389 -2.8890 -2.0172 8.9927 -8.2508 3.5769 -10.8726 -1.6017 +#> -6.9268 9.0528 -10.6983 6.9228 4.5881 -6.5463 4.7896 -12.9723 +#> -13.1831 -4.9624 -3.3134 2.8427 -17.2637 19.6480 -11.4416 9.9095 +#> -1.4476 25.2428 -12.3396 -2.6832 7.4781 -9.5722 -8.0280 4.6569 +#> -25.4606 -6.8473 -3.6761 -9.1680 10.9991 12.9768 5.6900 -11.3268 +#> 8.3571 12.0314 -9.0241 1.3981 -14.1026 1.8525 -4.5234 0.9328 +#> 13.5991 -1.3644 0.3049 20.6506 0.6737 -3.3638 -11.0013 -8.6448 +#> +#> Columns 41 to 48 17.0404 2.3907 8.1429 -2.9929 0.2796 1.2241 -9.8935 -11.5428 +#> 15.8002 -13.7289 -14.6515 -8.2285 4.4164 5.7381 -0.7154 -4.4316 +#> -12.6148 -5.4966 0.5992 8.2136 13.4968 -6.4177 -3.5308 -8.0255 +#> -4.0532 7.2112 -7.8960 3.7471 9.1279 -1.5569 -2.6160 -7.0760 +#> 15.1692 7.2893 -2.0220 5.0197 -4.2582 -1.2034 -4.7606 -2.1391 +#> 6.0665 -0.6873 -8.5629 -5.3530 0.0460 9.8676 -17.2723 -3.6176 +#> -1.4594 -6.6986 -9.0179 -1.3675 -5.5377 14.7394 6.1867 -10.4132 +#> 7.5915 9.9949 -22.0853 4.0273 -8.9388 -0.0390 6.6909 -9.4642 +#> -13.1068 -6.1876 -3.3908 8.5442 -1.1383 1.8270 -2.7536 3.6476 +#> -1.0132 -9.6756 5.4367 6.5859 -12.0758 10.8135 6.3074 10.0008 +#> 10.4877 6.8412 -10.6524 5.2664 -6.0304 13.2375 14.4764 -8.9619 +#> -14.3012 -3.0017 7.3253 11.1903 2.3234 -0.9351 3.1019 7.6333 +#> -1.2189 5.0491 -8.8319 11.9791 -4.0255 -3.7533 8.2891 1.5896 +#> 3.2000 -17.0815 -2.4434 21.5138 5.3506 1.7681 -6.3518 3.1665 +#> 10.5045 4.0431 3.2837 -4.8931 0.2146 -18.7508 0.4636 11.0103 +#> 5.1954 -5.9280 10.3381 -1.5656 8.5219 8.1736 5.4672 -7.4633 +#> -1.3095 6.5339 2.8158 8.4017 -0.4449 -2.2498 4.4189 4.1448 +#> -11.5086 6.3872 -2.0754 2.7955 10.8612 -3.0908 9.1844 4.0402 +#> 0.1051 8.6908 -6.5760 -7.4659 1.4877 -2.4164 7.9129 1.2005 +#> 1.7190 9.7489 1.8291 -1.0358 -1.2305 -1.3863 11.3050 -1.2380 +#> -1.7161 3.0360 -11.3789 4.4664 6.4385 3.0301 -14.2979 -5.4705 +#> 2.9439 17.2244 -4.9991 -3.0013 -0.6943 -13.1395 8.5538 4.6308 +#> -9.8433 12.4677 -0.9130 1.2825 11.8843 4.1148 11.1564 -13.1213 +#> -0.5187 -3.1152 8.9776 18.5254 -4.7923 -10.7471 -3.8255 13.0633 +#> -0.5349 -9.1966 -10.8779 -12.6568 -3.5921 8.6177 -2.7212 0.2301 +#> -8.2529 8.1280 11.6623 1.6168 12.7484 -6.3634 -17.0857 2.3086 +#> 10.1484 -0.6663 1.4103 -4.3130 2.1070 2.2680 1.4857 -12.4895 +#> -10.5109 -3.9579 1.6315 -12.2888 5.8335 2.4822 1.2882 -4.2714 +#> -0.1297 -3.2154 6.7121 3.0783 -5.2159 -7.5626 -4.8793 9.0379 +#> 6.5821 5.2232 7.6865 0.0642 -1.0540 9.7343 -22.4976 4.9506 +#> 7.2325 -3.1604 0.0343 -1.8626 -4.0984 3.8431 -3.3217 -12.1327 +#> 17.0317 -0.4552 5.2271 -13.4809 4.1506 -7.0243 -15.5145 -9.5525 +#> 12.2009 -0.8925 -1.5349 3.6809 0.1501 -2.6208 -10.8274 -5.6294 +#> +#> Columns 49 to 54 5.6963 0.2506 -3.0955 3.1177 -2.1769 3.9404 +#> 20.9385 7.7665 9.5588 -10.6093 -3.1436 -5.8950 +#> -4.0900 4.9580 6.5708 -2.9171 6.7623 2.6823 +#> 6.6758 11.5514 -6.7011 -1.8267 -4.8228 -1.8819 +#> -20.4042 14.7817 0.7493 -0.6215 1.7704 -0.8799 +#> 5.3647 7.5883 -8.9134 4.2283 0.4137 3.4573 +#> 3.6155 2.2602 1.3205 0.3387 6.4984 -11.8713 +#> -0.9361 1.8455 -0.3475 15.1761 0.7070 3.1944 +#> -0.4948 12.8991 6.2295 3.0410 0.5066 5.1099 +#> -10.6016 -2.2949 -2.8080 -7.7852 -0.8843 -1.0471 +#> 2.8862 -5.6388 6.6788 6.9915 -3.7869 -0.1016 +#> 4.5676 -0.0893 -9.7924 -4.9659 3.6476 6.3014 +#> -10.6180 4.4348 -4.9740 2.8333 -2.2137 -0.9116 +#> 3.0230 -1.5213 4.1895 1.4238 -7.1200 -6.4478 +#> 1.0273 -5.2753 -1.0815 -1.6767 4.4602 -8.7587 +#> -8.7541 4.8772 -1.2043 -2.4257 9.8312 0.6178 +#> -11.3903 -10.3645 -11.6155 -1.1508 -3.5049 -5.7102 +#> -1.5347 6.3216 9.7323 -5.6061 0.3648 -1.7503 +#> -5.4678 2.9957 -11.1503 -1.4187 -1.8333 -0.3659 +#> -5.2660 -7.7109 0.7309 4.6312 6.5262 3.0843 +#> 6.5456 -14.1992 -4.3068 -0.1588 11.3763 -2.4150 +#> -1.8872 -6.4028 7.5852 5.2557 1.2525 7.5619 +#> -8.3606 -6.9282 -5.4472 -3.9413 2.6334 -0.0307 +#> 8.5096 -9.3000 0.5980 0.5252 3.0727 -5.8492 +#> 5.6290 2.9428 5.3831 -7.0399 -0.9795 0.8958 +#> -6.1621 -4.6670 -7.7470 -8.0638 -3.9178 -0.0234 +#> -5.3416 0.9509 -13.4397 10.7798 6.4156 -1.1138 +#> 6.0930 9.6360 -1.9067 7.3904 2.2579 2.0919 +#> -0.7342 6.0397 1.0002 -0.3908 -0.7458 1.4206 +#> -2.8761 -3.2436 5.7291 0.2252 -2.9766 5.8760 +#> -11.7262 4.5826 -21.3365 -3.3397 -0.9450 0.7249 +#> -12.0078 -4.0095 -3.3915 -4.8664 1.3195 -7.1477 +#> 1.0076 -14.7269 2.4294 10.5905 3.6087 -4.5968 #> #> (16,.,.) = -#> Columns 1 to 8 5.9169 1.1962 10.7501 -8.9790 4.1484 -0.2881 -2.9131 8.8346 -#> 1.6393 -2.6889 5.0957 -4.3603 -10.1045 4.5340 -2.5791 -9.9538 -#> 2.0981 1.8680 -2.1006 13.5482 -2.9988 -9.8013 2.6953 -3.9434 -#> -6.9594 14.0226 -6.9573 13.5674 -4.3679 -6.6665 -3.0626 -10.0226 -#> -4.8158 17.7432 -5.9582 0.1271 5.2878 -2.4874 4.2246 7.6162 -#> -1.8618 3.9992 -14.8552 -0.6717 2.1653 -0.0528 -0.5305 9.8745 -#> -6.0365 5.1183 3.4952 -4.1085 2.3846 7.8327 4.9367 -2.7905 -#> 1.3103 -4.7215 -2.1793 4.6908 -3.7660 5.4065 -9.4307 -6.7514 -#> 4.4123 0.1263 6.5382 -10.5020 3.3511 4.1023 9.8486 -0.5677 -#> -1.5796 0.2654 -6.1811 12.6362 -3.4534 5.1137 0.9683 4.5397 -#> 6.5069 5.2308 -1.0415 3.2129 3.6340 -3.6598 2.7702 -4.7992 -#> 1.8092 2.6591 -5.5465 0.3925 6.2483 -0.5837 4.6502 -8.0858 -#> 6.4339 -4.2291 -5.4463 1.3123 12.0333 -0.3098 3.8704 -5.4030 -#> 6.8779 -5.5842 3.2166 17.6905 -1.6929 -5.4125 4.0909 6.6283 -#> -4.6292 0.9189 -11.1377 -4.3370 -5.0335 11.1261 0.1276 8.0493 -#> -2.5658 -3.2492 -2.2783 0.5461 12.7220 2.3526 -4.2149 -18.9606 -#> -4.9198 7.3656 -4.8668 -0.3271 -0.8615 0.2618 14.6728 2.8823 -#> 0.3484 0.3916 -8.9037 6.6532 -2.4009 6.8033 1.5034 -17.2913 -#> -1.1485 5.0058 4.2632 -2.1971 3.7979 10.3216 7.6086 1.4941 -#> 0.4886 3.0998 -2.3149 6.9763 -3.5498 -11.9070 0.7731 2.6125 -#> -5.4350 -4.2100 6.8747 -11.3444 3.5116 -6.8341 3.3729 9.3017 -#> -0.1810 0.0186 11.3952 -3.4121 6.9950 3.7474 -9.6391 4.4926 -#> -5.7175 13.6596 -11.6647 1.7630 -8.6601 3.9700 8.8271 -19.6870 -#> 16.2903 2.8260 23.8470 -2.6987 14.3097 5.0003 -10.3122 4.4237 -#> 0.7487 -4.2665 9.0854 -0.7235 -13.1581 12.4358 -19.0049 -16.0640 -#> 0.0781 1.3698 9.2325 4.6129 5.4748 -4.3969 0.6132 9.7189 -#> 5.8890 -1.4452 5.4483 -10.7550 -18.6493 -0.3483 2.3382 5.1563 -#> -0.0916 5.1133 -2.3725 -7.2628 -11.0122 -9.0826 1.5038 6.7757 -#> -1.1501 0.0571 -6.8683 -0.1019 -5.7275 -0.2904 -3.8972 -6.5371 -#> 1.7825 3.4269 -11.7473 3.5808 7.4465 5.7079 -0.9744 -11.0135 -#> 3.0592 1.6905 3.4647 -1.9238 -10.8315 8.9985 -2.4843 -8.3443 -#> 0.2737 1.7902 -6.4242 2.6640 -10.9324 -10.8223 11.0115 4.6460 -#> 1.6822 3.0623 0.2343 2.7980 -4.0485 7.8199 1.1451 12.8511 -#> -#> Columns 9 to 16 -13.7071 -0.7653 5.5538 -2.4353 -5.7447 -0.6709 -7.2778 6.2663 -#> 14.0684 8.3597 -5.1964 -0.3258 6.5183 -12.6385 1.4381 -19.2791 -#> -4.4823 -5.3254 3.3574 7.8482 6.4927 19.5868 1.6706 5.6892 -#> 11.6002 -4.3351 4.4546 -8.9352 -0.6136 -0.0973 8.0068 -8.3215 -#> -2.1117 7.8508 4.9740 -4.2024 8.0122 -4.6648 -3.8557 6.6858 -#> 19.8712 5.1868 -11.2870 0.4251 6.9703 -12.9883 -13.2475 18.2691 -#> -9.1562 10.4813 -1.2635 11.8747 -4.2531 -1.0797 11.2160 -19.6530 -#> -0.0567 -4.9667 9.7458 2.3701 10.9538 1.9237 -1.9627 13.9311 -#> 1.3140 8.7365 -0.1949 -8.5719 -7.5008 -2.7805 -0.9558 -5.4660 -#> -6.1256 -3.5703 -2.3028 -9.0828 -8.1802 3.4247 -11.0404 14.6100 -#> -12.9374 -12.2777 -3.5171 -5.9202 13.1994 8.6371 -5.0024 -0.5268 -#> 12.8851 3.0432 2.3660 5.1235 5.7813 -8.8750 -0.5470 4.2925 -#> -2.6762 -1.8033 -3.3926 -17.6865 -2.9754 -8.5596 -1.3519 -10.5077 -#> 7.7198 -7.3484 3.8967 2.3075 7.3826 -11.7406 6.7200 3.9621 -#> 6.3456 -5.4186 -5.6147 -19.3007 -6.5337 -8.0445 -1.3140 11.4247 -#> -3.4923 8.3719 1.3613 -0.5927 -7.4595 4.1947 12.5511 2.8711 -#> 2.5924 -1.3840 -6.6778 -1.6072 6.5471 11.2356 -6.4557 3.2157 -#> 6.1776 5.3566 11.3179 0.2979 3.6626 14.9784 0.3117 -15.7688 -#> -11.2680 6.5108 7.6255 2.8750 -15.6441 -11.5801 4.3952 -6.3325 -#> -5.5521 -5.5296 5.4242 2.3838 2.0523 0.7012 -18.1893 -0.5384 -#> 14.5744 -0.8031 -4.6487 10.3956 5.4341 -4.0979 -0.9086 -0.3972 -#> -11.0830 -22.4040 -12.0306 -11.2804 -2.2865 3.0656 -12.6764 -6.4913 -#> -16.6999 4.3738 4.0502 -10.0046 3.9659 2.0011 9.7486 -4.4500 -#> -8.8754 -5.8829 -2.1642 10.0776 -10.7525 -10.7539 -0.5335 -16.6962 -#> -3.3995 -13.1827 -8.8088 7.4763 -1.9271 11.0060 17.0620 9.4130 -#> 3.0052 -0.0882 -7.0467 -5.8017 -14.0206 -15.3782 -9.5878 8.9883 -#> -3.8261 -5.8923 14.9247 11.1826 5.4076 6.7611 10.0125 -4.7802 -#> -4.5243 -8.8796 -3.0152 7.7899 -9.3244 3.0303 -14.7213 3.7061 -#> 13.4022 -0.5678 -2.1548 1.3116 -4.6149 -1.8015 13.2173 -2.9844 -#> 6.0882 -17.9818 0.5556 -23.7100 0.1930 -4.3171 1.8997 -3.6025 -#> -7.7793 9.4382 3.9473 -2.6181 0.2232 8.3356 -3.1850 -7.8183 -#> -4.7888 -1.6125 -8.2620 -6.1865 -4.4570 -9.0457 1.7380 -9.7770 -#> -2.7259 -10.7252 -7.6792 2.6634 -2.2631 3.8497 -15.9251 -20.6218 -#> -#> Columns 17 to 24 -7.8796 17.9775 -9.3992 -4.8710 -10.0097 -0.4248 12.5003 -13.9830 -#> 10.9893 3.6070 5.4122 -4.1925 -10.8265 11.4295 4.7736 3.5215 -#> -9.8660 6.7317 8.8891 -2.7798 -4.1303 -5.8500 2.5645 3.4559 -#> 12.4319 -7.9728 -2.8910 -1.3465 -2.0578 -2.5140 -7.0054 8.4268 -#> -0.9489 -7.8873 -3.4151 0.7461 1.9786 -4.1699 3.1521 5.1590 -#> -7.7628 14.5598 -3.7605 -2.4563 -4.8887 4.3739 -1.3184 12.0368 -#> -3.3868 -9.5283 23.5325 12.5333 -6.4736 -6.5903 7.1718 10.8270 -#> 12.0811 0.8471 3.1682 7.7866 -11.2046 -0.6210 -5.9061 6.1771 -#> -2.5386 6.6288 -21.1735 -7.4060 8.6651 16.5632 -3.3783 -14.9110 -#> 6.4171 12.3525 3.2848 -7.7431 6.7203 13.0827 2.9858 3.2090 -#> -14.8683 -2.8080 -0.1957 10.3795 -7.0103 15.6995 -15.7298 -8.2846 -#> -0.5186 -0.5380 -1.4989 -1.5238 -1.6552 -0.1317 3.3847 -10.4952 -#> 1.0079 -0.7870 -3.5615 3.2061 10.0451 17.6399 3.1742 -9.9340 -#> 8.9299 -8.4056 -10.5260 -3.7712 -14.4148 0.4000 -15.8502 13.8139 -#> -4.4611 -6.0424 -8.9023 10.5212 4.2197 7.3905 -12.1776 11.8185 -#> -9.9123 -1.9639 -10.0037 -0.6602 -2.5114 6.8629 1.1878 -6.3813 -#> -0.0282 -2.9304 -9.4680 -8.3135 -6.3681 -4.3604 -7.0652 -3.8044 -#> 4.8215 22.9106 7.5484 -2.5532 -2.5809 3.1510 -2.8192 -4.3745 -#> -13.7029 -13.7980 -6.9890 16.0019 -1.9355 -5.9001 9.0478 20.1010 -#> 3.2395 0.1785 3.6520 19.3297 5.5748 -2.6324 0.0679 -6.5968 -#> 8.1041 -7.6415 4.9666 -0.1479 6.9535 -1.0444 -1.1585 13.3160 -#> -10.1763 -5.5170 0.5665 1.8729 -0.9565 -3.9006 -0.9908 -0.9861 -#> -22.9678 2.1149 1.5818 9.4493 1.6239 2.5172 -7.3676 5.2019 -#> -15.0124 3.9553 4.8910 -3.0632 4.0516 -6.1032 -3.0397 -16.0125 -#> 4.1737 -6.5068 -0.9279 4.9228 5.0661 7.6071 -5.4581 6.6090 -#> -15.5276 15.1106 2.8998 -10.0532 15.5078 -12.6153 10.8250 -12.8329 -#> -14.1448 -0.5968 25.9646 -1.1691 6.4707 4.0210 -4.2684 -3.9871 -#> 4.7639 3.5075 -4.7696 -0.6260 -0.1757 -1.9947 14.4427 11.0419 -#> 2.0681 9.3647 10.1557 -3.0895 -13.3740 0.8708 2.6143 5.3274 -#> 8.0578 -6.6474 -0.7361 4.9535 -6.1931 2.7753 -4.5427 11.5385 -#> -11.6095 3.9669 5.9931 11.6033 -5.9513 -1.0784 -0.9202 2.6523 -#> -0.9995 9.6205 4.9987 -3.6116 6.1728 -1.6928 6.5078 6.2526 -#> -5.6990 10.3888 -4.3727 -2.2719 -4.1629 15.1052 5.0346 4.0389 -#> -#> Columns 25 to 32 -3.3179 8.9114 10.0572 -1.2587 2.0931 -2.0530 6.5594 10.9616 -#> -4.7203 -7.3808 -6.6912 -12.0078 -11.0555 5.6575 7.7934 18.6705 -#> -19.2785 -15.9866 1.3843 -0.8469 15.9078 -0.2410 7.6636 8.9358 -#> -11.1760 14.2224 -3.0896 16.3962 -5.9672 8.0302 -13.1924 -1.9202 -#> -11.5296 10.4649 -3.0178 2.9463 8.7748 -14.6963 4.4653 2.7594 -#> -7.3800 -5.3960 -17.0513 0.9882 0.4144 -4.9462 18.2287 -5.4529 -#> -4.0838 -15.5619 11.6511 8.8858 -12.9532 -13.0284 -0.6605 8.6867 -#> -9.4081 -3.5767 -5.3829 2.1063 6.0295 -9.8064 17.7653 -5.9768 -#> -12.7177 4.9938 -7.4952 -13.0969 8.9979 -0.0998 22.3145 1.7401 -#> -5.8592 4.4302 -6.8574 3.3468 -11.9368 -0.0754 11.5460 0.0574 -#> -4.4656 5.4609 3.4491 -10.8961 7.8371 3.5985 7.8025 2.8597 -#> -3.9979 -0.6659 -5.5074 15.7577 5.8280 6.6209 15.0434 -1.5492 -#> 10.2756 9.7925 1.1668 -2.0181 -2.6645 5.3276 -1.1349 -4.2664 -#> 1.6781 -0.7389 -0.0576 0.9104 0.7526 -4.0417 -13.5008 0.9477 -#> 9.2704 6.1267 2.2287 14.4398 -15.2803 8.1811 -15.1956 -4.7375 -#> -0.8798 2.0092 -5.3028 -3.4708 -1.6346 7.8313 4.4540 -8.5716 -#> 6.6299 13.0193 -4.3130 6.2526 1.2605 -7.0203 4.4134 -8.3241 -#> -4.6752 5.8241 -1.0570 6.9613 -6.6649 -9.4870 15.5721 -8.3716 -#> 2.1473 -12.8502 -18.3775 -20.7149 5.0487 -0.8780 -9.4605 4.9170 -#> 7.3915 0.3985 4.3123 -3.5218 -0.3456 -9.1704 -0.8021 1.1542 -#> 2.5691 -6.5108 3.8852 -2.2915 9.4971 -0.9335 8.3368 -17.9244 -#> 8.7376 4.6082 6.3856 0.9325 0.8904 -2.0705 -6.0855 1.2072 -#> -4.9679 9.4473 -8.5908 14.6577 9.2367 2.4137 -5.8935 -3.3195 -#> -8.3799 -11.7817 8.2112 -1.3504 -3.6656 14.2825 -1.3165 6.0722 -#> 1.7320 -12.4078 11.2184 0.9507 -8.4417 12.5083 9.1634 12.5494 -#> 13.9281 3.4773 13.7172 13.0891 -9.7119 -8.2638 -8.7622 -8.8073 -#> 6.4454 5.8153 5.1188 1.5204 -21.2378 -1.3159 -18.2051 5.7055 -#> -4.9967 -9.7521 -2.4479 -0.7929 5.7172 -4.6572 -1.0606 10.8816 -#> -2.7617 4.1111 -7.0416 0.6337 0.8852 2.1034 -5.1787 8.5571 -#> 8.7010 12.4941 -5.7907 16.1933 0.8232 -8.7499 -21.4545 14.7660 -#> 3.5851 8.8012 -3.2526 0.5185 -1.6775 -6.9595 -13.1621 6.2687 -#> -7.7303 -11.4324 19.5842 10.0752 1.3555 1.9197 8.3356 4.5247 -#> -8.8562 -2.9946 6.5604 -1.4950 -7.3620 13.2178 17.4251 11.9080 -#> -#> Columns 33 to 40 11.3978 -11.5616 0.6751 5.4931 -21.9684 2.6750 8.7489 24.5304 -#> -6.9687 -10.7086 -13.1778 -13.8945 8.1555 6.2153 16.2339 -10.6723 -#> -8.1479 -2.2703 4.3881 -1.6490 -1.7727 1.6611 19.1424 -9.4395 -#> 1.6611 6.1769 -3.1931 -0.8321 6.1536 5.8515 -5.7873 4.9654 -#> 12.7029 -4.0359 3.3536 7.6909 -10.7435 21.4226 5.7366 34.2568 -#> 9.1381 -17.1925 -5.0425 14.0283 11.5891 -0.8054 6.9017 -3.6643 -#> 1.6606 3.6179 -16.8114 -10.9507 3.6880 -12.8685 -8.8030 -13.1678 -#> 0.9630 3.8960 1.9926 5.5745 6.0234 -7.7944 2.0300 -8.8346 -#> 11.5074 6.9423 6.5628 -7.9312 -0.3811 2.8890 3.3496 -13.8868 -#> -9.8659 -9.3607 -5.8141 -3.7807 -1.1790 7.4838 8.1174 15.1448 -#> -4.9651 9.1740 14.4638 3.6277 9.6858 8.8757 4.3408 -11.4244 -#> 8.6441 -1.5692 8.1847 0.1239 8.9716 -2.4765 -10.7623 7.3977 -#> -11.0943 -16.6178 2.2971 -21.6193 -2.5390 -10.3741 9.8438 -5.6642 -#> -9.2277 14.6166 -0.9204 14.9913 10.7844 13.5524 2.8325 -8.9513 -#> -2.6473 -5.8741 4.6406 -5.3104 4.9784 -7.5659 7.1071 -9.0259 -#> -1.1382 0.5198 -2.9474 5.7955 6.4414 -5.7807 3.7976 -12.1651 -#> 6.7978 -6.5910 1.2208 6.1146 7.8964 16.9533 8.0562 -5.8046 -#> 4.1896 14.8452 -4.5841 -7.3017 -0.0823 4.4017 4.8627 -7.9858 -#> 7.3247 18.5870 7.4510 -3.6479 -25.6386 -0.6795 -4.1227 -13.5621 -#> -10.5895 -10.6946 8.3915 -3.0894 -2.1166 -8.1934 3.9229 11.6619 -#> 3.3745 3.2772 -12.0464 0.5004 -2.8965 -5.3103 -15.6017 10.8435 -#> 10.7255 3.9382 5.0526 6.5678 -8.1970 -12.1692 -4.3047 2.8854 -#> -5.4066 0.8166 -0.8165 -12.6045 -2.5846 7.5421 19.8140 -4.3701 -#> 7.6552 7.8124 14.6502 -2.3297 -11.4268 -10.0371 1.9409 12.4794 -#> 1.4078 9.8272 7.0404 -1.8239 10.8952 11.4932 1.9102 -10.4790 -#> 9.4703 -6.6227 12.2637 -6.4715 -15.9110 -14.3271 -7.6908 14.2522 -#> -0.8424 -4.3798 9.7508 -1.0919 -3.3694 -15.9257 12.9108 2.7339 -#> 1.0147 -4.9658 -6.1954 5.5696 -5.3329 -1.1068 4.5829 -5.8869 -#> 9.7037 -5.5950 -16.5860 7.1283 -6.5315 -2.6857 2.3856 8.0047 -#> 14.0389 2.8653 0.3209 4.5985 11.4294 -2.1647 10.7592 -13.9907 -#> 14.3837 19.9179 -1.5585 -13.3698 -8.6395 -1.7041 3.9278 2.5171 -#> -13.9244 -0.2977 -3.2740 4.1151 -4.9771 11.6901 -6.4768 6.6685 -#> 5.1153 -0.3700 -17.0048 8.4165 -6.7035 -6.2654 -1.5954 9.6956 -#> -#> Columns 41 to 48 -22.1942 -7.2350 -2.4917 -11.1559 -3.0166 -15.0017 -1.1812 -3.4713 -#> 0.2388 8.1962 -9.7739 9.0305 8.3463 2.1590 -5.6769 5.0629 -#> 16.7023 -3.2802 0.8704 2.1747 14.1589 -8.2684 -3.2724 7.1870 -#> 4.6840 -7.5805 9.2810 2.9401 -2.8086 6.2953 -9.3358 -1.4327 -#> -0.5781 10.5564 19.4567 -8.2992 1.6499 1.0847 2.9731 10.7564 -#> 9.4773 -0.5292 16.9066 -6.3917 -11.6539 15.5057 4.0474 7.7430 -#> 15.5874 1.3924 -16.0389 4.0388 5.7586 4.8510 -7.7798 -2.4974 -#> 21.3204 -14.2573 3.3693 5.9481 -10.7813 -0.9151 -5.0470 -10.1517 -#> 5.6014 -5.1494 19.4992 6.3122 -8.5826 -4.0738 2.6674 7.9877 -#> 0.5577 6.1221 16.2732 -1.8582 -3.3096 -1.8466 8.7034 20.5187 -#> 3.1797 10.5174 -15.2399 -2.6417 6.3028 1.2196 18.4807 -4.0511 -#> 3.0401 -8.5193 -1.1578 0.3379 -1.0156 -7.5664 -18.8893 -6.9766 -#> -8.8674 11.4856 -8.3224 -7.0834 7.2036 6.2827 18.5061 9.4553 -#> -0.7081 3.2447 -13.5620 -3.5952 5.2871 -7.9405 -12.0735 1.1528 -#> 15.1621 -0.6657 17.3880 -4.1401 -5.4776 1.6688 -5.8632 9.7832 -#> -4.7534 14.7511 -7.6977 -1.0605 6.7663 8.0379 -1.9791 2.8543 -#> 11.4958 0.2522 -19.5168 -14.8940 -3.2251 9.1275 9.5365 5.7808 -#> 16.8602 -8.5958 1.7279 0.3534 -15.7748 1.2065 -5.9608 -1.8861 -#> -11.9294 23.8705 10.0658 -9.9190 11.2422 13.1915 2.9278 -8.3528 -#> 20.0953 -0.7472 17.5934 -3.3260 18.8082 -8.5177 0.2490 -6.8630 -#> -12.7300 -2.4564 -1.6036 -1.8352 -7.7443 -7.1272 -13.3813 -3.6956 -#> 8.1264 -13.6377 10.9397 -3.0560 6.5067 2.8503 -4.2379 -10.5363 -#> 0.5117 11.3553 -9.0868 -2.7140 1.1550 5.6160 11.9133 14.7809 -#> -3.9240 5.4078 -13.7294 -4.3688 6.5080 -3.5070 -1.7219 -4.4902 -#> 12.5034 -12.8467 2.1763 5.1926 3.1938 -0.7747 -20.0967 -9.0441 -#> -4.2370 4.1992 7.6872 -5.2593 -1.7053 -11.8188 -0.1682 -4.0788 -#> -5.0200 -27.1574 12.7201 20.7038 0.9286 -12.9077 0.3077 10.2574 -#> -6.0969 0.4821 11.8856 5.4467 0.9379 3.4836 -5.4542 6.9657 -#> -18.5719 -0.9974 -5.4086 15.3446 -2.6354 -2.4166 -15.3805 5.1253 -#> 6.2883 -29.3754 -1.2569 -1.7976 -6.8801 -0.9421 3.6779 3.4648 -#> 14.4022 7.4876 -4.4341 -0.7004 5.4160 -1.7521 -5.0956 -13.7839 -#> 7.8083 2.2676 -7.3100 7.3198 -1.0796 -7.9814 2.6069 -0.7932 -#> -2.4366 -11.5112 3.4781 12.4499 -3.4112 -10.5143 -14.6968 5.0345 -#> -#> Columns 49 to 54 -8.9649 15.0480 -3.5127 -1.3708 -6.2487 0.3050 -#> 14.9347 5.8413 1.5031 -1.6481 -4.4734 -2.3108 -#> -6.8708 -4.6939 13.0038 -4.7202 2.7074 -7.4918 -#> -1.4283 11.7403 9.1098 -4.2097 5.4451 -4.1161 -#> 7.9117 21.6279 -2.7317 -0.8931 -9.2567 -3.0612 -#> 17.0366 -33.7473 4.8260 0.1720 -1.2427 -4.2173 -#> 9.5700 -15.3509 12.6820 -0.3305 0.5376 -6.5525 -#> -13.2608 -18.3225 18.4981 14.1568 -3.1979 -10.7866 -#> 8.7527 -11.6705 -10.1354 -10.2437 4.1236 5.4367 -#> 7.5527 -4.3760 9.8673 -0.5702 4.7787 -5.6126 -#> 1.2647 -21.6524 -21.0066 7.6477 2.8037 3.0143 -#> -8.7994 -14.5644 -19.7484 6.1574 -0.9049 -1.3891 -#> -4.8802 0.4612 -3.7767 2.4743 1.4301 0.2956 -#> 7.3854 10.7632 8.3053 1.8451 6.9022 -4.2529 -#> -3.2762 -6.7379 8.6647 5.6959 -1.1013 -0.0037 -#> 0.3988 -6.0855 12.6957 1.2420 -4.7745 3.7277 -#> 12.0424 -22.9109 0.8896 5.6065 4.0492 -0.3889 -#> -12.7498 -0.4841 11.0614 -5.0944 14.2974 -8.2520 -#> 3.4900 2.0673 -2.8354 1.1162 -1.4959 -4.3088 -#> -7.9860 7.1940 -1.3905 5.9913 -1.7375 -5.5433 -#> 1.8201 5.1878 9.1053 -6.9162 -5.2749 -1.3472 -#> -13.8737 -16.1515 4.4713 11.9625 -2.8959 -2.3175 -#> 7.7018 -2.5956 0.8244 3.0606 5.3660 -4.8304 -#> -20.7461 -7.6867 12.4397 -6.9317 -7.0717 6.5692 -#> -3.1832 -9.8930 -9.0189 2.7933 -0.3160 -0.1822 -#> -2.9797 16.1053 -1.1040 4.7778 -8.6608 3.9642 -#> -7.8118 1.3081 0.3643 -13.3631 -4.6473 1.9124 -#> 16.6642 3.7141 -11.1340 -17.0547 4.8123 2.1086 -#> 1.4257 8.5336 7.0028 -12.1989 -3.7266 4.4054 -#> -13.9857 -10.3642 0.7977 5.1235 10.0793 -1.8005 -#> -11.1381 18.8368 9.4772 8.2808 1.2748 -3.9059 -#> 5.8320 -3.8253 -8.4454 -7.3230 2.7275 3.6790 -#> -0.2858 -1.3838 -1.1742 -20.6285 5.9709 7.5458 +#> Columns 1 to 8 5.2756 1.7547 6.6615 -4.6317 -11.0351 4.6979 -0.3336 12.0969 +#> -8.4552 -2.7863 -2.5687 7.5584 14.7468 -10.8910 18.6608 6.5969 +#> -5.1925 7.5049 -3.4704 7.0544 -8.9914 -8.3004 15.1310 -12.3357 +#> -1.1411 -6.0144 -2.0602 -4.1445 -3.4500 -4.4563 16.2231 12.0519 +#> 7.7178 -3.0893 -2.8235 -6.4274 -8.1859 26.6533 1.2059 0.3104 +#> 1.5923 -10.1808 -2.9224 -7.9397 1.6827 16.1166 -9.2551 12.5632 +#> -2.6303 -7.3634 7.1459 -10.0291 3.6116 4.1022 3.6122 8.8857 +#> 3.1622 0.5190 15.0729 -5.9795 -2.1665 16.2795 13.3101 2.7286 +#> 5.5378 5.0749 2.1329 -3.5275 2.5768 2.9050 2.9379 2.5825 +#> 3.8436 -1.4010 3.5313 11.2961 -13.4705 -18.4719 0.8731 -8.6201 +#> -5.1079 0.5902 10.3610 -0.1259 -0.6092 -0.3629 16.7746 -2.3444 +#> 0.0413 -6.9749 3.9347 -9.8101 -8.9197 -8.8230 -2.9649 -9.1024 +#> 3.7957 -8.6419 0.0118 -12.8990 -3.1335 8.5876 -0.2939 2.3774 +#> -2.1789 5.7850 6.0346 -2.0728 6.2211 0.4319 -8.2255 18.9112 +#> -7.7170 -6.1099 -1.3654 13.0622 6.8478 -3.4010 -5.4624 3.1738 +#> -7.1405 -4.9243 4.6307 13.6795 -9.1733 -2.8841 7.6890 4.3012 +#> -2.4381 -6.4208 -1.1564 -0.8427 -14.2324 0.8812 2.6017 -1.5269 +#> -5.5222 -6.3645 -1.7536 22.5823 -13.4884 -12.7068 -7.2487 22.9923 +#> -10.0144 -6.4853 -15.9476 4.4563 1.2097 -0.5564 9.2309 1.4572 +#> 4.0753 0.6627 8.7961 -6.3962 1.5498 5.9692 -9.9231 1.2858 +#> 1.2143 1.5773 6.6916 -3.1724 -8.4606 -8.0125 25.8460 -12.1257 +#> 4.2672 -17.0935 2.0135 19.9261 9.4984 -7.5729 5.1595 -4.6295 +#> 6.5616 5.6412 -13.1349 3.5440 0.4855 -3.9691 -14.9047 -5.1175 +#> -1.6898 5.4509 19.7283 6.9923 -5.5815 -10.8268 15.0298 -6.0301 +#> -0.5550 8.4818 0.2669 -8.1374 -2.4088 -5.0113 8.1721 1.9246 +#> 4.0182 6.4205 -0.3107 8.1392 -11.3114 -8.3182 18.0963 -9.7825 +#> 0.2734 7.0354 -0.6524 -13.5626 14.5923 11.9680 1.2434 9.3070 +#> -3.8056 -5.2942 6.0883 7.5276 4.8486 -1.1728 4.0150 6.6492 +#> 1.4906 0.7824 -7.3681 11.2375 -5.6878 -14.5941 12.9500 -0.1145 +#> 1.5652 -1.1036 -2.0194 7.8666 -11.4598 -5.0640 -2.6630 7.8504 +#> 0.9509 -3.8416 2.4783 -0.5952 -20.7763 5.2880 -14.7230 5.2904 +#> -2.1371 13.4726 -14.8625 -3.5884 -6.7279 12.7785 -5.6060 5.2959 +#> -1.6139 3.2251 -13.8866 -13.3582 13.3213 -3.4479 11.4082 4.1389 +#> +#> Columns 9 to 16 -5.7304 1.0103 5.8575 -0.0605 5.0028 -4.5229 -5.3523 -1.9468 +#> 6.3223 3.2229 4.6141 5.6089 -6.0990 -6.5460 -16.9111 -4.8647 +#> 3.0358 1.6391 9.6789 -3.6553 11.9386 -0.4901 5.4186 0.9924 +#> -7.8927 4.0743 7.0767 18.4033 8.8516 -8.0430 -11.5070 3.5952 +#> 1.8210 -4.5351 12.3444 7.1112 13.6045 -13.3215 -2.5869 2.1400 +#> -15.2553 3.0540 1.0591 -4.6158 -1.8773 6.0072 5.9049 -11.5138 +#> 8.6981 9.6180 1.8021 11.7182 -0.8657 16.7797 -19.3308 -10.6778 +#> 5.8178 2.5498 -13.3592 -15.0191 -5.4614 -15.2058 -4.8563 -0.1500 +#> -6.3362 0.2004 1.9199 -4.4868 0.3165 -13.1603 4.2994 -2.3995 +#> -0.8427 -2.1965 -2.4171 -8.7800 12.6301 3.9171 1.3621 2.9480 +#> 2.1233 1.0991 -14.8571 0.4214 -14.3135 12.0101 -3.3511 -24.4492 +#> -7.3505 -13.1537 11.8116 -5.8900 6.6713 -5.9084 25.6507 4.0086 +#> 2.4995 12.2153 -4.9295 -1.6695 9.2545 -8.2995 -3.3051 -11.0737 +#> 1.9268 2.9528 -4.8150 1.7671 32.9636 -21.0868 -5.9075 -0.3272 +#> 9.2840 5.8526 8.2257 -4.3659 1.3838 -5.5580 10.4893 -6.5285 +#> 9.4716 1.0466 -2.3091 2.0697 -0.4807 11.8881 -7.1691 3.6942 +#> -2.6656 9.8087 -8.4430 -15.8668 2.8323 -3.1499 3.0797 -0.2514 +#> -10.9524 4.5483 -4.3438 -11.9182 18.9662 5.8849 -7.9422 -18.0211 +#> 17.4253 18.2292 5.2624 -7.4562 6.4601 -11.2950 0.7170 -8.4622 +#> -9.6193 0.6903 1.9112 -0.8364 -1.7324 -2.6348 0.6072 -18.0377 +#> -11.3179 -4.8949 -9.0830 -4.9424 8.9319 12.5477 9.1676 -0.6539 +#> -0.5279 -7.8971 1.8857 -6.4948 -12.6447 13.8687 -5.3077 0.3727 +#> -10.4233 -0.6892 -2.8241 7.6385 9.2530 7.2591 7.7877 2.2405 +#> -9.8340 -2.5524 6.6764 2.0074 -6.1819 13.9545 16.0812 8.3872 +#> -3.1945 3.7308 0.2755 -2.7634 -2.0122 9.0720 -13.1779 -2.0101 +#> -15.7492 -9.8427 -1.9868 7.1928 8.1297 18.2289 2.5699 9.7198 +#> 9.1292 5.4583 0.4262 -3.8870 -0.3989 -25.9822 -1.8671 0.1439 +#> -4.2841 12.0578 7.4115 -1.3739 0.9352 -7.4396 -9.2268 0.9252 +#> -7.1374 3.7755 2.6228 6.6387 7.2836 -5.1350 -5.6117 -6.1560 +#> -6.7697 -5.5184 1.8841 4.6353 -14.6008 3.6154 8.1505 -17.1586 +#> -4.9002 1.8613 -4.3523 -5.4893 10.2204 -6.3039 14.3135 4.9045 +#> -0.8154 10.8824 0.1450 7.3946 7.5411 -2.2142 -4.3591 16.2671 +#> -6.2743 11.4999 -1.3266 7.2196 5.8977 1.2253 2.5984 -3.3130 +#> +#> Columns 17 to 24 0.1109 -8.6602 4.1653 9.9289 -2.8564 16.6534 8.1101 4.4451 +#> 0.9047 3.2154 3.7040 -9.7789 0.2520 -16.1604 -5.5157 -19.8182 +#> 2.4664 11.2283 1.5133 -8.6573 0.4796 -7.3835 -0.9546 -3.5618 +#> -10.9444 -3.3607 -14.2992 -7.1980 -3.0000 0.7098 -0.2666 2.2843 +#> -7.6134 9.6128 -2.1862 1.9244 15.3964 9.4247 3.8489 18.8211 +#> -7.0254 11.2304 6.9857 5.2021 -6.8423 3.9968 -12.0987 -13.4685 +#> -12.9116 -6.9782 -4.7683 -8.5014 10.8463 -1.0870 -4.5421 0.6890 +#> -22.3120 2.9633 4.8133 -0.4045 11.2195 15.4009 0.1240 -2.2965 +#> 6.2753 5.9701 -2.5405 0.9651 2.6084 -7.2175 5.5234 -0.6114 +#> 10.8033 13.4333 7.4037 5.2561 17.5797 -6.7208 -3.8077 10.9466 +#> 0.7625 4.0083 1.6502 0.7978 2.5485 -2.0261 -3.7659 0.2436 +#> 4.8823 3.7094 13.0514 -6.3162 2.8484 -3.3460 1.4774 -2.0317 +#> 8.1490 4.1746 4.9700 -13.2374 12.3449 9.5299 -1.6022 12.8237 +#> -9.6539 22.7185 2.0802 -17.1319 6.8095 -6.3835 -2.9193 -6.1267 +#> -15.9983 -12.2554 -9.9446 -18.7150 -1.0571 -6.6002 1.9996 -11.0283 +#> -1.0477 0.2512 -12.7843 -9.4633 2.3716 -14.1197 -0.7984 -7.9425 +#> -5.9936 9.1579 10.7186 3.8784 -0.4395 -5.5418 20.8995 -1.0326 +#> -5.0643 10.4046 -24.2997 -26.7654 21.8561 2.0324 -10.0300 -5.5872 +#> 14.1897 -3.7722 -11.7379 -4.4245 -13.1528 -7.1254 0.9065 -9.4944 +#> 9.7989 -22.3986 7.1816 2.1629 -3.1439 8.9054 7.9801 -5.7809 +#> -7.3011 10.6445 15.4557 -2.4307 11.8996 -21.2262 8.4361 -19.3996 +#> 2.7022 -10.6509 2.3958 5.3890 10.9339 1.9430 5.5174 8.5673 +#> -0.7941 16.4490 12.1735 6.4868 0.6859 6.3813 7.3958 -9.0892 +#> 1.8388 5.1728 12.7578 1.3988 8.4592 -7.9833 4.2787 -5.4130 +#> -9.0848 -6.4597 -9.0238 -1.4956 11.2817 9.4407 -2.7686 15.5198 +#> 6.0515 4.1566 -1.0304 3.7152 0.4336 13.3752 -2.2339 1.8877 +#> -22.5849 -1.9260 7.2711 -5.7071 -1.6980 1.2614 8.5001 9.4663 +#> -16.9608 -28.3583 -6.1349 0.0221 -3.0540 -1.9245 0.7631 -12.7320 +#> 15.4369 -19.8765 -7.6738 -2.9603 3.5349 0.6490 -3.5738 -8.5207 +#> 18.5094 -5.8799 -8.7084 13.3065 -12.8286 7.0230 0.1117 -14.1954 +#> -8.4163 4.1708 -8.8344 16.7151 5.7089 -0.7503 7.7101 -9.3620 +#> -12.2627 -2.6774 -0.9829 -1.2696 0.2368 -6.6867 1.6201 -8.1525 +#> -1.5377 7.5412 13.3149 -19.6364 1.2173 -15.9384 6.3028 -10.5030 +#> +#> Columns 25 to 32 7.1455 -1.0906 -2.7605 -0.3507 10.5780 4.1830 4.5996 1.7428 +#> 3.6134 -8.3677 3.0470 -4.5174 27.7372 -19.6549 -3.9806 -21.1256 +#> -0.8752 11.5483 -5.3463 5.2390 7.5473 1.4855 17.2177 -1.7172 +#> 3.9694 9.8566 5.8868 -4.9664 4.0228 8.7470 -14.0383 10.2980 +#> -14.8703 13.0404 -18.9169 10.2354 6.5883 -2.5261 9.8096 8.6856 +#> -8.9858 -4.8288 -29.5893 23.5058 -13.8050 8.3925 5.4768 12.7193 +#> -1.7898 -15.4314 -0.3950 -18.1937 10.4765 3.2355 -15.7613 3.0058 +#> -10.5070 4.1645 -2.1300 1.5021 4.1172 -9.5280 20.0038 -23.7300 +#> 0.8192 3.8243 -1.8633 20.9778 -4.7873 -4.7055 8.2949 -11.0779 +#> -21.6078 5.3028 4.4726 -12.8169 -2.9467 -2.2130 -3.7131 -6.5314 +#> 7.4495 7.1566 -11.4765 -14.4477 17.2949 6.1652 1.4114 5.5598 +#> -0.8195 9.1307 -6.2796 -1.0628 -11.5630 3.4110 13.2530 -10.2064 +#> -15.0065 -1.3195 -0.0967 7.2141 -6.7762 12.2057 -11.2859 -0.8617 +#> -19.3545 16.9331 -8.0291 10.5120 4.1357 -1.9985 9.7107 -33.3601 +#> 23.4512 -0.6348 -8.7729 -15.9981 -6.2032 -8.8835 3.5345 -2.1418 +#> -1.5216 -15.0219 0.0789 -1.7791 -4.5648 -0.2017 -1.6768 3.5868 +#> -1.7076 -5.5204 -8.1446 1.9945 -0.7330 -11.8539 0.3281 -7.9996 +#> 2.7804 9.0645 0.0402 -6.8113 9.6398 9.3978 -11.1483 6.6381 +#> 5.7593 -14.7045 -2.2387 6.7050 -3.6270 -3.3686 -3.1270 -5.6681 +#> 12.5126 -16.0731 6.5076 4.1613 -18.1303 10.4273 15.7300 12.6138 +#> -1.3093 5.4387 -15.1621 -8.3894 7.9490 9.0548 -10.6498 -4.1357 +#> 4.8561 -4.9120 4.1151 -0.9482 5.3171 0.6698 0.4033 -2.0774 +#> 8.8815 -10.4062 6.0685 -13.0138 -6.7070 6.9667 16.0388 4.2778 +#> -0.7046 5.3964 -7.3500 -11.9831 13.2234 -6.0727 4.5220 2.2326 +#> -5.3754 -3.7180 -3.9801 -4.4031 3.3587 2.2696 -9.9712 -2.3492 +#> 6.2067 4.7716 0.7327 6.2060 -11.9394 -6.4664 -9.2583 7.6398 +#> -14.8866 -0.4833 -8.6024 -5.7619 1.6164 -0.5714 2.3140 -14.4776 +#> 8.2720 3.0408 6.0909 13.6438 2.1994 5.6075 6.0553 10.2403 +#> 8.3371 -6.4873 2.7770 7.7559 -21.3662 31.6905 -13.0160 -7.5158 +#> 22.1024 -6.1697 -16.2355 8.0592 -4.7968 10.2041 9.6756 16.9020 +#> 2.1754 -1.6632 -16.7546 -5.1437 -2.9404 5.8497 8.3028 2.7146 +#> 7.1739 2.7593 -15.4200 5.7264 -19.3383 -20.0260 18.7217 4.6537 +#> -6.3425 0.0260 -1.8281 -9.1379 2.8205 15.9186 -2.1441 13.4606 +#> +#> Columns 33 to 40 6.8411 -4.4396 -9.5836 -1.6676 6.9374 14.1813 11.5965 9.2761 +#> -9.3780 5.6008 2.0125 6.8668 -3.4065 2.7283 -17.4808 -17.3281 +#> -3.5577 -1.7638 -11.5944 -2.3791 1.5909 -0.9679 -0.1730 4.8436 +#> 2.9281 10.0167 -6.1779 -15.2507 5.7254 4.8623 10.4327 6.4091 +#> 16.3262 0.8307 -18.4928 13.7244 -0.3390 2.3932 7.1833 -5.6180 +#> 5.4358 -9.5330 -6.5301 14.0511 -1.7974 -3.0830 -8.5173 -11.6941 +#> -0.8204 11.9848 10.6886 -7.7296 -10.2907 -9.2758 11.0179 -2.0856 +#> 1.9375 -19.4346 -4.5586 3.9863 -12.2739 3.3930 6.9249 -1.0322 +#> 11.1694 4.4905 -1.8620 6.5131 20.0568 10.0477 2.8180 8.0744 +#> -5.2887 12.7394 12.3157 -5.5464 1.6756 -3.8787 5.5408 -14.1235 +#> -0.4279 2.5755 -0.0170 -20.2724 -13.7843 15.1414 3.4993 -14.2596 +#> 2.0926 -6.7771 -4.2083 7.2270 -2.0337 1.8369 -8.8234 0.9679 +#> -11.5875 -11.0893 5.8020 -5.4750 5.8504 -2.8166 4.0331 -4.4042 +#> -3.5533 -5.2732 3.6338 5.3809 -6.3057 6.1845 5.8185 -3.7466 +#> 13.9148 -5.9032 -3.1771 -14.6675 -17.9047 -7.0205 -7.5716 -8.0607 +#> -0.0368 5.3509 0.1135 8.1883 -6.2960 -5.8944 2.2877 5.0143 +#> 7.9204 1.6694 2.4287 3.8599 -10.1610 -6.7682 3.0148 -1.4434 +#> -0.4123 7.4901 8.6897 -21.7944 -13.1354 -0.7698 4.3561 -10.5247 +#> -1.6998 -9.8662 14.5214 18.2987 2.7539 -15.8786 -0.8604 11.9122 +#> 12.1845 -7.1130 4.7102 -5.6439 -4.5554 -10.8677 0.7087 8.4045 +#> -16.4750 1.0094 -8.3482 16.7520 9.9351 7.1005 7.3992 3.0023 +#> -5.8115 26.9794 -3.1757 14.0404 -4.5152 3.0581 7.1578 8.3886 +#> -2.6003 4.8403 1.8387 -13.1631 0.2252 15.2221 10.0423 -0.5418 +#> 1.0098 0.1739 7.2735 10.8776 -6.1047 -1.8527 2.8180 -2.4202 +#> -22.3656 -1.9155 -9.6280 -16.1306 1.2218 9.2338 -4.9751 2.4195 +#> 3.2826 10.7070 -4.0028 9.0551 22.1263 2.1284 -0.9929 7.2130 +#> -6.6908 -8.4050 7.3130 -8.8912 -10.0687 8.0647 5.3002 12.4514 +#> 7.5722 4.7857 -14.6610 3.8883 -13.8927 -6.2390 3.6901 2.1713 +#> -5.4662 0.6774 16.0048 -8.5734 6.1506 -10.2157 -0.5454 1.5779 +#> 1.0933 0.9105 -4.0711 -3.8430 5.7126 -1.4640 -16.7412 -13.0553 +#> -12.5979 -17.3075 0.1075 1.2570 -4.8064 -2.8653 7.6689 -4.2270 +#> 11.8769 -14.2723 -22.5438 -4.0313 -2.4251 -0.0567 -12.0291 -2.0781 +#> -21.3470 0.0947 14.8395 1.5358 -1.8869 -0.2724 -0.1991 -4.5065 +#> +#> Columns 41 to 48 6.9446 -4.2671 3.4144 -1.1094 5.2336 1.9632 -16.7952 -18.1477 +#> 9.6741 -16.3215 -6.5841 -5.6629 3.5887 2.3931 0.7793 3.2301 +#> -5.8765 0.6689 13.0392 -7.3758 4.0077 0.1969 -4.1479 -7.8417 +#> 2.4437 21.6355 -0.6811 4.3060 -1.1801 7.3260 -4.1952 -12.1952 +#> 3.3129 -2.8117 16.2947 -5.0317 -2.8167 10.5863 13.1083 -13.7058 +#> -1.5049 -10.6149 1.1306 -0.9964 -8.6222 11.5668 -11.7854 -5.0032 +#> -5.3137 -3.5947 -10.3030 1.4657 0.1443 -1.2825 7.0504 3.9803 +#> -4.3435 -3.6132 -15.7955 4.1468 -2.7513 -0.3367 -0.1255 -7.0169 +#> 8.5116 6.4778 1.1936 0.5842 -0.2371 -2.0299 -8.6984 8.5404 +#> -12.2470 -2.3939 -1.7271 -5.3625 3.8095 3.1663 2.8408 5.2890 +#> -0.2210 10.5781 2.6809 7.9563 6.2104 1.6067 7.1071 -6.9584 +#> 8.2769 2.9826 8.8886 -7.6564 24.0268 -10.4096 1.1162 4.4026 +#> -2.1162 2.8869 -1.1564 -11.1091 5.8721 -1.7154 8.7926 -13.7035 +#> 4.7409 6.4705 2.7221 2.0685 -3.5677 7.9069 -6.5497 3.9448 +#> 2.6934 0.8747 6.5580 -9.8086 3.8814 -8.3940 -15.2200 -3.0584 +#> -5.0661 -2.5881 3.7141 -9.0621 1.3558 3.8149 -11.0696 -4.8240 +#> -0.1446 -6.4048 -2.8131 9.9504 1.2759 -5.2146 -6.3272 -3.6930 +#> 0.1263 6.3730 3.8088 -1.7857 -3.4236 21.0228 3.1908 -7.5723 +#> -0.4359 1.1442 5.9393 3.1697 -4.6568 -5.0381 0.3295 0.1785 +#> -5.6850 -7.3113 -5.6905 -8.5304 3.4065 -12.5474 -4.2248 -3.9635 +#> 10.6694 1.2094 -0.2402 -0.6552 1.1061 -7.0099 6.4595 6.0198 +#> 3.4160 -5.6929 -0.2469 -2.6480 -5.4434 -1.6890 3.9987 -13.5543 +#> 6.4329 2.1033 2.5955 6.5692 -11.0004 4.3492 -0.2325 -2.4060 +#> -4.9823 0.9858 -3.5069 3.2410 4.9803 -6.7506 -4.1168 4.7269 +#> 4.4581 5.0462 -5.1053 -5.5269 -3.4229 -0.7907 -0.7029 2.3380 +#> 4.8148 -3.2190 2.4329 1.2624 6.6487 -0.9443 -6.4351 7.2845 +#> -14.3486 3.3816 -1.9723 -3.9340 -2.8140 2.5117 -9.7417 -10.1351 +#> 8.1523 3.6266 -5.9597 -0.3466 14.7332 5.0281 2.0042 -0.7068 +#> 1.5586 3.6073 -7.7473 -25.5572 10.1104 4.5242 -5.0179 4.7243 +#> 8.7372 -20.9357 12.0967 -3.2205 -2.7543 1.6430 -3.6255 -4.6444 +#> 3.1438 1.6858 -7.3364 2.0136 -5.4578 7.2369 -4.5787 -2.8689 +#> -10.7882 -1.9402 7.2337 4.8458 -3.6868 -6.1800 -14.5916 9.2025 +#> 6.0829 0.8040 8.5610 2.7991 -10.8768 0.4372 -3.2278 -5.7635 +#> +#> Columns 49 to 54 -13.7861 4.1895 -2.6849 5.5715 4.6134 -8.8529 +#> 3.9196 -1.0141 -4.3942 -9.2992 -7.6787 -7.5954 +#> 1.9036 4.0315 -6.0885 8.4513 12.0977 -8.4275 +#> 2.5213 2.3490 -2.5723 -10.0099 -11.1267 -0.9692 +#> -1.2945 -0.2695 -4.9368 -10.3326 2.8373 2.8081 +#> -4.4903 -4.6993 -0.7000 -2.2604 -0.7307 3.6957 +#> 7.8396 -9.3960 1.6068 -2.4263 -9.6085 -5.5385 +#> 8.2954 4.4205 3.6761 3.6611 3.0474 4.6097 +#> 22.8393 14.4712 10.6769 3.7294 -5.8846 -7.9642 +#> 12.2628 -18.3305 -15.0729 5.8586 13.6415 -4.5394 +#> 1.3358 -1.9427 -7.2661 3.4302 -11.0954 -0.8750 +#> 26.2816 7.4058 -17.6920 5.9520 -3.1000 -3.3631 +#> -4.4457 7.8442 -13.7301 5.5825 3.2497 -4.7904 +#> 3.0219 -12.1137 5.8340 -4.9455 -5.6589 -2.7322 +#> -1.6002 9.9409 -3.7162 -1.8972 1.5393 -2.9746 +#> -5.6452 3.6341 -6.8602 1.8428 1.0427 -3.9592 +#> -8.7532 2.1036 3.1808 -4.1851 8.4547 -5.4933 +#> -13.1717 8.5755 -15.8391 -11.1131 6.9142 6.7456 +#> -6.1324 8.8025 -9.4276 -5.5035 -3.4948 -2.1023 +#> -9.5285 5.5477 -7.6595 3.9417 2.6315 3.9414 +#> 11.4401 -2.6202 0.4147 13.7419 0.3354 -3.5892 +#> -0.3527 -0.6917 6.2912 -5.7992 17.0025 5.5500 +#> -6.5020 2.7133 -0.0885 3.5808 15.8258 -1.8593 +#> 12.5222 -10.9913 5.9336 -10.6480 -4.6576 -0.4921 +#> 17.6329 -15.9664 -1.0999 9.4738 -2.5659 -2.3957 +#> -2.9495 -1.7847 -2.6418 -8.1787 -0.0291 7.0303 +#> 0.4696 1.8555 2.1434 13.2853 8.1429 -7.6976 +#> 4.8279 0.7118 -1.3883 -1.3308 0.9165 -0.1007 +#> 7.5085 17.3070 -17.9927 1.4501 -15.0000 3.7068 +#> -6.4306 -7.2700 -13.2596 9.3895 -4.2451 2.0024 +#> -11.0030 -14.6823 -3.8624 12.9594 11.3709 -4.8381 +#> -14.5862 -9.1310 7.3428 -3.2828 13.3119 -5.4210 +#> -8.0792 10.3616 1.0423 -1.0622 6.3049 -8.3263 #> #> (17,.,.) = -#> Columns 1 to 8 -0.0036 -3.3190 -7.9587 3.2906 -13.0091 4.0434 11.5652 -12.1427 -#> 0.1164 0.8379 4.9234 -1.3137 0.0675 -5.0679 -4.4452 -4.7952 -#> 2.5500 -3.7011 0.9437 1.4541 1.8920 -14.1025 0.3019 14.6240 -#> -1.2880 4.0459 1.9433 3.9558 2.5960 2.6825 -9.3180 14.3707 -#> 1.4861 -3.1195 -1.5166 -4.0141 -7.8198 -5.3168 16.8417 13.6179 -#> -2.4961 -1.8708 -3.0573 -7.6208 14.6806 10.5994 3.2349 -19.1036 -#> 1.1798 0.3461 9.7883 -3.9783 3.7118 -5.1532 -10.9065 17.8243 -#> 3.5643 -8.1262 0.6550 16.2425 7.3746 0.6471 -4.3637 16.9330 -#> 8.2370 0.4671 8.6061 -3.5247 -2.3181 0.9235 -2.5817 -4.7410 -#> 1.2592 0.1341 -8.5860 -10.3599 1.3681 3.0343 -3.2627 -4.4536 -#> 2.4246 -4.6030 -0.9959 1.6058 -1.8115 10.3210 11.3543 -14.6947 -#> 1.3383 -1.8282 -1.9980 -7.7775 0.9999 10.5046 7.7484 -1.0406 -#> 6.5106 -5.1481 -8.9645 2.7458 13.4778 -1.9987 -8.3724 -6.8411 -#> -6.7584 -5.6272 -3.1131 18.0075 -6.0840 -12.9692 4.0645 0.7947 -#> -4.2316 5.9853 7.8798 -3.3417 -11.0788 -4.6102 7.5990 -4.8012 -#> 2.0482 2.4839 2.4645 -0.5146 7.5531 -6.1914 -5.2242 5.2656 -#> 2.3681 -6.2423 -1.4921 -1.4588 13.6384 11.2711 6.0663 0.3827 -#> 1.7283 0.8490 -4.8233 -8.1522 1.8019 -3.3521 1.7968 13.4382 -#> 1.9979 13.1230 7.3525 -13.3938 -4.5078 -1.8459 -14.6914 -14.1974 -#> -1.2166 -5.0405 1.4720 3.4205 -11.9439 -16.7012 5.8988 9.3834 -#> 2.9653 -4.7849 11.0924 -3.6218 6.1173 -2.4609 -12.5736 11.4327 -#> -3.2606 12.5382 9.5843 -9.3089 4.5541 2.0092 -20.0066 -9.3231 -#> 0.5372 6.2045 -6.6611 -15.5596 -8.4444 7.8671 -0.7634 -4.9848 -#> 1.0054 0.0214 -4.3680 3.8120 -2.7707 -7.4050 -8.1512 -9.1003 -#> -7.9268 -6.0573 9.4077 1.9014 0.7067 1.0413 14.1705 -11.5789 -#> -3.5318 8.0966 9.8679 -7.0404 -7.6394 3.9163 -2.4689 0.5888 -#> -11.6240 2.5042 14.4123 2.6386 -19.1348 4.2073 11.3132 4.3702 -#> -4.9533 1.3656 2.4042 -2.3242 -2.4680 -8.1332 13.3980 -0.8918 -#> -7.9514 6.3341 5.5485 2.2275 1.4123 -1.7811 3.2591 3.3837 -#> -6.7042 -2.2874 -10.2507 20.4313 9.5276 8.1279 -1.0739 12.8549 -#> 2.1646 6.3275 -8.4244 7.2597 -2.9179 -12.6174 12.2693 -3.4996 -#> -2.9582 1.8840 -1.1082 4.9597 -8.9087 5.5940 -13.2627 15.1192 -#> 3.8620 15.6053 -14.6896 8.3630 5.0803 -12.6752 -7.6925 2.4458 -#> -#> Columns 9 to 16 18.2032 -19.6725 5.0508 -16.0836 9.1070 -3.1893 -5.8347 -3.0840 -#> -5.6912 -0.4025 4.0813 -1.5855 -9.3055 -12.0094 -11.9593 -5.9454 -#> 6.7649 18.8722 -8.4775 10.3950 -4.3235 -0.8341 13.1874 3.2388 -#> -8.8735 16.5639 -1.0106 5.3047 10.7215 -8.9410 15.9507 2.6645 -#> -0.4064 15.6966 1.1777 7.8093 18.6075 0.4738 1.0298 -12.1063 -#> -2.6083 22.5461 -18.7206 15.4192 -32.0458 7.3132 -8.8752 -11.7511 -#> -13.7117 -11.5101 -0.8774 1.9782 -4.6365 11.1330 -1.2761 7.9712 -#> 0.2302 13.4904 1.5463 -1.8252 -9.0678 9.8361 -8.8496 1.8389 -#> 5.2399 -2.1986 -10.1448 -6.6668 -12.4374 9.5085 -8.5832 3.4156 -#> 1.6309 11.2047 5.7111 5.6381 -10.8640 -0.8878 3.1294 -7.3463 -#> 0.7705 9.0933 0.9354 -3.5888 -14.0555 -11.2154 15.2315 -3.7600 -#> -1.7876 8.2053 1.9111 -0.1420 7.0392 -7.9765 4.7696 2.9816 -#> -0.0192 -12.5377 -9.9169 2.0928 12.4499 2.4241 -1.6729 -4.4325 -#> 8.1069 11.8041 0.5192 7.5075 -3.9413 -1.4742 8.3041 6.3053 -#> -13.3357 2.6357 6.9307 9.0956 -4.7934 -2.9089 8.3363 -13.9510 -#> 7.2659 -13.8078 -1.1089 -15.7769 9.4236 -5.1112 -6.0908 2.2405 -#> 7.9741 -7.2104 -7.6181 20.0668 -10.3272 -1.1638 11.3504 -12.6043 -#> -9.1118 9.9215 -0.0853 15.8825 -8.3135 -1.1377 3.6858 0.1344 -#> 8.6729 1.6374 1.3439 -9.6331 1.6315 -8.2283 -9.1082 4.5495 -#> -5.4776 6.7521 -3.4657 10.1412 11.8968 14.7285 -1.7380 -11.2804 -#> -2.5355 -4.1345 2.2051 -18.1531 5.0060 -6.3627 4.9419 4.7983 -#> 9.2168 -8.3501 1.9289 11.9154 -9.8096 11.2362 9.2350 -8.4550 -#> 4.9740 12.1595 -3.6848 10.5441 7.1024 -6.7502 10.8825 -5.2812 -#> 21.5238 -15.0916 12.0990 -2.5791 12.4567 1.5288 -7.3818 16.1056 -#> -0.0058 2.3107 13.5580 7.0735 4.1138 10.0373 -8.5884 9.2899 -#> -1.8384 -0.6716 -4.3395 4.3135 13.5270 2.1079 2.9250 -11.1521 -#> -0.6385 -10.9262 -4.9343 5.1412 -14.9500 -13.1119 13.7721 2.5448 -#> 2.3601 5.2188 0.3091 0.2517 -6.2330 -9.7139 7.5875 -0.9963 -#> 1.1930 7.7077 -11.2370 3.9249 7.5846 -2.9404 1.1477 -1.9224 -#> -2.4618 3.0866 -5.5732 13.2863 -11.5555 8.1338 -1.0781 -8.0933 -#> 4.2506 -6.1249 -2.8451 6.1572 13.4412 4.0567 -1.8001 -1.1474 -#> -16.7791 13.2047 4.3998 5.3425 -3.4872 4.9933 11.7567 -7.1929 -#> -4.3003 -4.2831 10.9765 5.8159 -5.9722 -1.2655 -5.2305 18.9444 -#> -#> Columns 17 to 24 -12.1367 1.4567 -1.6269 8.3249 -1.6047 3.3990 -11.6582 -5.8878 -#> -2.3760 0.4142 9.0234 -13.6288 -8.3573 -6.6818 7.0287 -2.7305 -#> -9.1737 0.7451 20.8649 17.2731 -19.0187 9.8993 7.9144 1.6249 -#> 9.1767 -5.3467 -4.6508 -9.8663 1.7865 -8.4075 5.7143 -0.6771 -#> 10.6411 -19.1595 0.1464 -0.1775 -1.1914 -10.3426 -1.2584 -7.9563 -#> -16.1971 -13.8179 -9.4703 20.5316 -9.9628 -20.8515 9.6431 -10.2868 -#> -9.6250 10.1539 14.2932 -11.5516 -4.8499 -2.0416 12.4956 8.9299 -#> -12.3655 0.1540 9.0695 -9.6254 -7.8712 -19.9817 20.3502 4.4879 -#> 1.8061 -8.6229 -6.4227 17.4155 4.0062 4.4123 -12.7788 -12.0072 -#> -4.9458 -5.6366 -4.4784 0.7285 -4.5531 -13.3600 -4.7004 -15.0519 -#> -0.2497 -0.2428 -8.2563 3.6055 -14.8665 -2.6090 -15.8119 5.3822 -#> 4.3693 -6.9149 -12.2015 4.5604 -0.3401 5.8007 -17.0388 0.4493 -#> 15.0418 -0.5247 5.2963 3.6107 -8.5075 -11.3623 -12.6868 1.0413 -#> -4.0378 8.6373 2.0527 -0.9846 -0.3666 13.2075 6.5006 9.9071 -#> -0.7308 7.5805 -3.0739 -0.0865 -14.7430 -19.0587 12.9800 -11.4091 -#> -6.2874 16.6758 7.1346 -7.2085 -4.7202 7.4124 3.7254 5.7929 -#> -11.5627 6.5587 -4.1144 19.9498 -6.9683 -10.0468 -7.7720 15.8065 -#> -12.7454 8.7868 12.2793 -10.7621 -4.4990 2.5450 -0.2208 -17.3568 -#> 1.5817 1.2042 8.9885 -2.1994 4.8149 15.1747 2.5702 -2.2010 -#> 3.3618 3.3082 -4.8614 -7.8732 -11.8926 -7.8178 -5.5999 -2.6310 -#> -7.0714 -4.9754 6.3195 -3.3233 5.6727 17.1982 2.4234 7.0407 -#> -0.0079 4.2773 -2.5152 -5.1012 -6.6415 7.7634 -1.7209 15.0072 -#> 0.7598 -0.0868 11.9345 15.8469 4.9130 -2.6162 11.1826 -5.4076 -#> 3.9734 2.6924 -6.5429 4.9290 -8.8703 13.0501 1.0853 -1.9093 -#> 0.0759 -10.3995 -20.7333 -6.7558 -3.4664 -5.0556 -3.4500 -2.6451 -#> 11.9588 -0.9773 -5.5220 -9.4147 9.2083 2.0006 10.9240 -26.0863 -#> -8.0222 -7.1364 -6.2283 -4.9439 -18.3233 -12.8074 0.2920 3.7361 -#> -14.7121 1.6619 -5.7169 9.3542 3.5858 1.4770 -0.7576 -9.8484 -#> 1.3114 -0.9283 18.1555 -3.2016 4.1839 -9.9645 2.9604 -3.2908 -#> -5.9759 -8.9707 -0.6552 9.0356 -7.6296 -30.9177 -8.4567 9.6805 -#> -7.4921 -3.0522 -4.5990 -1.9557 0.6606 3.0873 -0.2393 -6.4692 -#> -1.3348 -6.8173 -5.2836 -9.4862 13.9301 -0.0991 6.0113 -13.2417 -#> 4.5449 5.1664 -15.3847 1.6871 7.4429 -1.8639 -16.2640 4.9284 -#> -#> Columns 25 to 32 -14.1407 14.2955 3.8695 -6.0790 -5.2760 6.6175 3.1503 -5.2982 -#> 2.2688 -11.4986 -2.2534 2.4166 13.8815 -4.4936 -12.1703 -4.5053 -#> -1.4144 -5.8853 19.2525 14.6247 19.1070 11.7330 5.7779 -4.3484 -#> -3.9388 -1.7344 -10.0980 9.1879 -8.3052 11.6017 -14.8255 3.6860 -#> -10.8255 7.7143 13.6104 -3.6169 -5.8611 -6.1461 10.0281 -2.9510 -#> 3.6030 -19.8067 4.5132 -1.7495 -11.6521 -12.9786 19.7649 3.0201 -#> 5.1040 -18.6112 10.5612 5.6103 -2.6788 0.9387 1.4030 1.6843 -#> -1.2084 -16.5771 15.1098 9.0082 0.5725 0.1737 -1.0725 -11.1100 -#> 8.9186 -5.9483 -3.8864 -6.2679 17.5105 7.6420 1.4482 -30.5893 -#> -5.3877 -5.3120 1.5567 -4.8999 -12.9138 5.5266 -4.2428 12.0746 -#> 4.3041 -1.7397 -6.7371 0.3417 -0.7745 -5.9680 -4.7485 -9.4796 -#> -1.8727 -14.7426 -10.1967 8.3783 -5.9343 -5.3843 10.4344 -4.4727 -#> 1.0782 4.0079 0.5302 -19.9129 13.8336 -5.9290 2.6223 -10.6001 -#> 3.2687 -4.3255 -7.6249 -5.0333 3.5369 2.8440 1.9130 13.2635 -#> 0.9298 -3.1962 4.1605 -0.2121 -19.9007 -2.3541 -9.4520 1.5965 -#> -9.8351 13.3726 -3.0548 1.2052 -6.0823 8.8489 -14.4292 4.4013 -#> 13.7231 23.7698 -9.7120 5.3621 -10.5620 -14.4767 -2.1569 3.0922 -#> 17.7819 -11.9477 8.1932 -11.3599 5.3066 3.3444 5.0477 1.9190 -#> -11.2310 -17.4655 -21.1093 -16.8335 11.1529 14.9433 -1.9176 0.8984 -#> -8.4986 1.0614 5.4820 -7.7777 -7.8456 -2.3261 6.5574 0.4245 -#> -5.7616 -5.6309 18.0811 10.8869 4.5236 -8.1555 -11.5226 -2.2178 -#> -6.5431 0.4128 -2.1879 8.4302 -3.1771 -6.8854 -6.7822 -16.3649 -#> 3.8306 9.3733 2.6196 13.6446 6.9131 2.6113 -12.4660 7.1945 -#> -20.6794 -3.3141 -15.1161 -15.1085 6.8808 24.0385 13.2143 -3.2077 -#> 9.8282 -19.6173 -10.8193 -0.2638 8.3347 14.2335 -5.1957 0.3102 -#> -3.0323 9.2271 1.8307 0.7336 -7.8705 -2.3976 -3.2060 3.4770 -#> 3.7052 3.8880 4.7929 -8.8234 -3.5462 5.8435 11.7265 5.0594 -#> 11.1393 -4.2118 3.1103 -1.0103 5.0755 5.7154 5.7432 5.4446 -#> -2.5627 11.9907 5.3111 3.7991 6.3624 26.5475 -4.0819 5.1281 -#> 4.9443 1.2031 -10.6381 1.1163 -11.6207 -7.0394 7.4165 15.7235 -#> -14.2614 3.5230 -7.0514 4.8836 11.7337 -6.1559 -7.5084 -3.4756 -#> 0.3203 9.2310 4.0731 2.0453 -3.5192 7.8414 -11.4932 -3.4787 -#> -1.9585 11.0028 -7.8681 -5.2722 -4.7085 -0.3444 -8.8240 2.5753 -#> -#> Columns 33 to 40 -16.2086 3.6009 4.1534 -4.0031 0.7665 6.8490 7.3541 8.2326 -#> -11.6117 0.8177 2.1269 3.1290 4.0927 12.8487 -2.8535 9.8357 -#> -20.3408 2.3152 4.2274 8.3990 -4.7943 -18.4804 21.1509 -7.6883 -#> 0.1038 12.0059 -14.0866 7.1420 -2.2766 2.9557 -9.2178 -4.7261 -#> 4.3252 7.8233 -10.1021 0.0185 6.5683 -8.0528 -3.2560 0.2315 -#> -8.5887 -11.0928 -14.6416 3.7763 4.4483 -10.3446 1.0966 1.6011 -#> 11.9414 -6.7054 -2.7639 -4.9038 -6.6649 3.8405 5.4326 0.9827 -#> -19.0930 -8.3548 13.6712 -4.4705 -7.1463 8.9296 -1.0585 20.3146 -#> 16.2528 -3.6523 0.9896 -21.1536 2.3888 4.4423 -6.8638 6.4089 -#> -1.6519 6.3646 3.1551 9.5345 -4.8651 5.2906 18.3544 -2.8488 -#> 2.6591 4.9864 -21.2580 -10.1801 6.1868 3.7901 9.3343 -12.3642 -#> -1.5676 -9.3029 -11.8483 -2.9516 -1.4575 -6.7623 6.5269 -3.6121 -#> -2.2296 -7.6398 6.7909 -5.8588 6.1594 12.8406 -4.1241 9.5473 -#> 11.0382 10.7851 -4.9545 11.5578 -4.3485 -11.3629 -4.7165 -5.0466 -#> 2.5669 -4.2435 8.9381 0.1753 8.1956 -0.5499 2.4431 21.6045 -#> 4.7957 -14.3638 -1.0203 5.1330 -5.1298 -4.5777 4.2896 -5.9634 -#> 24.2212 -21.3843 -6.0477 10.0261 4.5586 2.9284 4.3612 -9.6834 -#> 3.6108 2.4465 4.1788 -12.8152 -10.0255 19.3243 9.6198 7.5142 -#> -8.3931 -8.3866 -14.1537 -3.7318 -8.6589 -18.7726 8.6055 17.5604 -#> 0.3024 -15.7799 -0.8323 5.2878 1.8991 -9.5312 3.1859 2.5497 -#> -1.8618 12.1423 -3.1689 -2.1018 6.6899 -7.0607 -4.2293 0.3692 -#> 6.0903 -19.3583 0.6555 -4.0463 1.7142 1.0252 5.1535 -2.7416 -#> 2.6232 16.2810 -4.7329 12.3208 -6.1677 5.3871 1.9002 -0.7168 -#> -10.6743 -21.4390 1.2288 1.0133 1.3441 -7.4375 -8.6747 -3.8495 -#> -2.5638 5.9309 4.8542 -4.5298 23.0909 -7.1586 -0.3635 1.0118 -#> -7.4935 9.6163 -7.2168 7.3669 -16.4921 3.3259 -2.4198 10.2347 -#> 4.2367 -0.5784 16.5164 4.3086 -7.5247 1.8854 3.8563 -7.8103 -#> -9.3610 -8.2013 -2.5560 12.9635 -3.8509 -10.7876 1.4986 -1.3885 -#> -14.0475 -2.9250 -2.8980 14.5107 -5.2680 -4.4482 -5.0714 14.8558 -#> -6.7825 -16.8933 9.7776 1.8856 4.4857 17.3968 -0.1486 13.6344 -#> -4.0811 1.1356 -6.8292 -15.4074 11.5754 -3.7018 8.3617 -1.3083 -#> -11.2235 4.8559 3.5214 23.2757 -6.6173 3.2491 -0.4854 9.0660 -#> -13.1656 -2.8782 9.3092 -10.9921 10.8435 1.7970 18.7206 -7.0084 -#> -#> Columns 41 to 48 -0.4794 -4.7200 2.1935 -2.2066 4.2906 -7.6101 10.5657 6.5193 -#> -10.3014 -12.9307 1.8835 2.5713 2.2012 2.0539 -13.8011 7.5363 -#> -17.2547 -12.5349 -20.9318 -4.4617 -0.9685 -0.3390 5.2662 -6.5757 -#> -3.1898 5.6737 -7.1999 14.6965 -5.5819 -2.1300 0.7077 -3.1309 -#> -18.0966 1.1532 -11.4858 7.5103 -4.0690 8.0907 -7.2170 -7.7530 -#> 7.6633 1.8808 5.7849 7.3424 -3.0240 -9.6336 -4.3158 -4.6648 -#> -8.0560 1.5696 3.1842 -11.2185 7.8985 8.3951 -3.0128 -21.7698 -#> -10.0482 14.3059 5.0131 -2.6909 9.0579 1.8615 20.8154 9.7676 -#> -1.6295 4.7432 -4.8663 -3.7719 1.1519 -3.5064 15.8951 2.9470 -#> -5.0773 -6.4014 0.0620 10.7953 -1.3257 3.5800 -8.8108 3.2778 -#> -7.8998 4.0090 13.3705 -15.3735 -3.4188 3.6468 -1.2002 8.7656 -#> -2.2991 7.6728 -7.0008 19.0875 -10.4308 -5.2981 -6.5476 -10.0805 -#> -7.6098 -5.1771 5.7833 4.8127 -7.2318 -4.9819 -16.0469 1.4642 -#> -1.1119 2.1865 5.0103 0.7753 -13.7961 -3.5765 11.4363 11.7848 -#> 5.3285 0.6544 -4.3014 -3.7732 -6.6757 -4.6680 9.2389 -3.0111 -#> 14.9243 -18.9852 1.5592 5.8168 3.2338 7.5046 -9.2199 -1.9998 -#> 5.8316 -16.0814 24.0642 1.9585 -3.5264 -6.1538 -9.1210 -2.4992 -#> -13.5651 -5.3863 -7.1816 17.6731 -13.0657 7.3824 1.5581 -5.1607 -#> 13.1493 4.8888 -5.5545 0.1581 -0.9223 11.4728 1.0332 -0.4469 -#> 1.6263 3.3358 0.6984 -6.3469 14.6469 -9.9027 -5.6818 1.4462 -#> 7.5497 7.6437 -17.7139 3.6806 -12.6042 4.5378 7.5226 -16.3628 -#> 15.6895 -10.2874 11.3661 -2.3743 -5.0022 2.3989 6.9662 1.3720 -#> -14.7766 -5.7658 -9.8210 -3.8484 9.2184 -5.0199 -10.5909 4.3750 -#> -3.3873 3.1920 10.2297 8.2557 -5.2131 -17.5189 8.5251 12.7148 -#> -4.4102 13.5598 -20.2984 -13.7089 -0.8403 3.0563 10.2671 -2.3559 -#> 15.3842 5.0043 14.6081 9.0073 4.0625 -3.2320 -2.1856 -2.8255 -#> -6.7683 7.2567 4.1482 -2.9104 -3.1228 -1.1391 10.5654 6.2337 -#> 4.1170 8.3056 0.6752 6.1380 -1.4899 6.1811 7.6111 -5.8355 -#> 10.8976 -17.5127 -13.1384 4.9627 -13.8710 14.4422 -2.8103 3.5432 -#> 7.4061 7.1341 -0.5488 2.8806 0.9175 -2.6211 14.1834 31.8526 -#> 12.3116 0.2204 1.3383 -13.2747 -2.8086 10.5885 -3.7188 6.7941 -#> 0.7298 13.8047 -3.9216 -1.4572 7.6106 1.1530 8.8140 -11.3650 -#> -5.5953 -14.6633 2.4963 2.2599 -9.8951 4.3039 -9.2876 -5.9474 -#> -#> Columns 49 to 54 -7.3718 0.1724 -6.6997 9.3962 -6.5077 -3.1607 -#> 23.4490 -5.6180 1.4565 -7.3384 -1.8619 4.1763 -#> -0.4912 2.9085 -4.5126 -6.7124 5.1127 -3.9138 -#> -6.0853 3.5843 3.1338 -1.4118 0.0586 -5.0726 -#> -0.1282 6.5454 -0.7282 3.4190 2.0427 5.7622 -#> 4.9669 -6.5563 -12.0750 -7.4261 1.5339 1.5126 -#> 8.9537 5.2251 -4.8157 -0.5032 3.1482 -3.5577 -#> 0.7154 0.0666 -7.2519 2.2708 5.7407 -4.2322 -#> 0.7558 -4.7962 7.9415 11.3416 -3.6383 4.6629 -#> -3.2890 -0.2584 6.8779 -1.2153 3.1597 -6.3844 -#> -8.5711 10.4096 9.5683 -2.4923 -11.0658 -11.7025 -#> 2.7361 6.0141 1.2901 -1.3441 6.5208 -3.1641 -#> -6.9538 -8.9816 1.3693 2.0008 -4.1646 2.1112 -#> 0.4658 10.2723 5.8848 5.8259 13.0186 -2.1762 -#> -11.7348 10.4812 -4.3369 7.0428 11.4794 -2.2028 -#> 7.4112 -19.0299 7.3612 1.7996 -5.1023 3.9962 -#> 4.9686 9.4521 8.7250 -0.0540 1.7540 11.1828 -#> -8.2419 -10.3812 8.1248 3.6823 12.9724 -3.7185 -#> 2.8024 -12.8766 3.1866 10.2768 -4.1207 5.6507 -#> -1.8880 14.1398 4.2122 -11.5026 11.6045 -3.2251 -#> 17.1062 -8.0658 -5.3075 0.5834 -0.4006 -2.4423 -#> 8.6359 3.7877 3.2643 3.3035 -3.9406 0.0275 -#> -6.1192 17.6782 7.8204 -6.9942 0.3734 -7.6414 -#> 11.0851 6.6772 3.3842 12.7506 5.1887 7.7000 -#> 14.8332 12.8735 2.1239 3.1732 5.5896 2.0718 -#> 7.2465 -5.4545 3.8073 -0.9183 7.1958 -0.9900 -#> -5.3976 8.9492 1.4537 1.1187 2.3930 -5.6091 -#> -4.3078 4.0652 -8.3012 -19.9772 -1.4070 -4.8790 -#> 1.6937 -16.0303 0.0988 -2.5211 -4.9906 9.1394 -#> 0.4467 17.8078 17.1774 -6.6270 12.9747 0.3961 -#> -11.5709 12.6492 13.1732 3.6568 -3.5198 0.5396 -#> 12.2177 7.3196 -6.3694 -1.8782 3.0797 -0.6957 -#> -2.2324 14.6047 -0.8001 13.8972 -4.3510 5.3483 +#> Columns 1 to 8 5.2062 6.4065 8.5312 9.0955 -0.5235 0.3085 -7.8041 -10.1075 +#> -1.5281 2.3107 6.2066 7.8368 -7.5168 7.6761 -6.8371 5.9547 +#> 3.3820 1.8783 12.0787 -10.0834 -7.5565 -8.6900 5.9631 -20.5098 +#> 2.3642 8.4941 1.8612 17.1973 7.5288 -4.4782 -3.4536 -9.3324 +#> -2.5016 -5.9783 4.4389 -3.1082 1.1414 4.3949 2.8199 -6.6375 +#> 0.9962 -0.3967 10.1941 2.3201 2.8284 2.5462 -3.6083 0.8569 +#> 2.3564 0.7161 -7.4071 3.0146 2.6186 17.3825 -4.5904 -5.4011 +#> 0.5117 1.0361 -1.5090 1.4770 -4.2030 4.0998 -7.5226 5.9214 +#> -1.7923 5.7535 -4.9701 -10.4625 -14.1787 -2.8550 3.1688 -9.5902 +#> 2.8345 -1.5235 -9.4966 11.9819 -6.2584 -7.3193 -5.0261 -11.1585 +#> 2.2643 5.8170 5.2043 9.2719 9.3628 6.2426 -9.1843 1.3245 +#> -7.1762 -12.2680 5.5735 -11.8904 -7.1599 14.8536 17.6881 -9.8516 +#> 5.5518 3.2217 -4.4778 3.8995 -1.8664 -6.9647 -3.9305 -12.9324 +#> -6.4909 -4.9552 5.2168 -12.6810 7.6878 5.8827 8.7034 7.9903 +#> -5.6575 -5.1057 -1.0492 -3.9127 -0.9752 -0.0680 1.8378 -1.0330 +#> 2.4254 2.5319 -1.5565 -3.2438 9.6880 -0.3314 -3.3405 -4.7011 +#> 3.2410 2.1115 -14.9651 7.7893 4.1532 6.7871 18.3936 -3.1358 +#> 2.9924 -2.4656 -8.2656 -0.5360 9.2659 4.5416 0.6314 -2.9858 +#> 0.6287 6.7512 1.7734 18.0947 2.9011 0.4293 0.8328 3.5282 +#> -0.0651 7.3371 5.6683 8.2378 -11.9909 -7.3594 0.4883 -8.6583 +#> 1.9222 -1.0131 -1.8941 11.5151 9.9498 6.7965 6.2610 0.0717 +#> -3.1166 -0.3244 -13.7111 -1.4455 12.2203 -5.0392 -15.4664 3.8544 +#> -3.6694 6.8850 -3.1157 -6.1977 9.5380 2.6029 -9.8562 1.5450 +#> -2.2759 -3.8652 -6.8997 -5.9262 -5.3554 11.0629 2.9481 -0.5143 +#> 3.2514 8.2135 3.4933 -14.9007 0.9373 4.1942 -6.1680 -3.1610 +#> -3.7257 -1.1718 -6.9096 -6.4412 9.2312 14.1220 -3.1979 1.1137 +#> -1.5789 -0.4715 13.0554 -13.8019 2.7488 -2.5198 -0.7968 -0.7388 +#> 0.2977 -2.6954 2.8029 -0.7476 6.0299 8.5365 6.1323 19.9033 +#> 0.5072 0.5749 -0.9289 6.4329 7.7126 -2.6064 0.6026 -7.7032 +#> 1.9559 4.3685 -2.1914 15.9204 -20.1938 -1.2114 14.5099 -1.6412 +#> 5.4583 6.1685 -2.4450 16.8150 -9.8069 0.1982 11.3293 3.0131 +#> -5.3711 5.6225 3.8660 -3.3717 16.2821 -8.9134 -2.9562 9.2144 +#> 5.1504 9.4042 0.5806 12.4228 2.5611 -9.9295 3.8668 -5.2253 +#> +#> Columns 9 to 16 -0.9677 -4.0097 -12.4059 -2.3359 -14.1406 -6.4539 -12.1545 -15.3925 +#> 7.0265 10.1461 -3.4154 -9.9663 6.8054 -1.3017 2.3787 12.3173 +#> 1.8448 3.4694 6.8664 5.6583 -6.4041 -9.7351 -1.2019 -2.7786 +#> -9.0938 -4.1331 4.8760 0.4087 -12.8925 -12.6656 -16.5059 -14.6703 +#> 15.2378 -2.5167 0.2577 11.3278 3.3438 4.8032 -1.1242 -11.0190 +#> -11.8968 -4.8267 -0.4278 8.6420 -6.6961 -0.6619 -5.6707 -10.5583 +#> -4.3568 10.2016 3.6984 6.0754 -7.8074 6.3275 4.1293 6.0634 +#> 8.5500 -6.6387 11.6132 -6.8210 9.6192 8.8854 16.5629 -7.0220 +#> -0.0838 -3.3344 5.2469 1.4617 -1.5493 4.2835 -8.1487 1.0005 +#> 10.5035 0.0507 -12.5861 -1.0949 -9.5833 2.2038 7.5619 2.7563 +#> 2.5321 10.7052 6.9507 -21.0274 11.1643 9.0992 -6.5149 -4.4616 +#> 6.3098 1.4958 0.0077 -0.0203 3.2615 5.5996 1.9291 2.2002 +#> 5.2948 -3.5760 -7.8087 -1.9050 -4.6672 8.0447 -4.9779 2.2689 +#> 1.2328 -11.3895 -7.5358 -2.3623 -6.8337 13.2612 -8.3084 -13.1817 +#> -4.9721 -5.0257 1.6596 2.4616 -8.9933 -4.1106 -5.1344 -2.3345 +#> -14.0951 -6.7244 -9.7946 4.3367 1.0985 3.3463 2.5564 -2.8044 +#> -2.6770 -12.4474 -6.2746 6.7057 -1.1132 7.2519 3.3884 6.2520 +#> -3.4401 22.3057 -7.1662 -2.1130 -12.6090 8.8895 -1.2511 1.5363 +#> 7.6137 -19.1470 -4.9358 -15.8546 3.5287 10.2997 17.6449 6.7492 +#> -9.6267 2.9538 -1.4326 2.1347 -6.3685 -2.0564 -6.6023 -2.6144 +#> 3.4578 0.2958 0.3596 13.4159 -5.9611 11.7165 -2.2672 -0.9468 +#> -11.2246 4.9543 -9.7973 7.4378 3.5873 6.2133 4.7434 -7.4909 +#> -8.2853 3.8656 0.3439 2.6212 -8.3324 -2.7554 -2.5187 0.6864 +#> -5.3623 13.4094 14.6258 -6.0378 -8.6302 -2.5292 -5.9406 3.4581 +#> 6.6980 0.5924 -3.1794 -9.1969 -3.8435 -0.3109 -2.3617 8.1715 +#> 4.3023 8.2797 8.9166 -0.1207 1.8869 -7.4730 3.3733 -0.0388 +#> 6.3221 -30.4470 -9.1873 9.8814 4.1910 8.1789 5.3645 -4.7537 +#> 7.9247 0.0846 1.1110 8.2288 5.7250 3.3094 5.2440 6.2563 +#> 5.4151 -7.0976 -5.6453 -10.9289 4.8744 -7.9637 -11.4142 -5.0917 +#> 2.4489 4.9953 10.6033 -11.6046 1.4299 -5.6220 -11.4937 11.7032 +#> 0.3344 -7.6370 0.5923 1.4171 -16.2069 2.5125 3.3490 -2.7550 +#> -1.4463 -7.6959 -3.7580 11.3231 0.8858 -12.9351 -2.0833 2.8690 +#> -13.3702 -2.0348 -1.6581 17.3827 -12.9511 0.6218 -17.0761 8.6773 +#> +#> Columns 17 to 24 -3.4340 -1.4332 9.4683 1.1172 -7.7836 6.6448 -3.4124 2.6377 +#> 4.0018 -5.1031 5.5315 -6.1011 -10.1176 -11.9185 -11.5298 -1.6543 +#> 0.7412 2.4470 4.4561 -5.6521 12.8072 5.4284 0.3308 -2.1896 +#> -10.1075 19.2759 5.2872 -0.6025 0.0830 -18.6056 0.5928 -2.7637 +#> 19.0132 10.5841 6.0043 5.0118 -13.0968 21.0139 -14.1146 0.3770 +#> 8.0410 -6.4152 2.6467 -11.1163 6.9361 18.3679 1.2554 -7.2941 +#> -4.6484 0.5479 1.3951 4.6272 7.0520 0.1075 2.0469 -1.8648 +#> 2.6453 -0.5733 -10.7167 5.9196 6.8765 -3.3049 1.6349 2.3652 +#> 12.6852 0.6612 1.1141 12.5043 10.3867 -2.4188 -5.2136 -1.9053 +#> -4.3614 -0.3980 -0.4479 9.9437 -10.4470 5.8754 0.4783 -7.1779 +#> -4.5928 -5.1068 -0.8685 0.9190 -1.8553 5.7822 12.7401 -1.2883 +#> 6.0113 -11.7025 5.0655 1.0502 -7.1784 18.2597 1.6481 16.7653 +#> 3.7718 -5.8672 1.0738 -1.9323 1.6446 -9.6286 -3.3639 -1.8432 +#> 2.5879 -10.5316 6.0032 17.6337 12.6403 -16.1492 -19.9456 -7.3339 +#> -5.1003 -15.7109 -1.1631 -6.8531 12.4065 1.9177 -3.2176 -3.7601 +#> -11.3250 -6.8002 -6.7891 -5.3432 0.1282 8.5800 -10.0017 -6.0924 +#> -21.4015 -10.3819 -3.8519 -2.2009 -0.5347 4.8379 5.5032 5.9228 +#> -2.9474 -11.2088 3.1387 -1.4097 1.0569 -14.3865 12.8763 -8.5439 +#> -4.6242 10.5840 -15.1057 1.7397 17.9535 -20.1512 -2.3121 -14.5795 +#> -0.7881 -6.0030 -3.5365 -4.5937 11.8125 1.2383 2.1917 4.4664 +#> -7.8666 1.7594 10.4907 -0.8218 5.3324 -3.6404 5.5090 -4.6110 +#> -6.9657 -14.1624 4.2566 -8.2208 -4.1070 -1.1306 -8.9193 12.0872 +#> -12.3010 0.8195 10.7051 -3.4484 5.1781 1.5721 4.4617 -15.5562 +#> -5.9939 -7.1084 -3.1787 6.5885 -3.8283 0.1379 -0.8186 6.0172 +#> -3.6418 3.7688 0.7896 0.0369 -0.9160 5.1858 -9.1146 -13.5730 +#> -12.7688 2.0074 3.9355 2.5086 -8.9581 6.8594 3.2289 -11.0553 +#> -7.4703 9.3158 -1.5761 -5.1712 -5.0044 -3.6980 2.8445 11.5953 +#> 0.5107 4.8479 12.5108 -3.4621 -5.8938 12.0158 -11.9714 18.2226 +#> -6.4609 1.8136 12.2727 13.1547 4.4119 -31.4823 0.9838 -1.9326 +#> -4.1312 5.7892 -9.2721 -10.4745 -6.1841 19.9672 3.9965 -16.4372 +#> -11.2938 8.0320 -6.3455 2.7606 -2.5874 12.2807 -13.0615 -9.6145 +#> -14.3744 20.2807 0.1696 -18.3391 8.4987 19.6241 -14.5685 -22.7530 +#> -6.0744 3.9201 3.4267 -4.2923 9.8226 -13.1160 14.3907 -14.1232 +#> +#> Columns 25 to 32 -25.6559 -10.9156 -9.3639 -2.1209 14.7189 -5.0703 12.8279 -8.1154 +#> 3.0320 11.0460 9.8138 -9.1326 8.1151 -10.1611 9.1126 -6.8986 +#> -9.6741 12.1914 -7.0153 6.3812 -8.3031 1.5720 -7.1641 -15.9364 +#> -9.0767 -12.0473 -16.0389 -10.9423 -11.2371 -14.7245 -1.0813 -10.2350 +#> -15.6762 -9.1777 8.5985 7.7974 0.0979 -6.0800 11.8735 -0.9774 +#> -6.1632 3.1626 2.9883 7.2371 2.0970 2.5618 10.7349 -5.8495 +#> 2.8342 4.3450 1.4687 -5.3153 7.0235 -4.8326 4.1890 3.2969 +#> 5.7836 2.5935 13.0898 11.6800 13.7342 15.2812 13.2999 3.4133 +#> 10.2866 8.7331 -4.8855 -9.4812 -10.5664 5.2894 -9.9995 -9.0635 +#> 1.4083 -7.5989 -16.7569 17.7992 6.5029 10.4945 7.0004 2.5671 +#> -2.1015 -11.2026 15.4402 -3.0975 11.5574 -12.2456 3.7944 -0.9630 +#> 9.6743 8.1705 -12.7869 0.0080 -0.9624 1.2023 -6.8988 -14.4147 +#> -8.7400 0.4795 2.4549 -0.0471 -7.8906 4.6204 -8.4670 12.1793 +#> -4.4415 4.8317 -5.7580 6.7529 -3.4655 -9.1483 -10.0194 -16.1485 +#> -17.8224 -9.6450 -8.6455 -11.4746 -15.5416 -10.2722 -4.1666 -2.5482 +#> 10.6338 0.0640 2.8336 -15.3082 -13.1138 -13.8507 0.9046 -1.2515 +#> -2.7058 -8.5468 3.7900 -8.5088 -1.6693 -5.6441 7.3983 -3.3417 +#> 19.7399 -14.5031 2.0907 -3.6008 -9.4239 1.6132 -9.7886 17.4081 +#> 20.2215 -0.2101 -7.6222 -2.9692 -25.8327 -13.0773 -13.4568 -3.6684 +#> -12.5797 -1.8838 3.4676 -10.5341 17.4419 10.1659 4.0989 1.4775 +#> 13.3531 -6.1383 2.3657 12.2830 5.1661 -0.0675 4.1757 1.4087 +#> -3.7723 -17.3552 2.3568 -7.6880 -5.5097 -1.6227 10.8565 16.7836 +#> -7.2957 -8.9424 9.9576 -11.6376 13.0173 4.0345 -13.3331 5.6424 +#> 8.6461 -11.3035 5.7422 0.3513 8.9917 5.0631 11.9125 3.8531 +#> -8.9531 9.6955 -5.8416 11.5748 11.6782 8.6377 0.4979 -1.2785 +#> -1.1237 -10.1375 -11.2824 7.5102 -1.2664 9.0790 -0.0468 6.8004 +#> -13.3994 10.5269 -5.1302 -1.3119 -4.4761 -1.2386 -1.7679 -12.8146 +#> 8.5100 8.3615 16.6133 -5.8301 10.6464 -11.0602 13.0278 -10.1821 +#> 20.3466 5.8746 -17.3077 14.5867 1.2178 6.6536 -1.0953 1.4276 +#> -1.5457 -5.3937 9.0299 -1.6478 12.1792 -3.2151 2.7575 -11.6832 +#> 4.5568 -10.8305 11.1900 3.3307 9.5425 -0.0017 14.8007 5.9503 +#> -28.9684 -1.7698 5.5603 6.9113 -1.7502 4.8231 -7.1587 -3.1486 +#> -11.5267 -0.1977 -2.8475 -8.7871 -2.1866 -9.6414 -5.4352 -4.4665 +#> +#> Columns 33 to 40 7.9492 -5.0804 -5.7993 -7.9770 3.7569 -8.4412 -3.8155 12.0096 +#> -5.3788 18.9755 -11.2479 -1.8086 -0.8534 -7.1297 -4.9246 -8.0938 +#> -0.1581 18.0097 -13.9512 -6.0909 -5.0746 -1.2351 11.2417 -5.4303 +#> -2.9308 -2.5272 2.9170 -2.8434 2.2266 1.8755 4.3224 10.6988 +#> 13.6307 -5.3490 5.2155 16.0001 -7.0303 -6.2270 8.0629 -10.3844 +#> 5.8387 -12.4334 17.5634 -11.5684 0.4949 -16.0601 -7.4916 17.3772 +#> -16.8662 0.9686 -8.7218 5.2073 2.6688 13.1830 3.0184 -4.8660 +#> 2.1112 -5.1226 -0.2730 7.3787 -16.1218 -7.2231 9.9759 11.2552 +#> -5.4150 0.5366 5.5534 2.7030 19.7924 10.9628 8.9917 17.9985 +#> -2.5858 8.1095 -7.0435 -9.2342 0.7987 1.6674 5.6875 -7.1116 +#> 0.3922 5.8502 10.8145 4.1114 -18.7439 6.8103 -2.5674 1.7185 +#> 1.7563 10.4029 -1.4483 4.8017 -6.0081 8.6435 6.5205 -0.8656 +#> -3.9743 4.2120 -10.8495 -0.8868 -0.4211 -8.7428 12.1068 0.6119 +#> 2.7847 -2.8586 -6.3869 -3.9169 -22.2621 13.9679 3.6501 11.3068 +#> -18.6067 4.6314 -7.7752 -5.1656 -12.7367 -13.8206 3.7056 -25.1201 +#> -8.5392 -11.8004 -6.7347 6.9775 -0.3826 0.9470 8.6821 0.4177 +#> -6.1266 8.8447 -6.9791 2.5319 -16.2844 -5.4277 14.4982 5.5883 +#> -15.7056 10.6798 -4.1240 -4.1178 -0.2579 5.5221 -6.0502 -0.3089 +#> -15.6076 -11.4628 -6.4459 4.3462 -4.9908 1.3151 2.8178 20.8673 +#> -6.6252 0.4098 -5.4148 5.6463 4.0706 -4.8986 -15.1973 4.6258 +#> -5.6373 10.1663 -14.6556 6.9960 10.2837 20.8380 17.3981 5.2066 +#> -14.3733 -5.5633 -1.6637 3.5819 8.4398 -19.9616 19.8552 8.0458 +#> 1.3051 4.6582 -9.1313 -3.9248 16.2773 9.9309 0.4038 -9.2242 +#> -10.8030 12.6749 -0.5136 2.2521 -10.2717 10.3002 0.7334 -20.7239 +#> 16.6504 5.3920 -22.9436 -14.0093 3.5895 11.1150 -3.3734 -10.8219 +#> 6.8710 13.4822 -23.3474 -12.1038 22.5145 -6.8989 -0.1700 -9.2741 +#> -3.1169 -18.7922 -2.6409 0.4745 -8.8956 13.7294 -0.0943 7.7534 +#> 2.9541 -5.6867 2.5941 15.8580 9.7454 -12.2776 -9.5374 2.3370 +#> 1.5138 -10.5095 4.6716 -8.5460 11.0888 -14.5055 7.9629 5.4367 +#> 1.9638 14.9499 1.5870 -20.9975 0.0808 2.4397 -7.2864 -20.8043 +#> 8.3696 -10.7314 -3.8182 -3.0129 -12.0980 2.4990 -11.1520 9.2976 +#> 13.3530 0.1603 -17.3399 1.4224 -17.2003 -7.8135 -24.7462 -4.6054 +#> -10.4999 6.7724 -1.3415 1.7718 3.6337 5.9713 4.8873 4.6049 +#> +#> Columns 41 to 48 -10.7290 1.2266 -1.1279 -6.7309 4.8564 -19.1697 -10.8422 -9.4680 +#> 15.3706 -12.0921 -15.1571 3.3368 3.0903 17.8897 7.6483 9.4141 +#> 8.6412 5.1572 -0.8060 10.5385 4.0186 5.9347 -0.0330 -7.5679 +#> 7.0214 -10.1595 -11.4584 1.3118 5.1355 -1.6815 -1.5747 -12.3343 +#> -19.1103 -7.9682 8.5581 0.0851 1.8505 -6.6875 -21.2344 -12.5088 +#> -18.5591 -2.7133 9.7430 1.5973 10.2906 0.3215 -1.6015 7.8144 +#> -1.5293 -4.7240 -3.8865 -15.0194 -12.8862 4.5046 12.2952 5.5256 +#> -1.4769 -0.9524 -2.0169 1.6460 4.8768 -9.9220 3.0602 12.2695 +#> -3.2681 1.0740 -11.6155 -11.1632 2.6573 0.5347 0.8439 11.7267 +#> -14.7730 3.6324 4.2335 8.8489 23.4191 18.0735 13.1651 -2.1606 +#> 13.6308 -3.6316 -8.3127 -4.5406 -12.0217 4.9650 6.1042 -4.3197 +#> -9.1802 1.6441 1.9902 -10.0347 -5.0455 -5.2543 8.4804 -1.5737 +#> 2.3652 3.5311 1.4964 15.6320 11.5806 -2.2735 14.3417 2.5839 +#> -3.7575 -2.8811 -0.9925 -4.3629 17.2919 -0.0197 -4.9680 -3.5100 +#> 7.2565 -0.0398 -2.7936 -6.1423 -14.8843 -9.1030 9.9438 10.3331 +#> 3.2569 4.8552 -1.1905 -7.5572 -14.9308 0.2999 7.4230 -1.0064 +#> -8.8221 -1.0738 -0.8882 -17.1510 -11.1800 0.2144 -0.6876 -5.8947 +#> 12.2692 -13.4749 -3.1823 5.0263 4.2603 -15.4658 2.4992 -10.1891 +#> 0.7860 -2.0814 -2.9499 -13.4518 -6.8299 2.7675 -3.3247 -3.0892 +#> -6.5321 -5.4376 6.9406 -2.3071 0.5853 -5.4339 -2.2470 1.5908 +#> 1.0329 10.0856 -4.5039 -5.2718 -0.3540 -1.6527 0.3171 2.4836 +#> -11.9147 6.7385 5.7380 -5.0723 -3.0284 -9.4464 4.8818 3.0832 +#> 12.7580 -2.9179 1.7291 7.3266 0.2459 -13.2164 0.9082 3.2840 +#> -7.1649 -1.0791 -4.0308 -7.5664 0.1214 -0.6316 0.7790 -6.8957 +#> 20.6936 12.1571 -3.3921 17.5928 7.0366 11.0281 6.9955 4.5702 +#> -1.1660 -1.6073 -16.1980 11.4725 6.6002 -6.8282 -9.1363 -12.4136 +#> -5.3623 3.8805 6.7446 -13.4904 -12.3133 -15.7244 -1.2280 4.4612 +#> -6.5582 9.1700 12.1526 -1.7667 -10.2811 -24.1948 -10.7069 -2.4161 +#> -14.2601 0.8724 6.2914 17.0546 9.1718 6.6296 2.0580 3.9653 +#> -13.2472 6.2988 6.0759 2.9227 -6.9672 -9.0773 -12.7429 -18.0649 +#> 0.5279 -0.3957 27.3752 10.9115 9.9392 -4.9250 -3.0218 -8.9675 +#> 3.2111 -2.3804 19.1284 11.3197 -4.8310 6.6147 -9.8923 4.6418 +#> 0.2088 2.7633 1.1299 5.4723 -7.2777 -4.1593 0.9491 -1.3457 +#> +#> Columns 49 to 54 2.2773 -7.0427 -7.2601 5.2576 10.1473 -1.4825 +#> 2.0728 5.8440 -11.9129 8.8585 -3.8773 -7.8997 +#> 13.9054 -20.6030 5.8407 3.9196 -3.7404 6.9391 +#> 0.7607 4.4037 -0.6187 -4.4399 4.3271 3.9021 +#> -6.7017 -2.3270 1.4333 -7.1526 5.4069 -2.5627 +#> -5.7247 -13.7080 -10.0205 4.2689 14.5773 -13.5699 +#> -17.6388 3.7523 -5.9033 -3.1553 2.3426 -6.7776 +#> 6.0180 -5.3101 0.0396 -9.6001 -1.2713 -6.8409 +#> 8.2616 2.0049 1.1839 -3.0684 8.2004 -2.0936 +#> -12.4783 6.6496 8.7333 -0.1603 -12.2373 5.5264 +#> -13.9707 -2.3604 5.3351 -12.4312 4.4919 -3.4858 +#> -5.3698 0.9314 12.7246 16.1495 0.8187 0.5749 +#> -4.9155 2.3534 8.6875 -4.7722 -2.2764 -1.1309 +#> -19.2869 -3.8281 18.3395 -4.6993 8.7467 -8.4019 +#> 4.2668 6.5840 -0.1524 6.5347 5.8189 -1.9036 +#> 3.4862 2.1957 7.6857 3.4270 -6.4757 -4.2581 +#> -6.3883 -5.9588 -8.9869 6.3761 1.9361 -7.1823 +#> -7.9681 -7.3008 20.0452 -0.3360 -0.2440 -1.5120 +#> 9.3153 8.3198 6.5897 11.9121 -13.1154 -2.8477 +#> 5.9208 -9.8848 -4.9459 -5.5302 1.2124 -4.2260 +#> -2.0859 3.9335 -7.6611 5.3094 -2.8403 -5.4787 +#> -3.2523 16.8035 0.8463 1.3927 -6.3697 -2.4045 +#> -9.5948 -11.6076 7.8068 3.9335 1.9341 -0.8143 +#> -11.5494 -11.6516 -3.0629 5.4208 3.1919 -3.6417 +#> 7.8636 3.7975 9.1254 10.4197 -6.0210 1.5177 +#> -16.3998 -8.0413 -10.2509 14.4855 -1.9143 -1.0065 +#> 2.2125 8.8223 8.0995 -4.1518 -2.7697 1.3032 +#> 13.8695 -5.3156 -8.4131 4.9752 0.0873 -5.5614 +#> -2.7581 2.6294 7.1795 4.6050 -12.8226 -2.5074 +#> 2.1836 -11.2572 -8.0921 12.9866 7.1034 -10.8439 +#> 7.0694 -8.4061 -0.5035 0.9225 2.2821 -2.9494 +#> 9.9549 -5.9032 -4.1427 1.1756 3.0637 -1.2262 +#> -0.3056 -1.8644 -9.1585 15.9833 -4.6379 -3.4776 #> #> (18,.,.) = -#> Columns 1 to 8 -4.3911 5.4973 1.8484 1.9810 -5.6260 -0.5801 -15.8868 6.4607 -#> -5.0212 -5.5476 -7.6658 -1.4139 -0.4396 2.2836 -5.9576 -3.4828 -#> -5.4325 -0.4088 2.7157 -8.4452 1.3914 -0.2093 11.4034 -4.1957 -#> -0.7190 -5.5509 -2.5918 -12.2629 18.8802 5.8711 1.6982 2.9475 -#> 0.4850 -0.5469 -7.6596 -6.5053 8.6592 13.8677 -21.4914 5.5691 -#> -1.4620 -6.3371 -3.4570 -11.5751 7.5142 -8.2088 -6.2354 -10.5874 -#> 3.2021 -4.3906 -7.1871 6.7673 10.5294 -7.6768 -3.2767 -2.1408 -#> 5.6072 1.9384 3.5318 2.1610 -5.8641 2.2806 9.2333 -28.8971 -#> -13.9020 7.9718 -2.5898 -4.9725 -1.6510 -4.6305 -11.0958 1.3548 -#> -2.8662 -1.9706 -2.5360 -8.9197 -11.1579 2.4577 11.8483 -11.4991 -#> -4.1481 -2.0496 9.5735 -1.7490 -13.7465 -1.5257 5.2256 -3.2159 -#> 4.4538 2.4108 -11.2044 -3.1763 -8.6365 4.4801 8.9262 -7.8778 -#> -5.8780 0.8098 -5.3211 2.7571 -13.6454 -0.8512 -0.6691 23.8480 -#> 2.6878 0.7378 0.9408 -2.5093 5.4975 -2.4101 10.2283 -5.4472 -#> -3.9325 1.6233 -8.9639 -7.7578 5.8783 2.2209 -9.6604 -4.5376 -#> -0.4879 1.8802 -7.2441 -4.4474 -0.9615 6.2233 10.8690 -14.4598 -#> -0.7505 5.6795 7.6239 -14.6193 -3.4399 14.3743 -3.5018 -0.3198 -#> 0.3104 9.1646 -9.0805 -7.8947 -5.8951 6.6030 17.4737 -4.6253 -#> -0.9207 7.8309 -8.4844 0.3523 -14.6567 -8.1897 -1.8762 0.6771 -#> -0.2594 -6.4661 2.0367 -5.6595 13.3032 -10.9836 -9.3047 7.9829 -#> 0.5242 -3.2823 0.8238 8.1593 -0.9138 -7.1425 -15.7148 2.3371 -#> -5.6616 8.2627 -0.1257 6.7354 5.7825 -9.0814 10.6219 -0.4017 -#> -0.0744 -2.6918 8.6814 -16.0938 13.5337 4.3540 -6.6646 9.3643 -#> -16.0383 11.2291 1.4746 11.9791 -1.7396 -6.4750 -7.7020 10.6767 -#> -3.9449 -7.1955 2.8869 4.3371 7.5388 -6.7228 3.2023 5.3479 -#> -0.9995 1.3863 4.3533 4.3314 12.2726 -15.1262 -0.1044 -1.0410 -#> -5.1356 3.6454 7.3923 1.7739 0.4303 6.9459 -2.3443 -5.2192 -#> 2.5240 0.3547 -3.5044 4.3585 7.6539 3.9435 -0.9854 1.7310 -#> -5.0543 3.4230 0.4311 -1.0042 11.0358 -8.9949 3.6246 18.5413 -#> 6.2018 1.1566 0.8363 -9.6308 8.2723 10.8877 11.3666 -13.1488 -#> -5.3042 4.6787 -2.5811 -0.9704 -1.7178 -2.1207 -6.6445 6.3538 -#> -1.3022 -8.0480 8.6952 1.9588 3.4376 -23.2013 0.1420 5.0201 -#> -1.6073 -7.1981 0.7532 -1.0488 -11.1509 -5.6928 18.6251 1.7090 -#> -#> Columns 9 to 16 7.6927 1.4596 13.0779 5.9595 0.2990 5.2401 -9.1685 -4.2340 -#> -19.2284 -8.5529 -1.4149 11.4336 10.2557 -4.8607 -16.2310 3.1786 -#> 5.2186 0.3449 -5.2681 -1.7545 6.1023 1.9320 2.4020 7.8770 -#> -9.2335 9.5286 -9.9603 -1.4513 -8.5965 12.4769 2.8153 6.8837 -#> 1.7237 -10.9426 4.0296 -7.5040 1.0439 12.2765 11.8449 -5.4733 -#> -8.3496 -20.7904 19.8178 -11.1970 4.5073 31.3651 -11.1798 -22.0608 -#> 5.2126 3.1247 -12.3427 -2.4787 3.8732 -15.9318 6.2172 2.2279 -#> 11.4959 -15.5201 4.8571 5.7709 6.6421 -19.3220 8.6169 -13.8630 -#> -6.1861 -8.2190 11.6082 -3.3027 12.0702 -15.4140 -8.6867 -8.4738 -#> -10.8348 2.5051 -12.9579 -0.2584 -6.0464 3.2162 3.6795 -5.0257 -#> 11.7900 -7.5695 7.3021 8.0940 -0.3820 -3.9853 -7.1611 1.7104 -#> -3.5033 4.5399 8.8900 -1.7946 7.6678 3.3881 13.1164 11.0010 -#> -15.3460 4.2485 -2.2991 -9.0732 -13.1693 -5.2471 1.1429 3.8441 -#> -5.8969 2.7695 5.1643 1.2429 10.3890 -2.7055 -6.0126 16.3083 -#> 8.5507 -1.9368 -6.1374 1.8580 -17.3128 9.9511 5.4091 -5.9266 -#> -5.6076 2.7743 -1.6566 11.0053 -11.9033 -10.1063 9.6658 6.6356 -#> -9.3610 -7.9023 0.7382 2.5932 3.6247 9.8894 -2.4512 -8.8385 -#> 8.9837 -6.6969 -10.0175 4.2795 13.5702 -6.6152 17.8442 -2.8948 -#> -15.7365 -10.9606 -4.4099 -13.5879 1.5271 -2.8249 -17.4113 4.3666 -#> -17.7783 -4.8231 -1.6511 -9.8570 -1.3040 10.0096 5.7238 5.7183 -#> 6.9739 16.3830 1.5270 4.4500 -2.1711 -5.9195 0.3354 -5.8155 -#> -8.6017 2.4775 -6.2702 -2.7926 4.4337 1.5416 7.3396 9.1336 -#> 9.5212 2.5359 2.9281 -11.4843 -7.7582 4.5839 -7.2885 -9.0561 -#> -17.8258 13.1264 0.3094 1.1877 21.5266 -2.0017 -9.3248 1.4888 -#> 4.2441 2.4671 1.9099 3.5147 19.1303 0.3357 1.9493 0.9496 -#> 1.1641 -8.2187 -1.0420 -9.8006 -15.3996 12.6898 -12.9848 1.7504 -#> 5.8278 8.0116 -16.8496 -6.5744 -1.8002 2.4645 7.1946 2.5312 -#> -6.0108 -4.0062 -7.3233 -2.1383 15.2241 4.4813 -12.1431 -2.0238 -#> 0.3557 4.6276 -8.4960 0.1130 11.3615 2.8329 -9.3739 2.4401 -#> 5.9013 1.5485 -7.0356 0.5447 1.4222 0.1700 6.2146 -8.5471 -#> -1.7002 3.5084 -14.1795 7.6799 -6.8140 13.4071 14.9627 -9.0716 -#> 15.5815 -4.9196 4.5648 -13.9866 -3.6228 -2.3111 -12.6678 -5.1569 -#> -3.9958 7.5002 -8.8081 8.3486 11.7740 -7.7243 19.9508 -6.5645 -#> -#> Columns 17 to 24 1.1347 2.5347 -2.8868 -3.3789 -5.0710 8.2973 -12.6579 16.1731 -#> -11.5549 -4.9196 -7.8892 -5.2357 -6.8317 -5.2695 -6.6647 -5.2959 -#> 2.1417 -11.1165 5.8018 -9.2976 -4.1183 -7.0978 -5.2528 -15.3420 -#> 2.0741 -5.3868 0.3100 0.1789 25.6531 -13.9063 17.0234 -10.6949 -#> 5.3593 16.3898 1.4933 15.7945 2.9275 -12.2839 18.2287 1.2301 -#> 19.4208 6.8820 -1.1850 -5.9584 -0.1833 -13.4068 30.5236 -17.4262 -#> -5.4856 -7.0090 2.3754 -5.4628 -8.2741 -9.6361 -3.6279 -5.0407 -#> -6.5071 3.6426 -22.6544 -4.3219 13.8968 8.3234 -15.4233 -7.8661 -#> -3.1956 9.5999 -14.0066 7.7170 -12.2087 -14.1807 -3.1584 6.2416 -#> 3.7422 4.2694 7.8953 -1.7984 1.1673 4.2236 15.3837 -13.6411 -#> 14.2686 -2.6712 -18.7982 -5.5391 -2.4245 4.4291 -3.1279 -3.5791 -#> 9.1189 0.0225 -11.5063 -5.8858 -5.8044 1.0564 23.4499 -4.9465 -#> 1.7194 10.7878 -15.6375 9.9320 -2.6833 21.7980 -1.9116 -0.0044 -#> 3.9943 -17.6837 9.9010 -0.0358 6.4139 -3.3084 0.7509 -13.5968 -#> -5.7291 12.0397 -1.5095 -5.5217 -5.2337 6.6550 14.6906 0.7526 -#> -5.1364 -18.5721 4.7661 -10.3700 6.6646 -7.4164 -13.3842 -5.5470 -#> -1.0711 6.4061 5.4447 -6.3443 -1.3465 -15.8051 25.1032 -5.0133 -#> -6.1450 15.4360 -8.0648 -7.5083 -6.3784 -12.4713 3.3846 -5.0549 -#> 4.9954 3.8794 6.9410 -0.0043 -1.6452 -11.6989 -7.0418 -4.5863 -#> 6.1630 -2.5573 1.2800 4.9611 -8.7608 -5.2573 4.6300 2.7334 -#> -4.4718 -5.3688 -1.9845 5.1600 0.4501 -7.0452 -16.2698 5.8914 -#> 0.2358 -0.7003 5.2938 9.5373 10.0164 14.2044 3.2263 3.9779 -#> -3.0353 0.2633 8.0138 -12.7800 -3.9198 -3.9936 17.1907 -3.8080 -#> -2.8725 -9.3989 -8.8084 -7.4599 -4.0630 18.1741 -5.8362 3.5222 -#> -3.4204 -19.2696 3.7916 8.3952 2.4625 10.1270 1.0981 4.5824 -#> -8.6086 -2.0380 13.7854 7.8583 7.4181 2.7249 -1.5048 -2.7189 -#> -4.1792 -1.9535 -0.2705 -0.0452 -15.2096 3.8035 -8.7664 5.4589 -#> -0.6465 -2.6124 8.4671 -2.1038 -4.6120 -9.2987 16.5657 -7.5394 -#> 4.7889 -10.3359 1.2910 -3.1306 8.0027 -12.9061 9.8752 -2.3037 -#> -1.2102 9.4415 4.3168 -4.2345 9.6880 19.6927 29.5038 6.3476 -#> -13.8122 -3.7279 -6.0201 7.7620 -3.1934 -0.1139 -16.0485 8.4671 -#> -5.2801 9.7130 7.0902 -12.6219 18.6469 1.0631 10.1957 10.0229 -#> 13.3858 -6.0519 5.5020 -3.9310 5.2673 -0.1442 2.0037 17.0299 -#> -#> Columns 25 to 32 -3.8738 3.9220 -12.5185 6.4561 0.0799 5.5301 1.6065 5.2487 -#> 1.4690 -6.9025 -2.4601 -4.2432 4.1212 1.4848 0.4622 1.9091 -#> -0.8780 2.4436 -8.1781 -10.4016 -8.1332 8.3900 4.3642 9.5500 -#> -1.2560 -3.2221 -0.0284 6.5983 1.0221 -5.9329 5.6549 7.3524 -#> 0.7555 -4.4902 -11.1791 -23.5908 7.3431 -8.1840 8.0723 0.2110 -#> 1.8144 -1.8614 -1.3185 -4.0768 6.4284 -3.2070 -11.3596 4.9718 -#> -0.1717 1.2119 -11.3041 -4.1853 15.7426 -1.5315 12.3409 -11.2353 -#> -2.0192 -1.1751 10.6557 -5.3755 -8.8156 0.9094 -11.1758 -7.5134 -#> 9.2353 -9.6842 -9.6485 -2.3479 9.3271 -5.3431 10.0440 8.1175 -#> -4.5267 -11.8635 6.1133 -2.1379 6.6027 1.5822 -5.5570 -7.7038 -#> -11.3767 -6.1185 -0.4371 -6.2906 -8.4045 -3.0140 -5.9909 9.3692 -#> 5.3097 17.9685 -5.6408 2.5726 0.1373 -2.0362 -4.4878 13.1866 -#> -0.2003 -18.0279 2.8840 7.6310 5.8091 -16.0159 2.7507 -9.0756 -#> -4.3898 6.3232 9.0166 0.3971 -13.0539 9.7434 -0.7697 14.1789 -#> 4.1869 -5.1755 2.1565 -3.0330 4.2686 -5.8388 -1.7538 -22.2728 -#> -13.1864 -5.5980 2.3033 1.6994 -5.2467 4.6051 -3.7793 2.0549 -#> -2.9787 -10.3587 17.2497 -18.6068 2.2088 -2.4777 -5.9609 -0.2677 -#> 6.8232 -0.2212 -1.2915 -2.6751 8.7935 0.6610 -1.5027 7.6642 -#> 0.4888 3.7794 8.7645 1.5360 -6.1666 -8.0935 -0.3406 3.8793 -#> 2.7377 -5.2790 0.5362 -1.7392 -6.0426 4.2360 12.0210 -16.1900 -#> -2.1322 2.6928 -3.7036 -4.7929 -3.1372 -2.6002 -12.2420 8.5769 -#> 2.5564 -7.6670 9.2444 -8.3447 -1.8659 -14.2068 -6.3827 -13.5205 -#> -8.6856 -5.2431 -15.0486 -4.4206 11.4039 -5.5736 4.4667 13.0206 -#> -8.1077 -0.0292 -2.1540 1.7868 -6.3482 -1.5963 13.8036 11.8692 -#> 7.5198 6.7072 7.6203 -0.7738 -11.0714 6.2842 -9.9921 -5.9641 -#> -13.2923 17.4194 -21.3287 2.7288 5.8553 -2.0353 -6.1919 3.6219 -#> 7.8293 10.1106 8.7589 -6.9789 9.7184 10.9510 7.1282 -3.0372 -#> 1.8077 0.6001 3.8597 -9.2054 -12.7888 12.6369 1.9466 -11.6437 -#> 7.4352 -3.4234 4.3904 7.3170 -8.2280 11.0989 4.0127 -4.6733 -#> 3.7689 -1.8453 18.5053 -8.0752 -7.2611 6.8307 -10.3481 -6.0274 -#> 4.2616 -8.9127 -6.4516 1.1904 -7.8745 -11.4709 15.2262 -2.8091 -#> -11.1368 12.1557 -11.0857 6.0072 -4.2969 8.1991 -7.0327 -9.7476 -#> -1.2019 -2.4516 -1.3659 -0.5371 -10.7445 3.9925 -10.0558 -12.4318 -#> -#> Columns 33 to 40 -8.5350 0.0875 9.6567 -15.5151 12.1594 0.2972 -6.5993 -4.7356 -#> -5.1730 -10.0375 -5.3178 8.7592 3.4126 -6.6100 -0.4462 2.7282 -#> 19.0838 6.3240 1.1039 -7.6161 -12.7377 5.3932 -16.2631 10.2438 -#> 4.3135 -3.5741 -5.6618 -5.5723 -6.1734 -9.6130 2.0577 5.0947 -#> -1.6806 0.4366 -7.4311 -0.9253 -9.2197 -4.2929 -7.8661 -4.0315 -#> -6.7793 6.6410 -2.3139 -6.6337 -5.3094 0.3094 -5.8473 -0.0352 -#> -1.0434 1.2943 -9.3600 5.2054 4.6067 10.3721 0.2044 -1.7094 -#> -2.7223 -0.6669 2.0470 9.0565 -3.1020 3.7929 -4.7129 -4.6957 -#> 1.7211 3.1714 -3.0869 5.9535 -1.7719 6.4213 -1.8118 2.8516 -#> 18.6640 -4.9960 2.3894 -0.6455 0.7528 -16.8513 -22.7559 -6.7013 -#> -1.3260 21.0229 11.0511 11.1625 -12.5773 -8.3422 1.8465 5.4793 -#> 3.0718 3.7418 1.0446 11.1606 -5.0686 -10.1680 1.5955 -10.0100 -#> -10.1795 2.2387 1.0678 2.0980 0.5267 -4.3053 -2.7337 4.6885 -#> -1.4100 -0.6188 -1.5873 -9.6573 -17.2998 -1.8995 6.9750 6.5807 -#> -2.1986 -12.3286 -0.7730 -2.9716 6.0537 19.9523 4.4846 -20.3818 -#> 2.2540 -10.7448 6.4450 5.5551 -14.0252 -12.9895 10.7741 0.6383 -#> -3.2762 -6.2054 -12.4799 -2.6375 -16.2244 4.1103 15.7387 0.7658 -#> 2.2558 -1.1088 8.7188 8.7920 -14.1842 -0.3710 -2.6653 -16.4752 -#> 9.9595 0.9145 -7.1987 0.1316 15.5304 -11.5121 -17.1075 -18.9906 -#> 0.7257 11.0916 -5.9431 -12.6732 10.7616 7.0296 -18.1772 0.6022 -#> -8.4137 -3.2743 4.7066 -0.6106 -1.2597 6.7818 -0.1954 8.4503 -#> 2.2677 2.4595 -2.5608 7.4922 19.8550 7.0307 -10.3801 -6.9346 -#> 3.2337 -5.8513 -10.3497 3.4862 -2.5652 -1.0337 -8.7844 -4.6261 -#> 17.0239 -3.5383 19.6143 2.4119 1.9370 -10.6873 -7.2749 7.7795 -#> 15.5725 4.1913 0.5852 14.9739 7.9995 16.0967 -4.3813 2.8909 -#> -6.8186 -3.1106 -4.5133 -11.0466 13.4662 -5.2009 0.3870 0.9202 -#> -0.6736 -15.3876 9.1537 -6.8619 -6.4745 16.5738 6.1942 0.9360 -#> -3.0737 -8.4914 -6.8561 -9.2497 2.5473 5.0432 9.7131 10.9237 -#> -7.1638 -11.7686 -5.0851 2.8317 4.6247 6.7577 9.9449 7.3291 -#> -18.6639 -18.1522 -1.8617 -2.1253 2.2573 12.0296 19.0306 -1.1478 -#> 3.9248 -7.9585 -0.6073 -8.6901 -2.2986 4.8878 9.5865 -7.5159 -#> -6.4216 -1.9025 -1.6467 -9.5660 0.5932 -0.9738 -7.3584 0.2400 -#> -1.2851 0.1969 19.3340 12.5870 -6.2235 -10.5510 -13.2139 -0.5947 -#> -#> Columns 41 to 48 2.0407 2.9510 2.1377 -3.1482 -3.5517 -1.6021 14.0215 5.6081 -#> -2.8634 -1.2369 5.2037 9.4677 -2.8090 8.5956 7.9898 -0.5333 -#> -0.7450 -0.5756 31.8037 -5.6067 4.3648 -9.8439 9.4060 0.9765 -#> 5.5924 4.6208 -4.7390 1.3896 5.1584 8.9603 -19.1332 11.0267 -#> -7.9844 -1.0559 11.0491 1.2988 -5.2304 -8.4710 -5.0629 5.1838 -#> -6.3647 -8.2324 -8.2835 9.3915 6.6549 2.4155 -17.9305 5.7850 -#> -7.2929 -3.4562 10.0331 -1.7906 -5.8117 -7.5337 3.8928 17.7967 -#> 0.0798 9.0012 8.6507 9.0891 -11.2270 -6.5867 3.6516 18.7555 -#> -6.5055 -3.0581 0.1773 -7.4135 8.0395 -6.7013 -16.4906 0.7733 -#> 6.1016 2.0575 2.5795 0.0249 8.2600 5.5781 -1.7872 6.3839 -#> 16.5275 -0.9233 7.0861 -4.6934 -7.0417 2.9589 -4.6347 3.9824 -#> -0.4278 8.0586 -5.4852 2.8071 14.5673 11.0391 1.6705 14.5835 -#> -2.8442 -12.2410 2.2064 -10.4224 -3.4541 0.6471 8.3707 -9.5317 -#> 5.1208 13.7774 -21.4315 16.4837 4.1118 -1.6831 -6.3328 -3.8604 -#> -3.4582 -5.1599 11.4076 21.8845 8.7755 -3.6688 -0.1227 22.9978 -#> -4.8443 19.5697 -7.3775 -2.9987 -1.4183 2.7953 -1.1646 -5.3746 -#> 10.0052 -6.8885 -7.1253 11.0419 9.9484 -7.6375 -7.7870 10.2694 -#> -1.6002 23.8070 -6.2378 -9.8259 5.4546 11.1698 10.0522 5.9604 -#> -19.6452 2.8931 -2.6409 -10.8057 -8.9000 -0.9451 1.7805 0.4884 -#> -0.8447 -8.7494 13.6706 9.4764 7.5104 -7.8797 16.1753 10.0951 -#> 1.0271 -1.0085 5.8542 -2.4994 3.6851 1.7053 1.4021 -0.8466 -#> -13.5364 5.9332 9.0672 -12.8885 0.8499 -11.0641 3.9007 16.8223 -#> 7.2468 13.2686 21.9044 3.8828 -4.1238 3.4955 -10.0625 6.3312 -#> -7.9251 -1.5882 8.9027 -10.5558 -13.2154 -10.4163 10.8657 -12.6363 -#> 4.4245 -8.0527 0.3378 19.5951 -3.7173 -10.0441 1.2527 -1.2587 -#> -6.4083 0.5048 -2.7162 -9.1202 -9.5379 -1.4137 11.4220 -13.2740 -#> -2.2673 -4.0698 17.3785 1.1086 -5.3697 2.8517 16.9722 15.4926 -#> 4.7849 0.3172 -14.7810 12.6112 5.7173 -9.8020 -8.7283 -3.6317 -#> -12.4094 8.4149 -1.3401 4.7318 -2.2228 -6.4433 -2.3998 12.2818 -#> -1.0780 1.9622 -18.8152 7.4658 -4.2106 -13.1520 -15.2373 22.1706 -#> -8.8288 -0.7321 15.0267 -6.7328 2.3793 -3.1096 7.7510 -11.4905 -#> 12.4244 2.2718 -4.3188 13.4401 3.7834 2.9043 3.9654 -4.6028 -#> 3.2978 -2.8197 -1.1388 1.0209 6.0064 8.2707 11.6162 -5.2744 -#> -#> Columns 49 to 54 15.6366 -2.3198 -10.2642 1.5620 8.4289 -4.5042 -#> -13.2333 16.8606 5.2800 3.8640 1.8557 -1.4473 -#> -8.5576 -8.3046 -3.4812 0.9038 -0.0236 -3.5858 -#> -4.5940 3.1661 4.2015 8.0996 -4.4305 -1.9093 -#> -14.5974 -7.1334 -1.0637 3.5828 1.2339 2.5546 -#> 15.3339 -4.6737 7.5107 -15.1962 10.1448 -1.9309 -#> -1.2980 -5.4444 1.6974 6.4495 -4.8496 -8.1780 -#> 10.3719 -23.1087 9.7942 6.6577 2.8608 -4.1214 -#> 16.3583 -0.6505 -6.5720 -1.6248 1.3788 -0.4460 -#> -0.9532 -7.5573 12.7045 10.1062 2.0182 -1.4121 -#> 1.0502 -1.7704 -5.8809 6.1531 0.9003 -0.1029 -#> 4.9284 -3.1047 6.9632 -4.9418 -7.1467 -0.6355 -#> -10.0474 11.3646 -8.2997 0.2088 4.3690 0.6673 -#> -6.8664 13.9372 4.2527 -3.8616 -2.4935 0.8499 -#> -2.9825 -6.4267 -2.6215 1.2597 4.8453 5.8249 -#> -12.8910 2.5260 14.1257 1.6857 -5.3704 4.9627 -#> 3.3596 -1.9308 1.8196 6.9653 3.4050 7.2779 -#> 9.6907 -0.6020 12.7136 -3.0734 2.4047 -0.6476 -#> -0.0868 -5.7963 -6.1393 5.7635 3.1106 -5.9301 -#> 3.9145 -6.2789 -2.0478 0.5966 2.0519 -1.6803 -#> -1.7202 1.0289 -2.8785 -9.7041 -9.5608 3.8213 -#> -2.7978 -21.7646 -8.9645 14.0012 -1.5626 0.9232 -#> -6.1989 -0.9096 -6.5068 3.1514 6.2610 -1.1243 -#> -13.9085 2.9254 -2.3032 1.3641 -5.4963 -8.0123 -#> -2.2217 3.5599 -0.3479 -0.7171 4.3949 -8.4972 -#> 6.1102 -6.4740 6.4523 -12.9943 0.6849 0.2070 -#> 0.6102 6.2528 8.6411 1.7952 -4.9242 1.0878 -#> -0.0187 -8.1872 8.5016 -5.4746 -3.8439 -3.6778 -#> -9.5806 1.2556 -1.5308 0.2019 2.0435 1.0044 -#> 3.9516 -3.5034 0.2421 7.7355 11.8162 5.8535 -#> 0.2947 13.0730 -19.5379 -0.3044 5.2665 7.0298 -#> 9.6607 -5.9342 12.9901 -2.8921 -7.3424 0.0711 -#> -10.5470 6.4672 13.2806 1.4521 0.7254 -5.5319 +#> Columns 1 to 8 -0.6943 -11.7297 -19.8109 -17.8715 -3.1018 -1.6216 -6.1713 7.0175 +#> 1.2637 -5.5946 1.9734 0.6969 -4.4829 21.6387 -4.2255 19.2646 +#> -1.5830 6.0621 12.1829 -12.1584 1.3138 2.0291 -1.6666 1.3453 +#> 4.7081 -1.7967 -1.6231 -4.6924 -5.8023 0.1627 5.7965 -8.3263 +#> 4.8833 -5.1227 -11.7820 -6.5734 -20.2747 -0.1523 -14.7940 2.5937 +#> -1.6729 -7.6201 7.1983 3.7689 0.6869 -3.6706 2.7649 11.3126 +#> 2.9467 11.5288 0.5804 -0.6815 -15.2222 0.2143 -0.9897 -0.7493 +#> 0.0487 -8.7858 4.8289 -22.1639 -19.0966 -17.4730 -14.4899 5.5767 +#> -1.6500 -0.3760 -6.2824 -1.1997 -10.1115 1.7025 -0.8467 -2.7430 +#> 3.1746 13.3972 2.6368 14.6530 -9.9063 14.6099 -15.3711 -11.4837 +#> -2.3207 -2.9667 2.3969 -8.9530 1.3322 -13.4430 -2.2326 -2.1133 +#> 5.0351 -3.1128 4.6477 20.7669 22.2164 22.5177 1.3325 4.6321 +#> 4.1186 10.3644 3.6980 9.9454 -1.7817 0.2860 -7.5642 -7.7186 +#> 1.7704 4.8310 -5.5803 -8.2606 -7.3151 -14.7874 -6.8896 -1.6936 +#> -3.4143 -2.9708 3.3480 5.9815 -3.1605 12.1714 8.4230 16.6094 +#> 1.3821 8.0293 13.5909 6.7996 7.1770 -11.5062 1.7293 -5.9990 +#> -1.4155 -0.1367 6.0215 9.5220 -14.9440 10.9706 -6.4762 -6.5018 +#> 0.2713 12.5095 8.9550 -7.6216 -11.0048 -10.5175 14.7178 -14.4070 +#> 0.0928 -1.1508 15.2024 10.5827 8.2966 12.1700 0.0713 -5.2117 +#> -5.6411 -2.6629 -6.7772 -1.9683 -6.1702 -5.1844 -7.6235 -1.0473 +#> 5.1876 2.1848 0.7629 1.5598 -17.7521 -2.2956 -8.2912 -1.3584 +#> 6.0448 -3.0509 4.2316 4.0943 -17.1256 1.2761 1.2891 -6.8882 +#> 1.4241 3.3625 -3.1053 -7.0194 1.2046 -4.3439 6.1482 -10.1996 +#> -6.9858 -3.9343 1.1994 -3.8249 -1.0314 13.4154 7.5780 -0.9521 +#> -2.1753 9.3451 -3.8307 -0.4245 12.4384 5.0704 -10.5902 -19.9977 +#> -1.2625 2.0943 -11.0664 -10.9072 -2.8737 9.1838 -6.2670 -4.7347 +#> 1.3522 -2.2314 -6.1485 -10.1211 4.9181 -14.2759 -4.2897 -8.3166 +#> 6.8338 -15.5991 -9.0361 -7.6642 -4.8751 -3.5506 -9.2338 -3.5385 +#> 5.3268 1.8028 -1.1131 -1.5406 2.7674 6.4356 3.8217 -11.3108 +#> -7.9179 -7.7759 -4.7621 2.4999 -14.1889 16.0488 1.1483 -8.4646 +#> -0.4503 0.6640 1.7990 8.5521 -3.0257 -3.9707 -7.6669 -8.4911 +#> -9.1814 2.3031 -14.0849 -1.3711 4.9259 -15.8364 5.5219 12.1400 +#> -5.2072 6.7106 8.2145 3.6960 -11.0155 -6.5847 5.2929 -1.9771 +#> +#> Columns 9 to 16 -0.2085 -17.7203 6.9680 11.7706 -6.4290 4.5609 -12.3673 -6.2474 +#> -2.1783 0.9402 11.8875 -6.0537 12.6271 -0.9372 1.2834 -5.1291 +#> 6.9743 -3.1526 3.1583 6.9085 -2.7171 8.1336 4.5249 -8.2805 +#> 19.8336 -2.6956 3.8896 8.2024 -2.8023 1.4369 -21.6870 -15.9530 +#> -6.3177 0.6611 10.1995 -8.8367 -18.8627 -4.1257 4.8174 -6.8275 +#> 3.0856 -4.7208 14.3747 16.8146 -11.1565 26.8793 3.4444 -11.6118 +#> -2.2330 -1.2387 9.7785 5.4645 -0.2280 2.3692 -1.9889 -2.9757 +#> -1.3426 -12.3795 -8.1321 -9.2347 8.0537 3.2193 1.0646 13.8183 +#> 13.4733 -1.1225 7.8080 -3.7626 -12.8206 4.2175 -5.1426 6.2702 +#> 14.1561 7.5268 1.6482 -8.3289 8.5740 12.9041 2.1424 -4.4302 +#> -3.8716 -1.5857 11.5844 7.2413 11.3246 -7.5156 -3.2773 -4.4696 +#> -10.6152 0.0301 -0.0632 0.1916 12.7462 -9.1572 -8.4740 -3.3215 +#> 1.1052 6.1220 11.3024 -0.8637 -6.3029 10.7655 2.6314 1.2299 +#> -14.0506 -0.9264 9.8830 1.5366 -0.4005 12.3184 -15.1565 -3.3964 +#> -8.3041 -2.0620 3.4451 0.4774 11.3717 -11.3828 -9.9329 -8.4014 +#> 7.6627 21.6430 9.6255 -1.2751 -6.8021 17.6907 -7.4235 -0.0468 +#> 10.2767 -3.3592 17.5942 8.9685 7.9768 -3.9261 5.0782 6.3163 +#> 9.3883 12.9818 -14.0936 9.2934 -11.9957 23.8184 -9.5113 -23.0765 +#> 8.3766 8.0653 19.6231 0.5976 -9.0933 4.1783 5.5905 -16.3117 +#> 3.8622 -6.3793 0.4563 -0.3446 -3.6082 1.6331 8.3198 -0.6253 +#> 14.3392 -15.3463 -8.4942 10.0521 2.2562 -6.5267 -16.0519 10.3532 +#> 4.0017 5.5630 -3.4266 5.9563 -7.5467 -1.5756 3.9646 0.2592 +#> 6.3461 11.1385 -10.5328 15.8490 -8.2805 -6.3062 4.7586 9.7775 +#> -4.2591 -13.8001 -0.0379 5.3841 14.2784 -17.9311 2.7991 2.5867 +#> -5.7767 -0.6882 -1.5613 -10.9829 7.8961 -4.9369 -5.8701 -0.7507 +#> 5.0183 -10.7408 -0.1752 -1.6686 9.6445 -7.2490 -7.5411 1.3869 +#> -1.4334 13.7027 2.1856 -1.1775 -4.2999 -10.8275 9.1272 2.4816 +#> -3.1155 -14.5365 -13.3983 1.9740 -5.6428 -0.4146 -2.1260 2.4407 +#> -10.8399 -9.6009 -1.1107 -2.5755 8.7914 9.5045 -10.0667 0.7754 +#> -3.1308 -20.8805 31.0240 -8.6923 -1.8422 3.0008 -4.1554 0.5273 +#> 9.6524 -16.6468 -8.2139 9.6052 -1.4911 8.3558 -4.5143 -2.8026 +#> 0.8688 1.1877 5.1967 1.2700 -2.3589 -3.3697 9.9622 0.3795 +#> 13.7129 5.9230 5.9550 13.3223 -5.4269 -2.0488 -6.0408 1.6363 +#> +#> Columns 17 to 24 9.9841 7.4051 -5.4774 5.4655 12.8064 -0.3276 -2.4372 9.2858 +#> 8.1535 -1.4907 14.4492 -4.0077 15.8888 -11.0130 11.5421 -14.8288 +#> 7.6276 -1.5935 1.5369 -0.3802 -4.4202 -4.6816 2.8409 3.8529 +#> 3.3394 -8.8562 -7.4803 0.0609 0.0779 5.1738 11.2288 5.7781 +#> -11.2799 -0.2456 -7.1385 17.5001 12.3590 5.5588 11.4625 4.9288 +#> -15.8009 3.6199 -7.8953 24.6365 12.5110 7.3805 4.7441 8.6363 +#> 3.7035 -8.1959 -1.1975 -13.0497 3.5089 0.9590 19.4003 18.6206 +#> -4.6876 22.0236 13.4174 7.9197 -3.0797 0.9036 2.1096 -9.7126 +#> -7.2211 -1.1826 -10.2896 -2.5285 -14.1303 2.3528 8.3646 5.4984 +#> 3.6114 13.3496 -0.6921 -2.6715 1.7045 -10.7906 -5.6524 -8.8471 +#> -10.6224 -8.0436 4.6234 0.1423 6.2833 -0.9170 6.8136 1.7302 +#> -17.9536 2.5027 -10.1077 -0.3824 -17.0680 -13.9117 2.8445 -4.0350 +#> -5.0261 -0.1825 -6.6372 6.7096 -4.7771 -1.3963 -4.1694 -2.0248 +#> 1.7750 3.3133 8.3620 6.5019 7.7299 7.8319 6.5405 2.1397 +#> -1.2157 -23.9608 -15.6523 -4.2013 2.8778 -0.2913 7.7776 5.4285 +#> -5.8463 -5.0958 0.5109 5.3433 -10.7702 -0.0089 10.8228 4.9703 +#> -7.8639 -8.5939 0.1666 -12.7026 -4.1873 -10.0299 -13.2304 4.7837 +#> -10.0848 14.0785 4.3467 9.9640 1.8178 -6.4756 -4.9856 5.7538 +#> -1.4450 -0.2188 -11.5968 -3.3136 -4.4837 -6.7709 11.8512 -10.7862 +#> 11.6271 -5.3994 4.0734 0.8112 -4.4228 13.3920 1.2578 15.7194 +#> 13.5841 5.8131 -8.7279 -10.4675 5.0931 -13.4010 15.8498 11.5048 +#> 6.1375 0.2089 13.9665 -9.8636 1.8155 -1.8648 -15.8568 -14.0856 +#> -12.2706 3.4789 8.0192 -16.1278 -4.2455 0.5700 5.4249 13.3981 +#> 3.8624 -13.6549 -8.2070 4.4581 7.9761 8.5087 0.9004 1.0381 +#> 13.3910 2.2407 15.0027 6.5939 -4.9817 -5.6574 -6.7505 4.9883 +#> 7.2875 -4.6972 -5.4647 -4.2520 -14.6316 -6.0051 4.0616 2.8983 +#> -7.4033 3.8761 -4.3224 7.4854 -0.8088 6.3289 0.4113 13.4652 +#> 11.4750 2.3820 -3.3070 4.3407 -4.3791 -1.2249 -3.3734 7.5285 +#> 16.4850 5.0340 -12.2983 -3.8812 1.5803 -2.6512 2.0305 -1.4490 +#> 0.8871 2.4346 -24.7953 9.7044 7.9937 -6.9829 -3.7912 0.0028 +#> -6.7067 9.4630 -13.7946 11.0594 -7.6773 -3.6778 -5.3839 -0.0313 +#> -2.5180 -10.7711 2.5268 -15.7344 3.0335 4.9066 1.5910 16.8586 +#> -6.3701 -16.7405 -0.3658 -7.6082 15.7316 5.9246 0.0710 18.4202 +#> +#> Columns 25 to 32 -12.4924 -13.9386 5.3801 1.9533 1.8343 -5.5427 -6.0740 11.9692 +#> -7.4786 -13.7394 14.2841 -4.0213 0.4788 -1.0037 15.6994 9.5437 +#> -3.3687 7.2376 -0.5511 3.3196 -4.7483 -11.6931 -4.3725 -2.6839 +#> -6.8991 -4.2236 -14.5938 10.6822 0.5059 4.8129 1.2190 -2.3443 +#> -3.5351 -14.6734 -4.6409 5.2077 1.7140 -1.4250 -5.5963 3.2164 +#> -7.2577 -14.4673 -15.1203 -6.8119 -7.0486 -9.0583 -2.8600 3.8979 +#> 4.1563 6.1172 -11.5901 -1.1599 9.4513 0.3673 -4.7681 3.0133 +#> 15.3027 -6.7064 7.5268 -6.6253 9.7704 4.5595 14.7593 -5.9744 +#> 9.5468 4.0438 3.6617 3.8005 -3.8920 -5.9028 10.6563 1.1908 +#> -2.4248 16.3710 4.4801 13.1811 9.8501 7.4075 -9.1692 -13.7154 +#> 19.2488 -6.3371 2.0719 -8.0576 6.2853 4.9698 -3.7756 0.1722 +#> -8.2789 3.9989 6.4186 -4.1124 -10.6450 6.0232 10.1185 0.5724 +#> -14.9448 6.4501 -5.9246 -0.9967 -6.6727 6.0222 -2.9239 7.1939 +#> -9.1822 9.7263 0.6503 9.8614 -3.9117 -0.5857 23.0976 -8.2061 +#> -3.2678 0.9722 -7.8012 -4.9839 -5.8058 -8.1955 -12.5700 -5.8367 +#> 16.3260 -6.9330 2.9484 -11.7137 -3.4506 2.0883 -7.6949 1.1805 +#> 4.7742 9.0419 -8.2699 -3.2370 -2.4196 8.8271 -5.9554 -16.6552 +#> -0.7793 1.4161 -10.9983 -8.4753 7.3569 15.8702 -7.2797 -7.2277 +#> 13.1009 11.8405 3.0866 4.0939 -6.3774 4.9618 7.5836 -7.9561 +#> -1.2617 2.3014 -21.6267 -1.0531 -2.4203 -0.1569 -9.9339 5.3545 +#> 10.4250 12.6715 2.1675 11.6370 -3.3146 11.1795 -5.7525 -4.9723 +#> -6.2057 -7.0479 12.3756 1.2099 12.7864 18.5670 -6.6470 -2.4877 +#> 9.4214 -12.5478 2.9603 6.0830 -4.4291 -7.2537 -6.5186 -10.3807 +#> -4.4477 1.1599 -1.0153 -8.6561 -5.7503 -2.8120 -4.7871 -13.8161 +#> -10.0472 -1.5990 -5.3590 13.4239 15.2395 2.7747 12.9785 13.0879 +#> -4.9381 -5.2128 -6.7875 9.4383 4.7720 9.4286 -0.3002 -7.3394 +#> -12.5935 2.1243 2.2790 3.6599 4.6004 -4.8816 12.9005 -4.3094 +#> 13.1197 -2.2674 2.9963 0.8996 -1.7416 3.2874 7.5521 12.8102 +#> 1.5030 3.4196 -8.1728 3.4395 -13.9593 18.6215 1.7676 6.2154 +#> 5.7199 -1.0655 -1.0215 0.4555 -8.7670 -2.9984 -23.7500 9.6391 +#> 0.0883 -11.4441 -4.1828 14.0759 -8.5707 -14.8937 -8.7314 1.0714 +#> -3.4293 -5.6978 -8.4774 3.1523 -12.4606 -20.4160 -13.7440 -5.0757 +#> 3.0508 4.3126 -14.6681 -1.8550 -12.7661 2.1600 -10.6789 -4.6630 +#> +#> Columns 33 to 40 -8.9554 -14.5013 0.6054 -5.8190 -1.4334 10.7517 9.6013 -0.1070 +#> -10.4074 -8.5697 -9.7644 10.2820 11.4430 1.3164 -19.6196 2.3149 +#> -4.4762 0.6259 -4.1491 10.1084 8.5566 2.8234 -2.0995 7.3509 +#> -1.7968 -8.2735 1.4772 3.2515 -5.0901 -0.0443 -12.3752 1.1087 +#> 15.0824 2.6325 -2.3707 5.0981 1.6012 0.2394 12.7248 5.9542 +#> 2.3918 -7.7244 -0.4085 -1.7616 -1.6698 11.9080 5.3033 2.2969 +#> -9.9263 7.6323 -5.7591 2.0003 8.4513 2.3243 1.2843 -3.0821 +#> 2.9641 -2.6295 -7.7144 3.4335 -0.9605 16.3155 11.7670 -12.5938 +#> 3.5488 -14.2483 4.6517 14.7637 13.2793 -1.1110 1.3821 -0.3804 +#> -2.5041 -8.4909 -5.8024 -10.4037 2.5426 3.6872 10.8499 6.2814 +#> 3.9631 -7.4547 -10.5730 -9.3411 1.5350 -0.8418 -5.3848 -5.5719 +#> -23.4962 7.8277 1.2841 1.7089 -6.6692 -20.3650 -20.4044 2.0223 +#> -8.1212 -7.7868 1.0049 -11.7738 -0.5577 3.1284 8.0836 5.3553 +#> 1.4814 -5.0421 -5.4116 11.9177 9.1392 0.8603 3.9767 -11.0868 +#> 4.6231 9.2437 -2.6831 -10.9876 -6.7121 9.2350 -18.8191 -0.9715 +#> -9.8373 -3.2559 -2.3032 -4.5612 -2.7924 0.2588 8.8858 1.7649 +#> -0.7487 -13.0943 3.3256 -5.7844 -11.6853 -2.6758 0.8129 -5.3919 +#> 3.4169 -2.2824 -5.0868 -8.6401 3.5563 8.8348 -12.2568 -8.0357 +#> 8.4339 -9.8513 17.4138 -2.0076 -6.0231 -5.6696 2.8648 -7.6884 +#> -3.2783 -3.3848 -0.9662 -19.3254 1.5167 9.2626 9.8224 9.6015 +#> -2.0517 5.0160 -3.1997 7.1047 -8.3877 8.5859 4.5483 2.5926 +#> 7.4822 -1.5455 -16.9269 8.8366 -18.0318 1.5934 10.3386 12.9113 +#> 8.6641 -2.1338 -14.8348 14.0025 -3.1614 9.5194 -6.6382 2.9387 +#> 0.8064 1.1796 4.4789 1.4288 -7.3881 2.5189 -2.7048 3.8731 +#> 0.9824 -3.2064 -13.0805 -4.2816 7.2986 -4.9678 -8.5971 10.0907 +#> -0.5998 -1.9650 0.5499 1.1543 -0.8881 -5.6264 -0.9134 7.5820 +#> 5.0291 2.3153 0.8026 -10.7897 9.6179 -4.4469 14.4409 -3.1396 +#> 3.4095 16.8697 7.2437 7.5075 -6.3582 5.0839 0.8750 -13.5392 +#> -11.7284 2.2721 18.0969 -6.0961 -10.9177 -12.8979 -3.2199 9.9099 +#> 0.0326 -18.2042 12.2909 -11.5698 -12.4102 11.8029 -23.5936 11.7020 +#> 9.9583 -17.2390 -2.7436 -7.6730 0.9455 12.1800 -6.1141 3.9890 +#> 15.4247 -0.6704 -1.6215 1.0959 11.4966 2.0526 4.0590 -7.6983 +#> 8.1723 2.8213 5.4104 -11.4786 3.2855 11.7383 2.4266 -0.3940 +#> +#> Columns 41 to 48 3.2757 10.8145 1.8187 -10.8773 0.0613 5.9120 -20.7639 -7.6829 +#> -5.8169 -15.1133 -9.6044 13.6763 7.8400 -12.5860 1.7253 -7.5364 +#> 9.6021 -4.4161 7.2538 -10.3463 -9.9258 5.3735 -13.9486 -1.5164 +#> 17.5350 1.6708 -1.4872 3.4978 -21.1603 -12.5518 -6.7924 -4.3215 +#> 0.0957 7.6806 -16.4572 -8.8855 2.8795 5.3116 -5.4549 -2.6387 +#> 14.4676 4.3018 -5.3326 -3.8244 7.9262 1.6860 -7.2135 0.3964 +#> 0.5103 -1.8801 -3.0475 -2.3776 17.5428 -1.3414 10.0828 -4.3307 +#> -4.5380 -2.9757 -5.9528 -8.7044 22.2579 7.8321 2.6347 8.8544 +#> -0.2866 -2.2502 15.8532 -6.3859 -1.0899 -3.2782 -16.0715 -2.1797 +#> -2.2209 -0.7420 8.3737 10.0736 9.8639 -8.7984 -15.6570 2.5879 +#> 12.0457 4.2703 -5.1882 7.5619 10.1244 -8.3739 19.5678 3.9155 +#> -8.2974 12.9329 8.0896 -4.2640 -8.2841 -5.6907 -2.9056 13.6863 +#> 5.2255 -2.3657 2.7089 -9.1566 4.3666 2.0922 0.1060 -0.3940 +#> 2.0557 5.7824 -4.0109 9.2817 21.0503 -22.0207 0.8276 8.6519 +#> 3.8673 7.9139 -3.2026 12.0452 -0.0251 -7.4712 19.3353 -9.8834 +#> 7.4787 3.2177 -1.7692 7.5804 -0.4800 25.0913 8.6167 -11.9164 +#> 2.2891 4.7743 11.3459 -0.0053 4.7265 1.0612 14.5198 -1.1703 +#> 10.6246 -19.2143 -5.0525 7.3475 -1.0796 -4.3981 -1.6431 -6.9889 +#> 9.7104 -8.1107 -3.4126 -4.1727 1.9839 26.8486 0.6193 19.7643 +#> -10.7407 6.8807 13.6166 -9.6841 6.0132 13.3368 -5.9151 -18.3632 +#> 6.9898 -7.7316 -13.1621 -9.9887 7.6353 -5.5938 12.0758 -6.8949 +#> -11.1150 -7.8397 0.2596 -3.7319 8.7702 0.8530 3.0326 8.8942 +#> 1.4253 5.1044 -2.3931 0.2922 -0.8858 -14.6207 1.5947 -10.1076 +#> 6.5030 -4.5554 -1.5744 9.7471 0.7615 -11.9855 15.4412 -7.1916 +#> 2.7969 -3.4683 -7.6129 -1.7743 8.2984 -8.6413 -4.9321 -3.2166 +#> -1.4983 -4.7405 9.3071 -6.3806 -4.9376 2.7848 -0.3833 -11.3562 +#> 5.1440 -4.8126 -6.5556 7.4010 11.8370 24.1226 2.9931 4.0632 +#> -14.5159 10.4019 -10.7014 -12.2669 -7.8915 4.9762 -8.2486 9.3089 +#> 6.4645 10.5710 2.8239 -4.9620 -5.3707 4.9703 -0.6751 5.5845 +#> 15.0005 -0.9998 -7.2795 -3.3259 -7.8754 2.7780 21.2901 -14.5734 +#> 24.5584 -8.3442 -12.7770 11.3651 -10.8127 -4.3726 5.9924 -4.9559 +#> 8.8961 10.3320 -9.1711 3.1170 -5.2894 -10.5718 16.2240 -22.4172 +#> 26.0771 -7.0022 -15.3138 -5.7192 0.2452 4.3132 2.0631 -18.1102 +#> +#> Columns 49 to 54 -2.8295 4.3922 20.7197 -0.0240 8.6751 -1.6035 +#> 16.3829 2.7459 4.2050 -14.4186 -1.4939 1.6178 +#> 3.9324 -9.5108 7.4369 -1.6881 -4.5359 -6.8948 +#> -14.7896 -4.1422 9.6311 5.1452 19.3832 0.8723 +#> -11.2585 1.6993 16.2487 14.5141 -2.5438 1.0223 +#> -21.5838 15.8320 -10.3487 -4.8617 -3.5866 -1.3017 +#> -0.3889 -12.2566 1.2446 -8.4364 14.7537 7.8892 +#> -15.1715 20.0868 -1.8932 9.7009 -7.2752 1.7632 +#> -8.2849 6.3488 -13.4587 6.3709 -6.2838 -4.1846 +#> -11.8349 -7.7772 7.5496 8.5381 -7.6831 -4.4465 +#> -33.7483 -2.5239 17.8613 3.9449 -0.1833 0.1565 +#> 17.9800 -15.4024 0.6699 8.7572 -10.7733 -8.2805 +#> -0.3194 -0.6577 2.6579 -1.6968 -10.8007 5.7856 +#> -3.9182 5.8878 -10.9374 0.0723 3.4909 5.3152 +#> 21.3063 -9.3108 3.2918 -4.3441 9.3519 5.3003 +#> 1.1076 -19.6701 -5.4674 -6.2837 -1.6270 -0.4506 +#> 7.8793 -15.8570 -8.6042 -0.9513 3.5009 8.1138 +#> -1.9548 -6.8843 8.3953 12.1991 4.0054 4.4473 +#> 2.8620 -0.4685 -23.6100 -11.0123 -3.9941 1.8789 +#> 11.4245 -11.0546 10.2274 2.2243 12.8753 0.8180 +#> -0.8705 -5.9181 10.5114 -8.3738 -11.2249 -0.5612 +#> -14.9168 -17.3760 -3.5284 10.0062 -8.0880 -1.8089 +#> -5.6122 -16.4036 10.7873 6.8826 -6.3163 -0.5449 +#> -2.2350 -7.1392 0.7758 2.0823 5.4589 -2.7004 +#> 2.2204 5.7967 14.4527 4.0159 1.5278 -2.7988 +#> 10.6012 4.1860 -9.5941 15.1213 -1.4634 0.8367 +#> -6.5743 -0.4198 11.2743 -2.1883 -7.1866 5.1508 +#> 25.7816 -9.5695 10.0629 -0.5985 5.0684 4.5676 +#> 4.5558 -8.9279 -5.6411 2.4653 11.3544 4.8136 +#> -8.0721 13.1690 9.5670 1.8495 14.7423 -2.3996 +#> 4.6414 5.7775 9.1238 -1.9057 3.1172 -1.0679 +#> 15.4091 5.9839 11.1199 -5.9117 -6.0987 6.2805 +#> -4.1258 -9.6158 -0.6696 -14.0358 4.9354 7.6087 #> #> (19,.,.) = -#> Columns 1 to 8 1.0767 5.5533 -4.7036 9.9412 -1.1217 -2.2419 1.8039 -5.7265 -#> -8.6722 -5.3671 9.6119 -5.1046 0.2392 1.5390 -1.9327 -11.1522 -#> -1.5390 -9.6345 -7.3554 16.1637 -0.7063 -2.6086 -5.6490 13.4125 -#> -1.3161 6.6821 9.2491 9.2681 -1.5992 -10.0013 -5.6260 -14.3100 -#> 4.1478 1.8199 -13.8644 9.2892 -11.3088 -17.8347 12.9773 1.0151 -#> 4.2774 0.4376 -12.1486 -1.4250 -11.3629 1.0010 14.5349 2.7159 -#> -9.5957 -10.0090 9.3964 -11.6175 9.9439 1.5080 -4.2625 -14.0303 -#> 1.8417 -3.3999 3.4323 -9.7912 13.9317 -0.4858 -2.4816 3.5579 -#> -1.1186 3.8074 -10.4497 0.0204 0.5021 14.5979 12.0954 9.1226 -#> -0.0734 2.5938 -1.4849 3.2009 -11.6996 4.1094 -8.8224 16.3809 -#> 0.7290 0.7407 -13.0315 9.0456 7.0214 18.5311 -14.6049 21.0846 -#> 6.7994 5.7353 9.3056 5.6686 -1.1127 -3.9791 -8.0180 -9.2956 -#> -4.5172 -9.8966 -1.6582 2.0723 -8.5300 8.9774 -4.6183 -8.2331 -#> -1.9031 5.4573 3.7323 9.5858 -2.3931 -3.7597 -14.8737 -11.7221 -#> -1.2665 7.6661 -8.5460 7.0667 0.4723 -13.6319 -6.8802 10.5505 -#> -1.7393 -4.5569 8.2047 -5.7268 -5.8031 6.0560 -1.6962 -2.7832 -#> -2.9019 -9.2061 -13.3871 3.5014 0.2425 4.3444 2.0498 3.8161 -#> -2.4064 0.5608 10.4820 -0.3968 -4.0915 9.8314 -9.2122 -0.0182 -#> -4.4806 0.9583 1.7148 6.4891 -3.7685 12.5118 11.5523 1.7418 -#> -4.1795 4.3758 2.3889 3.8275 5.5741 -2.8142 5.4183 -10.0492 -#> -2.5554 -4.5916 -0.9749 -7.9303 -7.7465 -1.9804 -3.6414 -1.7364 -#> -0.2133 -3.2456 -1.4034 14.3690 17.6698 8.4782 -5.1666 18.6391 -#> -2.6984 1.6757 -1.8009 9.5193 -5.9498 1.4157 9.3605 -1.7934 -#> 2.0156 3.8572 0.4427 14.9490 8.4395 -0.4469 -6.9477 -14.0243 -#> 5.4003 21.3595 0.1373 1.2464 6.0059 -10.9409 -4.1660 -4.8872 -#> 4.0334 -10.4847 5.6587 -4.9414 3.0759 -4.8128 12.8768 -1.0747 -#> 1.5269 -4.5568 -2.0269 5.0052 -1.8830 -16.4411 -14.7579 6.1195 -#> 1.8340 9.0677 -12.6624 1.7791 -0.0315 -4.7311 8.5713 5.6756 -#> 2.8952 3.3245 -10.5208 -1.0060 -3.9409 -7.1061 -1.5845 -4.5052 -#> 9.5655 2.2102 -10.4304 5.0988 13.1694 -2.3234 -10.7082 -4.1711 -#> -3.5340 9.0905 5.9303 12.0301 -16.5272 4.6653 -3.9841 3.0749 -#> 3.4951 -6.8877 7.5635 -1.8781 4.6857 2.5944 13.6946 -0.5487 -#> -2.0527 3.1539 -6.7138 6.3938 -1.3217 13.6770 -8.9408 6.8614 -#> -#> Columns 9 to 16 -14.0036 -6.9333 -7.5914 -7.3397 21.1018 -20.7922 -7.4453 -2.9514 -#> 15.3105 2.2971 0.2982 4.3908 -0.2667 -6.4674 -13.3505 -1.4617 -#> -6.1115 2.5714 -7.5683 11.4539 1.3155 3.3049 -21.5070 19.7832 -#> 4.9959 1.4983 9.3627 3.7017 -10.2146 26.5324 -2.2526 5.2295 -#> -12.8752 19.7129 -0.4196 -7.5762 -2.0572 13.2759 -4.4405 -9.2029 -#> 10.0756 2.7735 -23.5013 12.2906 -5.4782 21.6938 -17.0761 -4.0252 -#> 14.6526 6.0746 4.7952 13.9198 -3.3412 16.1222 -16.5424 -3.8929 -#> -12.3913 -2.0690 -13.4521 7.9808 6.5945 -14.5495 -2.6231 -0.5771 -#> -9.7536 9.1040 9.0998 4.7534 -6.1598 8.4980 -7.8844 -9.6756 -#> -0.1418 -16.9912 -0.5980 16.0907 0.1257 8.7807 2.0439 -1.3475 -#> -3.1518 -2.1738 -6.6052 6.8677 1.5873 -14.2726 1.7766 -10.5044 -#> -2.8683 -5.1831 -10.5308 -18.5447 -12.5690 -1.3005 -3.5881 -11.1699 -#> 3.7412 16.8748 3.9050 9.9851 15.3700 -2.9613 -5.7636 10.6608 -#> -4.2591 -12.5602 -3.5333 3.8479 8.8281 -7.0624 6.9942 5.4780 -#> 8.9278 -4.0972 0.0137 -4.8261 -4.0591 3.0358 11.8793 4.4076 -#> 8.2928 4.9787 6.3531 4.0125 -0.8324 -8.5832 0.3094 7.7585 -#> 7.5487 -6.4172 -12.2168 14.1711 -11.3847 16.0068 -8.3022 -10.8496 -#> -10.7014 -9.5926 -0.8300 -0.6476 3.2910 5.1632 -0.4048 2.1139 -#> -5.1289 5.3220 8.1721 0.3316 -0.0183 -1.2238 11.9751 4.5372 -#> -0.4399 7.5658 -5.5506 -0.7805 -14.4911 -0.9694 -9.7239 15.7855 -#> 6.4175 4.7623 -4.1142 -0.3659 -2.9082 0.1668 -7.6900 3.3325 -#> 12.2541 -4.5770 -4.3425 -3.8493 -7.1113 -17.3472 -5.5664 -3.4450 -#> -7.3949 2.0393 -7.3935 0.2332 -7.9718 13.4079 -3.7224 -2.4832 -#> -4.1343 -18.4931 -1.6467 -11.0502 -0.3584 -12.0707 -9.2867 4.7150 -#> 2.5687 -21.6220 4.2097 -8.2748 -15.1219 -3.1143 -11.4862 -14.7020 -#> 3.6943 0.4919 7.0880 8.1900 2.3499 4.6997 11.1992 16.1555 -#> 11.3095 2.7934 11.0052 9.1714 1.3916 -6.2684 4.4728 -10.0944 -#> 4.7105 -5.1521 2.2129 -2.0156 1.4167 3.6537 -3.3794 0.4796 -#> 5.8399 3.3100 8.1257 4.9947 11.2183 4.3283 -7.2079 8.7814 -#> -7.1103 -10.6905 7.1750 1.3918 1.9821 9.5506 -4.6820 -24.4189 -#> -8.6839 9.8079 -4.0095 -13.7002 16.0389 -5.6545 7.1707 -1.1640 -#> 4.9739 -0.7280 -13.5759 0.1589 -8.2647 -3.7519 9.5167 -11.3627 -#> 4.5451 0.6956 -10.7291 -7.4561 -4.6505 -7.3631 -4.7628 -28.0731 -#> -#> Columns 17 to 24 -3.8539 -2.0230 -4.8859 -11.9436 -22.1095 -5.6628 -5.5931 -13.9555 -#> 9.4386 -11.0877 -7.7920 1.4645 9.1822 -4.6735 3.8736 13.9348 -#> 1.1258 3.4778 3.1203 -1.8828 16.5443 14.8341 -2.4198 13.1858 -#> -1.4624 3.6322 7.5670 24.5240 18.2129 0.3216 -0.8468 -6.3034 -#> 5.6501 -0.1130 7.7071 24.8891 7.6820 5.8292 -2.0203 -7.4934 -#> 7.5183 12.9281 9.2345 12.9773 -14.5725 15.2097 -5.0126 6.9257 -#> 10.3476 -9.5865 4.2492 -0.4222 -1.4051 -22.3373 21.0602 0.2441 -#> 13.8562 -4.1246 -11.0882 6.7276 1.1437 14.5107 9.8191 -8.3293 -#> 22.9158 -10.9553 1.8614 -6.1393 -14.1027 3.3065 -4.3564 15.2460 -#> -2.5361 5.8835 10.0634 10.9973 1.2540 11.9511 -12.0903 -2.0412 -#> 7.4133 5.8439 7.5822 -8.0505 11.4378 -2.9357 -7.5842 -0.7917 -#> -1.2279 1.6481 -0.9516 6.5344 5.2117 4.4362 -8.1969 0.8317 -#> 8.7589 -13.4882 5.3295 -5.6887 -3.2925 5.1488 5.2311 11.4647 -#> -14.5725 0.6747 -9.5241 -7.4326 10.4742 -2.8029 -6.1621 0.9784 -#> -2.6426 -23.0778 0.8946 12.7232 1.5908 -0.3250 -11.8063 -12.0918 -#> 4.6674 -16.3255 0.2027 5.4052 10.1676 -15.8522 -3.5531 -9.6944 -#> -6.6342 5.1806 -11.5974 -1.7326 -1.8143 -11.4650 -5.1670 12.2587 -#> -1.6934 -12.3489 0.2702 3.2438 -8.1793 -12.0089 -19.8978 12.2621 -#> -11.0990 -2.8589 -5.5567 -11.1047 10.5503 -6.1740 0.0674 1.6390 -#> -6.9366 5.6578 6.1890 1.4972 9.6568 24.4663 0.9538 15.1331 -#> -4.2098 -9.1040 -11.4043 -5.8215 -4.2459 -18.2579 12.9748 -2.4634 -#> -16.1518 -5.9413 -5.0830 -8.7746 6.3358 1.4073 15.6339 -5.8825 -#> 5.7514 2.2376 8.7201 6.7791 -7.1000 -1.2431 -6.1248 14.9285 -#> 18.2804 4.7080 8.9722 3.6387 -0.8141 5.1707 8.3539 7.4838 -#> -6.5839 12.5141 -3.7244 -6.3505 10.4050 -3.5125 -13.8111 19.2087 -#> 3.7694 3.4359 -3.1777 4.5373 -13.3302 10.7501 -3.6951 -17.2893 -#> -3.1303 0.0702 2.6647 -9.0818 -6.5239 -7.3000 -10.3534 4.1345 -#> -1.6505 30.5225 14.7744 -2.7572 1.8382 22.8058 -15.6121 2.9228 -#> 6.1693 -2.7620 -0.2159 -3.6507 -9.1278 13.1196 10.4311 0.6940 -#> -15.8986 3.7884 -11.8322 -23.4437 -10.0117 12.9315 -7.4701 2.8522 -#> -5.2591 -14.5160 12.0403 11.5505 -9.5525 -14.6422 1.0221 14.5150 -#> -2.2123 23.6488 -8.6307 -2.5041 -1.4410 1.0480 -1.4282 -8.2588 -#> -10.4613 -1.3295 1.1473 6.4935 -6.8058 -4.4212 -4.8149 1.6436 -#> -#> Columns 25 to 32 -9.6950 5.8274 -2.7130 7.8220 0.2628 -8.4575 -0.8611 -2.0891 -#> -0.6262 -7.6070 10.3993 2.3214 -4.4756 -17.3426 -5.3815 3.6312 -#> -3.7904 -8.9074 11.7960 -15.4559 2.9198 -3.6039 -18.5744 2.5687 -#> 1.0892 5.1254 1.9500 -0.2883 -5.9760 -1.8075 12.4245 1.2843 -#> -3.4794 -13.5702 10.2551 -18.4022 4.0738 6.1874 4.3489 10.5939 -#> -13.8473 -8.3958 20.2520 -2.2035 11.2639 -12.5962 10.4556 -23.7736 -#> 5.4967 -0.0896 1.8114 5.0066 9.4052 3.1489 -13.4869 21.5895 -#> -14.4500 2.4206 5.2329 -3.3601 -14.6554 -11.6877 -4.2139 10.0467 -#> -4.6730 -7.9645 8.5485 12.7164 0.3668 -3.0959 -2.8246 -2.3478 -#> -2.8326 19.0015 2.2847 1.0802 -2.5495 -7.1782 5.1540 -5.5282 -#> -7.0435 -18.7726 -0.9276 2.9793 7.8034 12.0036 15.0883 -8.5713 -#> -10.3140 10.2727 9.5996 8.0028 -9.0517 -2.4783 -1.7422 -0.5073 -#> 12.4743 10.3130 3.6449 3.9975 1.7386 4.4202 1.0105 3.8690 -#> -26.0719 -2.5164 -14.8735 -10.9506 -7.5253 1.9838 8.8550 -7.7213 -#> 3.2727 2.7772 15.2995 -7.4337 -21.3232 -17.8804 11.6360 0.8503 -#> 0.1637 11.9308 -3.0914 -3.8390 5.9435 -6.4313 -0.5033 2.3434 -#> -19.7685 -25.8448 -22.1217 10.0720 6.8253 -7.4429 3.5742 8.5442 -#> -12.8470 -3.7712 -0.5459 9.1840 0.1786 -23.6156 -13.7719 7.9354 -#> 14.0543 3.6764 8.9798 2.1178 8.6580 11.0799 -3.9781 -10.0693 -#> -6.5107 16.1062 1.6875 -5.3284 -9.7218 -9.3557 -3.5293 0.6473 -#> 11.4312 -2.4190 -6.2085 -5.4153 -8.2941 10.3688 -9.1526 17.0744 -#> -8.0095 4.4340 9.9769 8.8788 -12.9031 7.2673 14.6156 8.1925 -#> 3.3411 -13.2837 -0.9161 -9.1904 11.4688 -3.5172 11.5384 -8.2214 -#> 0.6083 11.5753 -15.9066 14.4976 1.0073 19.7245 -9.7316 -5.6104 -#> 5.7921 14.3940 -1.5066 -1.9038 3.0477 5.1525 -6.0519 -14.2574 -#> 5.7377 0.6755 0.4734 -2.9928 1.3565 4.9964 10.1169 -1.9241 -#> 11.6454 11.2528 -1.9511 11.1411 -7.6800 0.4682 3.3893 7.9758 -#> -8.2559 4.2834 8.2124 7.4970 0.2958 4.1579 -6.5515 -10.6424 -#> -0.0748 -10.6938 20.9447 -6.5912 3.3622 -14.7842 -8.1799 -8.8367 -#> -29.9325 2.2621 -1.3697 21.0621 1.0445 -4.1829 10.6821 -12.7333 -#> 10.7188 -6.6811 2.9166 -17.8202 15.5555 -5.1319 -6.5339 -5.7573 -#> 17.2279 -6.4359 -11.4349 4.5974 9.9781 -0.8728 -17.6340 -4.9185 -#> 9.8801 8.1995 7.8229 -4.0455 6.4705 -8.6501 -6.0530 14.8059 -#> -#> Columns 33 to 40 -16.8134 0.9345 6.0130 3.2657 4.0334 12.5669 -3.8526 -7.4433 -#> 4.8040 -7.5728 -8.6687 -4.9897 10.4568 -0.3818 2.9275 7.0264 -#> 17.1487 17.1811 1.8131 -1.6268 3.2504 -1.6103 2.1148 4.8444 -#> 4.3901 -14.4819 4.4570 -3.3131 -4.5546 9.1669 -4.3040 5.5706 -#> -5.8960 -3.2672 8.1288 -3.7381 -2.1924 22.5451 2.3667 9.7284 -#> -18.2572 5.4731 11.3168 13.8111 -14.0473 0.3453 14.3634 3.4614 -#> 11.5613 -18.0741 2.7858 6.7078 4.9347 -8.2687 12.4922 -6.4614 -#> -5.2524 -8.8967 4.0937 -0.9864 -10.7034 18.0523 -4.1173 2.9440 -#> -7.9853 -3.3826 -1.6270 -9.9387 -0.3615 0.6658 -3.2412 -16.5801 -#> -20.2413 -3.5176 13.3472 -8.2069 1.5136 7.9762 -8.3682 10.7648 -#> -3.7595 -11.8931 -1.3305 -8.0434 -15.8344 -6.0567 1.9021 -2.7385 -#> -5.2238 -10.1297 -4.3264 1.0212 0.9195 -4.5202 -14.3401 -1.9053 -#> 1.1637 19.7997 -1.2302 -3.6157 10.0791 -6.5270 -0.1198 0.9702 -#> 21.7248 -9.0051 0.7113 -6.1913 15.0238 15.0909 -7.3654 10.6735 -#> 7.9912 -5.0314 10.6472 5.0058 -11.3351 3.3964 -3.5761 12.9362 -#> -3.6125 4.4261 6.5276 1.2179 9.4282 -2.5485 -10.9319 -1.7716 -#> 13.3769 3.6996 2.7557 6.6103 -3.1640 5.6600 15.0018 2.5374 -#> 4.6061 -6.1245 1.1185 4.9069 3.9026 -9.1939 -12.4780 17.3670 -#> -3.2972 4.5357 -3.6831 -12.5809 5.1991 7.6571 -7.7334 -16.0696 -#> 4.4995 -3.4061 -18.2221 0.1657 -17.2780 18.2298 4.7350 -1.9094 -#> -4.8294 9.7842 11.0807 7.1513 14.7856 -11.5080 11.0651 -5.7143 -#> 1.3501 -6.8370 -11.7194 -5.1865 -18.3552 -2.8589 14.2047 -23.2365 -#> 7.9918 1.8035 3.4218 3.9900 5.4208 6.7179 -4.7383 31.7702 -#> -9.9291 -6.0875 -19.0885 -27.4261 11.9650 0.2927 4.1253 -10.5055 -#> -5.5741 -25.3679 -2.6794 -6.6270 -4.9728 -0.6329 -14.6550 -16.3996 -#> -8.0340 20.9211 -18.7240 3.0184 0.9541 8.7990 -0.4756 2.7910 -#> 9.2895 -9.3025 -8.1933 0.8929 4.0431 -3.9183 -4.1492 -4.6099 -#> 1.2121 1.7955 -3.7963 7.0246 -2.8241 7.2822 10.2972 6.1966 -#> 9.2500 21.6554 10.0764 -3.8195 1.0120 -0.5537 -0.8079 -14.6708 -#> 7.1617 -10.3057 3.1645 3.6164 -22.4676 10.5733 -3.8868 -2.0448 -#> 1.6206 -5.9916 -17.1144 5.1716 5.7639 -15.1969 4.0864 4.3495 -#> 5.8028 -2.6665 2.5533 8.6684 2.9800 7.9136 2.4781 10.7375 -#> 11.7868 -12.8855 -3.9318 0.7253 -8.3808 -15.1319 -0.2693 10.1590 -#> -#> Columns 41 to 48 -16.5444 -4.6287 2.0314 -1.7905 -4.3152 -15.1268 -3.5579 8.1316 -#> 8.2578 12.1657 16.9449 -4.7423 10.7516 -5.3044 -13.8603 -8.9339 -#> 5.1447 3.9740 11.7261 1.3244 3.9746 7.0768 -6.8797 4.1232 -#> 7.0068 -9.2230 0.2787 0.7377 7.8000 0.7086 16.1432 -8.6581 -#> 18.8627 6.4956 16.8406 17.1304 3.2918 3.1755 -3.4407 4.3053 -#> -4.4693 -1.4799 6.3924 1.7607 -1.3521 -4.0490 -14.7393 -5.2026 -#> 1.0006 1.6647 -13.1156 12.6615 8.3567 -6.5948 -15.2573 7.6170 -#> -3.1967 4.9423 -4.7073 11.9696 9.9641 -9.7513 -0.7074 6.3584 -#> -0.4424 8.1667 -5.0007 0.7535 -9.9669 -12.6861 -11.3693 22.1647 -#> 11.3647 -1.3075 7.4077 0.7178 -2.3886 -5.3439 3.9297 12.7206 -#> -2.3143 2.2302 -7.5687 -31.6458 -2.5112 18.9413 4.5091 9.2022 -#> -6.8183 -8.6902 -19.0367 -7.5001 -12.1462 -5.3157 -3.3004 -7.1680 -#> -6.7658 2.4393 6.3413 -8.4221 -5.2463 10.0096 4.4987 4.1712 -#> -7.3988 -14.1535 11.3764 -4.7804 18.2793 12.4318 11.0025 -13.3344 -#> 4.3526 2.2977 11.8006 -1.2596 -24.6957 3.3288 7.7652 13.5084 -#> 32.4653 8.8435 9.7412 8.9345 -2.9832 -1.6264 4.4315 12.2145 -#> 18.7072 -0.1537 7.3249 1.2747 6.8130 15.6451 -5.6066 1.3597 -#> -1.5957 -5.7835 -7.2907 -4.1485 -2.1856 -5.6710 6.6938 0.1075 -#> 2.2760 2.9900 -7.1284 -5.2640 0.3314 5.4664 -5.5271 3.6673 -#> 1.9755 -3.5697 5.8237 4.3192 2.5900 1.1106 -3.2682 -3.8377 -#> 1.5259 -1.8319 7.3504 1.0233 8.1935 -9.2719 18.0310 -7.7718 -#> 3.9253 -0.7988 -11.2892 15.1197 -1.6806 11.3325 3.1717 15.6728 -#> 16.1292 10.9454 15.4373 -15.8961 0.9757 -9.1515 8.7367 13.7797 -#> -19.1597 -8.2421 -16.0136 -7.3302 -11.1742 -0.3500 -1.6897 -6.1324 -#> -18.3563 7.7518 -7.3949 -7.3812 -2.3338 -13.3431 -7.0959 -10.0065 -#> -11.7076 -5.5947 -0.5833 -1.9684 -8.2818 1.4432 12.8440 -6.4148 -#> -14.6965 -15.2207 -16.3022 1.4870 -10.0094 0.8704 -8.3260 -19.1158 -#> 0.3915 0.7392 13.2677 7.1700 -0.7309 -7.1851 -14.4830 -18.1360 -#> 2.5043 2.7492 12.0607 4.7828 6.7693 -7.3077 -5.7348 -11.0223 -#> -7.0619 -29.3620 -5.8536 13.0872 7.5924 -0.6150 19.7299 -5.6692 -#> -11.1114 4.6274 8.7869 6.7250 15.6530 23.9817 -3.5366 6.9809 -#> -5.1315 8.5302 10.0534 -6.3178 -12.8647 -10.8650 13.3111 -11.5817 -#> 4.6134 12.7501 -11.3046 3.7991 -6.4595 5.7412 -5.2063 1.9086 -#> -#> Columns 49 to 54 -5.3282 -3.2266 -24.8687 -10.5104 -4.4439 0.3163 -#> 10.6467 -5.4864 2.7542 -0.7171 0.6800 2.0922 -#> 11.7002 10.2993 6.8480 0.6578 17.3164 6.8616 -#> 3.6185 -18.4867 4.6401 4.8705 8.8435 -0.9595 -#> 15.3718 6.8593 14.3551 -5.4822 5.1366 6.1417 -#> 3.1160 9.3470 -6.9606 -0.0294 -3.2393 -4.2674 -#> 15.7196 -1.8938 -7.7399 0.0166 2.4686 3.5718 -#> 6.9156 4.8775 -7.8181 1.5310 -2.6594 -0.0888 -#> 3.7727 7.0841 3.0279 -0.1965 -8.6360 -1.8248 -#> -8.9784 -22.4677 -8.8660 0.3609 9.7901 -3.4502 -#> -4.3600 -10.9410 -5.0836 -3.6610 -5.5090 -5.2339 -#> 9.3056 -11.7218 -16.2098 -2.7711 -2.8203 -7.1593 -#> -3.8441 -9.5755 2.5076 1.8440 6.0984 3.8825 -#> -3.3463 6.1568 11.1819 16.0915 8.8665 -5.5455 -#> 0.6073 0.7520 -7.7763 -8.8121 -3.8470 1.5911 -#> -11.1212 5.0537 2.1586 9.1666 -4.0692 -2.6149 -#> -4.3383 20.6349 7.5382 1.8032 -7.1276 -2.8183 -#> 6.8676 -6.5512 -13.4673 -7.1652 2.2067 -3.7488 -#> 7.9024 13.9121 -0.7281 0.6378 -1.9312 -0.9425 -#> 1.3689 8.0473 18.6780 -11.5955 9.1147 -0.9635 -#> 14.7714 -2.3066 3.0062 11.6449 2.5655 -0.1592 -#> 1.5034 1.5028 -8.2844 1.8364 0.1551 -0.7391 -#> 3.1903 -9.6047 8.1067 1.0414 14.3937 0.3468 -#> -16.7015 10.6327 1.9860 5.6200 -2.7926 4.6682 -#> -15.6331 -11.5799 -9.7693 -5.3290 -3.0923 0.8328 -#> 13.5332 2.7533 10.4474 -6.1639 1.1230 -7.9291 -#> -13.7635 -8.4313 -3.9145 -5.8836 -5.1160 1.8951 -#> -0.4143 18.4338 8.0957 -5.2611 -2.2945 -2.4783 -#> -4.6864 9.1464 19.1237 -1.0431 -8.9225 2.5674 -#> -14.4443 -4.0471 -0.9250 -16.8284 -11.6586 -4.4408 -#> -4.8694 3.6092 10.6813 0.2885 4.8268 5.6979 -#> 12.0734 2.5061 -1.6331 1.3433 4.1259 -4.5298 -#> 0.4723 3.8604 -15.6149 2.0348 -7.0461 7.8540 +#> Columns 1 to 8 -0.6585 9.8826 1.4030 -11.7249 -2.5811 -6.9256 -0.2827 -5.6463 +#> 0.1650 -0.9250 -7.3263 5.7255 20.3290 16.4450 5.1317 -0.3027 +#> -3.0503 2.2478 -2.3873 7.3440 2.1027 -9.6104 -18.6459 2.7329 +#> -1.6699 2.7374 6.5905 -0.8758 2.9866 -3.3036 -9.0732 11.1067 +#> 2.5126 -8.3576 -4.3203 -2.3904 2.5905 -3.0552 10.2008 3.5784 +#> -6.1066 -9.3117 -14.3303 -10.4691 -5.8768 6.6407 -3.0724 -1.1505 +#> -5.8966 -0.7295 4.0729 4.7547 -3.7514 5.5806 6.0984 -6.0869 +#> -0.0661 7.7294 7.3693 5.9577 -4.7820 0.8416 0.1636 -12.4664 +#> -1.4011 -7.8906 0.1955 0.9014 -3.4799 -25.4973 -8.7293 -0.9653 +#> -2.1498 -5.1125 -0.9872 -9.8244 6.0989 -1.5911 4.2375 -8.2248 +#> 0.0196 5.9959 0.7984 -1.3006 2.6809 10.8185 -6.3292 -15.5907 +#> 4.7008 -8.0456 -6.5674 8.6466 5.6945 -7.0601 -0.9332 3.3819 +#> -0.9763 2.1652 -0.4644 1.2342 7.8279 3.4226 -2.8396 0.6060 +#> 5.3351 -5.6966 0.6555 12.2491 11.7811 -3.3733 -7.5998 -4.3169 +#> 7.4829 8.3227 5.7174 1.9248 -1.0290 1.4823 7.8087 7.7931 +#> -3.9558 -1.1283 9.0225 9.8182 2.3755 -3.9436 -0.7533 -7.7746 +#> 0.2957 3.3435 12.7207 -4.4201 3.8301 -5.3327 -6.6411 -14.3132 +#> -0.5431 2.8346 8.6470 1.6883 7.7011 16.6742 -0.8749 1.6114 +#> -3.2364 -5.1738 6.5949 4.2149 6.6641 4.4238 1.2473 3.6537 +#> -0.3931 -7.0275 -3.2776 -0.1583 2.9636 -1.4412 1.4360 -4.1485 +#> 4.1846 11.3994 0.0978 -15.9557 -15.6705 -3.0953 -8.8132 3.4283 +#> -0.0567 9.4752 8.0310 2.5737 0.2140 7.0641 6.5740 -0.1967 +#> -2.2564 -4.1701 -0.9821 -12.4637 -5.1985 -2.5128 -10.4689 -8.4280 +#> 1.0629 1.8494 5.9899 -9.4499 -13.7456 -4.2967 10.3306 2.2490 +#> -3.5610 -2.6600 1.6836 10.8043 8.6932 6.9998 -0.2618 0.5577 +#> 2.4975 4.1212 -2.3321 -3.9119 -8.5979 -0.4280 1.9751 -0.5522 +#> 4.6049 5.1911 10.3075 0.9192 4.9468 -3.5057 3.0299 -0.6479 +#> -5.2388 -1.2334 8.2371 2.6726 9.1956 7.0493 2.6240 0.3927 +#> -5.1976 -2.7467 -2.1800 0.0677 -5.9041 -7.7093 -2.2795 10.3765 +#> -0.4761 -0.7456 -5.1437 -2.2301 12.5085 -6.5408 -2.4305 5.8645 +#> -3.4447 -4.6770 8.7570 -16.7938 0.7921 -6.2499 -9.9334 -3.1894 +#> -0.2361 -2.5222 -3.6469 -9.9391 1.0013 -4.6817 -3.0360 -7.3333 +#> -3.0991 -0.9720 -3.4605 -21.3730 -1.2274 4.6351 -4.7941 -1.8650 +#> +#> Columns 9 to 16 17.0764 5.8430 8.9233 -1.1620 -14.1773 -11.3817 -15.1978 7.5053 +#> -3.4096 8.3395 -4.9624 -11.3267 1.9752 9.8892 4.4464 12.5635 +#> -0.7508 7.3893 5.2803 -7.6950 3.4215 1.5168 4.2154 -2.7620 +#> 14.7705 16.0933 7.3837 -17.3485 -9.5440 -8.8536 -6.7643 0.4151 +#> 9.2540 -0.4406 -1.8635 5.1248 2.0445 -6.1785 -8.1137 11.8449 +#> -0.7375 0.6059 -3.2835 7.8062 4.8646 -11.8888 -5.1828 22.3903 +#> -0.5122 -0.4262 1.7144 -0.1126 13.4943 8.0834 -6.9894 5.0771 +#> -2.1991 -8.1310 2.5913 -4.3688 9.3261 17.9060 -24.1810 7.4151 +#> 10.6987 11.0966 1.7284 -5.2203 0.6270 12.3438 -10.8410 12.5950 +#> -6.1397 -15.9450 4.3618 -2.1385 1.5842 2.9679 -5.9682 1.8272 +#> 0.4473 8.8776 5.9637 9.0792 1.9436 -1.9682 -1.4089 9.4347 +#> -2.7729 -3.7891 19.5924 -17.5691 -7.6583 -4.0795 2.0968 8.5380 +#> 8.7640 -11.7137 10.3386 -5.7720 -0.2722 5.8701 -7.6923 12.3095 +#> -7.7306 15.5506 6.8464 6.5464 1.0749 -4.6382 -3.2192 21.5630 +#> -2.4014 6.8278 2.1954 -5.4732 -14.2277 -6.1357 10.7854 -0.2827 +#> 2.8345 2.4622 7.9324 15.2339 -2.5405 12.8163 9.3821 -8.4103 +#> 0.9343 -9.9945 1.4696 6.1561 2.6929 -0.2515 -5.4584 1.5540 +#> -6.4884 13.8360 -7.6828 10.1019 -12.2709 12.1233 -7.7072 -21.8447 +#> 1.3176 1.5931 -14.7742 1.1585 1.5179 23.7545 9.8173 -8.3933 +#> -2.8098 8.7542 6.3600 -0.5930 0.9488 -2.6780 -7.2417 -9.2113 +#> -10.5671 -0.1901 0.2076 2.1970 10.1435 -6.7166 -8.7641 -7.4586 +#> -5.5138 -4.6460 -10.3325 4.9752 -3.2165 3.5890 -6.1464 0.5999 +#> -5.0317 5.9133 3.6240 14.2959 -14.0091 2.2582 -9.2788 -10.8581 +#> 1.1499 1.0356 7.4818 -0.3367 6.0681 -2.0430 14.2815 -5.7766 +#> -0.8230 -1.8076 -2.2688 -17.6456 11.6994 -12.3278 -2.9590 -0.2083 +#> 2.3176 11.2455 10.2384 -11.4388 5.5013 -5.4900 -8.0858 -9.6185 +#> -3.8191 -4.9966 18.0780 -2.4338 2.3540 1.6048 -9.6348 1.2277 +#> -1.2965 -8.7914 8.3066 4.6738 12.5374 -2.2019 -0.0928 2.9176 +#> 4.3275 -7.6179 3.7792 1.4985 -2.4665 -9.1225 14.3049 -3.5420 +#> 5.5015 -14.7975 3.1378 1.0079 -1.3136 -9.9615 7.3445 -3.1338 +#> 11.7060 -17.1671 0.7704 8.7745 -6.2961 -2.3564 0.4598 -3.6809 +#> 16.3385 3.8710 -9.4886 10.0640 -1.2299 -13.2768 11.2196 -2.4243 +#> 7.3597 0.8981 0.5580 3.6674 3.2407 -6.7594 3.3683 -9.1721 +#> +#> Columns 17 to 24 12.3716 0.3837 10.9611 5.0459 -2.7379 -5.6940 -8.2364 -10.0896 +#> -5.5260 17.9999 6.1926 2.3621 -11.2591 10.1728 9.3540 9.5865 +#> 15.7756 -5.4608 5.8634 7.0200 1.5367 -18.7260 0.4461 -5.2404 +#> 9.8541 -8.2423 8.8742 -3.5748 7.2042 -11.2182 -3.1723 6.6835 +#> 3.4857 -5.9538 2.6302 -11.9920 -1.5768 -11.1749 1.0269 6.6369 +#> 19.3770 1.3367 18.3553 14.1196 -5.5469 -2.7597 -2.1055 -5.3372 +#> 2.7199 1.6571 3.2364 -7.4958 1.6000 1.9315 2.9589 8.8029 +#> -14.7438 35.2956 -6.2668 4.0513 6.3596 -8.6667 3.8587 -3.1671 +#> -7.2888 -4.4561 -1.6586 2.2003 -18.8753 2.6387 7.1313 -3.0279 +#> -6.2671 -4.4149 6.2316 3.1299 15.8317 12.3411 -12.2143 0.4030 +#> -17.1368 1.2511 14.9939 16.3784 -1.7329 -12.3537 3.1450 6.1910 +#> -17.1984 -19.1974 -1.4921 3.0762 0.7544 3.6702 5.1287 1.0856 +#> 15.2964 -6.5933 7.6373 3.7420 7.8779 -3.5939 -4.2540 -1.5932 +#> -14.3014 -5.8670 -3.9555 -2.3297 1.4709 -2.6299 -3.3988 -15.9106 +#> 0.9698 -0.7359 -1.8761 14.7952 15.2837 9.9436 5.8233 9.8523 +#> -7.1648 2.7458 6.0455 -0.5011 -3.5236 7.8465 0.3871 9.4305 +#> 2.5477 3.0310 -5.1516 -12.1554 14.4463 3.8393 3.4572 5.8848 +#> 2.7395 -13.1965 -2.1219 13.6616 -14.9050 -15.7851 -1.4292 22.1225 +#> 1.2172 1.7590 -12.6351 1.8396 -12.6134 4.4054 20.6590 7.7989 +#> 12.4026 -7.7903 12.7742 9.3675 3.1696 10.7111 4.7836 -7.6805 +#> -3.9905 -5.2888 4.4187 -7.0884 3.4783 -7.1178 -12.4988 -3.3764 +#> 4.5872 -6.9516 -1.1587 -17.7174 -0.9773 15.6822 -17.0617 -0.0740 +#> 0.3508 -13.9338 -18.0642 15.0680 -5.4266 -2.7523 -9.4221 7.6032 +#> -5.6482 9.7174 -8.2347 -10.9364 18.7895 14.0610 -3.6170 -11.6634 +#> 7.8077 -6.4071 16.5274 1.1633 -1.1429 -1.6385 -2.0166 -12.1178 +#> 8.8312 -0.9600 -6.0248 -4.4254 0.8864 9.2633 -2.6882 -2.7071 +#> 14.5306 2.2454 -15.7828 0.4146 15.6573 -8.9497 -3.4040 2.7653 +#> -0.2983 -16.9829 -4.1195 -7.9388 -5.4294 4.0565 0.5919 3.5288 +#> 4.3897 -4.8480 16.0940 -4.9574 12.1835 22.9355 -5.1749 -7.5077 +#> -9.2620 -7.7825 25.3690 2.7733 -8.4929 13.8857 6.5900 -11.8566 +#> 1.4553 8.9283 7.9397 21.4509 5.8687 -7.0294 1.1019 -4.3770 +#> -2.7059 19.3818 17.2716 10.3299 7.9694 5.0580 0.6178 8.5162 +#> 18.8328 -4.1314 -0.0858 -3.2976 8.5809 -4.5226 -10.8117 3.5330 +#> +#> Columns 25 to 32 -14.6839 6.8461 8.1797 2.9323 -3.3079 1.0147 -1.2211 -7.4734 +#> 7.0771 12.1087 12.3633 9.1535 -7.6124 3.2287 -12.4478 -0.0069 +#> -1.9076 0.3062 -0.8460 -7.1716 -5.9524 -0.5816 -13.8654 9.3998 +#> -1.1433 -4.0499 6.0653 7.0734 -4.0855 -3.4087 -8.1162 -2.8009 +#> -12.2015 -10.7412 14.5730 -0.6075 3.9057 17.0001 -2.7814 6.7763 +#> -6.1382 3.5829 17.4019 -0.0395 2.6327 2.5508 -15.2277 -7.0339 +#> 2.7591 4.2329 2.0129 6.1050 5.7073 12.4767 3.0138 1.3023 +#> -1.6469 3.0729 20.4677 4.8603 19.0591 0.6883 13.9318 -6.5538 +#> -1.2853 9.5133 13.8790 -2.7390 -4.1806 -13.8094 6.3727 -0.0213 +#> -0.9865 -2.0827 -4.9538 -4.4485 0.8715 7.5893 -10.6855 0.3418 +#> 23.9912 2.2103 6.8849 4.3127 -9.0539 -7.7329 -8.6874 -9.3279 +#> 2.0184 16.9708 8.1070 -9.1243 -11.0595 -12.4074 5.4987 -17.4810 +#> -8.8934 -3.0940 -2.3081 1.3716 -3.8921 2.1954 11.7256 9.3779 +#> 5.9283 4.8621 13.3068 -17.0110 2.5952 7.7034 0.4248 -0.2420 +#> -7.6045 2.8049 -9.3567 -0.5433 11.7936 15.3341 -0.7353 7.3319 +#> -5.5023 -6.9329 -15.4190 -16.7040 1.2402 0.0872 8.3702 0.7609 +#> 0.4774 6.1620 5.7706 -14.1197 -5.0692 17.9640 2.9275 6.2174 +#> 12.7974 -6.9163 -7.4520 8.1369 4.3131 4.3910 -1.4471 -3.3956 +#> 3.7648 -1.6795 5.0141 -14.7830 -6.4952 -12.8173 6.8626 6.1952 +#> -8.6048 -6.0142 -10.9879 8.9913 -5.0566 -7.9528 11.2173 11.9002 +#> -2.3514 8.6348 1.9311 -4.7748 -3.4838 5.2479 -21.6047 -14.6047 +#> -13.9327 -0.3495 -10.7567 12.5530 -3.5812 10.8072 2.5655 2.7480 +#> 8.6673 -3.4642 -26.0022 2.1550 -10.6572 -7.9585 1.0572 -3.8994 +#> -9.0354 -0.4745 -10.0054 0.1008 1.3596 11.8869 -18.4773 -7.1295 +#> 0.9810 -0.4039 2.4569 17.4938 1.0490 6.2991 -7.8893 12.1776 +#> -8.6968 -7.8995 -12.1378 -4.6789 7.5475 -2.4331 -6.3865 5.4573 +#> -3.0174 -4.1122 11.9619 -3.9539 11.7905 9.4868 19.5192 6.4434 +#> 1.5298 22.3209 11.0158 12.4615 -8.4496 6.2791 7.0195 -7.3815 +#> -1.8864 -4.9000 11.1343 2.2848 0.8357 1.0630 5.2691 -1.8301 +#> 9.5066 2.8672 -10.2872 -3.2455 8.1272 11.1888 -22.8100 -0.5767 +#> -22.7715 -6.5251 2.7178 0.9808 9.8882 -0.0530 2.2730 -3.9347 +#> 5.3799 -10.3769 -8.7074 -11.6425 19.0864 10.0179 -2.9931 6.1525 +#> 0.9106 -4.2660 -9.4658 4.3267 -7.8953 12.4605 -19.6896 0.3364 +#> +#> Columns 33 to 40 -0.3796 4.3497 -6.3414 -15.4790 7.4071 12.6665 -2.7184 2.4283 +#> 2.7128 -3.8754 4.7812 9.5654 7.3676 5.0271 -2.8875 7.1535 +#> -2.2233 0.7749 12.3845 -14.3747 3.7607 -3.1506 -1.5266 9.8238 +#> -14.0021 -1.2939 4.8742 -2.6410 -7.0068 -5.7241 4.8954 6.5269 +#> -12.8333 3.9485 -1.8470 12.1138 0.8843 5.7001 -2.2891 -14.8813 +#> -16.9078 -1.6815 -7.7031 -2.1090 6.2136 12.4303 -4.7568 -2.2713 +#> -0.7305 -0.7113 -6.7354 3.7713 9.2904 1.7057 6.3839 -2.7437 +#> 11.3646 4.2795 -1.3488 6.3319 2.6274 15.4925 -4.3177 12.4847 +#> -10.5841 -9.9281 13.8982 5.4759 -10.0459 3.3208 9.5562 -6.3237 +#> -2.7735 -4.3544 5.0805 -2.5470 -8.5915 -6.4103 3.6065 -8.5134 +#> 1.8425 5.5700 -23.2685 -2.4631 18.8629 -2.0304 -14.0650 3.0244 +#> -6.5492 0.3404 11.5178 2.0990 -14.4096 9.1546 -1.4287 -11.5420 +#> -1.1521 -6.1942 -8.9768 -0.4030 5.1833 -22.7510 -8.1499 12.0629 +#> -3.4294 3.3127 13.4552 25.0106 2.5155 -12.0142 2.4186 -11.3317 +#> 10.8930 -0.7558 0.7617 -5.3487 11.3760 9.4106 1.4221 0.4910 +#> 2.3136 -6.2310 -3.3220 -3.1695 -7.1624 -12.1209 -9.9109 -2.1805 +#> -8.2842 -5.9906 -3.6156 -10.9326 0.7163 -6.6026 -17.2435 -14.2433 +#> 7.1166 -17.0526 5.6994 -2.8829 13.0573 -1.2222 1.8053 12.8678 +#> 0.4565 -6.5211 5.5655 -0.2669 -4.9020 -5.3036 -18.1904 0.3326 +#> -0.5590 -1.7494 2.6337 -12.4070 1.0603 -6.7488 10.2757 -3.2534 +#> 6.3244 5.4070 -12.0361 -19.4551 -2.2169 7.3486 -4.0652 -0.4261 +#> -2.9591 -2.8364 2.1820 -4.0785 -5.8038 7.8457 -15.4939 1.3535 +#> 6.9466 -5.9346 -0.8241 -8.8413 -1.5481 -8.5602 -8.9101 -8.2495 +#> 7.0841 -0.4892 -3.1260 -23.4276 4.5827 18.2278 -15.5246 -5.9589 +#> 3.9757 0.5016 2.5670 6.0558 20.3464 -2.1646 3.0153 2.6730 +#> 6.6056 -1.3289 1.6741 -8.6122 -1.4567 4.5469 -11.8557 -1.1949 +#> 6.9510 0.3596 -6.5264 5.9946 4.0785 0.7225 -4.2921 1.7292 +#> -8.8113 5.7748 16.1156 -1.9505 2.9988 20.6561 2.5375 1.3187 +#> -17.2576 10.5056 4.4396 -3.2395 -12.2402 -13.9491 3.3513 4.3628 +#> 5.2828 5.8323 -2.1133 -14.3629 1.9819 2.3627 3.2801 -7.6097 +#> -0.3246 -8.6066 -5.1149 -18.0291 -4.2166 1.3238 -0.3825 -5.5382 +#> 7.1872 10.7332 1.0616 -5.7285 12.3867 -8.0469 2.3091 -4.9136 +#> 1.4546 -10.9034 -13.5010 -12.3129 6.6715 -12.9542 -4.7032 8.3032 +#> +#> Columns 41 to 48 10.0628 10.3230 10.9151 -3.8171 -6.2244 -4.4165 -8.0065 -10.0503 +#> -0.4867 -9.2430 5.7187 5.5422 -14.9284 -10.8488 5.5976 -3.3869 +#> -12.1743 -5.1474 10.6593 -4.4454 -21.1598 2.7410 4.2188 -3.6996 +#> 1.5159 9.4710 3.7640 1.5259 0.2589 -5.4336 11.1809 3.3088 +#> 26.8123 -15.9038 -5.3623 6.9913 9.8407 -1.4732 0.1598 -1.3244 +#> 6.3979 -11.5956 -9.6217 -19.9339 -22.7870 -7.3380 -7.0088 -3.0605 +#> -0.0878 5.2290 -0.0313 -5.2206 0.3272 5.9445 -3.0802 4.3588 +#> 13.6341 4.5430 -1.3493 11.5789 2.6397 2.8362 -4.8600 -10.0980 +#> -3.5060 -0.7775 6.5990 -3.0528 -8.0371 -1.9193 6.7200 0.3974 +#> -1.1054 -2.5882 1.0700 -10.0701 -1.5345 2.9574 -0.1513 -9.1856 +#> 6.0967 1.3947 3.6074 3.4811 16.8579 -2.9246 -11.6604 7.4707 +#> -2.3503 6.5394 11.1039 5.7431 -9.7938 5.0839 1.3991 9.2364 +#> 6.3953 -1.1190 8.2917 -6.7516 -6.5861 4.9368 -7.8025 5.3642 +#> 4.9937 1.3474 0.2836 8.0037 5.1593 -2.4403 -13.0951 12.4779 +#> -3.2154 -19.7367 -8.5535 8.1844 -5.1560 -3.2137 -2.4954 -9.7998 +#> 3.4415 -1.6426 0.6285 -11.2886 -2.0018 4.2029 1.7292 4.2282 +#> 11.1582 -5.7000 -3.5032 -4.2132 -5.6515 -8.5600 4.1543 -5.9248 +#> 6.9363 -4.0137 14.7716 11.0608 -8.6587 17.2787 5.0551 7.1445 +#> -3.9364 -15.0201 -10.8555 -5.8179 -0.0540 -2.9061 10.4499 -14.9126 +#> -13.5480 2.4110 8.0315 2.1498 0.5293 11.1205 -9.4289 -8.9675 +#> 26.4785 9.3031 -9.0149 10.4413 -2.0480 8.3809 9.4102 1.8135 +#> 2.6971 0.5015 -18.6142 3.1587 0.8600 7.9833 12.2409 -7.7840 +#> -12.6269 1.3774 6.6004 2.7139 -8.8343 4.2410 9.8433 -0.0475 +#> 15.0651 -12.2557 -1.6488 8.6864 1.8046 4.8422 4.9187 0.7652 +#> -5.8416 23.2031 25.9997 -1.5593 -0.9083 -1.2811 -5.1272 8.5302 +#> -1.0311 1.0817 3.2356 -7.1522 -13.7981 5.3841 14.2587 -4.0031 +#> 8.1744 -3.6218 4.2007 -1.5445 2.4377 -0.4216 -0.9016 -10.7751 +#> 6.2261 5.8193 16.8180 4.3904 8.9278 7.8632 4.6301 -6.8090 +#> -4.0843 3.2028 9.4383 -0.8920 7.2703 1.9310 -9.1039 13.9812 +#> -2.6016 -4.1952 10.6024 2.5199 2.4970 -13.2140 4.7213 -8.5451 +#> 14.1817 9.8274 8.8007 -6.8922 -2.2044 -14.2716 7.1534 -14.1860 +#> -7.6821 -7.9864 -6.9343 -20.8698 -4.8451 -7.3192 -9.9142 -2.8159 +#> 6.0660 -19.4450 -9.7641 0.1634 -3.2923 -2.0917 2.9346 9.0170 +#> +#> Columns 49 to 54 5.8200 8.4084 9.3054 -9.8628 -0.4510 3.9276 +#> -0.7372 -14.4453 7.4721 -3.9569 -0.1176 -10.6268 +#> 6.5721 7.3733 3.0376 0.2524 1.4956 -0.7129 +#> -6.4613 -1.8132 -17.5323 0.7709 -1.6627 2.6338 +#> 13.6628 -1.4183 10.7630 -0.2926 14.4118 0.7544 +#> -11.6941 12.3944 -1.3716 0.1419 -3.1795 5.9284 +#> -21.3091 -8.7149 1.4729 9.7163 5.3108 -1.7244 +#> 8.8412 -3.0840 3.7965 10.6223 3.5092 -0.1732 +#> 2.2283 -3.2319 -7.4862 -0.9160 -4.5971 2.0654 +#> -4.5742 -6.4256 11.2858 2.7449 1.7005 -5.0744 +#> 9.4259 -12.7133 1.1876 9.9136 1.8085 -1.6360 +#> 12.5822 -9.9034 -7.7651 -8.2844 5.7129 -1.6666 +#> 8.7622 0.5824 -5.6734 -0.0371 -1.8248 -1.9474 +#> 7.2880 5.1096 -1.4563 -9.1424 5.4070 -0.8980 +#> -8.1181 -4.0841 -9.3682 -9.2077 2.4062 -3.9323 +#> -7.2562 -0.7773 -11.0796 2.9141 1.4463 0.0563 +#> 4.4834 7.0276 -2.7986 -4.0180 -1.7774 1.0401 +#> -16.4588 -1.5919 -9.5157 15.1540 0.3869 -3.1181 +#> 5.3706 6.3268 -17.8983 -3.6777 -4.0082 -1.0565 +#> -0.5372 -1.0455 4.8401 3.0141 9.7704 3.6299 +#> -11.8018 -1.2936 -10.1979 4.6638 4.7457 2.2066 +#> -2.4877 -8.5786 7.9622 4.4728 -3.2413 3.6715 +#> -7.2463 4.0733 4.5851 15.7668 -4.4075 -0.5109 +#> 9.5208 0.7263 -2.5272 0.2300 7.6229 0.3069 +#> 2.1348 -16.7072 -6.1046 -1.1163 -0.3200 -3.3069 +#> 4.1479 -4.0324 -8.4265 0.6642 0.4718 3.8026 +#> 2.0110 7.1490 1.3257 -2.5020 -0.8291 0.3372 +#> 0.3210 3.6987 -1.7593 -2.7245 6.8877 0.0815 +#> -3.1185 -3.9933 -15.9710 0.6983 1.9976 -0.2487 +#> 2.0827 6.4108 -1.6085 -3.8503 -4.8357 4.8550 +#> -4.6074 6.6271 -10.5125 -2.1807 -3.7528 -0.7343 +#> -17.0510 14.7428 8.2607 -11.1247 -1.3343 -5.0686 +#> -2.1579 2.9472 -11.7718 1.6321 0.1034 -0.0795 #> #> (20,.,.) = -#> Columns 1 to 6 1.1531e+01 2.3331e+00 6.3103e-01 -1.0747e+01 -1.5133e+01 4.4643e+00 -#> -2.2385e-01 -2.7180e-01 -6.3996e+00 -6.3756e+00 2.2921e+00 5.5969e+00 -#> 2.5445e+00 -3.3252e+00 -1.3379e+01 -2.0287e+00 -1.2538e+01 -2.3544e+00 -#> -7.9704e+00 -2.9126e+00 4.3689e+00 -9.9908e+00 1.1437e+01 4.4595e+00 -#> 2.0039e+00 -1.3223e+00 3.4075e+00 -1.4926e+00 3.8605e+00 -1.1802e+01 -#> -5.0724e-01 1.2890e+01 4.9779e+00 1.1625e+01 -2.1969e+01 -4.6666e+00 -#> -1.3738e+01 4.1919e+00 9.3695e-01 -1.6508e-02 4.2423e+00 -3.7811e+00 -#> 5.0110e+00 2.4779e+00 4.7327e-01 4.6535e+00 -1.9481e+01 -6.0639e+00 -#> 1.1933e+00 3.6992e+00 -7.0678e+00 -8.9165e+00 2.4942e-01 6.7458e+00 -#> 5.3256e+00 1.0658e+01 -4.6348e+00 -8.8527e+00 -4.4973e+00 -7.1829e+00 -#> 8.4373e-01 7.9508e+00 -7.1050e+00 9.3471e+00 -9.7752e+00 -7.6119e+00 -#> 4.2762e+00 -3.7394e+00 1.2157e+01 -4.3657e+00 5.3120e+00 -5.2498e+00 -#> 4.2000e-01 -1.4188e+00 -3.5735e+00 6.4012e+00 1.5363e+00 6.0558e+00 -#> -8.0048e-01 -6.7814e+00 -5.5306e+00 3.3690e+00 -1.9144e+00 1.9936e+01 -#> -1.0017e+01 3.4138e+00 -7.7884e+00 6.4376e+00 1.3389e+01 -4.7419e+00 -#> 2.2364e-01 -3.2301e+00 -7.9332e+00 -1.2841e+01 4.2582e+00 1.8374e+00 -#> -4.4040e+00 4.5913e+00 -6.1392e+00 -1.2316e+01 3.4954e+00 -1.1173e+01 -#> -2.2540e+00 3.2320e+00 -5.9081e-01 -5.3758e+00 3.4184e+00 -5.9921e+00 -#> -2.6445e+00 9.4472e-01 6.2066e+00 6.4064e+00 2.4575e+00 3.5394e+00 -#> 2.5899e-01 -2.1468e+00 1.3507e+00 6.7354e+00 5.0850e+00 3.9325e+00 -#> 7.8227e-02 -5.2020e+00 2.0918e+00 -4.1192e+00 2.2587e+00 -3.3492e+00 -#> -3.0478e+00 1.3071e+01 4.8683e+00 6.8381e+00 3.9535e+00 -8.2438e+00 -#> -3.8922e+00 -7.1828e+00 -1.1203e+01 -7.5899e+00 -1.4261e+00 1.1600e+00 -#> 7.7494e+00 7.2217e+00 7.2037e+00 5.6197e+00 6.8696e+00 3.1197e+00 -#> 2.1424e+00 5.3235e+00 1.3030e+00 -9.0769e+00 1.5741e+01 1.3477e+00 -#> 3.4640e+00 -1.1975e-01 6.6389e+00 -4.4436e+00 1.1040e+01 -1.5087e+01 -#> -7.3618e+00 -6.3839e+00 -3.2193e+00 9.0840e+00 1.5088e+01 9.2531e+00 -#> -1.0281e-01 1.0142e+00 1.4126e-01 -1.8113e+00 6.3071e+00 -1.0526e+00 -#> -1.0200e+00 9.5171e-01 -4.2450e+00 -7.9963e+00 1.2206e+01 1.0523e+01 -#> 3.7093e+00 9.6528e-01 4.0630e+00 1.1129e+01 -3.9470e+00 5.5017e+00 -#> -3.9856e+00 3.6659e+00 1.6750e+00 1.0511e+01 -8.8842e+00 -3.3586e+00 -#> 3.0463e-01 4.3400e-01 -2.9486e+00 -3.8748e+00 -8.4585e+00 -1.4113e+00 -#> 6.9056e-02 5.3960e+00 -2.7156e+00 7.2227e+00 -6.3782e+00 -1.2635e+00 -#> -#> Columns 7 to 12 7.9277e+00 -3.3597e+00 -1.2598e+01 6.1640e+00 8.6454e-01 -1.4289e+01 -#> -1.8540e+00 4.6359e+00 -4.1865e+00 -5.1530e+00 2.3492e+00 -8.1270e+00 -#> 9.1591e+00 -9.1336e+00 2.2569e+00 -2.5140e+00 1.1702e+01 -9.2587e+00 -#> -7.6002e+00 6.2050e+00 -1.3418e+01 -7.7016e+00 1.2713e+01 -2.0433e+00 -#> -3.8421e+00 1.9191e+00 -4.5832e+00 -2.3329e+00 9.7887e+00 -2.1843e+01 -#> 6.0568e+00 -2.4789e+00 7.5743e+00 -9.3153e+00 -6.3102e+00 5.5473e+00 -#> 9.7806e-01 1.3823e+01 -3.1965e+00 -6.0133e-01 -3.1926e+00 1.0389e+00 -#> -1.1084e+01 9.0128e-01 -8.2534e+00 -1.1053e+01 5.0405e+00 9.5454e-01 -#> -1.9832e+01 -1.6358e+00 7.7623e+00 -1.1648e+01 1.3623e+01 -1.1985e+01 -#> 5.8391e+00 3.5531e+00 -3.1517e+00 1.0902e+01 6.9349e+00 -1.1707e+01 -#> -1.2187e+01 -1.9061e+01 2.7532e+00 -8.7982e-01 -1.2811e+00 -1.1784e+01 -#> -9.6197e+00 1.4408e-01 -3.8304e-01 -1.0293e+01 -4.2343e+00 -9.3015e+00 -#> 2.1816e+01 -1.0358e+01 4.4754e+00 4.4965e+00 -3.9147e+00 -8.8297e+00 -#> -1.3955e+01 7.2483e+00 -2.5442e+01 -7.0836e+00 5.3737e+00 -1.0677e+01 -#> 7.2140e+00 2.3756e+00 -4.7037e+00 -4.8433e+00 1.1872e+01 -8.0447e+00 -#> -1.3389e+00 1.1117e+01 -1.4307e+01 9.4055e+00 1.2201e+01 -1.6476e+01 -#> -4.9345e+00 8.2168e+00 -3.9515e+00 4.4601e+00 -1.3724e-01 -1.4649e+01 -#> -1.0054e+01 1.1957e+01 -9.0555e+00 -1.1360e+01 1.1955e+01 -1.2255e+01 -#> -6.8150e+00 6.2035e+00 -7.0091e+00 -4.7487e+00 1.4248e+01 -1.3114e+00 -#> 1.1368e+01 -7.3924e+00 9.4632e+00 -9.4340e+00 4.1829e+00 -1.3339e+01 -#> -1.4158e+00 1.2500e+01 -8.5006e+00 6.2598e+00 -2.9507e+00 -2.1564e-01 -#> 2.3398e+00 -2.4304e+00 -7.3535e-01 2.8813e-01 6.6898e+00 1.7336e+01 -#> -3.1147e-01 1.6517e+00 1.2610e+01 7.1454e+00 9.4211e+00 -1.4088e+01 -#> -7.4251e+00 -8.1059e+00 -2.5371e+01 -1.2935e+01 -1.6045e+01 -2.7882e+00 -#> -1.8432e+01 7.2453e+00 -5.2321e+00 -1.3105e+01 -9.3911e-01 -1.2354e+00 -#> 9.3714e+00 -1.6181e+01 1.5188e+01 7.1519e+00 -1.1995e+01 2.7365e+01 -#> -1.7714e-01 -1.3419e+01 6.6561e-01 -8.0800e+00 -1.1812e+01 1.2695e+01 -#> -7.3460e+00 9.6275e+00 2.8878e+00 4.3720e+00 1.5972e+01 4.4419e+00 -#> -6.1104e+00 1.5898e+00 -7.9875e+00 9.2666e-01 5.0628e+00 7.6020e+00 -#> -5.2511e+00 -5.0651e+00 5.6202e-01 -1.8023e+01 -1.7160e+00 1.4579e+01 -#> 4.1947e-02 -1.7834e+01 2.7451e+00 1.2405e+00 1.3470e-01 -1.2129e+01 -#> -5.9979e-01 -3.5876e+00 5.7801e+00 3.4267e+00 -6.0223e+00 2.1237e+01 -#> -7.8867e+00 3.8303e+00 -4.2517e+00 1.7581e+00 2.2147e-01 -9.6697e+00 -#> -#> Columns 13 to 18 -1.3265e+01 -1.3388e-01 -6.2657e+00 1.8457e+01 -2.0316e+01 -8.9335e+00 -#> -8.2219e+00 2.7945e+00 -1.0981e+01 -1.2172e+01 -4.2599e+00 6.7829e+00 -#> 1.5608e+01 8.9277e-02 3.2390e+00 7.8762e+00 1.2230e+01 -6.7688e+00 -#> 8.7358e+00 5.5304e+00 -5.5587e+00 3.9099e+00 -4.6915e+00 -1.2642e+00 -#> 1.0147e+01 3.6444e+00 -9.1277e+00 1.7335e+01 3.6767e-01 4.4335e+00 -#> 5.7939e+00 -1.8102e+01 1.1411e+01 -8.8106e+00 1.4680e+01 6.0197e+00 -#> 7.5897e+00 -4.5091e+00 -9.6216e+00 1.3621e+01 3.3132e+00 -2.2411e+01 -#> -7.9518e+00 5.3377e+00 -8.7286e+00 1.1977e+01 1.1974e+01 4.4068e+00 -#> 5.2793e+00 1.2346e+00 1.2539e+01 2.2888e-01 1.6566e+00 -4.3003e+00 -#> 6.2172e+00 -1.6921e+01 4.8339e+00 4.0403e+00 -3.6163e+00 6.3318e+00 -#> 3.7147e+00 1.1907e+00 1.7518e+01 9.2517e+00 9.1858e+00 7.7552e+00 -#> -1.0452e+01 -2.0280e+00 6.5720e+00 -1.0286e+01 -6.1103e+00 6.2985e-01 -#> 1.8934e+01 1.2379e+01 1.6522e+01 -8.6650e+00 -3.7289e+00 1.0112e+01 -#> 2.5255e+00 -1.0635e+01 -2.0998e+01 -9.0092e+00 1.6594e+01 1.1383e+00 -#> 1.7404e+01 -1.6339e+01 -1.1579e+01 -6.3186e+00 -1.3261e+01 -1.9673e+00 -#> -9.4890e+00 3.6748e+00 6.6153e+00 7.0064e-01 -1.4821e+00 -5.0903e+00 -#> 1.5503e+01 -9.9915e+00 3.1989e-01 -4.1594e+00 6.4457e+00 -4.3074e+00 -#> 7.8736e-01 -8.5897e+00 -2.0359e+00 -1.3591e+01 -4.8745e+00 -2.1896e+01 -#> -6.6362e+00 -9.8757e+00 -5.5616e+00 1.8116e+01 7.5799e+00 -1.7781e+00 -#> 8.3797e+00 6.0021e-01 -1.7778e+01 4.7960e+00 -5.5892e+00 1.2583e+00 -#> -5.1661e+00 5.4751e+00 -8.1088e+00 -5.9602e-01 6.1550e+00 3.6812e+00 -#> 1.0024e+01 -1.3289e+01 4.1283e+00 1.8308e+00 1.7409e+00 -3.9152e+00 -#> 1.5095e+01 1.9088e+00 6.1760e+00 2.3120e+01 -1.4459e+01 1.5964e+00 -#> -4.3251e+00 1.3200e+00 5.6500e+00 1.0793e+01 1.1772e+01 7.8633e+00 -#> 3.0207e+00 8.6253e+00 3.7148e-01 -8.2103e+00 2.1514e+00 3.3441e+00 -#> -4.2380e+00 8.7565e+00 5.1626e+00 1.0956e+01 -1.1340e+01 1.2452e+01 -#> -4.7337e+00 -1.4291e+01 -8.9942e+00 -1.3272e+01 2.6020e+00 -8.6399e+00 -#> -2.9628e+00 1.7688e+00 9.4542e+00 1.3147e+01 7.1706e+00 3.1746e+00 -#> 6.3297e+00 1.0754e+00 2.7610e+00 -4.0563e+00 -7.0332e+00 1.6860e+00 -#> 2.2331e+00 4.8680e+00 1.0615e+00 -1.3488e+01 3.2870e+00 3.1170e+00 -#> 1.5144e+01 -8.8461e+00 3.1123e+00 -4.4244e-01 -4.8833e+00 4.7693e-01 -#> -6.3397e+00 1.5592e+01 1.5269e+01 1.5849e+01 -6.6470e+00 2.9379e+00 -#> -3.5580e+00 2.0140e+00 2.5515e+00 -1.6820e+01 5.1384e+00 9.3253e+00 -#> -#> Columns 19 to 24 -4.3144e+00 4.6143e+00 3.7621e+00 1.0236e+00 9.1765e+00 1.1586e+01 -#> -3.2139e+00 -2.9037e+00 1.1098e+01 -4.6513e+00 1.6938e+00 4.7404e+00 -#> -1.2711e+01 6.3123e+00 -4.1875e+00 -1.0495e+00 -1.1496e+01 -8.3215e+00 -#> 1.1090e+01 7.4233e-01 -3.5049e+00 4.8077e+00 -4.0711e+00 -1.1453e+01 -#> 1.6252e+01 8.5940e-01 1.1081e+01 4.6098e+00 2.5550e+00 3.6903e-02 -#> -1.2179e+00 1.0425e+01 -3.1774e-01 1.9092e+01 -1.3186e+01 9.3608e+00 -#> 3.7483e-01 4.5561e+00 6.0095e-01 9.6300e+00 -2.3309e-02 -4.5975e+00 -#> -1.2796e+01 2.8253e+00 4.3625e+00 7.1202e+00 3.4291e+00 8.9793e-01 -#> 7.0558e-01 -1.2783e+01 -1.1601e+01 -5.2674e+00 4.8640e+00 -8.5976e+00 -#> 1.7004e+00 1.4138e+01 -2.1057e+00 -2.3106e+00 -1.0992e+01 -2.4524e-01 -#> -9.8371e+00 -1.7429e+00 -1.6777e+00 -7.0428e+00 -4.9677e+00 -1.0101e+01 -#> 8.7128e+00 1.9727e+00 7.3768e+00 6.1230e+00 5.9434e+00 8.9918e+00 -#> 1.5984e+00 -4.9178e+00 2.1932e+01 -5.4599e+00 1.1959e+01 -2.2573e+00 -#> -6.9175e+00 -4.1474e+00 -2.1975e+00 -5.3939e+00 -2.2144e+00 7.2748e+00 -#> 1.0882e+01 2.0940e+01 3.1680e+00 3.3280e+00 4.6097e+00 -4.0240e+00 -#> 7.1740e+00 6.6605e+00 5.2177e+00 -8.3093e+00 -1.1249e+01 1.3216e+00 -#> 5.9196e+00 -4.6431e+00 2.4043e+00 1.3990e+00 2.1708e+00 -1.1351e+01 -#> -9.4173e+00 7.0889e+00 -6.5662e+00 3.4494e+00 -1.8142e+01 -2.3038e-01 -#> 6.4866e+00 -4.0087e+00 6.9798e+00 1.2574e+00 -2.6856e+00 1.7630e+00 -#> -6.4867e+00 8.2717e+00 3.6461e+00 6.8531e+00 8.2888e+00 1.5702e+01 -#> -1.4982e+00 1.2453e+01 2.7548e+00 8.9527e+00 -1.0024e+00 4.8150e+00 -#> 1.5337e+01 -3.2413e+00 5.8117e+00 1.6994e+01 6.7664e+00 -1.1449e+01 -#> 4.7731e+00 1.0507e+01 -1.6246e+01 -1.7924e+01 7.1068e-01 -3.8325e+00 -#> 1.8137e+00 -7.0561e+00 7.9566e+00 4.3107e-01 1.0817e+01 -5.4474e+00 -#> -4.6797e+00 1.4865e+00 -1.6564e+01 1.5662e+00 -3.3163e+00 -3.9403e+00 -#> 3.1976e+00 2.9680e+00 -7.3438e+00 1.4924e+01 -2.1774e+00 9.0913e+00 -#> -3.2800e-01 -6.5956e+00 7.9274e+00 6.7508e+00 1.2148e+01 -9.3619e+00 -#> 4.5274e-01 -2.6912e+00 -7.2021e+00 4.2023e+00 -3.9267e+00 2.5256e+00 -#> 4.3481e+00 -1.2686e+01 -1.1956e+01 1.0051e+01 5.9468e+00 -2.1925e+00 -#> 2.0461e+00 -1.3921e+01 -5.5452e+00 5.2529e+00 2.0601e+01 -2.6521e+00 -#> -4.6468e+00 1.4440e+01 -7.3033e-01 -9.9996e+00 -8.8389e+00 -1.5808e+00 -#> -1.2480e+00 9.1248e+00 -2.8179e+00 1.0731e+01 -1.5468e+01 1.7226e+00 -#> 1.3306e+00 8.6867e+00 4.8440e+00 9.5960e+00 -1.1540e+01 -3.9837e+00 -#> -#> Columns 25 to 30 5.4699e+00 5.6998e+00 -3.4012e+00 -1.1067e+01 8.6554e+00 6.6413e+00 -#> 1.4069e+01 -9.7582e+00 3.2213e+00 -2.2708e+00 -2.1654e+01 -1.5022e+00 -#> -3.8468e+00 1.0437e+01 -1.3451e+01 -1.7239e+01 1.4509e+00 7.5753e+00 -#> -5.2113e+00 2.8510e+00 -6.9533e+00 9.5826e+00 1.6959e+00 4.5609e+00 -#> -1.4356e+00 1.7240e+00 -2.1492e+01 6.4118e+00 -1.2253e+00 -2.2039e+00 -#> -2.2775e+00 4.8589e+00 -4.2910e+00 -5.1652e-01 -1.4957e+01 -5.9976e+00 -#> 1.0456e+01 1.3280e+00 -3.5628e-01 2.1212e+00 -8.3177e+00 -3.2242e+00 -#> -5.4305e-01 -1.2461e-02 -4.7155e+00 -6.2646e+00 3.0596e+00 -1.0798e+01 -#> 6.1351e+00 -6.4744e+00 -3.7152e+00 -7.4972e+00 -3.5417e+00 -6.1697e+00 -#> -1.1345e+00 5.3915e+00 -1.0067e+01 -4.4847e+00 -5.1633e-01 -1.4962e+00 -#> -1.7299e+01 7.8617e+00 -5.3225e+00 -5.7595e+00 9.3727e+00 2.8566e+00 -#> -3.7784e+00 6.6955e+00 2.7104e+00 6.3596e+00 1.0636e+01 7.4172e+00 -#> 3.5782e+00 -2.2840e+01 1.7752e+00 5.2072e+00 -1.1504e+01 -2.6621e+00 -#> -1.4476e+01 5.4797e+00 -1.4041e+00 3.9727e+00 6.1982e+00 3.7617e+00 -#> 5.4233e+00 1.8527e+00 -1.8342e-01 -6.8265e+00 1.1266e+01 -1.5702e+01 -#> 4.4169e+00 -1.5739e+01 -1.1726e+01 7.3365e+00 -7.8890e+00 -8.5225e+00 -#> 1.2108e+01 -3.1495e-01 -1.5868e+01 6.5639e+00 -8.2699e+00 1.5419e+00 -#> 1.1193e+01 -4.3754e-01 -5.9623e+00 -4.0880e+00 7.9017e+00 8.8718e+00 -#> -9.3012e+00 1.3934e+00 -5.8818e+00 -1.1752e+01 -8.1547e+00 -5.0979e+00 -#> 1.4485e+00 -5.6431e+00 8.8961e+00 -4.3433e+00 1.0669e+01 -6.7620e+00 -#> 7.8333e+00 1.4045e+00 -1.4081e+01 6.5516e+00 -1.7546e+01 -6.2451e+00 -#> 8.1935e+00 3.1147e+00 2.5687e+00 -8.3399e+00 -3.0574e+00 -7.1151e+00 -#> 1.3414e+00 4.2013e+00 -1.7803e+01 -5.1214e+00 -5.5807e+00 1.5435e+01 -#> 2.8192e+00 -2.4385e+00 1.2972e+01 -3.1511e+00 1.5503e+00 9.6873e+00 -#> -7.7992e+00 9.0596e+00 2.0192e+01 -7.8699e+00 7.2253e+00 -6.7786e+00 -#> -2.9993e+00 -1.3195e+01 1.2465e+01 1.7358e+00 -3.1596e+00 3.3080e+00 -#> 6.0420e+00 1.3294e+01 1.7468e+01 -4.0664e+00 1.5949e+00 9.8797e+00 -#> -3.8004e+00 1.5496e+01 -1.4409e+00 -8.1662e+00 1.1723e+01 1.1117e+00 -#> 3.1524e+00 -2.1490e+00 -1.3218e+00 1.4153e+01 -2.0530e+01 8.6345e+00 -#> -9.4948e-01 -3.9658e-01 9.1029e+00 -5.6274e+00 1.4035e+01 9.4233e+00 -#> 5.4670e+00 -5.4394e+00 7.3929e+00 -1.1360e+01 2.4601e+00 -4.9093e+00 -#> 1.7102e+00 -4.0448e-01 -8.3279e+00 -4.1029e-03 -1.1221e+00 -1.2746e+01 -#> 1.0214e+00 9.5807e-01 1.8192e+00 -1.2437e+00 -1.7461e+01 -5.0998e+00 -#> -#> Columns 31 to 36 1.6438e+01 -1.3438e+01 -2.0719e+00 -1.0913e+01 -7.5714e+00 1.8081e-01 -#> -8.8542e-01 2.0675e+00 -8.0859e+00 6.9595e+00 -8.2521e+00 -1.8931e+00 -#> 1.0092e+01 1.7633e+01 5.2209e+00 -1.6351e+01 -1.9979e+01 9.8331e+00 -#> -6.9253e+00 1.1116e+01 -1.1913e+01 -4.0747e+00 4.2845e+00 2.2116e+00 -#> -1.9262e+00 1.0957e+00 -4.9534e+00 -5.6227e+00 -1.0123e+00 1.1315e+01 -#> 4.2937e+00 -2.6627e+00 -1.2887e+01 4.3937e+00 -5.1109e-01 1.0921e+00 -#> -7.9952e+00 1.4324e+01 -3.9196e+00 -2.7443e+00 6.3004e-01 -4.6276e+00 -#> 9.4714e+00 -4.8978e+00 -1.0007e-01 -2.0158e+01 1.6750e-02 -1.0488e+00 -#> 3.1494e-01 -6.2173e+00 6.5896e+00 2.9818e+00 -1.0756e+01 -4.3337e+00 -#> 1.3194e+01 -1.4014e+00 2.6727e-04 -1.6177e+00 2.1996e-01 1.5266e+00 -#> 1.9788e+00 -8.5207e+00 -3.9199e-01 -1.0973e+01 -1.4129e+01 1.7517e+00 -#> 9.3051e+00 -2.2612e+00 -7.2882e-01 -1.8655e+01 -1.1064e+01 -5.8109e+00 -#> -1.1440e+01 -3.3624e+00 -3.3130e+00 9.9564e+00 1.2247e+01 7.4611e+00 -#> 1.4041e+01 8.0288e+00 4.4719e+00 6.5836e+00 5.6681e+00 4.5749e+00 -#> -1.0175e+01 -2.4834e+00 1.7094e+00 6.6462e+00 9.4967e+00 5.3757e+00 -#> 4.1961e+00 -2.3064e+01 -1.0312e+01 5.6536e+00 -2.5044e+00 -4.9450e+00 -#> -2.5085e+00 -4.3374e+00 -8.5296e-01 2.7185e+00 -5.6022e+00 6.4684e+00 -#> 1.0241e+01 9.5646e+00 6.8356e+00 -1.2182e+01 4.2921e+00 5.7431e+00 -#> 1.3466e+01 1.3720e-01 1.0346e+01 2.1120e+01 -5.1694e-01 -5.4295e+00 -#> 3.2274e+00 2.0233e+01 -1.4584e+00 -4.0477e-01 6.6957e+00 -4.2500e+00 -#> 4.9743e+00 -2.1034e-01 -1.1772e+01 -5.7535e+00 8.7506e+00 -7.1696e+00 -#> 9.0055e+00 8.2756e+00 5.8166e+00 4.2791e+00 -2.4567e+00 -6.4340e+00 -#> 3.1091e+00 2.3341e+00 -1.7254e+00 2.9272e-01 -1.0347e+01 6.9994e+00 -#> 7.0724e+00 -1.0171e+01 1.3105e+01 1.9351e+00 -1.1987e+01 -6.6013e+00 -#> -1.6740e+01 1.5330e+00 1.0160e+00 4.8406e+00 -1.2461e+01 -3.5074e+00 -#> -9.7528e+00 1.1982e-01 1.5609e+00 1.0209e+01 5.4214e+00 -5.4774e+00 -#> -3.3251e+00 -4.8822e+00 4.5186e+00 -1.5024e+01 -1.3310e+01 -6.2242e+00 -#> -2.7370e+00 -4.1817e+00 -3.3404e+00 -7.9251e+00 -7.3348e+00 1.0770e+01 -#> -5.2601e+00 -7.2051e+00 5.9309e-01 1.5419e+01 -1.4580e+01 -6.6892e+00 -#> -1.4980e+01 -7.9538e+00 1.0906e+01 -3.5722e+00 1.7001e+00 1.7724e+01 -#> -7.8612e-01 -7.4339e+00 -1.1043e+00 2.8421e+00 -7.2769e-01 2.5934e+00 -#> 1.2884e+00 1.1799e+01 2.6829e+00 -1.6889e+00 1.7691e+01 -9.7053e+00 -#> 1.0168e+01 4.0578e+00 1.3561e+01 -4.0530e+00 -4.0119e+00 -9.7104e+00 -#> -#> Columns 37 to 42 -8.2735e+00 3.1403e+00 -1.1959e+01 1.1864e+00 -8.7998e+00 -2.9534e+00 -#> 9.1179e+00 -6.3446e+00 2.2182e+00 -1.5197e+00 -1.2004e+01 -1.0454e+01 -#> -8.7442e+00 -3.2043e-02 1.6638e+00 5.4491e+00 1.7899e+00 -1.4846e+00 -#> -1.8708e-01 -7.6960e+00 -5.4669e+00 -7.6985e+00 -7.7320e+00 1.1031e+01 -#> -2.1632e+00 -7.0626e+00 1.4560e+00 -6.1482e+00 4.3042e-01 1.4981e+01 -#> 5.2970e+00 -1.5019e+01 2.8836e+00 -6.4169e+00 1.1324e+01 -1.0338e+01 -#> 9.4684e+00 -1.3241e+01 4.9726e-02 -2.9331e-01 1.3627e+00 2.9866e-01 -#> -1.2529e+00 -3.2748e+00 1.9241e+00 9.2193e+00 -1.0986e+01 -2.7363e+01 -#> -4.8738e+00 -7.7993e+00 -4.7592e+00 -2.0297e+01 -8.3224e+00 -2.2505e+01 -#> -1.0972e+01 -2.9503e+00 1.1717e+01 -1.6971e+00 -5.3677e+00 -2.8579e+00 -#> -8.9193e+00 -6.8548e+00 -1.2901e+01 -1.0337e+01 6.7866e+00 3.4333e-01 -#> -1.5797e+01 -5.8071e+00 -1.9483e+01 -8.7037e+00 5.0029e-01 -1.0541e+01 -#> 3.2845e+00 4.1856e+00 -3.9064e+00 -3.7897e+00 2.1681e+00 7.0478e+00 -#> 1.4918e+00 6.1159e+00 4.3892e+00 -7.7357e+00 -8.0483e+00 -6.6913e+00 -#> -2.9716e+00 -1.3224e+01 3.6103e+00 5.5090e+00 7.1380e-01 7.3106e+00 -#> -3.1293e-01 6.9401e+00 7.3920e+00 -2.7220e+00 9.2464e-01 5.9456e+00 -#> 1.5518e+01 1.1428e+01 7.0763e+00 -2.1403e+00 9.3160e+00 -3.3511e+00 -#> -1.1750e+01 -5.4353e+00 7.5489e-01 4.7460e+00 1.0737e+01 -6.3573e+00 -#> 9.4763e-01 2.6830e-01 -3.0907e+00 -1.5246e+01 -1.6409e+01 -2.3621e+01 -#> -3.2955e+00 2.3546e+00 1.1154e+00 -2.8251e+00 -8.3575e+00 5.5872e+00 -#> -2.2356e+00 1.0417e+01 6.6118e+00 1.0563e+01 -4.5935e+00 9.5886e+00 -#> 2.5937e+00 -9.0757e+00 -1.9578e+00 -6.8779e-02 -1.0149e+01 -1.8089e+01 -#> 5.6172e+00 2.3983e+01 5.6968e+00 -3.7683e+00 1.6258e+01 1.9859e+01 -#> -6.4705e+00 -2.5923e+01 -2.7608e+01 -1.9793e+01 -2.2804e+01 -1.3724e+01 -#> -3.4172e+00 -1.4107e+01 -9.9054e+00 4.4123e+00 -1.1000e+01 5.2239e+00 -#> 2.5845e+00 2.8300e-01 -3.3345e+00 -1.4225e+00 -9.2023e+00 1.4570e+01 -#> -5.9657e+00 -2.1567e+01 -2.3819e+01 1.1180e+00 -1.8332e+00 -1.8851e+00 -#> 5.3897e+00 3.4841e+00 3.8876e+00 3.3017e+00 5.1111e+00 3.0648e+00 -#> 1.4952e+01 -5.3567e+00 -2.6438e+00 9.7077e+00 8.9029e+00 6.6048e+00 -#> 1.3372e+01 -5.9119e+00 -1.4201e+01 -9.9048e+00 4.3797e+00 -1.7422e+01 -#> -3.7871e-01 2.2780e+00 -5.7702e+00 -2.5621e+00 1.3025e+01 1.0764e+01 -#> 3.2211e+00 4.6307e-01 1.1810e+01 5.7527e+00 -5.7293e+00 6.3445e+00 -#> -2.5170e-01 -1.1096e+01 -1.0406e+01 5.4917e+00 -1.1964e+01 -5.6287e+00 -#> -#> Columns 43 to 48 1.0823e+01 1.0227e+01 5.2452e+00 -7.6024e+00 1.0346e+00 -7.9165e+00 -#> -6.8778e+00 -8.0179e+00 -3.0957e+00 -5.3885e+00 -7.4486e+00 -5.7790e+00 -#> 7.6706e+00 2.3105e+01 7.8097e+00 -1.3647e+00 -1.2281e+01 -1.5098e+01 -#> 2.6246e+00 6.9635e+00 -1.8214e+01 5.7573e+00 -1.0707e+01 7.4098e+00 -#> 2.3787e+00 4.8133e+00 -4.4525e+00 5.3442e+00 -1.4970e+01 -3.1860e+00 -#> -1.7354e+01 1.6283e+01 1.5564e+00 1.3106e+01 -6.1499e+00 -4.8105e+00 -#> 5.7187e+00 -1.2279e+01 7.5568e+00 1.6225e+00 6.1519e+00 2.5067e+00 -#> 5.5771e+00 7.8960e+00 3.8538e-01 1.5720e+01 -6.6727e+00 -1.1263e+01 -#> -1.6229e+00 -3.2439e+00 3.1027e+00 -2.5326e-01 -8.8451e+00 -8.2672e-01 -#> 9.0183e+00 4.2075e+00 -1.4198e+01 6.5227e+00 -1.2879e+01 -8.1087e+00 -#> 6.6603e-01 3.8175e+00 -2.9692e+00 -1.5503e+00 1.2798e+00 -7.5683e+00 -#> 1.8157e+01 1.5004e+01 -2.7700e+00 1.1028e+01 -4.5021e+00 4.6181e+00 -#> 7.3547e+00 -5.4684e+00 3.7659e-01 -1.4494e+01 3.1610e-01 1.0328e-01 -#> -5.1568e+00 1.1580e+01 -1.5136e+01 -1.8530e-01 -7.0306e+00 -5.5930e+00 -#> 7.5433e+00 -1.9105e+01 -6.2412e+00 -5.3192e+00 -3.9712e+00 1.0104e+01 -#> 1.3997e+01 -7.2630e+00 -1.5452e+01 -1.4429e+01 -2.8080e+00 2.6409e+00 -#> 4.5980e+00 -2.1559e+01 -1.2334e+01 -4.2134e-01 7.6498e-01 3.6497e+00 -#> 1.3544e+01 6.7151e+00 -1.9943e+00 8.4802e+00 -1.3441e+01 3.5801e+00 -#> -2.1211e+01 7.9966e+00 5.1467e+00 1.9884e+00 1.4655e+00 -1.7772e+01 -#> -6.7888e+00 9.6563e+00 2.9522e+00 -3.2235e+00 4.3726e+00 -6.4796e+00 -#> 1.1923e+01 1.0458e+01 1.1862e+01 6.3710e-01 4.6522e+00 3.3642e+00 -#> -2.0412e-01 2.3011e+00 7.5140e+00 -6.2739e+00 1.2391e+01 6.0583e-01 -#> 8.2744e-01 4.1622e+00 -1.2777e+01 -6.6249e-01 -2.0946e+01 -5.7807e+00 -#> -4.2178e+00 -9.3935e+00 1.4077e+01 -1.0468e+01 2.5169e+00 -1.2825e+01 -#> -7.4545e+00 -1.7324e+01 1.0080e+01 4.3010e+00 6.2536e+00 5.2832e+00 -#> -1.7112e+01 7.4615e+00 -5.2570e+00 -4.3795e+00 4.2586e+00 1.0613e+01 -#> -4.2153e-01 -1.6174e+01 -3.5876e-02 -2.6960e+00 5.8841e+00 1.0271e+01 -#> -1.0999e+01 9.6090e+00 4.4693e+00 3.4537e+00 -4.9400e+00 -5.7933e+00 -#> -1.1098e+01 4.3504e+00 4.1122e+00 6.6750e+00 -1.3501e+01 5.7151e+00 -#> -1.8164e+01 -7.8379e+00 -1.6745e+01 -2.6554e-01 -1.3939e+01 -4.3230e+00 -#> 4.1970e+00 -1.3924e+01 8.9203e+00 -1.8577e+00 3.2587e+00 3.2568e+00 -#> -9.2823e+00 2.3534e+01 -8.9547e+00 -4.0144e+00 1.0853e+00 -1.2542e+01 -#> 6.3050e+00 -1.6179e+00 3.5026e+00 3.5845e+00 -2.6009e+00 -7.2738e+00 -#> -#> Columns 49 to 54 -1.2046e+01 -7.2920e+00 6.6618e+00 3.8642e+00 1.1839e-01 -1.1737e-02 -#> -1.9295e+00 -1.2324e+01 4.3648e+00 -4.7373e-01 -2.2393e+00 -2.6499e+00 -#> -4.9235e+00 9.2022e+00 8.5990e+00 5.1345e+00 2.7203e+00 1.3626e+00 -#> 1.1801e+01 -3.4244e+00 9.3624e+00 -2.5441e+00 4.0397e+00 -1.0988e-02 -#> -7.1344e+00 -1.4162e+01 -1.0796e+01 -1.5963e+01 4.3136e-01 5.5208e-01 -#> -2.7459e+01 -7.8029e+00 -4.7287e+00 7.1100e+00 8.4058e+00 -8.3266e+00 -#> 1.7347e+01 -2.7476e+00 -5.8147e+00 1.0619e+00 -2.6772e+00 3.0231e+00 -#> 7.3309e+00 -3.1074e+00 1.9109e+01 6.2488e+00 -6.4734e+00 -4.7125e-01 -#> -4.8922e+00 -7.5932e+00 -2.2855e+00 -1.2168e+00 -1.4721e+00 -4.0499e+00 -#> -1.8002e+01 -4.7983e+00 6.1407e+00 -6.8202e+00 3.7508e+00 -6.9611e-01 -#> 1.5803e+00 2.1600e+00 9.1819e+00 2.6416e-01 1.6545e+00 -6.8457e+00 -#> -2.5037e+00 -3.5136e+00 -1.2750e+01 9.4648e+00 2.0263e+00 -7.7318e-01 -#> -5.8539e+00 7.2161e-01 3.3986e+00 2.6831e-01 8.4629e+00 1.0556e+00 -#> -8.1779e-01 -9.5382e+00 8.8504e+00 -7.6203e+00 1.3994e+00 -2.9230e+00 -#> -3.5740e+00 -1.7280e+00 1.4569e+01 -1.1737e+01 -8.8975e+00 3.7210e+00 -#> 1.1795e+01 3.3146e+00 1.2242e+01 2.9391e+00 -1.8201e+01 -1.2430e+00 -#> -1.8295e+01 -4.6633e+00 5.9964e+00 -4.3451e-01 -6.5197e+00 3.5632e+00 -#> 3.7127e-01 7.4454e+00 1.1682e+01 2.6667e-01 -1.9586e+00 -3.3837e+00 -#> -1.0365e+00 -8.2232e+00 -8.4112e+00 -9.1053e+00 -3.6521e+00 -2.2473e+00 -#> -2.5694e+00 4.3786e-01 -1.0536e+01 -8.8205e+00 -3.6897e+00 1.0798e+01 -#> 8.6545e+00 -5.0410e+00 7.0885e+00 1.8632e+00 6.6410e+00 -1.3708e+00 -#> -5.7480e+00 8.0330e-01 -3.3123e+00 5.2791e+00 -5.4046e+00 4.0533e+00 -#> -3.6363e+00 -8.6738e+00 3.7204e+00 1.1624e-02 1.8064e+01 -1.6268e-01 -#> -2.2105e+01 -1.7262e+01 -1.0033e+01 -9.1466e+00 -5.0783e+00 6.1272e+00 -#> -1.2115e+01 -2.3348e+00 -1.0168e+00 5.4170e+00 -1.1375e+00 -1.1276e+00 -#> -1.1370e+01 9.9577e+00 -1.5178e+01 -3.5892e+00 2.8437e+00 9.4898e-01 -#> 1.3018e+01 1.0340e+01 2.0948e+00 -2.8574e+00 8.2862e-01 3.8906e+00 -#> -1.7084e+00 2.7070e+00 3.6188e+00 -2.3062e+00 8.2864e+00 5.3085e+00 -#> 1.5275e+00 6.4327e+00 1.8934e+01 7.2108e+00 1.6752e+00 1.9821e+00 -#> -1.6915e+00 8.5454e-01 7.1428e+00 1.0604e+01 -7.7169e-01 9.2157e-01 -#> -1.8514e+00 -1.1039e+01 -4.7102e+00 -7.0633e-02 5.1042e+00 5.8931e-01 -#> -7.9077e+00 -5.2208e+00 -4.4968e+00 7.9002e+00 1.5707e+01 6.7454e+00 -#> -4.4708e-01 -2.1211e+00 -2.3933e+00 -2.8860e+00 2.2992e+00 4.1969e+00 +#> Columns 1 to 8 -1.8085 6.8235 11.1442 -4.3852 -15.3604 -8.8875 1.1256 0.1523 +#> 3.2338 11.4704 0.3934 -14.2270 13.4444 2.4024 6.8763 -1.6765 +#> -3.7818 9.8499 -9.2872 5.0741 -0.6256 -12.0439 -9.2977 3.9530 +#> 5.6605 -1.4260 7.0991 6.6795 -2.6111 -13.9174 1.7084 8.8847 +#> -7.9131 2.2159 12.6250 0.0961 13.1972 -11.4249 -0.1778 4.9907 +#> 0.0938 -0.5793 10.3645 1.8925 11.3503 -3.1427 0.4214 -15.2126 +#> 3.3332 -2.4953 -0.5407 9.7266 7.8946 18.1418 -3.0654 18.2605 +#> 0.2715 -3.8937 -1.1274 7.2516 -0.6246 -10.3331 12.9356 -3.2030 +#> 1.6485 6.9216 -1.0674 -1.8517 0.0904 5.8350 1.1440 -3.3267 +#> -5.6427 -11.8021 -4.9037 6.8329 5.0761 6.1332 -8.6573 -5.3008 +#> -0.4524 -7.5455 10.2084 8.8555 -10.6587 -5.6035 17.9837 -9.9650 +#> -7.2268 11.1057 2.6428 -1.7195 13.2818 0.4378 1.4844 2.4036 +#> -6.8513 -7.4731 -2.7462 3.8581 -8.6282 5.4673 3.6392 6.9059 +#> -3.7328 2.4122 -6.1795 4.3776 1.8762 9.3600 -2.5737 3.5618 +#> -3.8572 11.2351 -4.3227 -4.8577 -12.3157 -1.9139 -2.5290 10.5374 +#> -5.5685 -4.6600 -3.2141 -2.2996 20.0426 0.0587 -3.7593 11.4482 +#> -2.3589 -1.4210 -10.3781 11.2984 -2.7009 14.6150 2.2226 -8.7667 +#> -2.6438 -7.9484 -6.7505 19.6090 10.5052 -0.4875 -3.4537 18.9603 +#> 6.3091 6.8971 -4.9796 -4.3744 9.0554 -3.9599 9.5644 -1.5092 +#> -0.9940 3.9283 -4.9377 1.1211 -12.0237 1.5106 -10.0866 0.0542 +#> -0.8940 -5.6882 3.2082 3.7745 0.5697 -1.4983 5.5870 9.2724 +#> -1.9254 -7.3987 -2.3184 2.9481 -1.0781 -18.3138 13.4581 -9.8867 +#> 5.2962 -13.5625 -5.7083 10.7356 0.0219 0.3040 -10.6505 10.0103 +#> -1.3605 6.3681 -6.7055 8.3580 -0.0669 -3.7588 3.8232 5.6098 +#> 7.3196 -3.2715 -10.4852 -3.4072 -13.0453 -0.1817 -2.6775 4.2766 +#> 1.4467 -1.2051 -4.3364 -1.9571 7.8727 -11.4624 -8.6240 5.5867 +#> -7.8125 2.9865 -5.7988 12.0364 3.4152 -7.1135 9.6319 11.9023 +#> 5.7813 6.9540 9.3736 -7.6431 5.5177 -5.2435 8.0511 3.0021 +#> -0.2057 -1.1182 6.9689 -4.2778 -12.3012 -6.0379 0.3608 0.4296 +#> 2.8318 1.5916 7.1183 -19.4666 -8.1717 5.6651 -21.5762 -20.9961 +#> 0.3252 -13.7046 0.9702 -3.9895 -12.7617 3.0772 -9.7669 7.3512 +#> 4.8164 1.5928 -11.3728 9.7084 -20.2306 8.2180 -21.9845 -2.1740 +#> -0.5004 0.1439 1.8745 11.2255 -2.9842 -6.1502 -1.9812 -0.8021 +#> +#> Columns 9 to 16 -10.1794 -14.9284 -17.3641 2.0294 -6.9438 -0.0645 14.2671 -2.3613 +#> -10.8704 -7.2153 -7.5334 6.1847 1.3025 -16.6261 -2.6642 -7.1922 +#> 13.3906 2.5243 -2.6378 11.3990 2.6536 0.6407 2.1983 4.5764 +#> -6.5675 1.6233 -5.1832 -8.1122 -8.8643 -4.9960 3.3150 4.6671 +#> -12.4715 4.1802 -4.1456 -21.4683 -0.9824 -2.2127 5.6457 2.9535 +#> 2.0502 0.7811 1.9804 -1.8003 -2.4586 5.2659 -0.9761 -7.8882 +#> -8.0298 0.6442 2.8820 5.6903 -5.4814 -11.2479 -4.6485 -11.4214 +#> -1.6430 -4.9383 -9.5249 7.0498 -2.5464 0.4699 1.3884 -11.7889 +#> 8.4361 7.2545 3.7774 -5.6343 -9.9351 5.1677 7.9047 4.6966 +#> -5.6658 -11.9736 13.2212 5.0623 -13.8838 -9.4784 3.6282 12.6086 +#> -4.6914 7.3080 -6.2720 -0.5174 -10.6196 -3.2478 -9.8968 -9.6412 +#> 6.2494 15.0193 5.1235 10.0639 -5.8118 12.3543 4.3839 -16.2044 +#> -5.7358 3.5501 -0.6512 -2.1734 -1.8810 -2.6743 8.3366 -2.7352 +#> -3.9936 2.5910 11.1833 -8.1628 -15.2792 -1.3009 -3.0070 -3.0361 +#> -9.4892 4.0590 5.0090 -7.3886 14.9358 -7.8874 1.0146 -1.7621 +#> -2.1584 14.1129 -1.4494 7.7903 3.0989 -3.4798 8.6529 7.1713 +#> -11.1146 4.1217 -3.8267 -13.7234 10.0567 -3.3526 -8.0599 -8.4550 +#> -1.6442 -6.4007 1.9564 7.6880 5.2909 -24.5806 1.7764 20.3234 +#> 5.2582 -4.1678 -5.1555 5.3339 6.2349 -5.0789 -5.9958 1.3127 +#> 6.2296 3.3385 -3.7889 4.4874 -1.8305 -4.2832 19.3959 6.4340 +#> -5.9719 3.7739 6.9392 3.1651 5.9951 11.3894 -10.9516 6.2830 +#> -17.4815 -4.0967 3.5075 -7.3431 -0.9606 0.9446 0.2215 5.6110 +#> -1.4048 5.0152 10.7015 -1.5667 9.9075 -4.0370 20.2363 26.3620 +#> -2.9662 4.9210 -0.0204 6.3144 13.1104 3.0940 -9.7981 -1.6755 +#> 1.1846 -17.2423 0.6635 10.1996 -11.0985 -16.3306 -6.2983 10.5892 +#> -10.9259 -20.2519 2.9075 3.5902 0.4626 -19.5359 -1.5157 23.6804 +#> -8.5254 -0.4845 -4.3085 -1.1406 -10.9099 12.9873 7.2302 -12.9039 +#> -9.8433 12.1345 1.3542 -1.1830 -11.8594 -4.6079 6.2281 -5.3346 +#> 19.6813 -10.2379 5.3029 4.3069 -0.9405 -9.8509 -4.2888 -7.2717 +#> 19.0780 1.2020 -21.4062 -7.8763 24.9200 -6.6737 -12.9860 9.6250 +#> -9.8688 1.7647 -1.3504 -3.3562 2.8880 8.9753 10.0467 5.1882 +#> -11.3345 6.3551 2.3746 -12.2455 1.3373 5.8598 16.3165 -3.8965 +#> 8.5505 5.9874 -7.7551 3.9953 11.5428 4.3301 -4.1456 -1.0553 +#> +#> Columns 17 to 24 3.4379 8.4352 12.1864 11.4839 -1.0812 -0.6587 11.6241 -2.1610 +#> 3.5834 -5.9094 7.1619 -9.0004 2.8066 1.2815 -3.2055 -18.7561 +#> -3.4830 6.8322 8.4415 -1.6890 16.4708 1.9059 -2.7801 17.4356 +#> 1.9267 9.1798 5.2292 11.5135 15.1043 8.0399 9.3046 6.4772 +#> 0.2345 13.2745 -4.6980 9.0597 -4.3470 -0.9129 -6.4124 9.2362 +#> -5.9683 12.7167 -8.7012 12.7688 -9.3868 -2.7994 -3.4714 4.3325 +#> 10.1765 0.9581 0.9913 -10.5012 5.9029 2.4469 20.0124 -1.4693 +#> 2.5257 -2.9225 12.2828 2.6812 3.4723 -18.7732 -0.9865 -2.4597 +#> -6.6799 7.1511 11.3533 9.9620 12.0954 4.7766 8.4221 2.0568 +#> 0.6233 -6.0174 -12.5099 4.2834 4.6351 7.5994 0.9900 -16.6508 +#> 16.9863 -12.8674 3.8486 -6.7235 0.2406 -8.0315 -3.3391 0.0315 +#> -13.4598 -3.4615 -4.5909 -17.8737 -9.8440 8.8483 -6.2322 -0.5780 +#> 4.4961 -3.0535 4.8150 5.2029 -7.6731 -5.4773 -0.5935 8.1613 +#> 16.1048 13.1719 -0.7813 13.9314 12.1535 0.6091 7.7643 -3.3097 +#> -2.2050 -8.8802 -4.0884 1.1293 7.7253 7.7275 2.0325 7.9701 +#> 0.6568 -4.2595 0.6793 -14.9570 6.2110 2.2176 2.4834 20.1524 +#> 9.4107 0.3862 -5.3271 -11.7983 -11.8202 -15.5278 6.5015 7.7119 +#> 2.9367 -13.8717 -15.5943 -1.1245 6.1041 -4.5714 0.6443 20.2473 +#> -5.6470 9.7853 4.0830 -3.9930 -1.9974 -8.0662 1.6419 9.8002 +#> -0.2521 -2.0018 4.5139 1.1573 0.1829 -2.9886 2.5863 -0.8541 +#> 7.5024 -1.8956 4.5682 -5.8431 12.3655 0.2531 -2.7889 -2.4726 +#> -1.7530 1.5935 -5.7318 5.3683 -10.2838 9.1896 0.4879 20.1177 +#> 2.4782 -0.4610 2.1775 -3.5302 9.6107 12.4782 3.2454 -0.9944 +#> 10.2677 -1.8757 -6.3894 -2.2962 4.8556 -7.3212 -6.0320 9.5188 +#> -2.7605 3.6678 -1.8530 -2.0780 8.1630 6.7658 5.9929 -14.7745 +#> -3.0390 0.4741 -6.7724 -2.5366 0.7335 3.6058 1.7393 -3.0921 +#> -0.6922 12.0244 0.5601 -2.9409 -3.6292 -7.0280 2.4363 11.4067 +#> -13.0394 -0.1910 1.2098 1.6614 -5.8048 6.6063 0.3493 -1.5076 +#> -5.1819 1.9542 5.1423 13.0171 -4.8891 -4.5958 2.1498 -4.7827 +#> 0.0163 5.8201 4.3923 -14.4396 -10.1979 -0.6131 0.0601 -31.7168 +#> -9.7506 -2.1386 5.7524 -0.7152 3.1771 -12.9438 -1.3517 -3.8864 +#> 5.4991 2.0661 -1.7861 10.3215 11.8130 1.1582 4.7063 -13.8469 +#> 13.7012 4.1644 9.8376 3.4762 5.3983 -6.1923 5.5819 19.3746 +#> +#> Columns 25 to 32 -10.2416 14.7264 -12.6604 -8.2676 4.4424 -19.3142 -8.1464 -11.3920 +#> -2.1834 -6.8935 15.3232 1.2277 3.4624 -2.7000 -11.4923 13.4703 +#> -15.8238 16.7675 2.6045 -0.7082 -5.0369 4.0627 -16.2359 -7.6680 +#> -8.8415 -2.6171 -6.9167 -10.2169 2.3852 -3.7276 -8.1779 -21.2976 +#> -13.8236 -6.7069 3.3380 -3.6913 -0.5309 -1.6683 -10.3296 3.6428 +#> -0.3584 14.2790 5.2645 -2.6072 -5.9618 0.7734 5.3214 -13.7755 +#> -10.0030 -24.8429 0.0549 -3.5409 5.6547 14.3611 5.5172 -1.4669 +#> -7.3790 -6.3546 -3.1390 13.5413 11.5983 11.6355 -1.6879 6.5976 +#> -3.4076 14.3032 -1.6495 7.9119 0.6767 12.3789 1.7472 -5.9689 +#> -25.6925 2.2857 6.3914 -4.8627 9.1784 8.6958 -12.4188 8.5016 +#> -0.7826 0.6261 4.9188 11.8967 16.4723 3.9813 -7.3106 -4.9731 +#> 21.7040 21.4698 0.7837 7.4448 -6.2250 16.1856 4.6194 -4.5464 +#> -16.1303 -3.3117 -10.2965 -4.5425 -16.3345 4.9616 -5.9379 7.6910 +#> -3.9478 12.4847 -1.7680 -8.5500 13.8150 -6.3223 -22.6779 -3.1781 +#> 13.6536 -5.5077 -9.0472 1.0408 -1.5763 0.1702 -3.6839 1.2109 +#> 0.3589 -0.4649 -4.9002 -17.8386 -1.3519 4.9333 9.8586 -6.8356 +#> -0.6898 -1.8803 -1.8696 -13.0802 7.3337 8.0793 -1.6920 1.1841 +#> -9.9161 -12.7632 16.5711 -5.5078 3.9504 14.7979 -0.0271 1.3653 +#> 0.2711 -2.6653 -1.1627 -18.3533 -17.6653 -2.5515 -0.5849 -3.9738 +#> -3.6610 -15.2496 -0.0277 -0.8859 -8.2856 7.2251 6.9414 -3.6113 +#> 11.6710 7.2822 -8.5050 11.5796 19.4352 6.2796 -13.4865 11.6867 +#> -1.7649 -2.8612 -15.2160 -4.4413 -8.1216 12.6129 18.1034 6.2739 +#> -6.6626 -1.7059 10.3411 3.0628 -7.0777 -4.8876 0.6616 4.7759 +#> 18.5314 -15.7258 -3.0978 4.7971 13.4731 1.4660 -8.1811 19.7662 +#> -4.4994 -1.6873 -8.5064 -5.7987 0.9318 -1.1732 -6.0563 7.7234 +#> 10.4725 -4.7629 -4.1846 11.6992 -4.1662 -7.0571 -5.9636 -0.2261 +#> -8.4743 1.4216 -8.4977 -15.5294 6.3919 -6.3828 17.5120 -6.7123 +#> -0.9549 8.3025 5.9612 -10.3707 5.9064 15.3804 9.5534 6.8740 +#> -6.7103 3.9216 -20.5821 -7.7008 -3.1681 5.8599 -1.1451 7.4167 +#> 14.6057 11.7997 -11.0051 12.4682 3.9693 -6.5482 -1.8915 0.3585 +#> -6.6026 3.1699 1.8563 -17.1516 0.4566 -9.4297 9.3337 5.1812 +#> -6.8692 5.3250 10.1857 -3.2662 0.6716 -25.4983 -18.7460 -8.6142 +#> -4.2331 -6.0863 -3.7910 -11.5121 8.6729 -12.9421 -8.5479 16.3405 +#> +#> Columns 33 to 40 -0.1569 -2.7266 9.2791 -0.5810 -8.5486 2.1007 5.0249 -4.4974 +#> -17.1284 -5.1497 1.8101 -16.9630 -6.0321 -1.4432 -10.3474 -3.1184 +#> -3.2402 17.3936 5.4339 -6.3686 -5.3584 -3.1360 4.2987 15.2203 +#> -7.1489 8.9191 12.8711 4.6684 -6.7725 -10.1931 13.6875 5.3358 +#> -2.6737 7.5119 6.0522 -11.3305 -9.8963 9.7640 -7.6690 11.4855 +#> -8.4428 8.8333 -0.4129 -3.4478 -9.0430 -2.3889 1.6318 -4.2240 +#> -16.8384 -7.0844 8.0303 3.8051 -2.0820 -11.8136 -1.6724 -3.6335 +#> 0.8212 -5.1141 -0.3663 0.3196 0.7888 -1.7771 5.2814 1.0386 +#> 11.1806 -0.9534 -1.2515 13.7424 -5.1325 -1.8527 2.8168 14.6277 +#> 7.6767 -4.4157 0.2736 -13.6328 8.7376 4.3707 -15.7661 -1.8361 +#> -23.8295 15.3801 26.8475 -9.6708 -2.6077 8.6731 -12.2178 1.0008 +#> 9.4455 -2.0803 21.2394 4.5900 3.8484 17.1744 -17.3394 13.5123 +#> 3.6987 -1.7717 7.6256 -15.0688 -4.2955 9.2369 -15.0015 1.8412 +#> -5.1464 -10.5151 -6.5049 5.7651 -5.9820 -1.9951 -2.6133 4.6113 +#> -18.5323 -9.3310 -11.7947 1.6622 -5.2074 0.8505 1.1189 -11.8461 +#> -10.7856 9.0138 2.0116 -6.0254 -7.5856 -3.6127 -7.5107 4.1595 +#> -14.6936 1.0497 11.7719 -0.8666 11.2925 -1.7937 -12.3447 -2.1602 +#> -17.4366 7.2441 6.1121 -3.1418 -4.5704 16.3396 14.8591 -5.4702 +#> 14.5764 -0.2693 -4.9854 6.5477 -12.2666 -5.2875 -3.9501 0.2375 +#> 20.4611 -14.2881 -4.1101 15.8526 -4.0581 4.1525 -7.3295 -1.5252 +#> 3.2099 8.6845 11.7543 -6.1204 11.4738 13.2616 6.8109 9.5679 +#> -13.9970 -6.7169 7.1447 -7.0404 -2.9133 8.2108 5.3468 -1.1020 +#> -9.1758 18.5591 14.3107 16.4954 14.1348 -3.4231 11.9152 -3.0018 +#> -2.1620 -0.2339 -2.0764 -10.2497 -2.7732 6.1084 14.7679 -17.9348 +#> -5.4329 -4.7309 -11.4260 -15.3764 10.0641 -9.1388 -0.5764 1.1177 +#> 11.0932 -9.1824 -1.3550 0.4423 -13.8164 6.2092 12.3749 -9.0443 +#> -5.2818 6.7898 -10.2957 -3.5308 2.9238 -7.6418 -4.0772 -5.8566 +#> -10.4743 -15.1906 10.0438 3.0422 8.8272 3.9424 6.1575 8.7274 +#> 16.2969 -2.8221 2.0969 -1.5984 0.6494 -7.9316 -2.5844 15.0276 +#> 5.7547 17.6778 -14.5500 -17.0432 2.0552 -5.0652 4.6477 2.8888 +#> -4.4138 2.8106 -10.8570 -5.7065 11.7775 -0.4233 6.0246 0.7196 +#> -12.5856 18.1345 -12.7985 -13.2050 6.7073 -6.8200 -7.4915 -3.0724 +#> 2.1405 4.6461 9.6643 -12.9346 8.3115 -0.7018 -6.4356 6.8158 +#> +#> Columns 41 to 48 -12.9826 -6.1457 -22.5488 -2.0832 -0.4162 9.0465 7.8768 -0.0694 +#> -0.9580 -7.0457 2.5557 -9.2195 3.5493 -8.0014 5.3317 -14.8083 +#> 1.6561 5.0123 -4.6498 4.0050 -10.8499 14.4019 6.2230 -5.0632 +#> 8.5846 -4.8309 -15.6632 -4.6525 -6.3167 17.2654 -6.8944 0.4633 +#> -5.9663 -4.1631 -14.6245 7.2168 -5.4054 -1.6174 13.9642 -8.5509 +#> 4.6117 -8.1391 -11.8904 4.0783 -16.7419 0.1631 -7.9859 -1.2737 +#> 6.5693 -2.6732 5.1538 -18.7892 2.5048 -11.5552 1.8862 4.1121 +#> 2.8661 1.9581 -18.6130 8.5455 -18.4324 7.4794 -15.1059 18.8131 +#> -3.7367 -4.4750 -2.8802 10.2123 -17.2796 9.6597 -3.4403 -10.2159 +#> -10.3463 3.4056 13.2073 1.2496 -7.6686 -9.6420 4.2977 -18.2493 +#> 7.3743 4.9233 -24.7237 1.1679 -10.1948 2.1863 3.6010 -3.3140 +#> -2.1077 6.6198 4.1753 26.1887 -4.3479 7.6100 2.2583 -1.4093 +#> 9.0027 7.2566 -13.9227 10.5004 -8.0264 5.4405 3.4126 -0.6111 +#> -1.7096 -2.5521 0.4205 -2.6405 -8.7542 0.6280 3.0397 4.6723 +#> -8.2249 -13.6187 6.7401 1.5473 2.5867 -10.1461 -6.3527 -13.9548 +#> 1.5364 7.2244 -3.4340 0.9902 -6.0006 5.9815 -0.6316 -4.7893 +#> 1.6934 -1.1801 1.8450 2.8821 -5.2247 -3.5259 0.5963 -4.4809 +#> 15.3615 -0.2178 -1.8231 3.3698 12.2011 1.5852 17.7421 -13.3385 +#> -10.7165 12.3857 5.7598 10.3255 2.9782 -3.5616 2.7659 4.2417 +#> 4.7488 -7.8035 0.2133 -13.8613 3.9308 -4.9753 -2.1151 3.2447 +#> 2.0527 16.0718 4.7682 1.0596 -3.4375 8.6130 3.9209 -8.1618 +#> -2.6088 -4.4650 0.7012 15.9247 -8.6858 2.7749 9.4317 -10.2418 +#> 13.8988 13.9604 -4.1115 -2.0487 8.5215 -3.4302 4.5700 -10.3320 +#> -14.6660 -3.9202 0.7864 -4.2224 3.1868 -5.1094 4.5952 -10.4710 +#> -0.7723 -4.8941 10.8158 -9.3243 -6.1164 8.6143 1.8235 -1.1579 +#> -6.3796 -6.1962 -3.8432 -14.4264 -0.6532 6.8874 -1.5109 -9.5585 +#> -8.3360 5.8116 -20.3218 4.3709 -1.6870 0.2719 -1.1324 13.1253 +#> -0.7276 10.8420 -8.8445 5.1642 -4.7909 5.8770 2.3446 3.1134 +#> -3.4728 -8.4132 -9.3282 -2.4598 -13.9889 -0.1141 -17.7281 8.4579 +#> 0.7313 -19.1262 4.8611 4.3102 -14.6471 12.2990 9.9911 -10.5856 +#> -4.0521 9.9625 -5.2854 2.2909 -3.1532 3.3688 -2.9038 -1.1516 +#> -0.5690 -5.5379 -0.1454 -16.6919 1.0590 -11.4037 -17.8771 -8.3987 +#> 16.9248 7.9114 -9.7885 -4.4478 11.5209 -12.3277 3.0344 -4.4491 +#> +#> Columns 49 to 54 -2.4816 -10.0894 1.5627 -5.4510 2.0629 -7.8133 +#> 8.0295 7.2492 1.5337 0.4177 9.9281 2.2332 +#> 5.5653 11.4952 -10.0090 9.0028 -0.4650 -9.2711 +#> 1.5195 -7.7740 -4.9919 -6.7892 -2.5577 -4.0503 +#> 22.2933 -7.5280 4.2462 -3.8104 -10.4262 9.4113 +#> 2.1584 -0.8205 11.7842 -5.0073 -13.0770 10.9063 +#> 5.1162 3.6789 2.0313 -5.3068 -6.0775 4.7882 +#> 6.5513 -5.6222 4.8174 -10.5155 -6.2489 3.0581 +#> -3.1880 -18.2021 -2.9476 -7.1455 -14.8756 -5.6792 +#> 16.5248 -1.6972 -4.7581 2.6545 19.2105 -0.7215 +#> 0.2193 9.9254 -4.3810 -6.0774 -1.6955 2.7366 +#> 11.6958 -9.3216 0.9710 6.0428 -0.2308 -3.2657 +#> 13.9625 12.0341 3.2408 -2.6272 4.7907 -3.3315 +#> 7.8446 -8.4412 2.0168 -2.4455 -13.0284 7.6630 +#> -11.7375 -11.6168 9.6884 -6.0522 -4.5266 -1.8117 +#> 7.2731 11.9864 -7.5153 -3.1953 -1.0606 0.9127 +#> 7.0889 -6.3740 7.7219 -12.9289 -3.1015 -1.5893 +#> 0.6799 38.9608 -20.6200 -15.9873 -1.9256 7.7908 +#> -4.6026 0.4663 -2.1743 -3.4974 5.8252 1.5821 +#> 0.1014 -11.5744 5.3243 4.4900 5.1452 3.3991 +#> -1.5698 -10.0190 -14.3703 -1.4048 -4.0098 5.7948 +#> 14.9512 -10.6372 2.0850 -17.9989 -3.8559 -1.8780 +#> -11.8236 -5.0441 -13.3233 -0.7043 5.6571 -3.6184 +#> 4.4885 -0.1081 5.8368 -7.9772 -3.0133 7.3391 +#> 10.4458 15.7382 -2.6193 5.8192 3.3003 -4.2809 +#> 1.2943 3.1749 -12.1381 -8.1458 -0.0987 4.7939 +#> 0.3299 -15.6021 -1.2116 8.8089 -6.6296 -9.7615 +#> 12.9355 -11.1910 1.6897 -8.2362 -6.7389 -2.0588 +#> 4.3795 2.6748 -8.3122 -3.4194 6.7437 4.1822 +#> 3.5644 -0.2029 -3.1704 -2.1312 0.7880 2.3878 +#> 4.3588 -3.4375 2.4982 -0.9298 4.9904 -6.5135 +#> 1.9467 -17.8350 2.4432 8.4665 2.7656 0.2217 +#> -13.2103 7.5169 -7.0545 -10.5433 5.0430 -7.0124 #> [ CPUFloatType{20,33,54} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Conv_transpose2d

    -
    torch_conv_transpose2d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  output_padding = 0L,
    -  groups = 1L,
    -  dilation = 1L
    -)
    +
    torch_conv_transpose2d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  output_padding = 0L,
    +  groups = 1L,
    +  dilation = 1L
    +)

    Arguments

    @@ -253,69 +285,69 @@ composed of several input planes, sometimes also called "deconvolution".

    See nn_conv_transpose2d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # With square kernels and equal stride -inputs = torch_randn(c(1, 4, 5, 5)) -weights = torch_randn(c(4, 8, 3, 3)) -nnf_conv_transpose2d(inputs, weights, padding=1) -} +inputs = torch_randn(c(1, 4, 5, 5)) +weights = torch_randn(c(4, 8, 3, 3)) +nnf_conv_transpose2d(inputs, weights, padding=1) +}
    #> torch_tensor #> (1,1,.,.) = -#> -3.6222 -7.9970 2.8105 3.1026 1.7725 -#> -4.7404 12.8775 1.0415 -0.8881 4.1451 -#> -2.2161 -0.0100 -7.8568 2.5352 7.7713 -#> 1.3625 -3.1941 -6.9461 6.7935 2.9976 -#> -3.9660 0.9707 12.1132 -7.1898 -5.3734 +#> -4.1597 -0.1576 1.4483 -2.0094 2.9097 +#> -1.8583 9.8337 -0.7585 -8.0211 1.4750 +#> -0.1620 5.7865 -2.7381 -1.5447 2.8569 +#> 6.2899 -0.6621 -10.8934 -0.5446 -3.5256 +#> 3.3105 -1.7729 1.7700 -2.0344 -2.9281 #> #> (1,2,.,.) = -#> 5.5750 4.9565 -1.2831 -9.3766 2.7933 -#> 0.3677 -1.5841 -1.1948 2.0401 1.6757 -#> -1.1554 -7.9480 2.0611 3.8100 -2.3715 -#> 3.6882 5.7345 7.8422 -10.5027 4.2258 -#> 6.4450 -2.5137 -14.2397 6.3799 2.2110 +#> 5.2262 11.0420 -9.6969 -4.1312 6.6328 +#> 8.9018 5.9441 -12.9078 -6.6986 -3.9976 +#> -5.8637 -7.4986 -5.0125 3.2821 -1.8264 +#> -1.0976 3.5361 6.9400 1.0611 0.6038 +#> 3.1511 7.2763 -6.7689 4.3118 -1.2124 #> #> (1,3,.,.) = -#> 2.3055 1.7193 1.5106 5.7209 -0.4044 -#> 5.4206 -5.3050 -1.7869 8.7362 4.7972 -#> -1.1797 -8.2598 0.6797 -5.7008 2.3267 -#> 8.3444 -1.9313 -3.6077 1.1224 0.3162 -#> -0.1740 -4.8076 -2.2376 -3.3701 -7.0435 +#> 2.7638 -6.3663 -1.5166 0.0480 3.5406 +#> -5.4546 -11.2902 -1.3522 -2.4067 9.6663 +#> -3.4808 1.0996 0.8911 1.4416 0.6622 +#> -1.1227 6.4158 -2.2347 2.0933 -6.1268 +#> 0.1290 -3.2629 4.0837 -6.5931 3.2663 #> #> (1,4,.,.) = -#> -2.1321 -0.6399 -0.5702 6.2185 -6.6191 -#> -6.7085 -1.7849 0.0567 0.2547 4.4343 -#> -0.1791 -13.0960 -1.1711 -11.7881 7.9123 -#> 1.6398 -8.7353 5.6190 2.7717 2.2932 -#> 6.8436 5.9709 0.8596 -5.4111 -4.1101 +#> -0.3616 -1.2966 2.9143 -3.9220 4.6383 +#> 0.5495 4.0566 -3.7807 -2.0309 -8.2831 +#> -1.4155 0.3986 -5.6448 3.5119 -1.9671 +#> 6.8860 1.2865 4.3564 -4.5815 4.5818 +#> 2.6446 -5.4075 1.3979 -6.4102 -0.1252 #> #> (1,5,.,.) = -#> 6.3795 12.9273 3.7923 -2.6331 -6.7760 -#> 12.3485 -7.3105 3.6371 -0.5407 -1.6971 -#> -6.8453 8.1406 8.8767 -0.0699 -10.0042 -#> 0.8129 -1.6930 3.5113 -0.9711 2.3083 -#> -1.1577 -2.1860 -1.9327 3.4738 -2.3536 +#> 2.6558 -0.9246 -3.8111 1.1757 3.2649 +#> 7.2648 4.3120 -5.3182 -12.6696 -3.2790 +#> -3.4242 -5.5040 -0.7625 3.8862 9.9976 +#> 2.8658 -2.9160 1.8878 4.9650 -2.0246 +#> -0.8867 -1.9310 4.2303 -5.3019 0.0027 #> #> (1,6,.,.) = -#> 2.8265 -4.2878 -10.9216 -1.4770 -3.4568 -#> -8.9188 -7.0011 -0.9053 -2.3985 -0.2689 -#> -0.8018 12.4512 0.5999 -5.5394 1.6234 -#> 4.3200 -10.4630 -4.2572 7.0144 -0.1696 -#> -4.6365 -0.5886 7.6997 -3.6797 2.4974 +#> 3.0233 -6.9291 15.0609 -3.6230 -5.5245 +#> -2.9845 3.6031 0.4262 -11.7900 -5.2817 +#> -0.7342 9.5789 -1.9073 2.4465 -0.7759 +#> 2.6915 4.0482 -4.1381 -3.2260 -7.9044 +#> 7.0667 -0.0559 4.4528 -2.3568 5.1196 #> #> (1,7,.,.) = -#> -2.3134 1.9207 6.9010 -2.2058 2.9683 -#> -2.6449 5.9937 -0.7752 -2.3005 -1.7128 -#> 3.1190 -11.4434 2.3856 3.4017 -0.6764 -#> -2.5913 6.5472 -0.4979 2.9762 -7.4267 -#> 1.9456 3.5234 -6.3143 -3.6686 -0.6943 +#> 0.3640 -3.3696 -4.5873 8.1249 0.1399 +#> 3.5094 -10.6056 11.0451 8.7865 -9.4152 +#> 2.7518 -0.7870 14.8498 0.1078 4.9419 +#> 1.5540 2.8918 4.9318 -20.0163 0.5801 +#> -0.1355 1.0936 -6.8449 0.3927 1.4653 #> #> (1,8,.,.) = -#> -0.3478 4.7669 7.3665 6.3556 -2.5469 -#> 14.4336 2.1104 2.8604 0.8508 7.2790 -#> -9.4439 -5.6837 -8.8314 10.9166 0.5517 -#> -8.0535 6.8200 4.4783 13.7326 3.1114 -#> -0.0064 8.4882 -0.5751 1.8667 -4.2371 +#> 4.1818 9.4601 4.7066 1.2210 -11.6650 +#> 8.3462 10.2608 -10.2658 -6.3403 -2.5061 +#> 5.7553 -3.7399 -6.0003 -1.9122 -7.3839 +#> 5.2740 -4.6355 -7.1101 5.5363 1.2041 +#> 3.0118 3.4482 4.6497 2.5063 -3.3164 #> [ CPUFloatType{1,8,5,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Conv_transpose3d

    -
    torch_conv_transpose3d(
    -  input,
    -  weight,
    -  bias = list(),
    -  stride = 1L,
    -  padding = 0L,
    -  output_padding = 0L,
    -  groups = 1L,
    -  dilation = 1L
    -)
    +
    torch_conv_transpose3d(
    +  input,
    +  weight,
    +  bias = list(),
    +  stride = 1L,
    +  padding = 0L,
    +  output_padding = 0L,
    +  groups = 1L,
    +  dilation = 1L
    +)

    Arguments

    @@ -253,13 +285,13 @@ composed of several input planes, sometimes also called "deconvolution"

    See nn_conv_transpose3d() for details and output shape.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -inputs = torch_randn(c(20, 16, 50, 10, 20)) -weights = torch_randn(c(16, 33, 3, 3, 3)) -nnf_conv_transpose3d(inputs, weights) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +inputs = torch_randn(c(20, 16, 50, 10, 20)) +weights = torch_randn(c(16, 33, 3, 3, 3)) +nnf_conv_transpose3d(inputs, weights) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cos

    -
    torch_cos(self)
    +
    torch_cos(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_cos(a) -} +a = torch_randn(c(4)) +a +torch_cos(a) +}
    #> torch_tensor -#> 0.9950 -#> -0.6881 -#> 0.9608 -#> 0.4292 +#> 0.3112 +#> 0.0153 +#> -0.1457 +#> 0.8875 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cosh

    -
    torch_cosh(self)
    +
    torch_cosh(self)

    Arguments

    @@ -218,17 +250,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_cosh(a) -} +a = torch_randn(c(4)) +a +torch_cosh(a) +}
    #> torch_tensor -#> 1.6794 -#> 1.0414 -#> 1.1420 -#> 1.4987 +#> 3.1999 +#> 1.2990 +#> 2.7654 +#> 1.1329 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cosine_similarity

    -
    torch_cosine_similarity(x1, x2, dim = 2L, eps = 0)
    +
    torch_cosine_similarity(x1, x2, dim = 2L, eps = 0)

    Arguments

    @@ -229,114 +261,114 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input1 = torch_randn(c(100, 128)) -input2 = torch_randn(c(100, 128)) -output = torch_cosine_similarity(input1, input2) -output -} +input1 = torch_randn(c(100, 128)) +input2 = torch_randn(c(100, 128)) +output = torch_cosine_similarity(input1, input2) +output +}
    #> torch_tensor -#> -0.0839 -#> -0.0532 -#> -0.0722 -#> 0.0385 -#> -0.0419 -#> 0.0142 -#> -0.1544 -#> 0.0472 -#> 0.0008 -#> -0.0617 -#> 0.0141 -#> -0.1420 -#> 0.0335 -#> 0.0430 -#> 0.0615 -#> 0.0353 -#> 0.1243 -#> 0.0716 -#> 0.0977 -#> -0.0064 -#> 0.0749 -#> -0.0833 -#> 0.1121 -#> 0.0322 -#> 0.0634 -#> -0.0648 -#> 0.0626 -#> -0.0684 -#> 0.0730 -#> -0.0172 -#> 0.0471 -#> 0.1222 -#> 0.0414 -#> 0.0982 -#> -0.0227 -#> -0.0359 -#> 0.1331 -#> 0.0087 -#> -0.0901 -#> -0.0007 -#> 0.0342 -#> -0.0128 -#> -0.0778 -#> 0.0089 -#> 0.0861 -#> 0.0460 -#> 0.2530 -#> -0.0914 -#> -0.0440 -#> -0.0222 -#> -0.0150 -#> 0.0758 -#> 0.0366 -#> 0.0954 -#> -0.1439 -#> -0.0192 -#> -0.0154 -#> -0.2044 -#> -0.0923 -#> 0.0788 -#> 0.0914 -#> 0.1129 -#> -0.1281 -#> -0.0538 -#> 0.0407 -#> -0.0087 -#> -0.0040 -#> 0.0872 -#> 0.0249 -#> -0.0875 -#> -0.0190 -#> -0.0206 -#> 0.0033 -#> -0.2125 -#> -0.2117 -#> 0.0331 -#> 0.1047 -#> -0.0187 -#> -0.0631 -#> -0.0723 -#> 0.0119 -#> -0.0522 -#> -0.0242 -#> 0.1630 -#> 0.2203 -#> -0.0939 -#> -0.0853 -#> -0.0385 -#> 0.0749 -#> -0.0212 -#> -0.1387 -#> -0.1505 -#> -0.1320 -#> 0.0642 -#> 0.0524 -#> -0.0435 -#> -0.0708 -#> 0.0286 -#> 0.0018 +#> -0.0524 +#> -0.0031 +#> 0.0624 +#> -0.0047 +#> 0.0654 +#> -0.0487 +#> 0.0383 +#> 0.0094 +#> -0.0003 +#> 0.0799 +#> 0.0575 +#> -0.2452 +#> 0.0222 +#> -0.1215 +#> -0.0114 +#> 0.0025 +#> -0.0643 +#> 0.0510 +#> 0.0154 +#> 0.0035 +#> -0.1361 +#> 0.0832 #> 0.0362 +#> -0.1286 +#> 0.0707 +#> 0.2782 +#> 0.0233 +#> 0.1162 +#> 0.1480 +#> 0.1364 +#> 0.0900 +#> -0.0424 +#> 0.0016 +#> -0.0590 +#> -0.0151 +#> -0.0540 +#> 0.0001 +#> 0.0757 +#> -0.1474 +#> -0.0219 +#> -0.0619 +#> -0.0699 +#> -0.0355 +#> 0.0544 +#> -0.0972 +#> -0.0169 +#> -0.0430 +#> -0.0224 +#> 0.0027 +#> -0.0083 +#> 0.1242 +#> 0.0050 +#> -0.1446 +#> 0.0612 +#> -0.1662 +#> 0.0265 +#> -0.1349 +#> -0.0373 +#> 0.1385 +#> -0.0781 +#> 0.0478 +#> -0.0592 +#> -0.0434 +#> -0.0869 +#> -0.0025 +#> -0.0563 +#> 0.0269 +#> 0.2477 +#> -0.0614 +#> -0.0770 +#> -0.0660 +#> 0.1701 +#> -0.0977 +#> -0.1226 +#> -0.0282 +#> 0.0571 +#> -0.0036 +#> -0.0479 +#> 0.0554 +#> 0.0151 +#> -0.0387 +#> 0.1160 +#> 0.0803 +#> 0.1881 +#> -0.0753 +#> 0.1063 +#> -0.1912 +#> 0.0516 +#> -0.1088 +#> -0.0408 +#> -0.1378 +#> -0.0315 +#> -0.0505 +#> 0.0735 +#> -0.1867 +#> -0.0145 +#> -0.0203 +#> 0.1713 +#> -0.0131 +#> 0.0257 #> [ CPUFloatType{100} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cross

    -
    torch_cross(self, other, dim = NULL)
    +
    torch_cross(self, other, dim = NULL)

    Arguments

    @@ -227,20 +259,20 @@ and other.

    size 3.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(4, 3)) -a -b = torch_randn(c(4, 3)) -b -torch_cross(a, b, dim=2) -torch_cross(a, b) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(4, 3)) +a +b = torch_randn(c(4, 3)) +b +torch_cross(a, b, dim=2) +torch_cross(a, b) +}
    #> torch_tensor -#> 1.0051 1.1323 -1.5101 -#> 0.1088 -0.3737 1.1852 -#> -0.4150 -0.6857 -0.1986 -#> -1.1004 -2.0375 0.8405 +#> 0.5935 -0.0352 1.4618 +#> -0.0312 -0.5835 0.0514 +#> 0.3859 0.9580 0.3189 +#> -1.2798 1.7368 1.1011 #> [ CPUFloatType{4,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cummax

    -
    torch_cummax(self, dim)
    +
    torch_cummax(self, dim)

    Arguments

    @@ -223,38 +255,38 @@ location of each maximum value found in the dimension dim.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(10)) -a -torch_cummax(a, dim=1) -} +a = torch_randn(c(10)) +a +torch_cummax(a, dim=1) +}
    #> [[1]] #> torch_tensor -#> -1.3457 -#> -1.3457 -#> -1.2500 -#> -1.2500 -#> 0.6911 -#> 0.6911 -#> 0.6911 -#> 0.7823 -#> 0.7823 -#> 0.7892 +#> -0.9319 +#> 1.6240 +#> 1.6240 +#> 1.6240 +#> 1.6240 +#> 1.6240 +#> 2.1832 +#> 2.1832 +#> 2.1832 +#> 2.1832 #> [ CPUFloatType{10} ] #> #> [[2]] #> torch_tensor #> 0 -#> 0 -#> 2 -#> 2 -#> 4 -#> 4 -#> 4 -#> 7 -#> 7 -#> 9 +#> 1 +#> 1 +#> 1 +#> 1 +#> 1 +#> 6 +#> 6 +#> 6 +#> 6 #> [ CPULongType{10} ] #>
    diff --git a/reference/torch_cummin.html b/reference/torch_cummin.html index e1892ecb05d6381e1a0250ea92b3d25bc60c9ea6..5ed0acd1e5d520d74017476568c26405c28a2647 100644 --- a/reference/torch_cummin.html +++ b/reference/torch_cummin.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cummin

    -
    torch_cummin(self, dim)
    +
    torch_cummin(self, dim)

    Arguments

    @@ -223,24 +255,25 @@ location of each maximum value found in the dimension dim.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(10)) -a -torch_cummin(a, dim=1) -} +a = torch_randn(c(10)) +a +torch_cummin(a, dim=1) +}
    #> [[1]] #> torch_tensor -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 -#> -1.4730 +#> 0.01 * +#> -4.5300 +#> -4.5300 +#> -4.5300 +#> -156.5067 +#> -156.5067 +#> -156.5067 +#> -156.5067 +#> -156.5067 +#> -156.5067 +#> -156.5067 #> [ CPUFloatType{10} ] #> #> [[2]] @@ -248,13 +281,13 @@ $$

    #> 0 #> 0 #> 0 -#> 0 -#> 0 -#> 0 -#> 0 -#> 0 -#> 0 -#> 0 +#> 3 +#> 3 +#> 3 +#> 3 +#> 3 +#> 3 +#> 3 #> [ CPULongType{10} ] #>
    diff --git a/reference/torch_cumprod.html b/reference/torch_cumprod.html index 4ec85b4c99cf6594801cd17d8ca742b830a893b2..1e07d69c6bff414d3a598cf61b538ecea95d1a41 100644 --- a/reference/torch_cumprod.html +++ b/reference/torch_cumprod.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cumprod

    -
    torch_cumprod(self, dim, dtype = NULL)
    +
    torch_cumprod(self, dim, dtype = NULL)

    Arguments

    @@ -228,23 +260,24 @@ a vector of size N, with elements.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(10)) -a -torch_cumprod(a, dim=1) -} +a = torch_randn(c(10)) +a +torch_cumprod(a, dim=1) +}
    #> torch_tensor -#> -0.2882 -#> 0.0701 -#> 0.1595 -#> 0.0911 -#> 0.0191 -#> 0.0297 -#> -0.0133 -#> 0.0059 -#> -0.0030 -#> -0.0012 +#> 0.01 * +#> -1.5305 +#> 0.5353 +#> 1.1566 +#> -1.0608 +#> -1.0334 +#> -0.1878 +#> -0.1060 +#> 0.0316 +#> -0.0599 +#> -0.0588 #> [ CPUFloatType{10} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Cumsum

    -
    torch_cumsum(self, dim, dtype = NULL)
    +
    torch_cumsum(self, dim, dtype = NULL)

    Arguments

    @@ -228,23 +260,23 @@ a vector of size N, with elements.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(10)) -a -torch_cumsum(a, dim=1) -} +a = torch_randn(c(10)) +a +torch_cumsum(a, dim=1) +}
    #> torch_tensor -#> 0.5764 -#> 0.8705 -#> 0.0415 -#> 0.2468 -#> -0.7231 -#> -2.5011 -#> -3.5835 -#> -2.3688 -#> -3.0899 -#> -2.3232 +#> 1.0341 +#> 0.3108 +#> 1.2103 +#> 0.9886 +#> 1.7014 +#> 2.0861 +#> 2.9594 +#> 3.5739 +#> 3.7172 +#> 1.8170 #> [ CPUFloatType{10} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Det

    -
    torch_det(self)
    +
    torch_det(self)

    Arguments

    @@ -223,18 +255,18 @@ unstable in when `input` doesn't have distinct singular values. See

    Calculates determinant of a square matrix or batches of square matrices.

    Examples

    -
    if (torch_is_installed()) { - -A = torch_randn(c(3, 3)) -torch_det(A) -A = torch_randn(c(3, 2, 2)) -A -A$det() -} +
    if (torch_is_installed()) { + +A = torch_randn(c(3, 3)) +torch_det(A) +A = torch_randn(c(3, 2, 2)) +A +A$det() +}
    #> torch_tensor -#> 0.2158 -#> 1.0192 -#> -0.1403 +#> 3.5172 +#> 0.0857 +#> -2.0464 #> [ CPUFloatType{3} ]
    @@ -146,6 +158,9 @@ is or will be allocated." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ is or will be allocated." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ is or will be allocated." /> is or will be allocated.

    -
    torch_device(type, index = NULL)
    +
    torch_device(type, index = NULL)

    Arguments

    @@ -219,18 +251,18 @@ with device 'cuda' is equivalent to 'cuda:X' where X i

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # Via string -torch_device("cuda:1") -torch_device("cpu") -torch_device("cuda") # current cuda device +torch_device("cuda:1") +torch_device("cpu") +torch_device("cuda") # current cuda device # Via string and device ordinal -torch_device("cuda", 0) -torch_device("cpu", 0) +torch_device("cuda", 0) +torch_device("cpu", 0) -} +}
    #> torch_device(type='cpu', index=0)
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Diag

    -
    torch_diag(self, diagonal = 0L)
    +
    torch_diag(self, diagonal = 0L)

    Arguments

    diff --git a/reference/torch_diag_embed.html b/reference/torch_diag_embed.html index 902ceb33721227e445d77b978bb841d4f88eb75c..192081f1c6fe9ff5ec4aa3ce75a25b592532f907 100644 --- a/reference/torch_diag_embed.html +++ b/reference/torch_diag_embed.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Diag_embed

    -
    torch_diag_embed(self, offset = 0L, dim1 = -2L, dim2 = -1L)
    +
    torch_diag_embed(self, offset = 0L, dim1 = -2L, dim2 = -1L)

    Arguments

    @@ -244,24 +276,24 @@ the same arguments yields a matrix identical to input. However, need to be explicitly specified.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(2, 3)) -torch_diag_embed(a) -torch_diag_embed(a, offset=1, dim1=1, dim2=3) -} +a = torch_randn(c(2, 3)) +torch_diag_embed(a) +torch_diag_embed(a, offset=1, dim1=1, dim2=3) +}
    #> torch_tensor #> (1,.,.) = -#> 0.0000 0.3358 0.0000 0.0000 -#> 0.0000 -3.1130 0.0000 0.0000 +#> 0.0000 -0.7715 0.0000 0.0000 +#> 0.0000 -1.6565 0.0000 0.0000 #> #> (2,.,.) = -#> 0.0000 0.0000 0.4778 0.0000 -#> 0.0000 0.0000 0.6605 0.0000 +#> 0.0000 0.0000 -0.2706 0.0000 +#> 0.0000 0.0000 -0.1178 0.0000 #> #> (3,.,.) = -#> 0.0000 0.0000 0.0000 -0.0378 -#> 0.0000 0.0000 0.0000 -0.3726 +#> 0.0000 0.0000 0.0000 -0.4561 +#> 0.0000 0.0000 0.0000 1.5418 #> #> (4,.,.) = #> 0 0 0 0 diff --git a/reference/torch_diagflat.html b/reference/torch_diagflat.html index 308a4c3471277d7bd22a87aea466b586911aece3..5a0e08342cb9cb5563fee4606f830624431bdb6a 100644 --- a/reference/torch_diagflat.html +++ b/reference/torch_diagflat.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Diagflat

    -
    torch_diagflat(self, offset = 0L)
    +
    torch_diagflat(self, offset = 0L)

    Arguments

    @@ -229,21 +261,21 @@ with the elements of input as the diagonal.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3)) -a -torch_diagflat(a) -torch_diagflat(a, 1) -a = torch_randn(c(2, 2)) -a -torch_diagflat(a) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3)) +a +torch_diagflat(a) +torch_diagflat(a, 1) +a = torch_randn(c(2, 2)) +a +torch_diagflat(a) +}
    #> torch_tensor -#> -0.7924 0.0000 0.0000 0.0000 -#> 0.0000 -0.6579 0.0000 0.0000 -#> 0.0000 0.0000 -1.1050 0.0000 -#> 0.0000 0.0000 0.0000 0.5352 +#> 0.3237 0.0000 0.0000 0.0000 +#> 0.0000 -0.8425 0.0000 0.0000 +#> 0.0000 0.0000 0.3246 0.0000 +#> 0.0000 0.0000 0.0000 0.8017 #> [ CPUFloatType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Diagonal

    -
    torch_diagonal(self, outdim, dim1 = 1L, dim2 = 2L, offset = 0L)
    +
    torch_diagonal(self, outdim, dim1 = 1L, dim2 = 2L, offset = 0L)

    Arguments

    @@ -242,31 +274,31 @@ of the input. However, torch_diag_embed has different default dimensions, so those need to be explicitly specified.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 3)) -a -torch_diagonal(a, offset = 0) -torch_diagonal(a, offset = 1) -x = torch_randn(c(2, 5, 4, 2)) -torch_diagonal(x, offset=-1, dim1=1, dim2=2) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 3)) +a +torch_diagonal(a, offset = 0) +torch_diagonal(a, offset = 1) +x = torch_randn(c(2, 5, 4, 2)) +torch_diagonal(x, offset=-1, dim1=1, dim2=2) +}
    #> torch_tensor #> (1,.,.) = -#> 0.6147 -#> 0.7596 +#> 2.4176 +#> 1.8687 #> #> (2,.,.) = -#> -0.9285 -#> -0.0531 +#> 0.3707 +#> 0.0257 #> #> (3,.,.) = -#> 0.8622 -#> -0.1970 +#> 2.5732 +#> -0.6057 #> #> (4,.,.) = -#> -1.2797 -#> 1.6829 +#> 0.2208 +#> -0.7582 #> [ CPUFloatType{4,2,1} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Digamma

    -
    torch_digamma(self)
    +
    torch_digamma(self)

    Arguments

    @@ -217,11 +249,11 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_tensor(c(1, 0.5)) -torch_digamma(a) -} +a = torch_tensor(c(1, 0.5)) +torch_digamma(a) +}
    #> torch_tensor #> -0.5772 #> -1.9635 diff --git a/reference/torch_dist.html b/reference/torch_dist.html index cf47972c0645444d69bd847e2fd7317aa0e4091a..06b5b6cb34d3abeeab8682314db3a433997e0e5a 100644 --- a/reference/torch_dist.html +++ b/reference/torch_dist.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Dist

    -
    torch_dist(self, other, p = 2L)
    +
    torch_dist(self, other, p = 2L)

    Arguments

    @@ -224,19 +256,19 @@ broadcastable .

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(4)) -x -y = torch_randn(c(4)) -y -torch_dist(x, y, 3.5) -torch_dist(x, y, 3) -torch_dist(x, y, 0) -torch_dist(x, y, 1) -} +
    if (torch_is_installed()) { + +x = torch_randn(c(4)) +x +y = torch_randn(c(4)) +y +torch_dist(x, y, 3.5) +torch_dist(x, y, 3) +torch_dist(x, y, 0) +torch_dist(x, y, 1) +}
    #> torch_tensor -#> 2.48136 +#> 3.86412 #> [ CPUFloatType{} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Div

    -
    torch_div(self, other)
    +
    torch_div(self, other)

    Arguments

    @@ -250,24 +282,24 @@ to the torch_dtype of the specified output tensor. Integral divisio by zero leads to undefined behavior.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(5)) -a -torch_div(a, 0.5) +a = torch_randn(c(5)) +a +torch_div(a, 0.5) -a = torch_randn(c(4, 4)) -a -b = torch_randn(c(4)) -b -torch_div(a, b) -} +a = torch_randn(c(4, 4)) +a +b = torch_randn(c(4)) +b +torch_div(a, b) +}
    #> torch_tensor -#> 0.6647 0.7626 -0.4095 0.1570 -#> 0.7859 -0.3618 0.0104 -0.7726 -#> 0.3775 -0.4202 0.9625 0.0194 -#> 0.4176 -0.7826 -0.3309 -0.8345 +#> 2.9167 -0.6880 0.1183 -15.0537 +#> 0.9509 0.1741 0.0892 8.4831 +#> 1.2162 0.5509 0.0958 -2.2754 +#> 1.4318 -0.6574 -0.6130 6.9025 #> [ CPUFloatType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Dot

    -
    torch_dot(self, tensor)
    +
    torch_dot(self, tensor)

    Arguments

    @@ -221,10 +253,10 @@

    Computes the dot product (inner product) of two tensors.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_dot(torch_tensor(c(2, 3)), torch_tensor(c(2, 1))) -} +torch_dot(torch_tensor(c(2, 3)), torch_tensor(c(2, 1))) +}
    #> torch_tensor #> 7 #> [ CPUFloatType{} ]
    diff --git a/reference/torch_dtype.html b/reference/torch_dtype.html index ca6599a883533c95cb950245371399416a2da797..85c8f6ca9d46e289e25ff89723ec8ab2521c321e 100644 --- a/reference/torch_dtype.html +++ b/reference/torch_dtype.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,41 +227,41 @@

    Returns the correspondent data type.

    -
    torch_float32()
    +    
    torch_float32()
     
    -torch_float()
    +torch_float()
     
    -torch_float64()
    +torch_float64()
     
    -torch_double()
    +torch_double()
     
    -torch_float16()
    +torch_float16()
     
    -torch_half()
    +torch_half()
     
    -torch_uint8()
    +torch_uint8()
     
    -torch_int8()
    +torch_int8()
     
    -torch_int16()
    +torch_int16()
     
    -torch_short()
    +torch_short()
     
    -torch_int32()
    +torch_int32()
     
    -torch_int()
    +torch_int()
     
    -torch_int64()
    +torch_int64()
     
    -torch_long()
    +torch_long()
     
    -torch_bool()
    +torch_bool()
     
    -torch_quint8()
    +torch_quint8()
     
    -torch_qint8()
    +torch_qint8()
     
    -torch_qint32()
    +torch_qint32()
    diff --git a/reference/torch_eig.html b/reference/torch_eig.html index 66ca51941b79471e1260b77d283ecfc36e70c058..4e18e8c25959c0dc15be2fb13a839be5f0dbad80 100644 --- a/reference/torch_eig.html +++ b/reference/torch_eig.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Eig

    -
    torch_eig(self, eigenvectors = FALSE)
    +
    torch_eig(self, eigenvectors = FALSE)

    Arguments

    diff --git a/reference/torch_einsum.html b/reference/torch_einsum.html index 7e8f13755d6a056d0796f856b56db40d4872462f..196d3f80a13d46741ccf26708c8adb48460bac9b 100644 --- a/reference/torch_einsum.html +++ b/reference/torch_einsum.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Einsum

    -
    torch_einsum(equation, tensors)
    +
    torch_einsum(equation, tensors)

    Arguments

    @@ -219,29 +251,29 @@ Einstein summation convention.

    Examples

    -
    if (torch_is_installed()) { - -if (FALSE) { - -x = torch_randn(c(5)) -y = torch_randn(c(4)) -torch_einsum('i,j->ij', list(x, y)) # outer product -A = torch_randn(c(3,5,4)) -l = torch_randn(c(2,5)) -r = torch_randn(c(2,4)) -torch_einsum('bn,anm,bm->ba', list(l, A, r)) # compare torch_nn$functional$bilinear -As = torch_randn(c(3,2,5)) -Bs = torch_randn(c(3,5,4)) -torch_einsum('bij,bjk->bik', list(As, Bs)) # batch matrix multiplication -A = torch_randn(c(3, 3)) -torch_einsum('ii->i', list(A)) # diagonal -A = torch_randn(c(4, 3, 3)) -torch_einsum('...ii->...i', list(A)) # batch diagonal -A = torch_randn(c(2, 3, 4, 5)) -torch_einsum('...ij->...ji', list(A))$shape # batch permute - -} -} +
    if (torch_is_installed()) { + +if (FALSE) { + +x = torch_randn(c(5)) +y = torch_randn(c(4)) +torch_einsum('i,j->ij', list(x, y)) # outer product +A = torch_randn(c(3,5,4)) +l = torch_randn(c(2,5)) +r = torch_randn(c(2,4)) +torch_einsum('bn,anm,bm->ba', list(l, A, r)) # compare torch_nn$functional$bilinear +As = torch_randn(c(3,2,5)) +Bs = torch_randn(c(3,5,4)) +torch_einsum('bij,bjk->bik', list(As, Bs)) # batch matrix multiplication +A = torch_randn(c(3, 3)) +torch_einsum('ii->i', list(A)) # diagonal +A = torch_randn(c(4, 3, 3)) +torch_einsum('...ii->...i', list(A)) # batch diagonal +A = torch_randn(c(2, 3, 4, 5)) +torch_einsum('...ij->...ji', list(A))$shape # batch permute + +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Empty

    -
    torch_empty(
    -  ...,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_empty(
    +  ...,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -242,13 +274,13 @@ defined by the variable argument size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_empty(c(2, 3)) -} +torch_empty(c(2, 3)) +}
    #> torch_tensor -#> 1.9205e+31 1.8891e+31 6.3375e-10 -#> 1.8169e+31 4.4726e+21 8.4843e+26 +#> 4.4726e+21 8.4843e+26 2.7945e+20 +#> 5.5610e+31 4.2964e+24 8.4725e+11 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Empty_like

    -
    torch_empty_like(
    -  input,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_empty_like(
    +  input,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    @@ -243,13 +275,13 @@ torch_empty(input.size(), dtype=input.dtype, layout=input.layout, device=input.device).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_empty(list(2,3), dtype = torch_int64()) -} +torch_empty(list(2,3), dtype = torch_int64()) +}
    #> torch_tensor -#> 1.2885e+10 0.0000e+00 0.0000e+00 -#> 0.0000e+00 1.7180e+10 1.3700e+02 +#> 1.4062e+14 1.4062e+14 0.0000e+00 +#> 0.0000e+00 1.2885e+10 0.0000e+00 #> [ CPULongType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Empty_strided

    -
    torch_empty_strided(
    -  size,
    -  stride,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  pin_memory = FALSE
    -)
    +
    torch_empty_strided(
    +  size,
    +  stride,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  pin_memory = FALSE
    +)

    Arguments

    @@ -257,13 +289,13 @@ vectorized) may result in incorrect behavior. If you need to write to the tensors, please clone them first.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_empty_strided(list(2, 3), list(1, 2)) -a -a$stride(1) -a$size(1) -} +a = torch_empty_strided(list(2, 3), list(1, 2)) +a +a$stride(1) +a$size(1) +}
    #> [1] 2
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Eq

    -
    torch_eq(self, other)
    +
    torch_eq(self, other)

    Arguments

    @@ -221,10 +253,10 @@ Must be a ByteTensor

    broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_eq(torch_tensor(c(1,2,3,4)), torch_tensor(c(1, 3, 2, 4))) -} +torch_eq(torch_tensor(c(1,2,3,4)), torch_tensor(c(1, 3, 2, 4))) +}
    #> torch_tensor #> 1 #> 0 diff --git a/reference/torch_equal.html b/reference/torch_equal.html index f094458d9b263ba0d0022d3059a4b804c0149a0d..1cbe65cce019206b3e8011ceae8f3c74013e5a29 100644 --- a/reference/torch_equal.html +++ b/reference/torch_equal.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Equal

    -
    torch_equal(self, other)
    +
    torch_equal(self, other)

    Arguments

    @@ -218,10 +250,10 @@

    TRUE if two tensors have the same size and elements, FALSE otherwise.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_equal(torch_tensor(c(1, 2)), torch_tensor(c(1, 2))) -} +torch_equal(torch_tensor(c(1, 2)), torch_tensor(c(1, 2))) +}
    #> [1] TRUE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Erf

    -
    torch_erf(self)
    +
    torch_erf(self)

    Arguments

    @@ -217,10 +249,10 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_erf(torch_tensor(c(0, -1., 10.))) -} +torch_erf(torch_tensor(c(0, -1., 10.))) +}
    #> torch_tensor #> 0.0000 #> -0.8427 diff --git a/reference/torch_erfc.html b/reference/torch_erfc.html index 8a4a96ccdd59f6ac64c572f786f0c07cf319bb4c..ba55d33d9345584aa8789149396ddfc8914c9ac8 100644 --- a/reference/torch_erfc.html +++ b/reference/torch_erfc.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Erfc

    -
    torch_erfc(self)
    +
    torch_erfc(self)

    Arguments

    @@ -218,10 +250,10 @@ The complementary error function is defined as follows:

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_erfc(torch_tensor(c(0, -1., 10.))) -} +torch_erfc(torch_tensor(c(0, -1., 10.))) +}
    #> torch_tensor #> 1.0000e+00 #> 1.8427e+00 diff --git a/reference/torch_erfinv.html b/reference/torch_erfinv.html index 03295dcb40caa4bc7783e616e9a675c035ec2a98..033f6d07d901d51fd69e48238dd397345f9725e7 100644 --- a/reference/torch_erfinv.html +++ b/reference/torch_erfinv.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Erfinv

    -
    torch_erfinv(self)
    +
    torch_erfinv(self)

    Arguments

    @@ -218,10 +250,10 @@ The inverse error function is defined in the range \((-1, 1)\) as:

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_erfinv(torch_tensor(c(0, 0.5, -1.))) -} +torch_erfinv(torch_tensor(c(0, 0.5, -1.))) +}
    #> torch_tensor #> 0.0000 #> 0.4769 diff --git a/reference/torch_exp.html b/reference/torch_exp.html index e045862d18133ca66450a18572f8b0dae9b3365a..45e0c295a1c55869227fdb6c3b666f94cad85299 100644 --- a/reference/torch_exp.html +++ b/reference/torch_exp.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Exp

    -
    torch_exp(self)
    +
    torch_exp(self)

    Arguments

    @@ -218,10 +250,10 @@ of the input tensor input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_exp(torch_tensor(c(0, log(2)))) -} +torch_exp(torch_tensor(c(0, log(2)))) +}
    #> torch_tensor #> 1 #> 2 diff --git a/reference/torch_expm1.html b/reference/torch_expm1.html index 7c09819e360b131ece10103daa522cdac3d6997a..db8176f8fbde7329187e4372089edc6e0cd42751 100644 --- a/reference/torch_expm1.html +++ b/reference/torch_expm1.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Expm1

    -
    torch_expm1(self)
    +
    torch_expm1(self)

    Arguments

    @@ -218,10 +250,10 @@ of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_expm1(torch_tensor(c(0, log(2)))) -} +torch_expm1(torch_tensor(c(0, log(2)))) +}
    #> torch_tensor #> 0 #> 1 diff --git a/reference/torch_eye.html b/reference/torch_eye.html index 9bb24caefa330575667237e1cb8e91ba7fb2f440..c427e244166f6c7ff33872923851832811cfc814 100644 --- a/reference/torch_eye.html +++ b/reference/torch_eye.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Eye

    -
    torch_eye(
    -  n,
    -  m = n,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_eye(
    +  n,
    +  m = n,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -241,10 +273,10 @@

    Returns a 2-D tensor with ones on the diagonal and zeros elsewhere.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_eye(3) -} +torch_eye(3) +}
    #> torch_tensor #> 1 0 0 #> 0 1 0 diff --git a/reference/torch_fft.html b/reference/torch_fft.html index 78ddfe283211e863a5cbb025ef6efcdbbdda08fc..e2f898d40d4c3b92ce50c889c786abc688675057 100644 --- a/reference/torch_fft.html +++ b/reference/torch_fft.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fft

    -
    torch_fft(self, signal_ndim, normalized = FALSE)
    +
    torch_fft(self, signal_ndim, normalized = FALSE)

    Arguments

    @@ -256,333 +288,333 @@ shape of input.

    torch_backends.mkl.is_available to check if MKL is installed.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # unbatched 2D FFT -x = torch_randn(c(4, 3, 2)) -torch_fft(x, 2) +x = torch_randn(c(4, 3, 2)) +torch_fft(x, 2) # batched 1D FFT -torch_fft(x, 1) +torch_fft(x, 1) # arbitrary number of batch dimensions, 2D FFT -x = torch_randn(c(3, 3, 5, 5, 2)) -torch_fft(x, 2) +x = torch_randn(c(3, 3, 5, 5, 2)) +torch_fft(x, 2) -} +}
    #> torch_tensor #> (1,1,1,.,.) = -#> 3.8970 3.1487 -#> 3.5089 -8.0428 -#> -4.2355 4.7304 -#> 5.1422 8.4612 -#> -0.2829 6.5526 +#> -1.1481 -4.3297 +#> -4.7188 -6.2804 +#> -1.3097 -4.8588 +#> -0.6476 -7.7830 +#> 5.8857 0.0997 #> #> (2,1,1,.,.) = -#> -6.4914 0.7626 -#> -9.6798 -6.4868 -#> 4.7413 -6.3207 -#> 3.1433 6.0861 -#> 1.4183 8.1368 +#> -1.6040 2.3841 +#> 2.0651 -0.7709 +#> 3.9230 8.8587 +#> 5.2396 -3.9834 +#> 2.9333 -2.8714 #> #> (3,1,1,.,.) = -#> 4.1796 -0.8876 -#> -2.7845 -0.2090 -#> 6.4877 -2.0553 -#> 1.6739 -1.4434 -#> 6.7934 3.7603 +#> 0.3662 1.1582 +#> 3.3350 -1.2566 +#> -6.8803 1.0332 +#> 0.6336 -4.3311 +#> -8.1302 -1.4290 #> #> (1,2,1,.,.) = -#> -8.0135 -3.4585 -#> 3.2928 1.1881 -#> 1.0519 7.3132 -#> -2.0995 0.7831 -#> 11.4499 0.4932 +#> 9.9370 -4.7862 +#> 1.7947 8.3546 +#> 7.8480 -4.3197 +#> -4.5602 -4.6462 +#> 9.8942 -1.0242 #> #> (2,2,1,.,.) = -#> 2.4901 2.9753 -#> -8.2115 4.1514 -#> 6.0281 1.2984 -#> -7.5086 1.3346 -#> 3.6055 -0.8337 +#> 5.4458 -1.6464 +#> 0.9909 -0.2878 +#> -3.9934 7.8341 +#> 0.6661 0.3599 +#> 9.6997 -2.0700 #> #> (3,2,1,.,.) = -#> 4.5424 -0.5391 -#> 10.2617 -3.2754 -#> -1.8136 5.1917 -#> -4.6990 -0.9120 -#> -0.9396 -2.4928 +#> -0.2640 13.3921 +#> -3.9612 1.9535 +#> -2.7264 -6.7090 +#> 6.3232 -2.0279 +#> 3.9858 -3.9658 #> #> (1,3,1,.,.) = -#> -4.4424 -3.1498 -#> -0.4156 0.1798 -#> -13.2501 -4.7913 -#> -0.4209 2.1745 -#> -0.4077 -2.1715 +#> -4.8588 -0.8276 +#> 4.9844 -1.4986 +#> -2.8926 -1.1695 +#> 5.5408 -3.3529 +#> -1.9728 -1.8652 #> #> (2,3,1,.,.) = -#> 1.8846 7.9342 -#> 3.5554 -4.6065 -#> 2.1314 -1.0630 -#> -1.8522 5.3271 -#> -4.2557 -6.4940 +#> 6.2058 -10.3060 +#> 2.3539 0.2381 +#> 2.5999 2.7840 +#> 4.4128 -0.1960 +#> -1.7241 -1.4131 #> #> (3,3,1,.,.) = -#> -5.2358 -6.1865 -#> -13.5679 8.0294 -#> 5.8208 -5.6350 -#> 6.6610 -3.3221 -#> -2.0237 -1.4322 +#> 3.4567 -0.4836 +#> 6.1658 2.3352 +#> 0.0856 0.4609 +#> 3.7644 -5.1061 +#> 1.0802 -4.8710 #> #> (1,1,2,.,.) = -#> 3.3358 4.7716 -#> 3.1996 3.2956 -#> 2.6306 7.2663 -#> -2.2276 -8.0122 -#> 2.0598 -2.9997 +#> -3.5388 6.0588 +#> -3.2331 -3.7648 +#> -3.3814 -2.3884 +#> 1.9866 0.6786 +#> 3.8671 0.4380 #> #> (2,1,2,.,.) = -#> 0.5116 0.5725 -#> 2.0175 -4.3590 -#> -1.4288 0.5337 -#> 2.3421 0.0760 -#> -7.5872 -0.3140 +#> -2.7273 -7.3688 +#> 3.1946 1.7876 +#> 5.9637 -6.5781 +#> -10.5504 -7.0482 +#> 9.0913 -10.8848 #> #> (3,1,2,.,.) = -#> 5.6348 1.3832 -#> 6.6151 0.4225 -#> 6.0133 -5.4770 -#> -7.6286 -6.4727 -#> 3.6738 4.4512 +#> 1.7073 4.0997 +#> -1.8936 -5.7653 +#> -2.6932 -3.5705 +#> 2.1578 6.4609 +#> 0.3697 4.2655 #> #> (1,2,2,.,.) = -#> 2.8345 -7.9496 -#> -1.2934 6.3027 -#> -2.5656 3.4925 -#> -4.2269 8.1202 -#> 7.0657 8.9951 +#> 3.2221 -3.5457 +#> -5.6733 -1.1672 +#> -6.0391 14.1264 +#> -1.8346 -4.6253 +#> 1.4615 1.0004 #> #> (2,2,2,.,.) = -#> -2.5830 2.6838 -#> 1.3960 -2.1490 -#> 3.6903 -4.3413 -#> -4.8245 1.9661 -#> 2.4275 -0.8489 +#> 7.8673 2.4643 +#> 6.2591 2.9252 +#> -0.3886 -0.8997 +#> 0.1365 -4.4832 +#> 3.5114 0.2806 #> #> (3,2,2,.,.) = -#> -2.3136 -3.0029 -#> 6.4553 2.9853 -#> 1.0790 6.2816 -#> -2.3066 3.1048 -#> 2.4425 -2.4980 +#> 2.5126 -0.0145 +#> 8.0643 -0.4143 +#> 6.9933 0.4218 +#> 7.4069 3.1231 +#> 10.0771 0.4146 #> #> (1,3,2,.,.) = -#> 5.0888 -1.9831 -#> -3.3846 -3.1375 -#> 0.4764 0.9040 -#> -0.2155 -3.9354 -#> -0.2403 9.0833 +#> 8.7499 6.0461 +#> -6.3412 -2.5804 +#> -1.8398 -11.6373 +#> 0.7424 -0.4836 +#> -3.3934 0.5697 #> #> (2,3,2,.,.) = -#> 3.2478 0.6904 -#> 4.8131 -4.7587 -#> 4.4327 -2.0460 -#> -0.1583 -1.1435 -#> -0.8185 -3.7916 +#> -0.8282 2.5507 +#> 2.5071 -1.9104 +#> -6.8926 -1.4344 +#> -2.7887 0.8809 +#> 1.3705 -3.8546 #> #> (3,3,2,.,.) = -#> -4.8215 -5.7823 -#> -4.3197 2.5006 -#> -7.6764 10.5907 -#> 5.7097 9.1375 -#> -6.5421 5.3465 +#> -1.3417 -2.2560 +#> -0.9487 -2.3724 +#> -8.4024 6.2669 +#> -9.3354 -14.5017 +#> 3.7803 -3.7519 #> #> (1,1,3,.,.) = -#> 6.8857 1.6267 -#> 0.6538 -2.9130 -#> 4.2404 -5.3240 -#> 5.6476 5.1706 -#> 1.4154 2.3173 +#> -1.9732 -8.0447 +#> -4.0135 4.3342 +#> -8.5510 -3.9217 +#> 1.7348 5.0812 +#> -1.3394 5.5100 #> #> (2,1,3,.,.) = -#> -0.5342 -1.8588 -#> -6.3230 -2.3969 -#> -2.6561 -1.8841 -#> 2.2272 -2.2146 -#> -7.2355 -9.8852 +#> -2.3784 -3.2535 +#> -0.5325 -1.7479 +#> 0.5439 1.1196 +#> -1.7045 -6.2396 +#> 1.8775 -5.5534 #> #> (3,1,3,.,.) = -#> 6.9817 -1.9044 -#> -10.6065 -3.0032 -#> 6.2727 10.5124 -#> 2.0892 -0.2945 -#> 1.4965 3.6209 +#> -7.0758 -4.6158 +#> -4.4565 3.7113 +#> -2.5085 -0.5332 +#> 2.2014 -1.8736 +#> -6.4190 0.8344 #> #> (1,2,3,.,.) = -#> -2.7138 0.5805 -#> 3.8351 -13.1060 -#> -2.0923 8.4118 -#> 6.7002 2.8014 -#> -2.0329 -5.4260 +#> -1.6565 7.4865 +#> 7.9111 8.2888 +#> 1.7455 -7.7078 +#> -8.7524 3.5801 +#> 3.2935 2.1932 #> #> (2,2,3,.,.) = -#> -4.7038 4.4041 -#> 0.6079 6.9384 -#> -2.4581 -2.0337 -#> -2.4631 5.5419 -#> -2.4616 -2.3076 +#> -0.4705 -8.0645 +#> -2.7541 5.7445 +#> 3.0258 -0.3652 +#> 4.6185 -9.1709 +#> -3.3521 4.3578 #> #> (3,2,3,.,.) = -#> 1.0971 1.5684 -#> 4.6985 3.6660 -#> 1.7358 2.5220 -#> -2.9059 -3.8360 -#> 8.5191 -0.6597 +#> -9.7571 5.1088 +#> 0.5665 3.9230 +#> 0.8925 -0.7799 +#> -2.2061 9.9910 +#> 0.5800 0.5737 #> #> (1,3,3,.,.) = -#> 1.2912 -4.1150 -#> 5.9200 -9.2185 -#> 2.7708 -11.2848 -#> -5.2274 -9.0754 -#> -5.9556 -8.3723 +#> -7.3047 4.8323 +#> -3.9106 3.1690 +#> -6.3493 -0.1799 +#> 2.7435 13.2830 +#> 3.4891 -3.3635 #> #> (2,3,3,.,.) = -#> 3.6284 -9.7751 -#> -6.4936 0.4896 -#> 1.0783 3.2584 -#> -1.2375 4.0910 -#> 8.7281 11.1889 +#> 5.1295 -1.2111 +#> -5.8640 -4.0844 +#> -0.3176 2.4835 +#> -6.5782 -4.5152 +#> -0.8305 0.7476 #> #> (3,3,3,.,.) = -#> -2.9392 -0.8457 -#> -5.1092 -3.4291 -#> -6.7226 -2.2762 -#> -6.7354 0.7375 -#> -3.6834 -10.3393 +#> -5.4378 -5.3350 +#> -2.3722 -11.4782 +#> 7.3945 0.4418 +#> 1.8523 -0.8628 +#> 1.4826 -7.1815 #> #> (1,1,4,.,.) = -#> -0.4641 5.1642 -#> -4.9807 -2.8183 -#> -0.9600 1.1617 -#> 13.1615 -5.7607 -#> -5.8192 6.1872 +#> 2.4687 5.1131 +#> 6.3423 -4.0122 +#> 4.2059 0.0545 +#> -3.0227 -8.9330 +#> 7.3082 3.1296 #> #> (2,1,4,.,.) = -#> -0.6880 12.9307 -#> -8.0335 -1.1752 -#> 6.2135 2.0547 -#> 8.6658 -5.6305 -#> -7.1765 -4.2632 +#> -1.9773 4.5083 +#> -2.9370 0.5696 +#> -1.3116 0.0959 +#> 3.0593 2.6283 +#> 0.7708 -1.4834 #> #> (3,1,4,.,.) = -#> -3.0057 -3.1364 -#> 4.9857 4.4488 -#> 1.3519 9.6340 -#> -9.3729 4.8508 -#> 0.7863 3.1354 +#> -0.4809 8.7561 +#> -0.4841 3.7019 +#> -2.3270 0.4256 +#> 3.7321 -10.0876 +#> -2.0148 6.4611 #> #> (1,2,4,.,.) = -#> -5.5472 0.7396 -#> 5.5568 -2.7190 -#> 2.1487 3.5182 -#> 2.2908 5.8565 -#> -2.7511 0.1199 +#> -2.6632 5.7187 +#> 11.6131 2.1020 +#> 1.8945 2.9998 +#> -0.9931 -15.3454 +#> 1.6627 1.9919 #> #> (2,2,4,.,.) = -#> 0.1053 -9.7559 -#> 2.6366 2.9785 -#> 5.8546 -8.3557 -#> 2.3308 -0.3229 -#> -0.1616 -2.8580 +#> -0.7808 -2.8565 +#> 8.3442 0.4109 +#> 2.7123 0.6158 +#> 1.3526 5.0902 +#> 3.2664 -1.5515 #> #> (3,2,4,.,.) = -#> -8.8217 -4.0197 -#> -1.3547 2.0932 -#> 5.0738 -2.1672 -#> -1.6119 7.2245 -#> -1.4150 -0.6509 +#> 5.0697 0.5581 +#> -11.7244 1.5131 +#> -4.9459 3.7161 +#> -3.6735 2.2271 +#> 10.1182 -0.3320 #> #> (1,3,4,.,.) = -#> -1.2559 2.1416 -#> -0.0231 4.1475 -#> -3.8181 2.3299 -#> 3.7565 0.7653 -#> 1.8683 -3.8644 +#> 3.6115 -1.6901 +#> -1.0491 6.2340 +#> 2.3156 -0.1393 +#> -0.2285 1.8952 +#> -0.8674 -1.8877 #> #> (2,3,4,.,.) = -#> 0.4839 6.4445 -#> 3.1534 -8.8380 -#> 3.7889 6.3242 -#> 9.0306 11.1551 -#> 5.7409 -1.1564 +#> 5.8264 6.5763 +#> -5.1067 2.8225 +#> 4.7281 5.3284 +#> 3.8772 -8.7982 +#> 7.9865 3.5250 #> #> (3,3,4,.,.) = -#> -2.2988 8.8577 -#> 4.7663 -5.2979 -#> -4.7160 9.3649 -#> -4.3578 -7.4509 -#> -4.3874 2.7093 +#> 5.6288 -2.1604 +#> 5.2852 12.1708 +#> -6.5885 -1.3408 +#> -1.0228 -3.8949 +#> 4.9946 5.1332 #> #> (1,1,5,.,.) = -#> -0.4679 1.3350 -#> -1.4923 -5.4231 -#> -2.3291 0.3742 -#> 2.6760 -0.3634 -#> -3.0602 8.7696 +#> -3.6772 -2.6791 +#> 9.3151 6.3421 +#> 1.0636 -2.4348 +#> 0.0648 0.2960 +#> -9.0852 -0.3164 #> #> (2,1,5,.,.) = -#> -2.1765 4.7492 -#> 7.2170 3.6409 -#> -6.5629 -2.2038 -#> -5.8527 -0.0878 -#> -4.5430 0.7514 +#> -6.7774 5.5203 +#> 8.8951 0.4216 +#> 1.4675 -0.9553 +#> -4.2931 2.7152 +#> -8.8658 0.0596 #> #> (3,1,5,.,.) = -#> 8.4563 6.2536 -#> -7.6490 -0.3940 -#> -2.2804 0.5806 -#> -9.5464 4.4593 -#> -5.9173 -3.0061 +#> -0.8913 -3.6577 +#> -2.9250 2.1499 +#> 1.9352 -0.6881 +#> 12.8338 2.7566 +#> -8.3175 0.9609 #> #> (1,2,5,.,.) = -#> 5.1614 -5.6453 -#> 4.8296 -15.8688 -#> 1.8169 -0.8523 -#> 10.0722 -2.0120 -#> 0.8588 2.5174 +#> -2.9347 0.3892 +#> -1.4719 -6.7884 +#> 0.1891 8.8247 +#> 11.0228 8.0462 +#> -3.6506 3.0815 #> #> (2,2,5,.,.) = -#> -6.1863 -1.2811 -#> -5.0314 3.6125 -#> -6.2021 -2.8083 -#> -0.3612 -3.9482 -#> -3.0259 0.6619 +#> 2.6453 -9.5708 +#> -1.4926 2.9505 +#> 9.6809 -4.5462 +#> 10.8256 6.3338 +#> 4.1002 -1.5479 #> #> (3,2,5,.,.) = -#> 5.1903 0.0372 -#> -1.9769 1.8278 -#> 4.1834 -11.1886 -#> -3.8679 -4.4589 -#> 2.2054 -3.4290 +#> -1.0330 5.5573 +#> -2.3059 -1.8269 +#> -3.2600 6.0368 +#> 4.6868 2.6289 +#> 4.2373 0.1010 #> #> (1,3,5,.,.) = -#> -9.9394 5.2093 -#> -6.9233 -3.8417 -#> 7.3800 -2.2016 -#> 1.4532 4.9600 -#> -2.6033 -3.4185 +#> 6.1920 5.8462 +#> 0.6780 4.9170 +#> 2.4530 -14.6991 +#> 8.8984 0.1436 +#> -0.9811 3.2451 #> #> (2,3,5,.,.) = -#> -6.9207 1.5530 -#> -3.9120 0.2946 -#> -0.9112 5.2904 -#> 3.2102 -2.0272 -#> 7.6431 -0.3506 +#> -3.2560 0.5176 +#> 8.5553 -0.6800 +#> -7.4328 -0.9606 +#> -6.5130 -0.3419 +#> 2.8921 -2.0966 #> #> (3,3,5,.,.) = -#> 3.1340 6.7375 -#> 1.4610 3.8716 -#> 2.8714 3.5657 -#> -4.2452 -3.6816 -#> 5.5423 2.7491 +#> 4.2309 0.3529 +#> 4.4683 8.9331 +#> 2.9729 -1.8606 +#> 1.4575 3.8359 +#> -5.0445 -14.9692 #> [ CPUFloatType{3,3,5,5,2} ]
    @@ -146,6 +158,9 @@ floating point torch.dtype" />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ floating point torch.dtype" />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ floating point torch.dtype" /> floating point torch.dtype

    -
    torch_finfo(dtype)
    +
    torch_finfo(dtype)

    Arguments

    diff --git a/reference/torch_flatten.html b/reference/torch_flatten.html index daccdc19295d52e79ec800ed4f7b4b163dfc0985..d719c3e86f623ba7939a5e026b8536c31a80b48d 100644 --- a/reference/torch_flatten.html +++ b/reference/torch_flatten.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Flatten

    -
    torch_flatten(self, dims, start_dim = 1L, end_dim = -1L, out_dim)
    +
    torch_flatten(self, dims, start_dim = 1L, end_dim = -1L, out_dim)

    Arguments

    @@ -231,12 +263,12 @@ flatten

    Flattens a contiguous range of dims in a tensor.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -t = torch_tensor(matrix(c(1, 2), ncol = 2)) -torch_flatten(t) -torch_flatten(t, start_dim=2) -} +t = torch_tensor(matrix(c(1, 2), ncol = 2)) +torch_flatten(t) +torch_flatten(t, start_dim=2) +}
    #> torch_tensor #> 1 2 #> [ CPUFloatType{1,2} ]
    diff --git a/reference/torch_flip.html b/reference/torch_flip.html index e1146db25869b678c38e6530b383e6c389a17ec6..f49f22e1b6d03cc5e0ff20fb758e01b04a9ffa72 100644 --- a/reference/torch_flip.html +++ b/reference/torch_flip.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Flip

    -
    torch_flip(self, dims)
    +
    torch_flip(self, dims)

    Arguments

    @@ -218,12 +250,12 @@

    Reverse the order of a n-D tensor along given axis in dims.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_arange(0, 8)$view(c(2, 2, 2)) -x -torch_flip(x, c(1, 2)) -} +x = torch_arange(0, 8)$view(c(2, 2, 2)) +x +torch_flip(x, c(1, 2)) +}
    #> torch_tensor #> (1,.,.) = #> 6 7 diff --git a/reference/torch_floor.html b/reference/torch_floor.html index ee0a1dac547b05da896b46aeca8ac70c8d0e396d..9f1a9a9fefe12eacb950f1e384a7be30b73eb6dd 100644 --- a/reference/torch_floor.html +++ b/reference/torch_floor.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Floor

    -
    torch_floor(self)
    +
    torch_floor(self)

    Arguments

    @@ -218,17 +250,17 @@ the largest integer less than or equal to each element.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_floor(a) -} +a = torch_randn(c(4)) +a +torch_floor(a) +}
    #> torch_tensor -#> 0 -#> -1 #> -1 +#> -2 #> -1 +#> 0 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Floor_divide

    -
    torch_floor_divide(self, other)
    +
    torch_floor_divide(self, other)

    Arguments

    @@ -222,13 +254,13 @@ for type promotion and broadcasting rules.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_tensor(c(4.0, 3.0)) -b = torch_tensor(c(2.0, 2.0)) -torch_floor_divide(a, b) -torch_floor_divide(a, 1.4) -} +a = torch_tensor(c(4.0, 3.0)) +b = torch_tensor(c(2.0, 2.0)) +torch_floor_divide(a, b) +torch_floor_divide(a, 1.4) +}
    #> torch_tensor #> 2 #> 2 diff --git a/reference/torch_fmod.html b/reference/torch_fmod.html index 9bd9cde04b17c01b4f0a4365310fbe83dfef6a3d..b7a78a66315557154f769b7118756e9d2b7e175b 100644 --- a/reference/torch_fmod.html +++ b/reference/torch_fmod.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Fmod

    -
    torch_fmod(self, other)
    +
    torch_fmod(self, other)

    Arguments

    @@ -222,11 +254,11 @@ numbers. The remainder has the same sign as the dividend input.

    other must be broadcastable .

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_fmod(torch_tensor(c(-3., -2, -1, 1, 2, 3)), 2) -torch_fmod(torch_tensor(c(1., 2, 3, 4, 5)), 1.5) -} +torch_fmod(torch_tensor(c(-3., -2, -1, 1, 2, 3)), 2) +torch_fmod(torch_tensor(c(1., 2, 3, 4, 5)), 1.5) +}
    #> torch_tensor #> 1.0000 #> 0.5000 diff --git a/reference/torch_frac.html b/reference/torch_frac.html index eafa181a010efbc0bb4a6d8d9ad397c45ba2fbe4..ee1619de7bc9ca0d8c4a7cbcc2f17e2a9d51a4cc 100644 --- a/reference/torch_frac.html +++ b/reference/torch_frac.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Frac

    -
    torch_frac(self)
    +
    torch_frac(self)

    Arguments

    @@ -217,10 +249,10 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_frac(torch_tensor(c(1, 2.5, -3.2))) -} +torch_frac(torch_tensor(c(1, 2.5, -3.2))) +}
    #> torch_tensor #> 0.0000 #> 0.5000 diff --git a/reference/torch_full.html b/reference/torch_full.html index 6a26b0f00041a15d674d64106b207795ded8c9ca..d29d1832aec2cf87fdd72dc5d40b979144fc34da 100644 --- a/reference/torch_full.html +++ b/reference/torch_full.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Full

    -
    torch_full(
    -  size,
    -  fill_value,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_full(
    +  size,
    +  fill_value,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -255,10 +287,10 @@ a bool fill_value will return a tensor of torch.bool dtype, and an integral fill_value will return a tensor of torch.long dtype.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_full(list(2, 3), 3.141592) -} +torch_full(list(2, 3), 3.141592) +}
    #> torch_tensor #> 3.1416 3.1416 3.1416 #> 3.1416 3.1416 3.1416 diff --git a/reference/torch_full_like.html b/reference/torch_full_like.html index 9113e685c795a3ca7841a5a5b07ac4a80ccfc57c..d042ed2bb5b1ae8d3e5068a127f6bd4732e6e7c1 100644 --- a/reference/torch_full_like.html +++ b/reference/torch_full_like.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Full_like

    -
    torch_full_like(
    -  input,
    -  fill_value,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_full_like(
    +  input,
    +  fill_value,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    diff --git a/reference/torch_gather.html b/reference/torch_gather.html index 153778c323cef6be6d0a9f1206a9982de16060eb..b8f014d6249156dc26e473f5900e4ca99ed6fad7 100644 --- a/reference/torch_gather.html +++ b/reference/torch_gather.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Gather

    -
    torch_gather(self, dim, index, sparse_grad = FALSE)
    +
    torch_gather(self, dim, index, sparse_grad = FALSE)

    Arguments

    @@ -224,9 +256,9 @@

    Gathers values along an axis specified by dim.

    -

    For a 3-D tensor the output is specified by::

    out[i][j][k] = input[index[i][j][k]][j][k]  # if dim == 0
    -out[i][j][k] = input[i][index[i][j][k]][k]  # if dim == 1
    -out[i][j][k] = input[i][j][index[i][j][k]]  # if dim == 2
    +

    For a 3-D tensor the output is specified by::

    out[i][j][k] = input[index[i][j][k]][j][k]  # if dim == 0
    +out[i][j][k] = input[i][index[i][j][k]][k]  # if dim == 1
    +out[i][j][k] = input[i][j][index[i][j][k]]  # if dim == 2
     

    If input is an n-dimensional tensor with size @@ -236,11 +268,11 @@ size \((x_0, x_1, ..., x_{i-1}, y, x_{i+1}, ..., x_{n-1})\) where \(y \geq 1\) and out will have the same size as index.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -t = torch_tensor(matrix(c(1,2,3,4), ncol = 2, byrow = TRUE)) -torch_gather(t, 2, torch_tensor(matrix(c(1,1,2,1), ncol = 2, byrow=TRUE), dtype = torch_int64())) -} +t = torch_tensor(matrix(c(1,2,3,4), ncol = 2, byrow = TRUE)) +torch_gather(t, 2, torch_tensor(matrix(c(1,1,2,1), ncol = 2, byrow=TRUE), dtype = torch_int64())) +}
    #> torch_tensor #> 1 1 #> 4 3 diff --git a/reference/torch_ge.html b/reference/torch_ge.html index 5add98bfb35e3bfdd14cbf3119fff8f462e3d2f2..89ad9502a8b3cc4607a86acfa39a1359b9fa780b 100644 --- a/reference/torch_ge.html +++ b/reference/torch_ge.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ge

    -
    torch_ge(self, other)
    +
    torch_ge(self, other)

    Arguments

    @@ -220,11 +252,11 @@ broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_ge(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), - torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) -} +torch_ge(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), + torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) +}
    #> torch_tensor #> 1 1 #> 0 1 diff --git a/reference/torch_generator.html b/reference/torch_generator.html index e205f8f225e6bdc2dac471ac70d08f6fbbb83c1e..e39927160767e4fc34e20ba796b46644a58fd4cd 100644 --- a/reference/torch_generator.html +++ b/reference/torch_generator.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ In-place random sampling functions." /> torch - 0.0.3 + 0.1.0
    @@ -147,6 +159,9 @@ In-place random sampling functions." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ In-place random sampling functions." />
  • - +
  • Reference
  • @@ -199,21 +231,21 @@ that produces pseudo random numbers. Used as a keyword argument in many In-place random sampling functions.

    -
    torch_generator()
    +
    torch_generator()

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # Via string -generator <- torch_generator() -generator$current_seed() -generator$set_current_seed(1234567L) -generator$current_seed() +generator <- torch_generator() +generator$current_seed() +generator$set_current_seed(1234567L) +generator$current_seed() -} +}
    #> integer64 #> [1] 1234567
    diff --git a/reference/torch_geqrf.html b/reference/torch_geqrf.html index 506785d1ab30df48d8ab97fbd8701c94295f5495..2d1a24e0dfc07113c5aea62b70ccb2a3982b8990 100644 --- a/reference/torch_geqrf.html +++ b/reference/torch_geqrf.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Geqrf

    -
    torch_geqrf(self)
    +
    torch_geqrf(self)

    Arguments

    diff --git a/reference/torch_ger.html b/reference/torch_ger.html index 2a20545da45c806f134f7da7d76b3eb40157c9ed..f4ecc3a908dac108f0ade7b887f9e8be82bad8d1 100644 --- a/reference/torch_ger.html +++ b/reference/torch_ger.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ger

    -
    torch_ger(self, vec2)
    +
    torch_ger(self, vec2)

    Arguments

    @@ -223,12 +255,12 @@ If input is a vector of size \(n\) and vec2 is a vecto size \(m\), then out must be a matrix of size \((n \times m)\).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -v1 = torch_arange(1., 5.) -v2 = torch_arange(1., 4.) -torch_ger(v1, v2) -} +v1 = torch_arange(1., 5.) +v2 = torch_arange(1., 4.) +torch_ger(v1, v2) +}
    #> torch_tensor #> 1 2 3 #> 2 4 6 diff --git a/reference/torch_gt.html b/reference/torch_gt.html index 98edc7761725fa78887f50fb12d90f5b52a38c3e..8139c4ea76cd670dfb08d778de0526a36901e69d 100644 --- a/reference/torch_gt.html +++ b/reference/torch_gt.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Gt

    -
    torch_gt(self, other)
    +
    torch_gt(self, other)

    Arguments

    @@ -220,11 +252,11 @@ broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_gt(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), - torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) -} +torch_gt(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), + torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) +}
    #> torch_tensor #> 0 1 #> 0 0 diff --git a/reference/torch_hamming_window.html b/reference/torch_hamming_window.html index bceab5f03b34cfad4f6ef5fa62fda9c542462cc1..0ff2cfe0f45bdb419ad6f495e9aa1219ab0e7a2b 100644 --- a/reference/torch_hamming_window.html +++ b/reference/torch_hamming_window.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Hamming_window

    -
    torch_hamming_window(
    -  window_length,
    -  periodic = TRUE,
    -  alpha = 0.54,
    -  beta = 0.46,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_hamming_window(
    +  window_length,
    +  periodic = TRUE,
    +  alpha = 0.54,
    +  beta = 0.46,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    diff --git a/reference/torch_hann_window.html b/reference/torch_hann_window.html index 2122ae2b50ce316ba3e90d367b0bee58e194c4e4..efaffec3cd3732eaf4152bf92aaded547484d17e 100644 --- a/reference/torch_hann_window.html +++ b/reference/torch_hann_window.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Hann_window

    -
    torch_hann_window(
    -  window_length,
    -  periodic = TRUE,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_hann_window(
    +  window_length,
    +  periodic = TRUE,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    diff --git a/reference/torch_histc.html b/reference/torch_histc.html index 3fb1d585f79b2f01a61b58ec4e38af2eb26e2d2d..3a3d0221efeba18c07f418330345164920853a1e 100644 --- a/reference/torch_histc.html +++ b/reference/torch_histc.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Histc

    -
    torch_histc(self, bins = 100L, min = 0L, max = 0L)
    +
    torch_histc(self, bins = 100L, min = 0L, max = 0L)

    Arguments

    @@ -229,10 +261,10 @@ maximum values of the data are used.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_histc(torch_tensor(c(1., 2, 1)), bins=4, min=0, max=3) -} +torch_histc(torch_tensor(c(1., 2, 1)), bins=4, min=0, max=3) +}
    #> torch_tensor #> 0 #> 2 diff --git a/reference/torch_ifft.html b/reference/torch_ifft.html index 833691b3aeeec3d90cd2904d84c2ef7f307cbffc..855f0beb46f805880c4f66f950ee7889f7c1d9a5 100644 --- a/reference/torch_ifft.html +++ b/reference/torch_ifft.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ifft

    -
    torch_ifft(self, signal_ndim, normalized = FALSE)
    +
    torch_ifft(self, signal_ndim, normalized = FALSE)

    Arguments

    @@ -255,28 +287,28 @@ shape of input.

    torch_backends.mkl.is_available to check if MKL is installed.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_randn(c(3, 3, 2)) -x -y = torch_fft(x, 2) -torch_ifft(y, 2) # recover x -} +x = torch_randn(c(3, 3, 2)) +x +y = torch_fft(x, 2) +torch_ifft(y, 2) # recover x +}
    #> torch_tensor #> (1,.,.) = -#> -1.1864 0.4079 -#> -0.7702 0.2462 -#> 0.1156 1.5071 +#> -0.4696 1.4304 +#> 0.3439 -0.8507 +#> -0.2687 -0.8497 #> #> (2,.,.) = -#> -1.1418 -0.3572 -#> -0.6047 0.3412 -#> -0.5722 -1.1495 +#> -0.1440 0.7003 +#> 0.7445 0.9379 +#> 2.1761 1.1186 #> #> (3,.,.) = -#> 0.3688 0.8228 -#> -0.6308 0.5165 -#> 1.4051 0.6681 +#> 0.8872 0.8706 +#> 0.8568 -0.9243 +#> -1.4517 0.7946 #> [ CPUFloatType{3,3,2} ]
    @@ -146,6 +158,9 @@ type." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ type." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ type." /> type.

    -
    torch_iinfo(dtype)
    +
    torch_iinfo(dtype)

    Arguments

    diff --git a/reference/torch_imag.html b/reference/torch_imag.html index 8f35c0bc12897b58c7d1b411006d982d46a4dc1c..ed55b6476566165f66ff52c80d594b0ffe7366be 100644 --- a/reference/torch_imag.html +++ b/reference/torch_imag.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Imag

    -
    torch_imag(self)
    +
    torch_imag(self)

    Arguments

    @@ -222,11 +254,11 @@ $$

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -torch_imag(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +torch_imag(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Index_select

    -
    torch_index_select(self, dim, index)
    +
    torch_index_select(self, dim, index)

    Arguments

    @@ -232,18 +264,18 @@ storage if necessary.

    of index; other dimensions have the same size as in the original tensor.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(3, 4)) -x -indices = torch_tensor(c(1, 3), dtype = torch_int64()) -torch_index_select(x, 1, indices) -torch_index_select(x, 2, indices) -} +
    if (torch_is_installed()) { + +x = torch_randn(c(3, 4)) +x +indices = torch_tensor(c(1, 3), dtype = torch_int64()) +torch_index_select(x, 1, indices) +torch_index_select(x, 2, indices) +}
    #> torch_tensor -#> 0.1471 0.1196 -#> 1.7271 0.1422 -#> -0.8387 0.1700 +#> -0.2085 -0.3586 +#> 1.8368 -0.7636 +#> -0.4655 0.7847 #> [ CPUFloatType{3,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Inverse

    -
    torch_inverse(self)
    +
    torch_inverse(self)

    Arguments

    @@ -223,20 +255,20 @@ of 2D square tensors, in which case this function would return a tensor composed individual inverses.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -x = torch_rand(c(4, 4)) -y = torch_inverse(x) -z = torch_mm(x, y) -z -torch_max(torch_abs(z - torch_eye(4))) # Max non-zero +
    if (torch_is_installed()) { +if (FALSE) { +x = torch_rand(c(4, 4)) +y = torch_inverse(x) +z = torch_mm(x, y) +z +torch_max(torch_abs(z - torch_eye(4))) # Max non-zero # Batched inverse example -x = torch_randn(c(2, 3, 4, 4)) -y = torch_inverse(x) -z = torch_matmul(x, y) -torch_max(torch_abs(z - torch_eye(4)$expand_as(x))) # Max non-zero -} -} +x = torch_randn(c(2, 3, 4, 4)) +y = torch_inverse(x) +z = torch_matmul(x, y) +torch_max(torch_abs(z - torch_eye(4)$expand_as(x))) # Max non-zero +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Irfft

    -
    torch_irfft(
    -  self,
    -  signal_ndim,
    -  normalized = FALSE,
    -  onesided = TRUE,
    -  signal_sizes = list()
    -)
    +
    torch_irfft(
    +  self,
    +  signal_ndim,
    +  normalized = FALSE,
    +  onesided = TRUE,
    +  signal_sizes = list()
    +)

    Arguments

    @@ -285,20 +317,20 @@ certainly fail the check.

    torch_backends.mkl.is_available to check if MKL is installed.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(4, 4)) -torch_rfft(x, 2, onesided=TRUE) -x = torch_randn(c(4, 5)) -torch_rfft(x, 2, onesided=TRUE) -y = torch_rfft(x, 2, onesided=TRUE) -torch_irfft(y, 2, onesided=TRUE, signal_sizes=c(4,5)) # recover x -} +
    if (torch_is_installed()) { + +x = torch_randn(c(4, 4)) +torch_rfft(x, 2, onesided=TRUE) +x = torch_randn(c(4, 5)) +torch_rfft(x, 2, onesided=TRUE) +y = torch_rfft(x, 2, onesided=TRUE) +torch_irfft(y, 2, onesided=TRUE, signal_sizes=c(4,5)) # recover x +}
    #> torch_tensor -#> -1.2099 1.7469 -1.3247 0.0171 0.0024 -#> 0.4107 0.2902 -2.0005 -0.8215 -1.4046 -#> -1.2626 0.0016 2.6169 -1.2084 -0.1029 -#> 0.0418 1.5131 0.9458 -0.2751 -0.5556 +#> -0.8984 -0.0241 0.5814 0.0022 -1.5341 +#> 0.3579 -0.1102 -0.1934 1.5845 -1.6773 +#> 0.4437 -0.5878 0.5142 1.8715 1.0713 +#> 0.1601 -0.3049 -3.2821 -0.5589 0.7651 #> [ CPUFloatType{4,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Is_complex

    -
    torch_is_complex(self)
    +
    torch_is_complex(self)

    Arguments

    diff --git a/reference/torch_is_floating_point.html b/reference/torch_is_floating_point.html index 30c95bb71f37a1d1fb85a5a3a299ee36c4478f0c..a6f5e8fdbaf1f02047e4c645fe1d87c422e46621 100644 --- a/reference/torch_is_floating_point.html +++ b/reference/torch_is_floating_point.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Is_floating_point

    -
    torch_is_floating_point(self)
    +
    torch_is_floating_point(self)

    Arguments

    diff --git a/reference/torch_is_installed.html b/reference/torch_is_installed.html index 2cf006e6716a5199baf7895b3222befc2bb1e130..db94a69b03b6bad9f32ba310de34079020a99a27 100644 --- a/reference/torch_is_installed.html +++ b/reference/torch_is_installed.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Verifies if torch is installed

    -
    torch_is_installed()
    +
    torch_is_installed()
    diff --git a/reference/torch_isfinite.html b/reference/torch_isfinite.html index 59316680032af4548798f988bd20c178d83910e6..be86eaecbf4230d5553cc8f5c6bf94df29569661 100644 --- a/reference/torch_isfinite.html +++ b/reference/torch_isfinite.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Isfinite

    -
    torch_isfinite(self)
    +
    torch_isfinite(self)

    Arguments

    @@ -214,10 +246,10 @@

    Returns a new tensor with boolean elements representing if each element is Finite or not.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_isfinite(torch_tensor(c(1, Inf, 2, -Inf, NaN))) -} +torch_isfinite(torch_tensor(c(1, Inf, 2, -Inf, NaN))) +}
    #> torch_tensor #> 1 #> 0 diff --git a/reference/torch_isinf.html b/reference/torch_isinf.html index 6b703dfe6d4bdb7929e22870f1fae4d8c1b0ecc1..ca7f6aed0cbf9bd01304a25c139b719cfd04ef8e 100644 --- a/reference/torch_isinf.html +++ b/reference/torch_isinf.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Isinf

    -
    torch_isinf(self)
    +
    torch_isinf(self)

    Arguments

    @@ -214,10 +246,10 @@

    Returns a new tensor with boolean elements representing if each element is +/-INF or not.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_isinf(torch_tensor(c(1, Inf, 2, -Inf, NaN))) -} +torch_isinf(torch_tensor(c(1, Inf, 2, -Inf, NaN))) +}
    #> torch_tensor #> 0 #> 1 diff --git a/reference/torch_isnan.html b/reference/torch_isnan.html index a498c097c004dcca08405225c83d7f5fa14abd85..6c41427f36fe9eb98ed345c94c1ddac3e27e567f 100644 --- a/reference/torch_isnan.html +++ b/reference/torch_isnan.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Isnan

    -
    torch_isnan(self)
    +
    torch_isnan(self)

    Arguments

    @@ -214,10 +246,10 @@

    Returns a new tensor with boolean elements representing if each element is NaN or not.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_isnan(torch_tensor(c(1, NaN, 2))) -} +torch_isnan(torch_tensor(c(1, NaN, 2))) +}
    #> torch_tensor #> 0 #> 1 diff --git a/reference/torch_kthvalue.html b/reference/torch_kthvalue.html index 1a2abf400c2f601127690d368833469f2e24a5fc..67ad74584c0b2e5ab671ac1560120f86355dfb43 100644 --- a/reference/torch_kthvalue.html +++ b/reference/torch_kthvalue.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Kthvalue

    -
    torch_kthvalue(self, k, dim = -1L, keepdim = FALSE)
    +
    torch_kthvalue(self, k, dim = -1L, keepdim = FALSE)

    Arguments

    @@ -234,15 +266,15 @@ they are of size 1. Otherwise, dim is squeezed indices tensors having 1 fewer dimension than the input tensor.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_arange(1., 6.) -x -torch_kthvalue(x, 4) -x=torch_arange(1.,7.)$resize_(c(2,3)) -x -torch_kthvalue(x, 2, 1, TRUE) -} +
    if (torch_is_installed()) { + +x = torch_arange(1., 6.) +x +torch_kthvalue(x, 4) +x=torch_arange(1.,7.)$resize_(c(2,3)) +x +torch_kthvalue(x, 2, 1, TRUE) +}
    #> [[1]] #> torch_tensor #> 4 5 6 diff --git a/reference/torch_layout.html b/reference/torch_layout.html index d557649615b38941db1a2acebeefb9085d91fa82..510df0274eb894031feafc42f5df949091591798 100644 --- a/reference/torch_layout.html +++ b/reference/torch_layout.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,9 +227,9 @@

    Creates the corresponding layout

    -
    torch_strided()
    +    
    torch_strided()
     
    -torch_sparse_coo()
    +torch_sparse_coo()
    diff --git a/reference/torch_le.html b/reference/torch_le.html index 9f5263505b3330c105394bc5119fc1a1ef9dc805..fccc0a7de02eef70691c68fbee0563c67e3ac421 100644 --- a/reference/torch_le.html +++ b/reference/torch_le.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Le

    -
    torch_le(self, other)
    +
    torch_le(self, other)

    Arguments

    @@ -220,11 +252,11 @@ broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_le(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), - torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) -} +torch_le(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), + torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) +}
    #> torch_tensor #> 1 0 #> 1 1 diff --git a/reference/torch_lerp.html b/reference/torch_lerp.html index 59fb1fd7448345f5eb1e7beda0c3740e75082271..fe68cb54c3157bf959a80a0fab7464d1eb199213 100644 --- a/reference/torch_lerp.html +++ b/reference/torch_lerp.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Lerp

    -
    torch_lerp(self, end, weight)
    +
    torch_lerp(self, end, weight)

    Arguments

    @@ -229,15 +261,15 @@ broadcastable . If weight is a tensor, then the shapes of weight, start, and end must be broadcastable .

    Examples

    -
    if (torch_is_installed()) { - -start = torch_arange(1., 5.) -end = torch_empty(4)$fill_(10) -start -end -torch_lerp(start, end, 0.5) -torch_lerp(start, end, torch_full_like(start, 0.5)) -} +
    if (torch_is_installed()) { + +start = torch_arange(1., 5.) +end = torch_empty(4)$fill_(10) +start +end +torch_lerp(start, end, 0.5) +torch_lerp(start, end, torch_full_like(start, 0.5)) +}
    #> torch_tensor #> 5.5000 #> 6.0000 diff --git a/reference/torch_lgamma.html b/reference/torch_lgamma.html index a84df0c4fe9935cbb9afca4d720bb5f949247fa2..16652d2830933360e43262a1df4eca5def0347e3 100644 --- a/reference/torch_lgamma.html +++ b/reference/torch_lgamma.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Lgamma

    -
    torch_lgamma(self)
    +
    torch_lgamma(self)

    Arguments

    @@ -217,11 +249,11 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_arange(0.5, 2, 0.5) -torch_lgamma(a) -} +a = torch_arange(0.5, 2, 0.5) +torch_lgamma(a) +}
    #> torch_tensor #> 0.5724 #> 0.0000 diff --git a/reference/torch_linspace.html b/reference/torch_linspace.html index 3b0cfa6d1603c420109d78e3c777c71fa27e1340..a003b57342f28ff55884823c9a954d1075e94f40 100644 --- a/reference/torch_linspace.html +++ b/reference/torch_linspace.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Linspace

    -
    torch_linspace(
    -  start,
    -  end,
    -  steps = 100,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_linspace(
    +  start,
    +  end,
    +  steps = 100,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -248,13 +280,13 @@ equally spaced points between start and end.

    The output tensor is 1-D of size steps.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_linspace(3, 10, steps=5) -torch_linspace(-10, 10, steps=5) -torch_linspace(start=-10, end=10, steps=5) -torch_linspace(start=-10, end=10, steps=1) -} +torch_linspace(3, 10, steps=5) +torch_linspace(-10, 10, steps=5) +torch_linspace(start=-10, end=10, steps=5) +torch_linspace(start=-10, end=10, steps=1) +}
    #> torch_tensor #> -10 #> [ CPUFloatType{1} ]
    diff --git a/reference/torch_load.html b/reference/torch_load.html index a7506e056c13ed5762035e60102d370ad3e1045d..2f8026161f541cc0277ced39682180baeebdbca5 100644 --- a/reference/torch_load.html +++ b/reference/torch_load.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Loads a saved object

    -
    torch_load(path)
    +
    torch_load(path)

    Arguments

    diff --git a/reference/torch_log.html b/reference/torch_log.html index 0d30ca684e27ceeb3f14a00e4b5a3e0da2ba41dc..6a9b7fddfd404bf9a83b8c12210d721ec657f42b 100644 --- a/reference/torch_log.html +++ b/reference/torch_log.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Log

    -
    torch_log(self)
    +
    torch_log(self)

    Arguments

    @@ -218,18 +250,18 @@ of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(5)) -a -torch_log(a) -} +a = torch_randn(c(5)) +a +torch_log(a) +}
    #> torch_tensor #> nan -#> 0.8911 -#> 0.0003 -#> nan -#> -1.5295 +#> -0.3113 +#> 0.7617 +#> -0.4020 +#> 0.2561 #> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Log10

    -
    torch_log10(self)
    +
    torch_log10(self)

    Arguments

    @@ -218,18 +250,18 @@ of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_rand(5) -a -torch_log10(a) -} +a = torch_rand(5) +a +torch_log10(a) +}
    #> torch_tensor -#> -0.2865 -#> -0.4904 -#> -0.9127 -#> -0.2747 -#> -0.2714 +#> -0.4762 +#> -0.4718 +#> -0.4183 +#> -0.2721 +#> -0.4976 #> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Log1p

    -
    torch_log1p(self)
    +
    torch_log1p(self)

    Arguments

    @@ -221,18 +253,18 @@ values of input

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(5)) -a -torch_log1p(a) -} +a = torch_randn(c(5)) +a +torch_log1p(a) +}
    #> torch_tensor -#> 0.6781 -#> 0.2969 -#> 0.6634 -#> 0.5704 -#> -0.1361 +#> 0.3740 +#> -0.0977 +#> 0.9810 +#> 0.4046 +#> nan #> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Log2

    -
    torch_log2(self)
    +
    torch_log2(self)

    Arguments

    @@ -218,18 +250,18 @@ of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_rand(5) -a -torch_log2(a) -} +a = torch_rand(5) +a +torch_log2(a) +}
    #> torch_tensor -#> -0.6302 -#> -6.7372 -#> -0.0043 -#> -1.2060 -#> -1.4108 +#> -2.5997 +#> -2.8238 +#> -0.1515 +#> -0.2273 +#> -1.4200 #> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Logdet

    -
    torch_logdet(self)
    +
    torch_logdet(self)

    Arguments

    @@ -227,17 +259,17 @@ be unstable in when `input` doesn't have distinct singular values. See

    Calculates log determinant of a square matrix or batches of square matrices.

    Examples

    -
    if (torch_is_installed()) { - -A = torch_randn(c(3, 3)) -torch_det(A) -torch_logdet(A) -A -A$det() -A$det()$log() -} +
    if (torch_is_installed()) { + +A = torch_randn(c(3, 3)) +torch_det(A) +torch_logdet(A) +A +A$det() +A$det()$log() +}
    #> torch_tensor -#> nan +#> -1.84361 #> [ CPUFloatType{} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Logical_and

    -
    torch_logical_and(self, other)
    +
    torch_logical_and(self, other)

    Arguments

    @@ -219,16 +251,16 @@ treated as TRUE.

    Examples

    -
    if (torch_is_installed()) { - -torch_logical_and(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) -a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) -b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) -torch_logical_and(a, b) -if (FALSE) { -torch_logical_and(a, b, out=torch_empty(4, dtype=torch_bool())) -} -} +
    if (torch_is_installed()) { + +torch_logical_and(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) +a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) +b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) +torch_logical_and(a, b) +if (FALSE) { +torch_logical_and(a, b, out=torch_empty(4, dtype=torch_bool())) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -214,12 +246,12 @@ dtype. If the input tensor is not a bool tensor, zeros are treated as FALSE and non-zeros are treated as TRUE.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_logical_not(torch_tensor(c(TRUE, FALSE))) -torch_logical_not(torch_tensor(c(0, 1, -10), dtype=torch_int8())) -torch_logical_not(torch_tensor(c(0., 1.5, -10.), dtype=torch_double())) -} +torch_logical_not(torch_tensor(c(TRUE, FALSE))) +torch_logical_not(torch_tensor(c(0, 1, -10), dtype=torch_int8())) +torch_logical_not(torch_tensor(c(0., 1.5, -10.), dtype=torch_double())) +}
    #> torch_tensor #> 1 #> 0 diff --git a/reference/torch_logical_or.html b/reference/torch_logical_or.html index 6a7329686d38e52f458421b938930008d9d351cb..fa4c0dc8864dae1e9a3c931fde05336817463254 100644 --- a/reference/torch_logical_or.html +++ b/reference/torch_logical_or.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Logical_or

    -
    torch_logical_or(self, other)
    +
    torch_logical_or(self, other)

    Arguments

    @@ -219,18 +251,18 @@ treated as TRUE.

    Examples

    -
    if (torch_is_installed()) { - -torch_logical_or(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) -a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) -b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) -torch_logical_or(a, b) -if (FALSE) { -torch_logical_or(a$double(), b$double()) -torch_logical_or(a$double(), b) -torch_logical_or(a, b, out=torch_empty(4, dtype=torch_bool())) -} -} +
    if (torch_is_installed()) { + +torch_logical_or(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) +a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) +b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) +torch_logical_or(a, b) +if (FALSE) { +torch_logical_or(a$double(), b$double()) +torch_logical_or(a$double(), b) +torch_logical_or(a, b, out=torch_empty(4, dtype=torch_bool())) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Logical_xor

    -
    torch_logical_xor(self, other)
    +
    torch_logical_xor(self, other)

    Arguments

    @@ -219,15 +251,15 @@ treated as TRUE.

    Examples

    -
    if (torch_is_installed()) { - -torch_logical_xor(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) -a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) -b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) -torch_logical_xor(a, b) -torch_logical_xor(a$to(dtype=torch_double()), b$to(dtype=torch_double())) -torch_logical_xor(a$to(dtype=torch_double()), b) -} +
    if (torch_is_installed()) { + +torch_logical_xor(torch_tensor(c(TRUE, FALSE, TRUE)), torch_tensor(c(TRUE, FALSE, FALSE))) +a = torch_tensor(c(0, 1, 10, 0), dtype=torch_int8()) +b = torch_tensor(c(4, 0, 1, 0), dtype=torch_int8()) +torch_logical_xor(a, b) +torch_logical_xor(a$to(dtype=torch_double()), b$to(dtype=torch_double())) +torch_logical_xor(a$to(dtype=torch_double()), b) +}
    #> torch_tensor #> 1 #> 1 diff --git a/reference/torch_logspace.html b/reference/torch_logspace.html index bdd37da5280fb1773a4ed78d04cac190dfb5abcf..2607088caff7af92d2261829e866b34ac94da0de 100644 --- a/reference/torch_logspace.html +++ b/reference/torch_logspace.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,16 +227,16 @@

    Logspace

    -
    torch_logspace(
    -  start,
    -  end,
    -  steps = 100,
    -  base = 10,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_logspace(
    +  start,
    +  end,
    +  steps = 100,
    +  base = 10,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -254,13 +286,13 @@ logarithmically spaced with base base between

    The output tensor is 1-D of size steps.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_logspace(start=-10, end=10, steps=5) -torch_logspace(start=0.1, end=1.0, steps=5) -torch_logspace(start=0.1, end=1.0, steps=1) -torch_logspace(start=2, end=2, steps=1, base=2) -} +torch_logspace(start=-10, end=10, steps=5) +torch_logspace(start=0.1, end=1.0, steps=5) +torch_logspace(start=0.1, end=1.0, steps=1) +torch_logspace(start=2, end=2, steps=1, base=2) +}
    #> torch_tensor #> 4 #> [ CPUFloatType{1} ]
    diff --git a/reference/torch_logsumexp.html b/reference/torch_logsumexp.html index c5a19fbd151e0b9894c6b292a81d9c238a851419..efa99751eabc08676fea2ae18a2e92bdd7997b07 100644 --- a/reference/torch_logsumexp.html +++ b/reference/torch_logsumexp.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Logsumexp

    -
    torch_logsumexp(self, dim, keepdim = FALSE)
    +
    torch_logsumexp(self, dim, keepdim = FALSE)

    Arguments

    @@ -232,15 +264,15 @@ Otherwise, dim is squeezed (see output tensor having 1 (or len(dim)) fewer dimension(s).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(3, 3)) -torch_logsumexp(a, 1) -} +a = torch_randn(c(3, 3)) +torch_logsumexp(a, 1) +}
    #> torch_tensor -#> 1.7933 -#> 0.4143 -#> 0.9287 +#> 1.3929 +#> 1.0997 +#> 1.7192 #> [ CPUFloatType{3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Lstsq

    -
    torch_lstsq(self, A)
    +
    torch_lstsq(self, A)

    Arguments

    @@ -242,25 +274,25 @@ for the solution in each column is given by the sum of squares of elements in th remaining \(m - n\) rows of that column.

    Examples

    -
    if (torch_is_installed()) { - -A = torch_tensor(rbind( - c(1,1,1), - c(2,3,4), - c(3,5,2), - c(4,2,5), - c(5,4,3) -)) -B = torch_tensor(rbind( - c(-10, -3), - c(12, 14), - c(14, 12), - c(16, 16), - c(18, 16) -)) -out = torch_lstsq(B, A) -out[[1]] -} +
    if (torch_is_installed()) { + +A = torch_tensor(rbind( + c(1,1,1), + c(2,3,4), + c(3,5,2), + c(4,2,5), + c(5,4,3) +)) +B = torch_tensor(rbind( + c(-10, -3), + c(12, 14), + c(14, 12), + c(16, 16), + c(18, 16) +)) +out = torch_lstsq(B, A) +out[[1]] +}
    #> torch_tensor #> 2.0000 1.0000 #> 1.0000 1.0000 diff --git a/reference/torch_lt.html b/reference/torch_lt.html index ebda53accfeba8de2e395e8b7e1c269055b4d71d..de77b5360437cee93688fc19a87d8e7d04ccdd31 100644 --- a/reference/torch_lt.html +++ b/reference/torch_lt.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Lt

    -
    torch_lt(self, other)
    +
    torch_lt(self, other)

    Arguments

    @@ -220,11 +252,11 @@ broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_lt(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), - torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) -} +torch_lt(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), + torch_tensor(matrix(c(1,1,4,4), ncol = 2, byrow=TRUE))) +}
    #> torch_tensor #> 0 0 #> 1 0 diff --git a/reference/torch_lu.html b/reference/torch_lu.html index c8eb4f82a3a7cfe7ebf0e58c0a203954d4ccd975..c97abd196064e81a93ff434a4821f09e9b08a67d 100644 --- a/reference/torch_lu.html +++ b/reference/torch_lu.html @@ -38,6 +38,8 @@ + + + + + + @@ -74,7 +86,7 @@ is set to True." /> torch - 0.0.3 + 0.1.0
    @@ -147,6 +159,9 @@ is set to True." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -160,7 +175,24 @@ is set to True." />
  • - +
  • Reference
  • @@ -199,7 +231,7 @@ tuple containing the LU factorization and pivots of A. Pivoting is done if pivot is set to True.

    -
    torch_lu(A, pivot = TRUE, get_infos = FALSE, out = NULL)
    +
    torch_lu(A, pivot = TRUE, get_infos = FALSE, out = NULL)

    Arguments

    @@ -226,29 +258,29 @@ elements in the tuple are Tensor, IntTensor. Default: NULL

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -A = torch_randn(c(2, 3, 3)) -torch_lu(A) +A = torch_randn(c(2, 3, 3)) +torch_lu(A) -} +}
    #> [[1]] #> torch_tensor #> (1,.,.) = -#> 0.3564 0.0937 -0.2445 -#> 0.3677 1.8428 -0.7129 -#> -0.8647 0.0045 0.4332 +#> 1.2816 -0.2739 1.3627 +#> 0.6032 0.2360 -0.9924 +#> 0.5229 -0.2575 -0.0504 #> #> (2,.,.) = -#> 1.5740 -0.5029 -0.3207 -#> 0.2765 -0.6417 0.4304 -#> 0.1308 0.1404 0.1848 +#> 2.2965 1.0492 -2.6765 +#> -0.3373 2.0281 -1.0654 +#> -0.1761 0.7415 1.4537 #> [ CPUFloatType{2,3,3} ] #> #> [[2]] #> torch_tensor -#> 3 3 3 -#> 3 2 3 +#> 1 2 3 +#> 2 2 3 #> [ CPUIntType{2,3} ] #>
    diff --git a/reference/torch_lu_solve.html b/reference/torch_lu_solve.html index 81d46bfbd7cbb36d078e9e6bebfe562c529a302c..a334cb6a59dd965beb0ade2e0e70849d60441094 100644 --- a/reference/torch_lu_solve.html +++ b/reference/torch_lu_solve.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Lu_solve

    -
    torch_lu_solve(self, LU_data, LU_pivots)
    +
    torch_lu_solve(self, LU_data, LU_pivots)

    Arguments

    @@ -223,15 +255,15 @@ LU factorization of A from torch_lu.

    Examples

    -
    if (torch_is_installed()) { -A = torch_randn(c(2, 3, 3)) -b = torch_randn(c(2, 3, 1)) -out = torch_lu(A) -x = torch_lu_solve(b, out[[1]], out[[2]]) -torch_norm(torch_bmm(A, x) - b) -} +
    if (torch_is_installed()) { +A = torch_randn(c(2, 3, 3)) +b = torch_randn(c(2, 3, 1)) +out = torch_lu(A) +x = torch_lu_solve(b, out[[1]], out[[2]]) +torch_norm(torch_bmm(A, x) - b) +}
    #> torch_tensor -#> 1.58402e-07 +#> 1.49012e-07 #> [ CPUFloatType{} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sets the seed for generating random numbers.

    -
    torch_manual_seed(seed)
    +
    torch_manual_seed(seed)

    Arguments

    diff --git a/reference/torch_masked_select.html b/reference/torch_masked_select.html index 6108bfdfd1bd4e65d4becab979a0dc928d41790d..d016f041ba1d7197c45f5fbf5b6a6c460fe122f5 100644 --- a/reference/torch_masked_select.html +++ b/reference/torch_masked_select.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Masked_select

    -
    torch_masked_select(self, mask)
    +
    torch_masked_select(self, mask)

    Arguments

    @@ -225,19 +257,19 @@ the boolean mask mask which is a BoolTensor.

    to match, but they must be broadcastable .

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(3, 4)) -x -mask = x$ge(0.5) -mask -torch_masked_select(x, mask) -} +
    if (torch_is_installed()) { + +x = torch_randn(c(3, 4)) +x +mask = x$ge(0.5) +mask +torch_masked_select(x, mask) +}
    #> torch_tensor -#> 1.2190 -#> 1.2591 -#> 2.0310 -#> 0.7883 +#> 0.9040 +#> 1.0121 +#> 1.2403 +#> 3.2352 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Matmul

    -
    torch_matmul(self, other)
    +
    torch_matmul(self, other)

    Arguments

    @@ -243,79 +275,79 @@ tensor, out will be an \((j \times k \times n \times p)\) tensor.Examples -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { # vector x vector -tensor1 = torch_randn(c(3)) -tensor2 = torch_randn(c(3)) -torch_matmul(tensor1, tensor2) +tensor1 = torch_randn(c(3)) +tensor2 = torch_randn(c(3)) +torch_matmul(tensor1, tensor2) # matrix x vector -tensor1 = torch_randn(c(3, 4)) -tensor2 = torch_randn(c(4)) -torch_matmul(tensor1, tensor2) +tensor1 = torch_randn(c(3, 4)) +tensor2 = torch_randn(c(4)) +torch_matmul(tensor1, tensor2) # batched matrix x broadcasted vector -tensor1 = torch_randn(c(10, 3, 4)) -tensor2 = torch_randn(c(4)) -torch_matmul(tensor1, tensor2) +tensor1 = torch_randn(c(10, 3, 4)) +tensor2 = torch_randn(c(4)) +torch_matmul(tensor1, tensor2) # batched matrix x batched matrix -tensor1 = torch_randn(c(10, 3, 4)) -tensor2 = torch_randn(c(10, 4, 5)) -torch_matmul(tensor1, tensor2) +tensor1 = torch_randn(c(10, 3, 4)) +tensor2 = torch_randn(c(10, 4, 5)) +torch_matmul(tensor1, tensor2) # batched matrix x broadcasted matrix -tensor1 = torch_randn(c(10, 3, 4)) -tensor2 = torch_randn(c(4, 5)) -torch_matmul(tensor1, tensor2) -} +tensor1 = torch_randn(c(10, 3, 4)) +tensor2 = torch_randn(c(4, 5)) +torch_matmul(tensor1, tensor2) +}
    #> torch_tensor #> (1,.,.) = -#> -0.2185 0.1251 0.5783 -0.7380 1.9290 -#> -0.1489 0.9409 0.2506 0.7444 -0.3201 -#> 0.2165 2.5157 0.8195 -1.2013 2.7602 +#> 0.1457 0.5915 -0.2321 0.3854 1.0444 +#> 1.0695 -1.5871 -0.2751 -1.8681 2.0865 +#> 0.0864 1.0331 0.3989 1.9494 -1.9199 #> #> (2,.,.) = -#> -0.6251 1.0054 1.5308 1.0346 1.7585 -#> 0.7581 -0.5255 -1.6358 -0.3028 -2.8103 -#> 1.9208 0.9674 -2.9281 1.9148 -7.9955 +#> 1.3647 0.0372 0.1681 1.9106 -0.5945 +#> -0.4827 1.6374 0.5664 0.6887 -1.8527 +#> 0.0311 -1.1241 0.1378 -2.2851 0.2947 #> #> (3,.,.) = -#> -0.1272 1.0480 0.5100 -0.5049 1.5373 -#> -0.6784 -2.4129 0.4735 0.3082 0.6775 -#> -0.9108 -1.5962 1.0820 -0.0427 2.2469 +#> 2.2523 -2.0509 -0.0099 -0.2129 0.8620 +#> -0.9853 0.0842 -0.4192 -0.6732 1.1007 +#> -2.6549 2.1404 0.0785 -1.7666 -0.1672 #> #> (4,.,.) = -#> 0.1000 1.6520 0.6610 1.2012 -0.2146 -#> -0.9429 -1.2208 0.9296 -1.5397 3.7074 -#> 0.6559 -0.7159 -1.7710 -0.6774 -2.5811 +#> 2.8045 -1.6532 0.1686 2.0991 -0.4907 +#> 1.7490 -0.6220 0.1161 2.0473 -0.5601 +#> 0.5042 0.7286 0.2668 1.5151 -1.0157 #> #> (5,.,.) = -#> -0.1291 1.6498 1.3190 0.7676 1.5068 -#> 0.1177 -0.1660 -0.6345 1.6803 -3.0527 -#> 0.5094 -2.0094 -1.7119 2.0194 -5.5159 +#> 0.3968 0.1744 0.3270 -0.2302 -0.5648 +#> 0.5085 -0.4784 0.4098 -0.3110 -1.0941 +#> 0.6001 0.1766 -0.1867 2.8172 -0.4761 #> #> (6,.,.) = -#> 0.0387 1.2627 0.4658 -0.2528 1.1118 -#> -0.0531 -0.8584 -0.3116 -1.4711 1.1272 -#> 0.7294 -0.9226 -1.9783 1.3631 -5.2894 +#> -2.1846 1.3429 -0.1652 -1.5079 0.4546 +#> 0.0389 0.1063 -0.1917 0.4759 0.4723 +#> 0.2003 -0.8489 -0.1358 -2.0153 1.3900 #> #> (7,.,.) = -#> -0.7140 -1.2579 0.6561 -1.1249 2.6681 -#> 0.8946 1.1603 -1.5402 0.5578 -3.6267 -#> -0.8431 0.8911 1.9896 0.9616 2.7199 +#> -2.3057 1.1418 0.4197 -1.4893 -1.8241 +#> 2.2183 -2.3652 -0.2189 -0.6105 1.6256 +#> -0.8210 2.7651 -0.0790 4.5377 -1.5942 #> #> (8,.,.) = -#> 0.7925 0.9144 -1.5220 -0.5466 -2.3076 -#> -0.6783 0.0011 1.2162 -0.9383 3.4450 -#> 0.2356 -0.6907 -0.6703 -0.1563 -1.0982 +#> -2.9265 1.9699 -0.0915 -0.0178 -0.9813 +#> 3.1908 -1.8613 0.2964 2.3269 -0.8748 +#> -2.0344 1.4978 -0.1377 -1.9044 0.8209 #> #> (9,.,.) = -#> -0.7889 0.4032 1.3563 -0.5223 3.2740 -#> 0.3636 -0.9006 -1.1470 -1.1717 -0.8283 -#> -0.4663 -0.2173 0.5950 4.1339 -3.4726 +#> 0.4693 -0.6510 0.3761 -1.1013 -0.6223 +#> 4.6105 -4.3216 -0.0323 0.8181 0.9600 +#> 3.3824 -2.7861 -0.1797 1.7009 0.7962 #> #> (10,.,.) = -#> -0.7729 1.7140 2.1228 2.9509 0.6895 -#> 0.4515 -0.0429 -1.0629 -2.8544 1.2142 -#> -1.5890 -0.3097 2.8757 1.6369 3.7579 +#> 1.5995 -0.7023 -0.1898 1.2365 0.8378 +#> -2.9803 3.7585 0.3185 -1.2573 -0.6455 +#> 2.5373 -4.5162 -0.3837 -2.9798 2.6712 #> [ CPUFloatType{10,3,5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Matrix_power

    -
    torch_matrix_power(self, n)
    +
    torch_matrix_power(self, n)

    Arguments

    @@ -223,20 +255,21 @@ raised to the power n. For a batch of matrices, the batched invers is returned.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(2, 2, 2)) -a -torch_matrix_power(a, 3) -} +a = torch_randn(c(2, 2, 2)) +a +torch_matrix_power(a, 3) +}
    #> torch_tensor #> (1,.,.) = -#> -0.4133 -0.7320 -#> -0.0927 0.0367 +#> 0.5965 0.6330 +#> -0.4084 -0.2968 #> #> (2,.,.) = -#> -1.2180 2.0019 -#> -4.7379 -8.9109 +#> 0.01 * +#> 0.6387 -0.4923 +#> 1.8252 -0.6598 #> [ CPUFloatType{2,2,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Matrix_rank

    -
    torch_matrix_rank(self, tol, symmetric = FALSE)
    +
    torch_matrix_rank(self, tol, symmetric = FALSE)

    Arguments

    @@ -230,11 +262,11 @@ singular values (or the eigenvalues when symmetric is TRUEinput.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_eye(10) -torch_matrix_rank(a) -} +a = torch_eye(10) +torch_matrix_rank(a) +}
    #> torch_tensor #> 10 #> [ CPULongType{} ]
    diff --git a/reference/torch_max.html b/reference/torch_max.html index 265c01dd6e2dcf51f584c2912c9baba180cbebaf..1a802ac21ce541da427f2ec0493c56af00ef02db 100644 --- a/reference/torch_max.html +++ b/reference/torch_max.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -266,29 +298,29 @@ but they must be broadcastable .

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_max(a) +a = torch_randn(c(1, 3)) +a +torch_max(a) -a = torch_randn(c(4, 4)) -a -torch_max(a, dim = 1) +a = torch_randn(c(4, 4)) +a +torch_max(a, dim = 1) -a = torch_randn(c(4)) -a -b = torch_randn(c(4)) -b -torch_max(a, other = b) -} +a = torch_randn(c(4)) +a +b = torch_randn(c(4)) +b +torch_max(a, other = b) +}
    #> torch_tensor -#> 0.4118 -#> -0.1116 -#> 0.7360 -#> 0.9171 +#> 0.1607 +#> 0.8017 +#> 0.6057 +#> 1.4942 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mean

    -
    torch_mean(self, dim, keepdim = FALSE, dtype = NULL)
    +
    torch_mean(self, dim, keepdim = FALSE, dtype = NULL)

    Arguments

    @@ -238,20 +270,21 @@ Otherwise, dim is squeezed (see output tensor having 1 (or len(dim)) fewer dimension(s).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_mean(a) +a = torch_randn(c(1, 3)) +a +torch_mean(a) -a = torch_randn(c(4, 4)) -a -torch_mean(a, 1) -torch_mean(a, 1, TRUE) -} +a = torch_randn(c(4, 4)) +a +torch_mean(a, 1) +torch_mean(a, 1, TRUE) +}
    #> torch_tensor -#> 0.1962 0.3079 -0.5549 0.3102 +#> 0.01 * +#> -6.3939 -7.7447 -27.1052 -43.0190 #> [ CPUFloatType{1,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Median

    -
    torch_median(self, dim, keepdim = FALSE)
    +
    torch_median(self, dim, keepdim = FALSE)

    Arguments

    @@ -235,33 +267,33 @@ Otherwise, dim is squeezed (see the outputs tensor having 1 fewer dimension than input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_median(a) +a = torch_randn(c(1, 3)) +a +torch_median(a) -a = torch_randn(c(4, 5)) -a -torch_median(a, 1) -} +a = torch_randn(c(4, 5)) +a +torch_median(a, 1) +}
    #> [[1]] #> torch_tensor -#> 0.1832 -#> -0.8122 -#> -1.1052 -#> -1.2011 -#> -0.0244 +#> -0.7214 +#> 0.0035 +#> -0.7742 +#> -0.4327 +#> 0.4810 #> [ CPUFloatType{5} ] #> #> [[2]] #> torch_tensor -#> 3 -#> 0 -#> 3 -#> 0 #> 1 +#> 2 +#> 2 +#> 0 +#> 2 #> [ CPULongType{5} ] #>
    diff --git a/reference/torch_memory_format.html b/reference/torch_memory_format.html index 5b23723e1d58df084b265d90852e046dea352298..3408ebcd4c2da62876fec71d61cd303e03c30fcc 100644 --- a/reference/torch_memory_format.html +++ b/reference/torch_memory_format.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,11 +227,11 @@

    Returns the correspondent memory format.

    -
    torch_contiguous_format()
    +    
    torch_contiguous_format()
     
    -torch_preserve_format()
    +torch_preserve_format()
     
    -torch_channels_last_format()
    +torch_channels_last_format()
    diff --git a/reference/torch_meshgrid.html b/reference/torch_meshgrid.html index 5a4691b7e5dbab3ecac807e762f95158c0c8aa48..f63260efe22996117225b2cc2c8619a83f9902c2 100644 --- a/reference/torch_meshgrid.html +++ b/reference/torch_meshgrid.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Meshgrid

    -
    torch_meshgrid(tensors)
    +
    torch_meshgrid(tensors)

    Arguments

    @@ -217,13 +249,13 @@ vector, and create \(N\) N-dimensional grids, where the \(i\) th gr expanding the \(i\) th input over dimensions defined by other inputs.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_tensor(c(1, 2, 3)) -y = torch_tensor(c(4, 5, 6)) -out = torch_meshgrid(list(x, y)) -out -} +x = torch_tensor(c(1, 2, 3)) +y = torch_tensor(c(4, 5, 6)) +out = torch_meshgrid(list(x, y)) +out +}
    #> [[1]] #> torch_tensor #> 1 1 1 diff --git a/reference/torch_min.html b/reference/torch_min.html index f193a5f63bd096edd7d091f0ba1f978cd7ca4aa0..c289183f1e51326fe02b7f0090b3441345114d9b 100644 --- a/reference/torch_min.html +++ b/reference/torch_min.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -267,29 +299,29 @@ but they must be broadcastable .

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_min(a) +a = torch_randn(c(1, 3)) +a +torch_min(a) -a = torch_randn(c(4, 4)) -a -torch_min(a, dim = 1) +a = torch_randn(c(4, 4)) +a +torch_min(a, dim = 1) -a = torch_randn(c(4)) -a -b = torch_randn(c(4)) -b -torch_min(a, other = b) -} +a = torch_randn(c(4)) +a +b = torch_randn(c(4)) +b +torch_min(a, other = b) +}
    #> torch_tensor -#> -0.5633 -#> -0.5903 -#> -2.0560 -#> -1.0671 +#> 0.9762 +#> -1.0571 +#> 0.2046 +#> 0.7020 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mm

    -
    torch_mm(self, mat2)
    +
    torch_mm(self, mat2)

    Arguments

    @@ -224,15 +256,15 @@ For broadcasting matrix products, see torch_ma \((m \times p)\) tensor, out will be a \((n \times p)\) tensor.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -mat1 = torch_randn(c(2, 3)) -mat2 = torch_randn(c(3, 3)) -torch_mm(mat1, mat2) -} +mat1 = torch_randn(c(2, 3)) +mat2 = torch_randn(c(3, 3)) +torch_mm(mat1, mat2) +}
    #> torch_tensor -#> 0.1067 -2.8593 1.3572 -#> -2.8347 0.9182 1.1475 +#> -0.1280 -0.0578 0.9076 +#> -3.3546 0.1277 1.3863 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mode

    -
    torch_mode(self, dim = -1L, keepdim = FALSE)
    +
    torch_mode(self, dim = -1L, keepdim = FALSE)

    Arguments

    @@ -233,20 +265,20 @@ Otherwise, dim is squeezed (see in the output tensors having 1 fewer dimension than input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randint(0, 50, size = list(5)) -a -torch_mode(a, 1) -} +a = torch_randint(0, 50, size = list(5)) +a +torch_mode(a, 1) +}
    #> [[1]] #> torch_tensor -#> 34 +#> 1 #> [ CPUFloatType{} ] #> #> [[2]] #> torch_tensor -#> 2 +#> 4 #> [ CPULongType{} ] #>
    diff --git a/reference/torch_mul.html b/reference/torch_mul.html index 02940046fb9618b430ebf505e8dca6892d0e4bbe..eb081f05069257781e1499eb50e2254b0fd10552 100644 --- a/reference/torch_mul.html +++ b/reference/torch_mul.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mul

    -
    torch_mul(self, other)
    +
    torch_mul(self, other)

    Arguments

    @@ -233,24 +265,24 @@ broadcastable .

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(3)) -a -torch_mul(a, 100) +a = torch_randn(c(3)) +a +torch_mul(a, 100) -a = torch_randn(c(4, 1)) -a -b = torch_randn(c(1, 4)) -b -torch_mul(a, b) -} +a = torch_randn(c(4, 1)) +a +b = torch_randn(c(1, 4)) +b +torch_mul(a, b) +}
    #> torch_tensor -#> 0.9656 0.2508 -0.9725 0.1089 -#> -0.4285 -0.1113 0.4315 -0.0483 -#> -0.5136 -0.1334 0.5173 -0.0579 -#> 0.3787 0.0984 -0.3814 0.0427 +#> -0.1927 -0.8468 1.2574 -1.2014 +#> -0.0539 -0.2367 0.3515 -0.3359 +#> -0.0566 -0.2488 0.3694 -0.3530 +#> -0.0106 -0.0465 0.0690 -0.0659 #> [ CPUFloatType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Multinomial

    -
    torch_multinomial(self, num_samples, replacement = FALSE, generator = NULL)
    +
    torch_multinomial(self, num_samples, replacement = FALSE, generator = NULL)

    Arguments

    @@ -249,17 +281,17 @@ from the multinomial probability distribution located in the corresponding row of tensor input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -weights = torch_tensor(c(0, 10, 3, 0), dtype=torch_float()) # create a tensor of weights -torch_multinomial(weights, 2) -torch_multinomial(weights, 4, replacement=TRUE) -} +weights = torch_tensor(c(0, 10, 3, 0), dtype=torch_float()) # create a tensor of weights +torch_multinomial(weights, 2) +torch_multinomial(weights, 4, replacement=TRUE) +}
    #> torch_tensor +#> 2 #> 1 #> 1 #> 2 -#> 1 #> [ CPULongType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mv

    -
    torch_mv(self, vec)
    +
    torch_mv(self, vec)

    Arguments

    @@ -224,15 +256,15 @@ size \(m\), out will be 1-D of size \(n\).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -mat = torch_randn(c(2, 3)) -vec = torch_randn(c(3)) -torch_mv(mat, vec) -} +mat = torch_randn(c(2, 3)) +vec = torch_randn(c(3)) +torch_mv(mat, vec) +}
    #> torch_tensor -#> -5.7310 -#> 1.3023 +#> 0.4328 +#> 1.0417 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Mvlgamma

    -
    torch_mvlgamma(self, p)
    +
    torch_mvlgamma(self, p)

    Arguments

    @@ -224,15 +256,15 @@ where \(C = \log(\pi) \times \frac{p (p - 1)}{4}\) and \(\Gamma(\cdot)\) is the

    All elements must be greater than \(\frac{p - 1}{2}\), otherwise an error would be thrown.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_empty(c(2, 3))$uniform_(1, 2) -a -torch_mvlgamma(a, 2) -} +a = torch_empty(c(2, 3))$uniform_(1, 2) +a +torch_mvlgamma(a, 2) +}
    #> torch_tensor -#> 0.4040 0.4059 0.7450 -#> 0.3997 0.8720 0.4162 +#> 0.6115 0.3920 0.6409 +#> 0.8698 0.3909 0.4000 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Narrow

    -
    torch_narrow(self, dim, start, length)
    +
    torch_narrow(self, dim, start, length)

    Arguments

    @@ -228,12 +260,12 @@ dimension dim is input from start to start + len returned tensor and input tensor share the same underlying storage.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_tensor(matrix(c(1:9), ncol = 3, byrow= TRUE)) -torch_narrow(x, 1, torch_tensor(0L)$sum(dim = 1), 2) -torch_narrow(x, 2, torch_tensor(1L)$sum(dim = 1), 2) -} +x = torch_tensor(matrix(c(1:9), ncol = 3, byrow= TRUE)) +torch_narrow(x, 1, torch_tensor(0L)$sum(dim = 1), 2) +torch_narrow(x, 2, torch_tensor(1L)$sum(dim = 1), 2) +}
    #> torch_tensor #> 2 3 #> 5 6 diff --git a/reference/torch_ne.html b/reference/torch_ne.html index 3696a16260b11dbb43f0a7a0d71476d66b1a0f06..b3e7fb85f28b23c838bdd55ed634324056661f06 100644 --- a/reference/torch_ne.html +++ b/reference/torch_ne.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ne

    -
    torch_ne(self, other)
    +
    torch_ne(self, other)

    Arguments

    @@ -220,11 +252,11 @@ broadcastable with the first argument.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_ne(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), - torch_tensor(matrix(rep(c(1,4), each = 2), ncol = 2, byrow=TRUE))) -} +torch_ne(torch_tensor(matrix(1:4, ncol = 2, byrow=TRUE)), + torch_tensor(matrix(rep(c(1,4), each = 2), ncol = 2, byrow=TRUE))) +}
    #> torch_tensor #> 0 1 #> 1 0 diff --git a/reference/torch_neg.html b/reference/torch_neg.html index d8e7d5b458fe1df32ffe5b30aaf8d37c43755cd9..15e0801de4e99569e10d0fb96894705ac66a6f4d 100644 --- a/reference/torch_neg.html +++ b/reference/torch_neg.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Neg

    -
    torch_neg(self)
    +
    torch_neg(self)

    Arguments

    @@ -217,18 +249,18 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(5)) -a -torch_neg(a) -} +a = torch_randn(c(5)) +a +torch_neg(a) +}
    #> torch_tensor -#> 0.0573 -#> 0.4788 -#> 0.7503 -#> -0.4747 -#> -0.4677 +#> 2.2162 +#> -1.0400 +#> 0.3351 +#> 2.0381 +#> 0.5984 #> [ CPUFloatType{5} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Nonzero

    -
    torch_nonzero(self)
    +
    torch_nonzero(self)

    Arguments

    @@ -244,10 +276,10 @@ non-zero elements in the input tensor.

    value, it is treated as a one-dimensional tensor with one element.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_nonzero(torch_tensor(c(1, 1, 1, 0, 1))) -} +torch_nonzero(torch_tensor(c(1, 1, 1, 0, 1))) +}
    #> torch_tensor #> 0 #> 1 diff --git a/reference/torch_norm.html b/reference/torch_norm.html index 94a942e1f9bda3330fdc4514d64e257b2d9bb5fa..f0c695f63bc818852f978e41f9105f17f8c4c436 100644 --- a/reference/torch_norm.html +++ b/reference/torch_norm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Norm

    -
    torch_norm(self, p = 2L, dim, keepdim = FALSE, dtype)
    +
    torch_norm(self, p = 2L, dim, keepdim = FALSE, dtype)

    Arguments

    @@ -231,16 +263,16 @@ Ignored if dim = NULL and out = Returns the matrix norm or vector norm of a given tensor.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_arange(0, 9, dtype = torch_float()) -b = a$reshape(list(3, 3)) -torch_norm(a) -torch_norm(b) -torch_norm(a, Inf) -torch_norm(b, Inf) +a = torch_arange(0, 9, dtype = torch_float()) +b = a$reshape(list(3, 3)) +torch_norm(a) +torch_norm(b) +torch_norm(a, Inf) +torch_norm(b, Inf) -} +}
    #> torch_tensor #> 8 #> [ CPUFloatType{} ]
    diff --git a/reference/torch_normal.html b/reference/torch_normal.html index 0387079e12ad31bbe4e3a73072c1776f6818d78e..19f7a92fc5618db99c36e47e4ac59d27d661e5f4 100644 --- a/reference/torch_normal.html +++ b/reference/torch_normal.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Normal

    -
    torch_normal(mean, std = 1L, size, generator = NULL)
    +
    torch_normal(mean, std = 1L, size, generator = NULL)

    Arguments

    @@ -258,21 +290,21 @@ all drawn elements.

    among all drawn elements. The resulting tensor has size given by size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -if (FALSE) { -torch_normal(mean=0, std=torch_arange(1, 0, -0.1)) +if (FALSE) { +torch_normal(mean=0, std=torch_arange(1, 0, -0.1)) -torch_normal(mean=0.5, std=torch_arange(1., 6.)) +torch_normal(mean=0.5, std=torch_arange(1., 6.)) -torch_normal(mean=torch_arange(1., 6.)) +torch_normal(mean=torch_arange(1., 6.)) -torch_normal(2, 3, size=list(1, 4)) -} -} +torch_normal(2, 3, size=list(1, 4)) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Ones

    -
    torch_ones(
    -  ...,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_ones(
    +  ...,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -242,11 +274,11 @@ by the variable argument size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_ones(c(2, 3)) -torch_ones(c(5)) -} +torch_ones(c(2, 3)) +torch_ones(c(5)) +}
    #> torch_tensor #> 1 #> 1 diff --git a/reference/torch_ones_like.html b/reference/torch_ones_like.html index 56cad96be5bd809f0bd4c69bae8a1a373f4f467c..1b8da234bd053bcfd44b51da4c360b511687799e 100644 --- a/reference/torch_ones_like.html +++ b/reference/torch_ones_like.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Ones_like

    -
    torch_ones_like(
    -  input,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_ones_like(
    +  input,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    @@ -250,11 +282,11 @@ the old torch_ones_like(input, out=output) is equivalent to torch_ones(input.size(), out=output).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input = torch_empty(c(2, 3)) -torch_ones_like(input) -} +input = torch_empty(c(2, 3)) +torch_ones_like(input) +}
    #> torch_tensor #> 1 1 1 #> 1 1 1 diff --git a/reference/torch_orgqr.html b/reference/torch_orgqr.html index ab879c438cd24475978bb6dc73e4e214fd4443ef..2622032ebbcb2ca24107a8a458d3c6a00779388a 100644 --- a/reference/torch_orgqr.html +++ b/reference/torch_orgqr.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Orgqr

    -
    torch_orgqr(self, input2)
    +
    torch_orgqr(self, input2)

    Arguments

    diff --git a/reference/torch_ormqr.html b/reference/torch_ormqr.html index 027e7c29997c310ae77f6febf61fda68f07ce0c6..446c3f91cf6dc23534f7fe66b4c43f36f35f627a 100644 --- a/reference/torch_ormqr.html +++ b/reference/torch_ormqr.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Ormqr

    -
    torch_ormqr(self, input2, input3, left = TRUE, transpose = FALSE)
    +
    torch_ormqr(self, input2, input3, left = TRUE, transpose = FALSE)

    Arguments

    diff --git a/reference/torch_pdist.html b/reference/torch_pdist.html index 32cabdc54cbd0c4bfc31cb14a3f92b15abafad58..8287a7a88a69b22d9e869ab63070fc2f066189f8 100644 --- a/reference/torch_pdist.html +++ b/reference/torch_pdist.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Pdist

    -
    torch_pdist(self, p = 2L)
    +
    torch_pdist(self, p = 2L)

    Arguments

    diff --git a/reference/torch_pinverse.html b/reference/torch_pinverse.html index 9e1f439af0765bd734ebc6914b4efc1cd6831e06..3439b47ab03a30042b38ef719b2ea72f0a5963b3 100644 --- a/reference/torch_pinverse.html +++ b/reference/torch_pinverse.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Pinverse

    -
    torch_pinverse(self, rcond = 0)
    +
    torch_pinverse(self, rcond = 0)

    Arguments

    @@ -232,26 +264,26 @@ See `~torch.svd` for more details. Please look at Moore-Penrose inverse_ for more details

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input = torch_randn(c(3, 5)) -input -torch_pinverse(input) +input = torch_randn(c(3, 5)) +input +torch_pinverse(input) # Batched pinverse example -a = torch_randn(c(2,6,3)) -b = torch_pinverse(a) -torch_matmul(b, a) -} +a = torch_randn(c(2,6,3)) +b = torch_pinverse(a) +torch_matmul(b, a) +}
    #> torch_tensor #> (1,.,.) = -#> 1.0000e+00 -5.9605e-08 1.0431e-07 -#> 2.9802e-08 1.0000e+00 2.2352e-08 -#> 2.9802e-08 8.9407e-08 1.0000e+00 +#> 1.0000e+00 2.0862e-07 8.9407e-08 +#> -5.2154e-08 1.0000e+00 2.2724e-07 +#> -1.3411e-07 2.0117e-07 1.0000e+00 #> #> (2,.,.) = -#> 1.0000e+00 -5.9605e-08 -1.7136e-07 -#> -7.4506e-09 1.0000e+00 -1.2666e-07 -#> -8.1956e-08 -2.1607e-07 1.0000e+00 +#> 1.0000 -0.0000 -0.0000 +#> 0.0000 1.0000 0.0000 +#> -0.0000 0.0000 1.0000 #> [ CPUFloatType{2,3,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Pixel_shuffle

    -
    torch_pixel_shuffle(self, upscale_factor)
    +
    torch_pixel_shuffle(self, upscale_factor)

    Arguments

    @@ -218,12 +250,12 @@ tensor of shape \((*, C, H \times r, W \times r)\).

    See ~torch.nn.PixelShuffle for details.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input = torch_randn(c(1, 9, 4, 4)) -output = nnf_pixel_shuffle(input, 3) -print(output$size()) -} +input = torch_randn(c(1, 9, 4, 4)) +output = nnf_pixel_shuffle(input, 3) +print(output$size()) +}
    #> [1] 1 1 12 12
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Poisson

    -
    torch_poisson(self, generator = NULL)
    +
    torch_poisson(self, generator = NULL)

    Arguments

    @@ -223,16 +255,16 @@ element in input i.e.,

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -rates = torch_rand(c(4, 4)) * 5 # rate parameter between 0 and 5 -torch_poisson(rates) -} +rates = torch_rand(c(4, 4)) * 5 # rate parameter between 0 and 5 +torch_poisson(rates) +}
    #> torch_tensor -#> 1 0 5 2 -#> 8 2 0 4 -#> 3 0 3 0 -#> 3 6 6 1 +#> 2 0 2 2 +#> 2 6 1 0 +#> 1 2 4 5 +#> 1 1 2 6 #> [ CPUFloatType{4,4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Polygamma

    -
    torch_polygamma(n, self)
    +
    torch_polygamma(n, self)

    Arguments

    @@ -228,12 +260,12 @@ $$

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -a = torch_tensor(c(1, 0.5)) -torch_polygamma(1, a) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +a = torch_tensor(c(1, 0.5)) +torch_polygamma(1, a) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Pow

    -
    torch_pow(self, exponent)
    +
    torch_pow(self, exponent)

    Arguments

    @@ -242,22 +274,22 @@ The returned tensor out is of the same shape as exponent

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(4)) -a -torch_pow(a, 2) -exp = torch_arange(1., 5.) -a = torch_arange(1., 5.) -a -exp -torch_pow(a, exp) - - -exp = torch_arange(1., 5.) -base = 2 -torch_pow(base, exp) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(4)) +a +torch_pow(a, 2) +exp = torch_arange(1., 5.) +a = torch_arange(1., 5.) +a +exp +torch_pow(a, exp) + + +exp = torch_arange(1., 5.) +base = 2 +torch_pow(base, exp) +}
    #> torch_tensor #> 2 #> 4 diff --git a/reference/torch_prod.html b/reference/torch_prod.html index e14b48c631732d203f95a3e79d760d4db45191f1..e94e0002ffedd8ad56ee1cd85d3204605ab900f4 100644 --- a/reference/torch_prod.html +++ b/reference/torch_prod.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Prod

    -
    torch_prod(self, dim, keepdim = FALSE, dtype = NULL)
    +
    torch_prod(self, dim, keepdim = FALSE, dtype = NULL)

    Arguments

    @@ -237,21 +269,20 @@ Otherwise, dim is squeezed (see the output tensor having 1 fewer dimension than input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_prod(a) +a = torch_randn(c(1, 3)) +a +torch_prod(a) -a = torch_randn(c(4, 2)) -a -torch_prod(a, 1) -} +a = torch_randn(c(4, 2)) +a +torch_prod(a, 1) +}
    #> torch_tensor -#> 0.001 * -#> -1.8251 -#> -120.0931 +#> -0.2616 +#> -0.0308 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Promote_types

    -
    torch_promote_types(type1, type2)
    +
    torch_promote_types(type1, type2)

    Arguments

    @@ -221,11 +253,11 @@ documentation for more information on the type promotion logic.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_promote_types(torch_int32(), torch_float32()) -torch_promote_types(torch_uint8(), torch_long()) -} +torch_promote_types(torch_int32(), torch_float32()) +torch_promote_types(torch_uint8(), torch_long()) +}
    #> torch_Long
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Qr

    -
    torch_qr(self, some = TRUE)
    +
    torch_qr(self, some = TRUE)

    Arguments

    @@ -230,15 +262,15 @@ with \(Q\) being an orthogonal matrix or batch of orthogonal matrices and Otherwise, if some is FALSE, this function returns the complete QR factorization.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_tensor(matrix(c(12., -51, 4, 6, 167, -68, -4, 24, -41), ncol = 3, byrow = TRUE)) -out = torch_qr(a) -q = out[[1]] -r = out[[2]] -torch_mm(q, r)$round() -torch_mm(q$t(), q)$round() -} +
    if (torch_is_installed()) { + +a = torch_tensor(matrix(c(12., -51, 4, 6, 167, -68, -4, 24, -41), ncol = 3, byrow = TRUE)) +out = torch_qr(a) +q = out[[1]] +r = out[[2]] +torch_mm(q, r)$round() +torch_mm(q$t(), q)$round() +}
    #> torch_tensor #> 1 0 0 #> 0 1 0 diff --git a/reference/torch_qscheme.html b/reference/torch_qscheme.html index 557b34642751fff9225ac6ac207d70fa32411b8d..6e296af5c5264a193e6eda34afd9830632f4187c 100644 --- a/reference/torch_qscheme.html +++ b/reference/torch_qscheme.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Creates the corresponding Scheme object

    -
    torch_per_channel_affine()
    +    
    torch_per_channel_affine()
     
    -torch_per_tensor_affine()
    +torch_per_tensor_affine()
     
    -torch_per_channel_symmetric()
    +torch_per_channel_symmetric()
     
    -torch_per_tensor_symmetric()
    +torch_per_tensor_symmetric()
    diff --git a/reference/torch_quantize_per_channel.html b/reference/torch_quantize_per_channel.html index 590d80daec58fe18bb62e3fa0ba70fc2a541d1a9..7b6415fff46442047d9a7debcdf1e4768a5d0082 100644 --- a/reference/torch_quantize_per_channel.html +++ b/reference/torch_quantize_per_channel.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Quantize_per_channel

    -
    torch_quantize_per_channel(self, scales, zero_points, axis, dtype)
    +
    torch_quantize_per_channel(self, scales, zero_points, axis, dtype)

    Arguments

    @@ -230,13 +262,13 @@

    Converts a float tensor to per-channel quantized tensor with given scales and zero points.

    Examples

    -
    if (torch_is_installed()) { -x = torch_tensor(matrix(c(-1.0, 0.0, 1.0, 2.0), ncol = 2, byrow = TRUE)) -torch_quantize_per_channel(x, torch_tensor(c(0.1, 0.01)), - torch_tensor(c(10L, 0L)), 0, torch_quint8()) -torch_quantize_per_channel(x, torch_tensor(c(0.1, 0.01)), - torch_tensor(c(10L, 0L)), 0, torch_quint8())$int_repr() -} +
    if (torch_is_installed()) { +x = torch_tensor(matrix(c(-1.0, 0.0, 1.0, 2.0), ncol = 2, byrow = TRUE)) +torch_quantize_per_channel(x, torch_tensor(c(0.1, 0.01)), + torch_tensor(c(10L, 0L)), 0, torch_quint8()) +torch_quantize_per_channel(x, torch_tensor(c(0.1, 0.01)), + torch_tensor(c(10L, 0L)), 0, torch_quint8())$int_repr() +}
    #> torch_tensor #> 0 10 #> 100 200 diff --git a/reference/torch_quantize_per_tensor.html b/reference/torch_quantize_per_tensor.html index 499139caa61b130b20da8328eda3958ad128a160..87476be6e2703a18c32290f611d1fadca87fd28b 100644 --- a/reference/torch_quantize_per_tensor.html +++ b/reference/torch_quantize_per_tensor.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Quantize_per_tensor

    -
    torch_quantize_per_tensor(self, scale, zero_point, dtype)
    +
    torch_quantize_per_tensor(self, scale, zero_point, dtype)

    Arguments

    @@ -226,10 +258,10 @@

    Converts a float tensor to quantized tensor with given scale and zero point.

    Examples

    -
    if (torch_is_installed()) { -torch_quantize_per_tensor(torch_tensor(c(-1.0, 0.0, 1.0, 2.0)), 0.1, 10, torch_quint8()) -torch_quantize_per_tensor(torch_tensor(c(-1.0, 0.0, 1.0, 2.0)), 0.1, 10, torch_quint8())$int_repr() -} +
    if (torch_is_installed()) { +torch_quantize_per_tensor(torch_tensor(c(-1.0, 0.0, 1.0, 2.0)), 0.1, 10, torch_quint8()) +torch_quantize_per_tensor(torch_tensor(c(-1.0, 0.0, 1.0, 2.0)), 0.1, 10, torch_quint8())$int_repr() +}
    #> torch_tensor #> 0 #> 10 diff --git a/reference/torch_rand.html b/reference/torch_rand.html index c6fef07b66971d56bbfe7d8bdf1db33688475407..d534fff228ce2c682ebeb8cd390974534902ce03 100644 --- a/reference/torch_rand.html +++ b/reference/torch_rand.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Rand

    -
    torch_rand(
    -  ...,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_rand(
    +  ...,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -243,14 +275,14 @@ on the interval \([0, 1)\)

    The shape of the tensor is defined by the variable argument size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_rand(4) -torch_rand(c(2, 3)) -} +torch_rand(4) +torch_rand(c(2, 3)) +}
    #> torch_tensor -#> 0.4034 0.1802 0.2925 -#> 0.6916 0.6648 0.9430 +#> 0.7836 0.4775 0.1233 +#> 0.0427 0.2741 0.4000 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Rand_like

    -
    torch_rand_like(
    -  input,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_rand_like(
    +  input,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    diff --git a/reference/torch_randint.html b/reference/torch_randint.html index 15ab1288a6249a616cd9e76dd12970208766ed23..c268a7fa3f51471733e23532e12a8e5e327feeeb 100644 --- a/reference/torch_randint.html +++ b/reference/torch_randint.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,17 +227,17 @@

    Randint

    -
    torch_randint(
    -  low,
    -  high,
    -  size,
    -  generator = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_randint(
    +  low,
    +  high,
    +  size,
    +  generator = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    @@ -262,15 +294,15 @@ With the global dtype default (torch_float32), this function return a tensor with dtype torch_int64.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_randint(3, 5, list(3)) -torch_randint(0, 10, size = list(2, 2)) -torch_randint(3, 10, list(2, 2)) -} +torch_randint(3, 5, list(3)) +torch_randint(0, 10, size = list(2, 2)) +torch_randint(3, 10, list(2, 2)) +}
    #> torch_tensor -#> 7 8 -#> 8 9 +#> 6 4 +#> 9 8 #> [ CPUFloatType{2,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Randint_like

    -
    torch_randint_like(
    -  input,
    -  low,
    -  high,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_randint_like(
    +  input,
    +  low,
    +  high,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    diff --git a/reference/torch_randn.html b/reference/torch_randn.html index 38ec31367f5a19f4c67312c2852f29fc1d0a1840..7d0f09e05fd3ec366d726714635bf90414ce0e14 100644 --- a/reference/torch_randn.html +++ b/reference/torch_randn.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Randn

    -
    torch_randn(
    -  ...,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_randn(
    +  ...,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -247,14 +279,14 @@ $$ The shape of the tensor is defined by the variable argument size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_randn(c(4)) -torch_randn(c(2, 3)) -} +torch_randn(c(4)) +torch_randn(c(2, 3)) +}
    #> torch_tensor -#> 1.7302 1.3721 -0.0691 -#> 0.4933 -0.7643 -0.5334 +#> 0.8924 0.1711 0.9711 +#> -0.5444 0.3223 -0.4884 #> [ CPUFloatType{2,3} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Randn_like

    -
    torch_randn_like(
    -  input,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_randn_like(
    +  input,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    diff --git a/reference/torch_randperm.html b/reference/torch_randperm.html index 452ba130f5bece594e3e25187a97719fee6c6018..36fb08fab1388e1f7d74ca0bb00adeaf0dc04f0e 100644 --- a/reference/torch_randperm.html +++ b/reference/torch_randperm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Randperm

    -
    torch_randperm(
    -  n,
    -  dtype = torch_int64(),
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_randperm(
    +  n,
    +  dtype = torch_int64(),
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -236,15 +268,15 @@

    Returns a random permutation of integers from 0 to n - 1.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_randperm(4) -} +torch_randperm(4) +}
    #> torch_tensor -#> 3 #> 2 -#> 0 #> 1 +#> 0 +#> 3 #> [ CPULongType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,15 +227,15 @@

    Range

    -
    torch_range(
    -  start,
    -  end,
    -  step = 1,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_range(
    +  start,
    +  end,
    +  step = 1,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -256,11 +288,11 @@ $$

    This function is deprecated in favor of torch_arange.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_range(1, 4) -torch_range(1, 4, 0.5) -} +torch_range(1, 4) +torch_range(1, 4, 0.5) +}
    #> Warning: This function is deprecated in favor of torch_arange.
    #> Warning: This function is deprecated in favor of torch_arange.
    #> torch_tensor #> 1.0000 #> 1.5000 diff --git a/reference/torch_real.html b/reference/torch_real.html index 77c9ca9000cf64173c733b3cc22594e2e8d1b267..06ccb8bb8de3b7c5b067d90b71bf27c49271dcf6 100644 --- a/reference/torch_real.html +++ b/reference/torch_real.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Real

    -
    torch_real(self)
    +
    torch_real(self)

    Arguments

    @@ -224,11 +256,11 @@ returns it.

    $$

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -torch_real(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +torch_real(torch_tensor(c(-1 + 1i, -2 + 2i, 3 - 3i))) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Reciprocal

    -
    torch_reciprocal(self)
    +
    torch_reciprocal(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_reciprocal(a) -} +a = torch_randn(c(4)) +a +torch_reciprocal(a) +}
    #> torch_tensor -#> 5.6343 -#> -0.4645 -#> 4.8170 -#> 2.1907 +#> -0.4381 +#> 39.7676 +#> -0.7620 +#> -2.9071 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,11 +227,11 @@

    Creates the reduction objet

    -
    torch_reduction_sum()
    +    
    torch_reduction_sum()
     
    -torch_reduction_mean()
    +torch_reduction_mean()
     
    -torch_reduction_none()
    +torch_reduction_none()
    diff --git a/reference/torch_relu.html b/reference/torch_relu.html index 9eedfcae4ae325029caa6970dd055fb1141280b9..d35ecd0c099f458fc5e7bbc5a1c9ff4fbf44afc2 100644 --- a/reference/torch_relu.html +++ b/reference/torch_relu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Relu

    -
    torch_relu(self)
    +
    torch_relu(self)

    Arguments

    diff --git a/reference/torch_relu_.html b/reference/torch_relu_.html index 572dddd535330ac98a399d50b4d3e5863290dae4..a46b0e7c92aeb587c3a607eee1a777df9ec1adb6 100644 --- a/reference/torch_relu_.html +++ b/reference/torch_relu_.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Relu_

    -
    torch_relu_(self)
    +
    torch_relu_(self)

    Arguments

    diff --git a/reference/torch_remainder.html b/reference/torch_remainder.html index 351ef4af7ec1089daa4a689789d530acdc8f70ce..8c9ac138ec040ef4ecb46e4f4a4f0e2f2f4ba0e3 100644 --- a/reference/torch_remainder.html +++ b/reference/torch_remainder.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Remainder

    -
    torch_remainder(self, other)
    +
    torch_remainder(self, other)

    Arguments

    @@ -222,11 +254,11 @@ numbers. The remainder has the same sign as the divisor.

    other must be broadcastable .

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_remainder(torch_tensor(c(-3., -2, -1, 1, 2, 3)), 2) -torch_remainder(torch_tensor(c(1., 2, 3, 4, 5)), 1.5) -} +torch_remainder(torch_tensor(c(-3., -2, -1, 1, 2, 3)), 2) +torch_remainder(torch_tensor(c(1., 2, 3, 4, 5)), 1.5) +}
    #> torch_tensor #> 1.0000 #> 0.5000 diff --git a/reference/torch_renorm.html b/reference/torch_renorm.html index 3f28bd7b59e7eb0f2ba64e913002d41498b70688..7752bc9afdb5672ac3757cdf9fc8951bc2d49333 100644 --- a/reference/torch_renorm.html +++ b/reference/torch_renorm.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Renorm

    -
    torch_renorm(self, p, dim, maxnorm)
    +
    torch_renorm(self, p, dim, maxnorm)

    Arguments

    @@ -231,13 +263,13 @@ than the value maxnorm

    Examples

    -
    if (torch_is_installed()) { -x = torch_ones(c(3, 3)) -x[2,]$fill_(2) -x[3,]$fill_(3) -x -torch_renorm(x, 1, 1, 5) -} +
    if (torch_is_installed()) { +x = torch_ones(c(3, 3)) +x[2,]$fill_(2) +x[3,]$fill_(3) +x +torch_renorm(x, 1, 1, 5) +}
    #> torch_tensor #> 1.0000 1.0000 1.0000 #> 1.6667 1.6667 1.6667 diff --git a/reference/torch_repeat_interleave.html b/reference/torch_repeat_interleave.html index 0e85bf392140d5bdac1af2d486b8c60fb6a0cd37..d89d83de47df943a88a570a7f779080a89faad0c 100644 --- a/reference/torch_repeat_interleave.html +++ b/reference/torch_repeat_interleave.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Repeat_interleave

    -
    torch_repeat_interleave(self, repeats, dim = NULL)
    +
    torch_repeat_interleave(self, repeats, dim = NULL)

    Arguments

    @@ -236,16 +268,16 @@ 1 appears n2 times, 2 appears n3 times, etc.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -x = torch_tensor(c(1, 2, 3)) -x$repeat_interleave(2) -y = torch_tensor(matrix(c(1, 2, 3, 4), ncol = 2, byrow=TRUE)) -torch_repeat_interleave(y, 2) -torch_repeat_interleave(y, 3, dim=1) -torch_repeat_interleave(y, torch_tensor(c(1, 2)), dim=1) -} -} +
    if (torch_is_installed()) { +if (FALSE) { +x = torch_tensor(c(1, 2, 3)) +x$repeat_interleave(2) +y = torch_tensor(matrix(c(1, 2, 3, 4), ncol = 2, byrow=TRUE)) +torch_repeat_interleave(y, 2) +torch_repeat_interleave(y, 3, dim=1) +torch_repeat_interleave(y, torch_tensor(c(1, 2)), dim=1) +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Reshape

    -
    torch_reshape(self, shape)
    +
    torch_reshape(self, shape)

    Arguments

    @@ -225,13 +257,13 @@ depend on the copying vs. viewing behavior.

    dimensions and the number of elements in input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_arange(0, 4) -torch_reshape(a, list(2, 2)) -b = torch_tensor(matrix(c(0, 1, 2, 3), ncol = 2, byrow=TRUE)) -torch_reshape(b, list(-1)) -} +a = torch_arange(0, 4) +torch_reshape(a, list(2, 2)) +b = torch_tensor(matrix(c(0, 1, 2, 3), ncol = 2, byrow=TRUE)) +torch_reshape(b, list(-1)) +}
    #> torch_tensor #> 0 #> 1 diff --git a/reference/torch_result_type.html b/reference/torch_result_type.html index 261709999741bc447d6d4e3bd00a7157e0744241..f70af9ba997a456615bdcb20c7ee8365dca88acc 100644 --- a/reference/torch_result_type.html +++ b/reference/torch_result_type.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Result_type

    -
    torch_result_type(tensor1, tensor2)
    +
    torch_result_type(tensor1, tensor2)

    Arguments

    @@ -220,10 +252,10 @@ operation on the provided input tensors. See type promotion documentation for more information on the type promotion logic.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_result_type(tensor1 = torch_tensor(c(1, 2), dtype=torch_int()), tensor2 = 1) -} +torch_result_type(tensor1 = torch_tensor(c(1, 2), dtype=torch_int()), tensor2 = 1) +}
    #> torch_Float
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Rfft

    -
    torch_rfft(self, signal_ndim, normalized = FALSE, onesided = TRUE)
    +
    torch_rfft(self, signal_ndim, normalized = FALSE, onesided = TRUE)

    Arguments

    @@ -262,47 +294,47 @@ of input, but instead the last dimension will be halfed as of size torch_backends.mkl.is_available to check if MKL is installed.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_randn(c(5, 5)) -torch_rfft(x, 2) -torch_rfft(x, 2, onesided=FALSE) -} +x = torch_randn(c(5, 5)) +torch_rfft(x, 2) +torch_rfft(x, 2, onesided=FALSE) +}
    #> torch_tensor #> (1,.,.) = -#> -0.4625 0.0000 -#> -5.1624 3.2586 -#> -6.0212 1.9752 -#> -6.0212 -1.9752 -#> -5.1624 -3.2586 +#> -2.6885 0.0000 +#> 2.2970 -3.1304 +#> -3.3626 5.1310 +#> -3.3626 -5.1310 +#> 2.2970 3.1304 #> #> (2,.,.) = -#> 1.6002 -2.9581 -#> 2.1200 0.1343 -#> 3.0293 -7.1277 -#> 3.5287 3.5903 -#> -0.8434 -0.1112 +#> 3.5981 -1.6740 +#> 6.2369 6.0703 +#> -3.2006 4.7044 +#> -1.0957 -2.3358 +#> -5.0663 -1.6792 #> #> (3,.,.) = -#> -3.0107 -2.3239 -#> 0.9071 0.0358 -#> -0.0607 -0.0974 -#> -2.2262 2.3303 -#> 1.6252 1.9184 +#> -1.1132 0.0063 +#> -6.2288 2.1029 +#> 3.1367 4.8644 +#> 1.5096 -1.7360 +#> 3.2987 2.3868 #> #> (4,.,.) = -#> -3.0107 2.3239 -#> 1.6252 -1.9184 -#> -2.2262 -2.3303 -#> -0.0607 0.0974 -#> 0.9071 -0.0358 +#> -1.1132 -0.0063 +#> 3.2987 -2.3868 +#> 1.5096 1.7360 +#> 3.1367 -4.8644 +#> -6.2288 -2.1029 #> #> (5,.,.) = -#> 1.6002 2.9581 -#> -0.8434 0.1112 -#> 3.5287 -3.5903 -#> 3.0293 7.1277 -#> 2.1200 -0.1343 +#> 3.5981 1.6740 +#> -5.0663 1.6792 +#> -1.0957 2.3358 +#> -3.2006 -4.7044 +#> 6.2369 -6.0703 #> [ CPUFloatType{5,5,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Roll

    -
    torch_roll(self, shifts, dims = list())
    +
    torch_roll(self, shifts, dims = list())

    Arguments

    @@ -225,14 +257,14 @@ specified, the tensor will be flattened before rolling and then restored to the original shape.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_tensor(c(1, 2, 3, 4, 5, 6, 7, 8))$view(c(4, 2)) -x -torch_roll(x, 1, 1) -torch_roll(x, -1, 1) -torch_roll(x, shifts=list(2, 1), dims=list(1, 2)) -} +
    if (torch_is_installed()) { + +x = torch_tensor(c(1, 2, 3, 4, 5, 6, 7, 8))$view(c(4, 2)) +x +torch_roll(x, 1, 1) +torch_roll(x, -1, 1) +torch_roll(x, shifts=list(2, 1), dims=list(1, 2)) +}
    #> torch_tensor #> 6 5 #> 8 7 diff --git a/reference/torch_rot90.html b/reference/torch_rot90.html index b71ff01b65e8f291268e302dd701a0e96e4c5fdc..329fb66b57e9cf2ecb6ffacfb06ff83a023abd94 100644 --- a/reference/torch_rot90.html +++ b/reference/torch_rot90.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Rot90

    -
    torch_rot90(self, k = 1L, dims = c(0, 1))
    +
    torch_rot90(self, k = 1L, dims = c(0, 1))

    Arguments

    @@ -223,15 +255,15 @@ Rotation direction is from the first towards the second axis if k > 0, and from the second towards the first for k < 0.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_arange(0, 4)$view(c(2, 2)) -x -torch_rot90(x, 1, c(1, 2)) -x = torch_arange(0, 8)$view(c(2, 2, 2)) -x -torch_rot90(x, 1, c(1, 2)) -} +
    if (torch_is_installed()) { + +x = torch_arange(0, 4)$view(c(2, 2)) +x +torch_rot90(x, 1, c(1, 2)) +x = torch_arange(0, 8)$view(c(2, 2, 2)) +x +torch_rot90(x, 1, c(1, 2)) +}
    #> torch_tensor #> (1,.,.) = #> 2 3 diff --git a/reference/torch_round.html b/reference/torch_round.html index 24caf7992d68339bd1797163e45bf81ce07233a8..b5e0db6dcbf44b17d8542ccb6061d9ebf205b1ba 100644 --- a/reference/torch_round.html +++ b/reference/torch_round.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Round

    -
    torch_round(self)
    +
    torch_round(self)

    Arguments

    @@ -215,17 +247,17 @@ to the closest integer.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_round(a) -} +a = torch_randn(c(4)) +a +torch_round(a) +}
    #> torch_tensor -#> 1 -#> 1 +#> -0 +#> 0 +#> 0 #> 0 -#> 1 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Rrelu_

    -
    torch_rrelu_(
    -  self,
    -  lower = 0.125,
    -  upper = 0.333333,
    -  training = FALSE,
    -  generator = NULL
    -)
    +
    torch_rrelu_(
    +  self,
    +  lower = 0.125,
    +  upper = 0.333333,
    +  training = FALSE,
    +  generator = NULL
    +)

    Arguments

    diff --git a/reference/torch_rsqrt.html b/reference/torch_rsqrt.html index c172ff369b8c0bd3823dd83440a70e9fcfd09492..7388252cc89ec8befab74931caa289dfcd9e9d9d 100644 --- a/reference/torch_rsqrt.html +++ b/reference/torch_rsqrt.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Rsqrt

    -
    torch_rsqrt(self)
    +
    torch_rsqrt(self)

    Arguments

    @@ -218,17 +250,17 @@ the elements of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_rsqrt(a) -} +a = torch_randn(c(4)) +a +torch_rsqrt(a) +}
    #> torch_tensor #> nan -#> 1.0875 -#> 1.7124 -#> 1.1438 +#> nan +#> 2.1289 +#> nan #> [ CPUFloatType{4} ]
    @@ -146,6 +158,9 @@ term storage." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ term storage." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ term storage." /> term storage.

    -
    torch_save(obj, path, ...)
    +
    torch_save(obj, path, ...)

    Arguments

    diff --git a/reference/torch_selu.html b/reference/torch_selu.html index 5abb76651208d96b74fd3dae13da246143e88756..ad94b56c105aa837043cd2ed4f8d4455a5ed2244 100644 --- a/reference/torch_selu.html +++ b/reference/torch_selu.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Selu

    -
    torch_selu(self)
    +
    torch_selu(self)

    Arguments

    diff --git a/reference/torch_selu_.html b/reference/torch_selu_.html index fd576b25e401c1de3ff933c6480a5b55c4a83965..f2531b73f00e1b27c54a982a03111281a356ff25 100644 --- a/reference/torch_selu_.html +++ b/reference/torch_selu_.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Selu_

    -
    torch_selu_(self)
    +
    torch_selu_(self)

    Arguments

    diff --git a/reference/torch_sigmoid.html b/reference/torch_sigmoid.html index b375a2ed2e001aad41db1d66594d0cbb7bbb51e0..e5b9f38462fdfe75d4b1b20a4ba81d77245cc3e2 100644 --- a/reference/torch_sigmoid.html +++ b/reference/torch_sigmoid.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sigmoid

    -
    torch_sigmoid(self)
    +
    torch_sigmoid(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_sigmoid(a) -} +a = torch_randn(c(4)) +a +torch_sigmoid(a) +}
    #> torch_tensor -#> 0.4169 -#> 0.4884 -#> 0.4421 -#> 0.6942 +#> 0.4851 +#> 0.6093 +#> 0.3732 +#> 0.2108 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sign

    -
    torch_sign(self)
    +
    torch_sign(self)

    Arguments

    @@ -217,12 +249,12 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_tensor(c(0.7, -1.2, 0., 2.3)) -a -torch_sign(a) -} +a = torch_tensor(c(0.7, -1.2, 0., 2.3)) +a +torch_sign(a) +}
    #> torch_tensor #> 1 #> -1 diff --git a/reference/torch_sin.html b/reference/torch_sin.html index c90950e40729f699168987a6ec67ccfced38c824..e3e6fdda42015b56040288a67101aae41b934a8b 100644 --- a/reference/torch_sin.html +++ b/reference/torch_sin.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sin

    -
    torch_sin(self)
    +
    torch_sin(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_sin(a) -} +a = torch_randn(c(4)) +a +torch_sin(a) +}
    #> torch_tensor -#> -0.2805 -#> 0.5482 -#> 0.8525 -#> -0.6915 +#> -0.8386 +#> -0.5387 +#> 0.9056 +#> -0.8318 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sinh

    -
    torch_sinh(self)
    +
    torch_sinh(self)

    Arguments

    @@ -218,17 +250,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_sinh(a) -} +a = torch_randn(c(4)) +a +torch_sinh(a) +}
    #> torch_tensor -#> -0.7905 -#> 1.1799 -#> 1.2017 -#> -2.5987 +#> -1.3742 +#> -1.2565 +#> 1.3291 +#> -0.5771 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Slogdet

    -
    torch_slogdet(self)
    +
    torch_slogdet(self)

    Arguments

    @@ -226,14 +258,14 @@ See `~torch.svd` for details.

    Calculates the sign and log absolute value of the determinant(s) of a square matrix or batches of square matrices.

    Examples

    -
    if (torch_is_installed()) { - -A = torch_randn(c(3, 3)) -A -torch_det(A) -torch_logdet(A) -torch_slogdet(A) -} +
    if (torch_is_installed()) { + +A = torch_randn(c(3, 3)) +A +torch_det(A) +torch_logdet(A) +torch_slogdet(A) +}
    #> [[1]] #> torch_tensor #> 1 @@ -241,7 +273,7 @@ See `~torch.svd` for details. #> #> [[2]] #> torch_tensor -#> -0.447307 +#> 1.17265 #> [ CPUFloatType{} ] #>
    diff --git a/reference/torch_solve.html b/reference/torch_solve.html index 2f10717ce07329937ba0f34c619cacf11f71f6df..3bad4099464252643b7fe812259ea6f7f657194a 100644 --- a/reference/torch_solve.html +++ b/reference/torch_solve.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Solve

    -
    torch_solve(self, A)
    +
    torch_solve(self, A)

    Arguments

    @@ -233,30 +265,30 @@ batches of 2D matrices. If the inputs are batches, then returns batched outputs solution, LU.

    Examples

    -
    if (torch_is_installed()) { - -A = torch_tensor(rbind(c(6.80, -2.11, 5.66, 5.97, 8.23), - c(-6.05, -3.30, 5.36, -4.44, 1.08), - c(-0.45, 2.58, -2.70, 0.27, 9.04), - c(8.32, 2.71, 4.35, -7.17, 2.14), - c(-9.67, -5.14, -7.26, 6.08, -6.87)))$t() -B = torch_tensor(rbind(c(4.02, 6.19, -8.22, -7.57, -3.03), - c(-1.56, 4.00, -8.67, 1.75, 2.86), - c(9.81, -4.09, -4.57, -8.61, 8.99)))$t() -out = torch_solve(B, A) -X = out[[1]] -LU = out[[2]] -torch_dist(B, torch_mm(A, X)) +
    if (torch_is_installed()) { + +A = torch_tensor(rbind(c(6.80, -2.11, 5.66, 5.97, 8.23), + c(-6.05, -3.30, 5.36, -4.44, 1.08), + c(-0.45, 2.58, -2.70, 0.27, 9.04), + c(8.32, 2.71, 4.35, -7.17, 2.14), + c(-9.67, -5.14, -7.26, 6.08, -6.87)))$t() +B = torch_tensor(rbind(c(4.02, 6.19, -8.22, -7.57, -3.03), + c(-1.56, 4.00, -8.67, 1.75, 2.86), + c(9.81, -4.09, -4.57, -8.61, 8.99)))$t() +out = torch_solve(B, A) +X = out[[1]] +LU = out[[2]] +torch_dist(B, torch_mm(A, X)) # Batched solver example -A = torch_randn(c(2, 3, 1, 4, 4)) -B = torch_randn(c(2, 3, 1, 4, 6)) -out = torch_solve(B, A) -X = out[[1]] -LU = out[[2]] -torch_dist(B, A$matmul(X)) -} +A = torch_randn(c(2, 3, 1, 4, 4)) +B = torch_randn(c(2, 3, 1, 4, 6)) +out = torch_solve(B, A) +X = out[[1]] +LU = out[[2]] +torch_dist(B, A$matmul(X)) +}
    #> torch_tensor -#> 1.29687e-05 +#> 6.1e-06 #> [ CPUFloatType{} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sort

    -
    torch_sort(self, dim = -1L, descending = FALSE)
    +
    torch_sort(self, dim = -1L, descending = FALSE)

    Arguments

    @@ -229,26 +261,26 @@ sorted values and indices are the indices of the elements in the or input tensor.

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(3, 4)) -out = torch_sort(x) -out -out = torch_sort(x, 1) -out -} +
    if (torch_is_installed()) { + +x = torch_randn(c(3, 4)) +out = torch_sort(x) +out +out = torch_sort(x, 1) +out +}
    #> [[1]] #> torch_tensor -#> -0.7961 0.1753 -0.2432 -1.1334 -#> -0.2084 0.5509 0.3876 -0.9865 -#> 0.1948 0.9346 0.5226 -0.1232 +#> -0.3537 -3.3591 -0.5009 -1.4206 +#> 0.2542 -0.4446 -0.4783 -0.1513 +#> 1.0143 1.4504 1.6122 1.3161 #> [ CPUFloatType{3,4} ] #> #> [[2]] #> torch_tensor -#> 0 1 0 2 -#> 2 2 1 0 -#> 1 0 2 1 +#> 1 2 2 0 +#> 2 0 0 1 +#> 0 1 1 2 #> [ CPULongType{3,4} ] #>
    diff --git a/reference/torch_sparse_coo_tensor.html b/reference/torch_sparse_coo_tensor.html index 99aa3f3df153fedca01dba9aab3261ddc6cd2baa..6384205b214a1506b9eb22e19d98fb8d6e22a257 100644 --- a/reference/torch_sparse_coo_tensor.html +++ b/reference/torch_sparse_coo_tensor.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Sparse_coo_tensor

    -
    torch_sparse_coo_tensor(
    -  indices,
    -  values,
    -  size = NULL,
    -  dtype = NULL,
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_sparse_coo_tensor(
    +  indices,
    +  values,
    +  size = NULL,
    +  dtype = NULL,
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -244,25 +276,25 @@ coordinates in the indices, and the value at that index is the sum of all duplic torch_sparse_.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -i = torch_tensor(matrix(c(1, 2, 2, 3, 1, 3), ncol = 3, byrow = TRUE), dtype=torch_int64()) -v = torch_tensor(c(3, 4, 5), dtype=torch_float32()) -torch_sparse_coo_tensor(i, v) -torch_sparse_coo_tensor(i, v, c(2, 4)) +i = torch_tensor(matrix(c(1, 2, 2, 3, 1, 3), ncol = 3, byrow = TRUE), dtype=torch_int64()) +v = torch_tensor(c(3, 4, 5), dtype=torch_float32()) +torch_sparse_coo_tensor(i, v) +torch_sparse_coo_tensor(i, v, c(2, 4)) # create empty sparse tensors -S = torch_sparse_coo_tensor( - torch_empty(c(1, 0), dtype = torch_int64()), - torch_tensor(numeric(), dtype = torch_float32()), - c(1) -) -S = torch_sparse_coo_tensor( - torch_empty(c(1, 0), dtype = torch_int64()), - torch_empty(c(0, 2)), - c(1, 2) -) -} +S = torch_sparse_coo_tensor( + torch_empty(c(1, 0), dtype = torch_int64()), + torch_tensor(numeric(), dtype = torch_float32()), + c(1) +) +S = torch_sparse_coo_tensor( + torch_empty(c(1, 0), dtype = torch_int64()), + torch_empty(c(0, 2)), + c(1, 2) +) +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Split

    -
    torch_split(self, split_size, dim = 1L)
    +
    torch_split(self, split_size, dim = 1L)

    Arguments

    diff --git a/reference/torch_sqrt.html b/reference/torch_sqrt.html index b8b49a160f400ecb994a36a6d940efac48826c9a..bbcd7945a01b222f97e958ac2458db5dca35c96f 100644 --- a/reference/torch_sqrt.html +++ b/reference/torch_sqrt.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sqrt

    -
    torch_sqrt(self)
    +
    torch_sqrt(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_sqrt(a) -} +a = torch_randn(c(4)) +a +torch_sqrt(a) +}
    #> torch_tensor #> nan -#> 0.2255 -#> 0.5333 -#> 0.3449 +#> 0.7270 +#> 0.5041 +#> nan #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Square

    -
    torch_square(self)
    +
    torch_square(self)

    Arguments

    @@ -214,17 +246,17 @@

    Returns a new tensor with the square of the elements of input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_square(a) -} +a = torch_randn(c(4)) +a +torch_square(a) +}
    #> torch_tensor -#> 1.5194 -#> 0.2682 -#> 1.6344 -#> 0.8489 +#> 1.6023 +#> 0.1200 +#> 1.5702 +#> 7.8365 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Squeeze

    -
    torch_squeeze(self, dim)
    +
    torch_squeeze(self, dim)

    Arguments

    @@ -229,17 +261,17 @@ dimension. If input is of shape: \((A \times 1 \times B)\), will squeeze the tensor to the shape \((A \times B)\).

    Examples

    -
    if (torch_is_installed()) { - -x = torch_zeros(c(2, 1, 2, 1, 2)) -x -y = torch_squeeze(x) -y -y = torch_squeeze(x, 1) -y -y = torch_squeeze(x, 2) -y -} +
    if (torch_is_installed()) { + +x = torch_zeros(c(2, 1, 2, 1, 2)) +x +y = torch_squeeze(x) +y +y = torch_squeeze(x, 1) +y +y = torch_squeeze(x, 2) +y +}
    #> torch_tensor #> (1,1,.,.) = #> 0 0 diff --git a/reference/torch_stack.html b/reference/torch_stack.html index c29f7a7dee21b779ccb2b4ed7d37ecf1aafec5fe..40db26fada0b59cf617747817ea551c2a7409091 100644 --- a/reference/torch_stack.html +++ b/reference/torch_stack.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Stack

    -
    torch_stack(tensors, dim = 1L)
    +
    torch_stack(tensors, dim = 1L)

    Arguments

    diff --git a/reference/torch_std.html b/reference/torch_std.html index 4bdd78bf1f9b118461bfb9fdfcdd5acaa30c587d..cb66842d032380c7a3488230892a835fe091e324 100644 --- a/reference/torch_std.html +++ b/reference/torch_std.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Std

    -
    torch_std(self, dim, unbiased = TRUE, keepdim = FALSE)
    +
    torch_std(self, dim, unbiased = TRUE, keepdim = FALSE)

    Arguments

    @@ -242,22 +274,22 @@ output tensor having 1 (or len(dim)) fewer dimension(s).

    via the biased estimator. Otherwise, Bessel's correction will be used.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_std(a) +a = torch_randn(c(1, 3)) +a +torch_std(a) -a = torch_randn(c(4, 4)) -a -torch_std(a, dim=1) -} +a = torch_randn(c(4, 4)) +a +torch_std(a, dim=1) +}
    #> torch_tensor -#> 0.5958 -#> 0.5692 -#> 1.5368 -#> 0.7848 +#> 1.0153 +#> 0.6058 +#> 1.6137 +#> 0.7942 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Std_mean

    -
    torch_std_mean(self, dim, unbiased = TRUE, keepdim = FALSE)
    +
    torch_std_mean(self, dim, unbiased = TRUE, keepdim = FALSE)

    Arguments

    @@ -242,31 +274,31 @@ output tensor having 1 (or len(dim)) fewer dimension(s).

    via the biased estimator. Otherwise, Bessel's correction will be used.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_std_mean(a) +a = torch_randn(c(1, 3)) +a +torch_std_mean(a) -a = torch_randn(c(4, 4)) -a -torch_std_mean(a, 1) -} +a = torch_randn(c(4, 4)) +a +torch_std_mean(a, 1) +}
    #> [[1]] #> torch_tensor -#> 1.4613 -#> 0.7194 -#> 0.5630 -#> 1.2705 +#> 1.1390 +#> 1.6594 +#> 1.2557 +#> 0.9028 #> [ CPUFloatType{4} ] #> #> [[2]] #> torch_tensor -#> 0.2608 -#> -0.1669 -#> -0.7519 -#> -1.0647 +#> 0.6997 +#> -0.4549 +#> 0.0871 +#> -0.1986 #> [ CPUFloatType{4} ] #>
    diff --git a/reference/torch_stft.html b/reference/torch_stft.html index 1d6ef0f23dc29663385b3a1012e1ee2efca64cc0..fca295cef3b62913c2b2fd87bb657679fc951d66 100644 --- a/reference/torch_stft.html +++ b/reference/torch_stft.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,17 +227,17 @@

    Stft

    -
    torch_stft(
    -  input,
    -  n_fft,
    -  hop_length = NULL,
    -  win_length = NULL,
    -  window = NULL,
    -  center = TRUE,
    -  pad_mode = "reflect",
    -  normalized = FALSE,
    -  onesided = TRUE
    -)
    +
    torch_stft(
    +  input,
    +  n_fft,
    +  hop_length = NULL,
    +  win_length = NULL,
    +  window = NULL,
    +  center = TRUE,
    +  pad_mode = "reflect",
    +  normalized = FALSE,
    +  onesided = TRUE
    +)

    Arguments

    diff --git a/reference/torch_sum.html b/reference/torch_sum.html index 6498a6e4f0cc96a8d1c862b1842a44fe97e1b393..0e28c16ba3a3aa1d1d9b91f20f9fb47093f9701d 100644 --- a/reference/torch_sum.html +++ b/reference/torch_sum.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Sum

    -
    torch_sum(self, dim, keepdim = FALSE, dtype = NULL)
    +
    torch_sum(self, dim, keepdim = FALSE, dtype = NULL)

    Arguments

    @@ -238,19 +270,19 @@ Otherwise, dim is squeezed (see output tensor having 1 (or len(dim)) fewer dimension(s).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_sum(a) +a = torch_randn(c(1, 3)) +a +torch_sum(a) -a = torch_randn(c(4, 4)) -a -torch_sum(a, 1) -b = torch_arange(0, 4 * 5 * 6)$view(c(4, 5, 6)) -torch_sum(b, list(2, 1)) -} +a = torch_randn(c(4, 4)) +a +torch_sum(a, 1) +b = torch_arange(0, 4 * 5 * 6)$view(c(4, 5, 6)) +torch_sum(b, list(2, 1)) +}
    #> torch_tensor #> 435 #> 1335 diff --git a/reference/torch_svd.html b/reference/torch_svd.html index 1914c96943faa8aa3b19cc342f4206e047300f7f..29916f5412a92bd960208a77a112aea497d106b5 100644 --- a/reference/torch_svd.html +++ b/reference/torch_svd.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Svd

    -
    torch_svd(self, some = TRUE, compute_uv = TRUE)
    +
    torch_svd(self, some = TRUE, compute_uv = TRUE)

    Arguments

    @@ -249,24 +281,24 @@ i.e., if the last two dimensions of input are m and some will be ignored here.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(5, 3)) -a -out = torch_svd(a) -u = out[[1]] -s = out[[2]] -v = out[[3]] -torch_dist(a, torch_mm(torch_mm(u, torch_diag(s)), v$t())) -a_big = torch_randn(c(7, 5, 3)) -out = torch_svd(a_big) -u = out[[1]] -s = out[[2]] -v = out[[3]] -torch_dist(a_big, torch_matmul(torch_matmul(u, torch_diag_embed(s)), v$transpose(-2, -1))) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(5, 3)) +a +out = torch_svd(a) +u = out[[1]] +s = out[[2]] +v = out[[3]] +torch_dist(a, torch_mm(torch_mm(u, torch_diag(s)), v$t())) +a_big = torch_randn(c(7, 5, 3)) +out = torch_svd(a_big) +u = out[[1]] +s = out[[2]] +v = out[[3]] +torch_dist(a_big, torch_matmul(torch_matmul(u, torch_diag_embed(s)), v$transpose(-2, -1))) +}
    #> torch_tensor -#> 2.94448e-06 +#> 2.16926e-06 #> [ CPUFloatType{} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Symeig

    -
    torch_symeig(self, eigenvectors = FALSE, upper = TRUE)
    +
    torch_symeig(self, eigenvectors = FALSE, upper = TRUE)

    Arguments

    @@ -242,23 +274,23 @@ only the upper triangular portion is used by default.

    If upper is FALSE, then lower triangular portion is used.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(5, 5)) -a = a + a$t() # To make a symmetric -a -o = torch_symeig(a, eigenvectors=TRUE) -e = o[[1]] -v = o[[2]] -e -v -a_big = torch_randn(c(5, 2, 2)) -a_big = a_big + a_big$transpose(-2, -1) # To make a_big symmetric -o = a_big$symeig(eigenvectors=TRUE) -e = o[[1]] -v = o[[2]] -torch_allclose(torch_matmul(v, torch_matmul(e$diag_embed(), v$transpose(-2, -1))), a_big) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(5, 5)) +a = a + a$t() # To make a symmetric +a +o = torch_symeig(a, eigenvectors=TRUE) +e = o[[1]] +v = o[[2]] +e +v +a_big = torch_randn(c(5, 2, 2)) +a_big = a_big + a_big$transpose(-2, -1) # To make a_big symmetric +o = a_big$symeig(eigenvectors=TRUE) +e = o[[1]] +v = o[[2]] +torch_allclose(torch_matmul(v, torch_matmul(e$diag_embed(), v$transpose(-2, -1))), a_big) +}
    #> [1] TRUE
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    T

    -
    torch_t(self)
    +
    torch_t(self)

    Arguments

    @@ -217,22 +249,22 @@ and 1.

    is equivalent to transpose(input, 0, 1).

    Examples

    -
    if (torch_is_installed()) { - -x = torch_randn(c(2,3)) -x -torch_t(x) -x = torch_randn(c(3)) -x -torch_t(x) -x = torch_randn(c(2, 3)) -x -torch_t(x) -} +
    if (torch_is_installed()) { + +x = torch_randn(c(2,3)) +x +torch_t(x) +x = torch_randn(c(3)) +x +torch_t(x) +x = torch_randn(c(2, 3)) +x +torch_t(x) +}
    #> torch_tensor -#> 0.9294 -0.8336 -#> 1.1379 0.4580 -#> -2.2674 0.2512 +#> -1.9564 0.0201 +#> -0.0425 0.3108 +#> 0.4885 -0.3318 #> [ CPUFloatType{3,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Take

    -
    torch_take(self, index)
    +
    torch_take(self, index)

    Arguments

    @@ -220,11 +252,11 @@ The input tensor is treated as if it were viewed as a 1-D tensor. The result takes the same shape as the indices.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -src = torch_tensor(matrix(c(4,3,5,6,7,8), ncol = 3, byrow = TRUE)) -torch_take(src, torch_tensor(c(1, 2, 5), dtype = torch_int64())) -} +src = torch_tensor(matrix(c(4,3,5,6,7,8), ncol = 3, byrow = TRUE)) +torch_take(src, torch_tensor(c(1, 2, 5), dtype = torch_int64())) +}
    #> torch_tensor #> 4 #> 3 diff --git a/reference/torch_tan.html b/reference/torch_tan.html index 33954955b11cbb0e6a6b01f23d0b3d534965c891..9863483320c07f88d4437b169e9c083d3a39abbc 100644 --- a/reference/torch_tan.html +++ b/reference/torch_tan.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Tan

    -
    torch_tan(self)
    +
    torch_tan(self)

    Arguments

    @@ -217,17 +249,17 @@ $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_tan(a) -} +a = torch_randn(c(4)) +a +torch_tan(a) +}
    #> torch_tensor -#> -0.2036 -#> -0.6868 -#> 0.0959 -#> 1.0523 +#> -0.8057 +#> 1.1575 +#> -0.8553 +#> -1.2794 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Tanh

    -
    torch_tanh(self)
    +
    torch_tanh(self)

    Arguments

    @@ -218,17 +250,17 @@ of input.

    $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_tanh(a) -} +a = torch_randn(c(4)) +a +torch_tanh(a) +}
    #> torch_tensor -#> 0.4501 -#> -0.2993 -#> 0.4767 -#> 0.4199 +#> -0.7525 +#> -0.8047 +#> 0.9685 +#> -0.1683 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Converts R objects to a torch tensor

    -
    torch_tensor(
    -  data,
    -  dtype = NULL,
    -  device = NULL,
    -  requires_grad = FALSE,
    -  pin_memory = FALSE
    -)
    +
    torch_tensor(
    +  data,
    +  dtype = NULL,
    +  device = NULL,
    +  requires_grad = FALSE,
    +  pin_memory = FALSE
    +)

    Arguments

    @@ -230,11 +262,11 @@

    Examples

    -
    if (torch_is_installed()) { -torch_tensor(c(1,2,3,4)) -torch_tensor(c(1,2,3,4), dtype = torch_int()) +
    if (torch_is_installed()) { +torch_tensor(c(1,2,3,4)) +torch_tensor(c(1,2,3,4), dtype = torch_int()) -} +}
    #> torch_tensor #> 1 #> 2 diff --git a/reference/torch_tensordot.html b/reference/torch_tensordot.html index 567ab398d080d088736197cc9feecdd0a3c3c73a..587358df6bcbbeabf93ae3865b7ec1f54ca4c0bc 100644 --- a/reference/torch_tensordot.html +++ b/reference/torch_tensordot.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ tensordot implements a generalized matrix product." /> torch - 0.0.3 + 0.1.0
    @@ -146,6 +158,9 @@ tensordot implements a generalized matrix product." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ tensordot implements a generalized matrix product." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ tensordot implements a generalized matrix product." /> tensordot implements a generalized matrix product.

    -
    torch_tensordot(a, b, dims = 2)
    +
    torch_tensordot(a, b, dims = 2)

    Arguments

    @@ -218,17 +250,17 @@ tensordot implements a generalized matrix product." />

    Examples

    -
    if (torch_is_installed()) { - -a = torch_arange(start = 0, end = 60.)$reshape(c(3, 4, 5)) -b = torch_arange(start = 0, end = 24.)$reshape(c(4, 3, 2)) -torch_tensordot(a, b, dims = list(c(2, 1), c(1, 2))) -if (FALSE) { -a = torch_randn(3, 4, 5, device='cuda') -b = torch_randn(4, 5, 6, device='cuda') -c = torch_tensordot(a, b, dims=2)$cpu() -} -} +
    if (torch_is_installed()) { + +a = torch_arange(start = 0, end = 60.)$reshape(c(3, 4, 5)) +b = torch_arange(start = 0, end = 24.)$reshape(c(4, 3, 2)) +torch_tensordot(a, b, dims = list(c(2, 1), c(1, 2))) +if (FALSE) { +a = torch_randn(3, 4, 5, device='cuda') +b = torch_randn(4, 5, 6, device='cuda') +c = torch_tensordot(a, b, dims=2)$cpu() +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Threshold_

    -
    torch_threshold_(self, threshold, value)
    +
    torch_threshold_(self, threshold, value)

    Arguments

    diff --git a/reference/torch_topk.html b/reference/torch_topk.html index 735d131fca4ab081895fc967988d27de51bab107..a0a9ce637e3448e5dc5923f7ce68824b3959b153 100644 --- a/reference/torch_topk.html +++ b/reference/torch_topk.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Topk

    -
    torch_topk(self, k, dim = -1L, largest = TRUE, sorted = TRUE)
    +
    torch_topk(self, k, dim = -1L, largest = TRUE, sorted = TRUE)

    Arguments

    @@ -237,12 +269,12 @@ of the elements in the original input tensor.

    k elements are themselves sorted

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_arange(1., 6.) -x -torch_topk(x, 3) -} +x = torch_arange(1., 6.) +x +torch_topk(x, 3) +}
    #> [[1]] #> torch_tensor #> 5 diff --git a/reference/torch_trace.html b/reference/torch_trace.html index fdb85b552c25d0687da248c97e60c6860e263430..30f0f65515d48f73ef238436d0470ed7c3bbab33 100644 --- a/reference/torch_trace.html +++ b/reference/torch_trace.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Trace

    -
    torch_trace(self)
    +
    torch_trace(self)

    Arguments

    @@ -214,12 +246,12 @@

    Returns the sum of the elements of the diagonal of the input 2-D matrix.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_arange(1., 10.)$view(c(3, 3)) -x -torch_trace(x) -} +x = torch_arange(1., 10.)$view(c(3, 3)) +x +torch_trace(x) +}
    #> torch_tensor #> 15 #> [ CPUFloatType{} ]
    diff --git a/reference/torch_transpose.html b/reference/torch_transpose.html index 34b09b098b4a9d97a9c82b617c6d63cafa64e49a..3494d3a4a5fdccabc1468db11939e4fee558dbb5 100644 --- a/reference/torch_transpose.html +++ b/reference/torch_transpose.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Transpose

    -
    torch_transpose(self, dim0, dim1)
    +
    torch_transpose(self, dim0, dim1)

    Arguments

    @@ -226,16 +258,16 @@ The given dimensions dim0 and dim1 are swapped.

    of the other.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_randn(c(2, 3)) -x -torch_transpose(x, 1, 2) -} +x = torch_randn(c(2, 3)) +x +torch_transpose(x, 1, 2) +}
    #> torch_tensor -#> 0.4633 -0.6867 -#> -1.8115 0.4476 -#> -1.5475 1.2365 +#> -1.6599 -0.1619 +#> -0.2997 -0.2021 +#> 1.1402 0.2918 #> [ CPUFloatType{3,2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Trapz

    -
    torch_trapz(y, dx = 1L, x, dim = -1L)
    +
    torch_trapz(y, dx = 1L, x, dim = -1L)

    Arguments

    @@ -232,17 +264,17 @@

    As above, but the sample points are spaced uniformly at a distance of dx.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -y = torch_randn(list(2, 3)) -y -x = torch_tensor(matrix(c(1, 3, 4, 1, 2, 3), ncol = 3, byrow=TRUE)) -torch_trapz(y, x = x) +y = torch_randn(list(2, 3)) +y +x = torch_tensor(matrix(c(1, 3, 4, 1, 2, 3), ncol = 3, byrow=TRUE)) +torch_trapz(y, x = x) -} +}
    #> torch_tensor -#> -1.7562 -#> -0.8343 +#> -3.0516 +#> -1.3811 #> [ CPUFloatType{2} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,13 +227,13 @@

    Triangular_solve

    -
    torch_triangular_solve(
    -  self,
    -  A,
    -  upper = TRUE,
    -  transpose = FALSE,
    -  unitriangular = FALSE
    -)
    +
    torch_triangular_solve(
    +  self,
    +  A,
    +  upper = TRUE,
    +  transpose = FALSE,
    +  unitriangular = FALSE
    +)

    Arguments

    @@ -242,24 +274,24 @@ batches of 2D matrices. If the inputs are batches, then returns batched outputs X

    Examples

    -
    if (torch_is_installed()) { - -A = torch_randn(c(2, 2))$triu() -A -b = torch_randn(c(2, 3)) -b -torch_triangular_solve(b, A) -} +
    if (torch_is_installed()) { + +A = torch_randn(c(2, 2))$triu() +A +b = torch_randn(c(2, 3)) +b +torch_triangular_solve(b, A) +}
    #> [[1]] #> torch_tensor -#> 5.9921 7.3633 6.5760 -#> 0.6631 3.9283 2.7029 +#> -1.4151 1.2923 0.9190 +#> -0.3280 1.1489 -0.3906 #> [ CPUFloatType{2,3} ] #> #> [[2]] #> torch_tensor -#> 0.1910 0.0582 -#> 0.0000 0.2591 +#> 1.2681 -0.4428 +#> 0.0000 -1.2994 #> [ CPUFloatType{2,2} ] #>
    diff --git a/reference/torch_tril.html b/reference/torch_tril.html index 9fac64c1bacc3359fae381ab6f74915941b045d4..c0a377d18e7b14932e2d62766b36c089af87c6df 100644 --- a/reference/torch_tril.html +++ b/reference/torch_tril.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Tril

    -
    torch_tril(self, diagonal = 0L)
    +
    torch_tril(self, diagonal = 0L)

    Arguments

    @@ -228,21 +260,21 @@ the main diagonal. The main diagonal are the set of indices \(d_{1}, d_{2}\) are the dimensions of the matrix.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 3)) -a -torch_tril(a) -b = torch_randn(c(4, 6)) -b -torch_tril(b, diagonal=1) -torch_tril(b, diagonal=-1) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 3)) +a +torch_tril(a) +b = torch_randn(c(4, 6)) +b +torch_tril(b, diagonal=1) +torch_tril(b, diagonal=-1) +}
    #> torch_tensor #> 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 -#> -0.0705 0.0000 0.0000 0.0000 0.0000 0.0000 -#> 1.4173 0.3856 0.0000 0.0000 0.0000 0.0000 -#> 1.3653 -1.3079 -1.1473 0.0000 0.0000 0.0000 +#> 0.4164 0.0000 0.0000 0.0000 0.0000 0.0000 +#> 0.6462 -0.3278 0.0000 0.0000 0.0000 0.0000 +#> 0.0475 1.0216 -0.9426 0.0000 0.0000 0.0000 #> [ CPUFloatType{4,6} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Tril_indices

    -
    torch_tril_indices(
    -  row,
    -  col,
    -  offset = 0,
    -  dtype = torch_long(),
    -  device = "cpu",
    -  layout = torch_strided()
    -)
    +
    torch_tril_indices(
    +  row,
    +  col,
    +  offset = 0,
    +  dtype = torch_long(),
    +  device = "cpu",
    +  layout = torch_strided()
    +)

    Arguments

    @@ -260,16 +292,16 @@ the main diagonal. The main diagonal are the set of indices where \(d_{1}, d_{2}\) are the dimensions of the matrix.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -a = torch_tril_indices(3, 3) -a -a = torch_tril_indices(4, 3, -1) -a -a = torch_tril_indices(4, 3, 1) -a -} -} +
    if (torch_is_installed()) { +if (FALSE) { +a = torch_tril_indices(3, 3) +a +a = torch_tril_indices(4, 3, -1) +a +a = torch_tril_indices(4, 3, 1) +a +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Triu

    -
    torch_triu(self, diagonal = 0L)
    +
    torch_triu(self, diagonal = 0L)

    Arguments

    @@ -228,23 +260,23 @@ the main diagonal. The main diagonal are the set of indices \(d_{1}, d_{2}\) are the dimensions of the matrix.

    Examples

    -
    if (torch_is_installed()) { - -a = torch_randn(c(3, 3)) -a -torch_triu(a) -torch_triu(a, diagonal=1) -torch_triu(a, diagonal=-1) -b = torch_randn(c(4, 6)) -b -torch_triu(b, diagonal=1) -torch_triu(b, diagonal=-1) -} +
    if (torch_is_installed()) { + +a = torch_randn(c(3, 3)) +a +torch_triu(a) +torch_triu(a, diagonal=1) +torch_triu(a, diagonal=-1) +b = torch_randn(c(4, 6)) +b +torch_triu(b, diagonal=1) +torch_triu(b, diagonal=-1) +}
    #> torch_tensor -#> 0.8511 0.3136 -0.8565 -0.3131 0.8333 -1.6256 -#> 0.6987 -0.8917 2.5117 1.6975 -0.5125 1.6937 -#> 0.0000 0.6684 1.6408 0.0282 0.3932 -1.6401 -#> 0.0000 0.0000 -0.9555 0.2990 0.3913 -0.5259 +#> -0.0346 0.4857 -0.6240 -0.0739 -1.5083 0.9504 +#> -0.6462 0.9285 0.0715 -0.7684 -0.3846 -1.2153 +#> 0.0000 0.7828 0.0337 -1.4075 -0.0488 -1.0085 +#> 0.0000 0.0000 1.3318 0.4527 0.3162 -0.7587 #> [ CPUFloatType{4,6} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Triu_indices

    -
    torch_triu_indices(
    -  row,
    -  col,
    -  offset = 0,
    -  dtype = torch_long(),
    -  device = "cpu",
    -  layout = torch_strided()
    -)
    +
    torch_triu_indices(
    +  row,
    +  col,
    +  offset = 0,
    +  dtype = torch_long(),
    +  device = "cpu",
    +  layout = torch_strided()
    +)

    Arguments

    @@ -260,16 +292,16 @@ the main diagonal. The main diagonal are the set of indices where \(d_{1}, d_{2}\) are the dimensions of the matrix.

    Examples

    -
    if (torch_is_installed()) { -if (FALSE) { -a = torch_triu_indices(3, 3) -a -a = torch_triu_indices(4, 3, -1) -a -a = torch_triu_indices(4, 3, 1) -a -} -} +
    if (torch_is_installed()) { +if (FALSE) { +a = torch_triu_indices(3, 3) +a +a = torch_triu_indices(4, 3, -1) +a +a = torch_triu_indices(4, 3, 1) +a +} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    TRUE_divide

    -
    torch_true_divide(self, other)
    +
    torch_true_divide(self, other)

    Arguments

    @@ -224,13 +256,13 @@ in which case they are cast to the default (floating) scalar type before the div $$

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -dividend = torch_tensor(c(5, 3), dtype=torch_int()) -divisor = torch_tensor(c(3, 2), dtype=torch_int()) -torch_true_divide(dividend, divisor) -torch_true_divide(dividend, 2) -} +dividend = torch_tensor(c(5, 3), dtype=torch_int()) +divisor = torch_tensor(c(3, 2), dtype=torch_int()) +torch_true_divide(dividend, divisor) +torch_true_divide(dividend, 2) +}
    #> torch_tensor #> 2.5000 #> 1.5000 diff --git a/reference/torch_trunc.html b/reference/torch_trunc.html index bf9f628f516910690e5f9ca0d6d7073356a94563..5be1898f68bc0a81c877b6af3338da80fd3e526e 100644 --- a/reference/torch_trunc.html +++ b/reference/torch_trunc.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Trunc

    -
    torch_trunc(self)
    +
    torch_trunc(self)

    Arguments

    @@ -215,17 +247,17 @@ the elements of input.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(4)) -a -torch_trunc(a) -} +a = torch_randn(c(4)) +a +torch_trunc(a) +}
    #> torch_tensor +#> 0 #> -0 -#> -0 -#> -1 -#> 1 +#> 2 +#> 0 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Unbind

    -
    torch_unbind(self, dim = 1L)
    +
    torch_unbind(self, dim = 1L)

    Arguments

    @@ -219,10 +251,10 @@

    Returns a tuple of all slices along a given dimension, already without it.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_unbind(torch_tensor(matrix(1:9, ncol = 3, byrow=TRUE))) -} +torch_unbind(torch_tensor(matrix(1:9, ncol = 3, byrow=TRUE))) +}
    #> [[1]] #> torch_tensor #> 1 diff --git a/reference/torch_unique_consecutive.html b/reference/torch_unique_consecutive.html index ea5529bb9bb217237f6b32e0868c6a7c0235c7ff..19393e7bf3e003384222225bc07817f8fce91619 100644 --- a/reference/torch_unique_consecutive.html +++ b/reference/torch_unique_consecutive.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,12 +227,12 @@

    Unique_consecutive

    -
    torch_unique_consecutive(
    -  self,
    -  return_inverse = FALSE,
    -  return_counts = FALSE,
    -  dim = NULL
    -)
    +
    torch_unique_consecutive(
    +  self,
    +  return_inverse = FALSE,
    +  return_counts = FALSE,
    +  dim = NULL
    +)

    Arguments

    @@ -235,13 +267,13 @@

    Examples

    -
    if (torch_is_installed()) { -x = torch_tensor(c(1, 1, 2, 2, 3, 1, 1, 2)) -output = torch_unique_consecutive(x) -output -torch_unique_consecutive(x, return_inverse=TRUE) -torch_unique_consecutive(x, return_counts=TRUE) -} +
    if (torch_is_installed()) { +x = torch_tensor(c(1, 1, 2, 2, 3, 1, 1, 2)) +output = torch_unique_consecutive(x) +output +torch_unique_consecutive(x, return_inverse=TRUE) +torch_unique_consecutive(x, return_counts=TRUE) +}
    #> [[1]] #> torch_tensor #> 1 diff --git a/reference/torch_unsqueeze.html b/reference/torch_unsqueeze.html index c051eef6a5d7413c08b46bb779772a0cec3e509d..f7a3207f54debd6c6789bdbfbfc45b71deb7a94b 100644 --- a/reference/torch_unsqueeze.html +++ b/reference/torch_unsqueeze.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Unsqueeze

    -
    torch_unsqueeze(self, dim)
    +
    torch_unsqueeze(self, dim)

    Arguments

    @@ -223,12 +255,12 @@ can be used. Negative dim will correspond to unsqueeze applied at dim = dim + input.dim() + 1.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -x = torch_tensor(c(1, 2, 3, 4)) -torch_unsqueeze(x, 1) -torch_unsqueeze(x, 2) -} +x = torch_tensor(c(1, 2, 3, 4)) +torch_unsqueeze(x, 1) +torch_unsqueeze(x, 2) +}
    #> torch_tensor #> 1 #> 2 diff --git a/reference/torch_var.html b/reference/torch_var.html index 56b75c8e401757b18c1c4ecda5ba39f0616da83c..6d50ca65cc30ae5d592446d09ddd8b9fcdcac15a 100644 --- a/reference/torch_var.html +++ b/reference/torch_var.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Var

    -
    torch_var(self, dim, unbiased = TRUE, keepdim = FALSE)
    +
    torch_var(self, dim, unbiased = TRUE, keepdim = FALSE)

    Arguments

    @@ -241,22 +273,22 @@ output tensor having 1 (or len(dim)) fewer dimension(s).

    biased estimator. Otherwise, Bessel's correction will be used.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_var(a) +a = torch_randn(c(1, 3)) +a +torch_var(a) -a = torch_randn(c(4, 4)) -a -torch_var(a, 1) -} +a = torch_randn(c(4, 4)) +a +torch_var(a, 1) +}
    #> torch_tensor -#> 0.3456 -#> 0.0521 -#> 1.0536 -#> 0.3194 +#> 0.2728 +#> 1.5850 +#> 0.9337 +#> 2.5722 #> [ CPUFloatType{4} ]
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Var_mean

    -
    torch_var_mean(self, dim, unbiased = TRUE, keepdim = FALSE)
    +
    torch_var_mean(self, dim, unbiased = TRUE, keepdim = FALSE)

    Arguments

    @@ -241,31 +273,31 @@ output tensor having 1 (or len(dim)) fewer dimension(s).

    biased estimator. Otherwise, Bessel's correction will be used.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -a = torch_randn(c(1, 3)) -a -torch_var_mean(a) +a = torch_randn(c(1, 3)) +a +torch_var_mean(a) -a = torch_randn(c(4, 4)) -a -torch_var_mean(a, 1) -} +a = torch_randn(c(4, 4)) +a +torch_var_mean(a, 1) +}
    #> [[1]] #> torch_tensor -#> 0.2877 -#> 1.0334 -#> 0.7203 -#> 1.7788 +#> 1.9048 +#> 0.7948 +#> 0.6549 +#> 0.2514 #> [ CPUFloatType{4} ] #> #> [[2]] #> torch_tensor -#> 0.1609 -#> -0.4014 -#> 0.6819 -#> 0.3604 +#> -0.2047 +#> -0.4955 +#> -0.5893 +#> 0.0395 #> [ CPUFloatType{4} ] #>
    diff --git a/reference/torch_where.html b/reference/torch_where.html index 62993b0de7f4fb0aae7b73e105ba797b21f701eb..694ff715ee3ef3284f1b80b96f7062bfb57d8dde 100644 --- a/reference/torch_where.html +++ b/reference/torch_where.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Where

    -
    torch_where(condition, self, other)
    +
    torch_where(condition, self, other)

    Arguments

    @@ -244,18 +276,18 @@ $$

    torch_nonzero(condition, as_tuple=TRUE).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -if (FALSE) { -x = torch_randn(c(3, 2)) -y = torch_ones(c(3, 2)) -x -torch_where(x > 0, x, y) -} +if (FALSE) { +x = torch_randn(c(3, 2)) +y = torch_ones(c(3, 2)) +x +torch_where(x > 0, x, y) +} -} +}
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Zeros

    -
    torch_zeros(
    -  ...,
    -  names = NULL,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE
    -)
    +
    torch_zeros(
    +  ...,
    +  names = NULL,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE
    +)

    Arguments

    @@ -242,11 +274,11 @@ by the variable argument size.

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -torch_zeros(c(2, 3)) -torch_zeros(c(5)) -} +torch_zeros(c(2, 3)) +torch_zeros(c(5)) +}
    #> torch_tensor #> 0 #> 0 diff --git a/reference/torch_zeros_like.html b/reference/torch_zeros_like.html index 01f0335253ce5c3695859b4f82958fc40cf1a8a7..7d5693a59fd7a7270dfe1953eabdee4c916c1326 100644 --- a/reference/torch_zeros_like.html +++ b/reference/torch_zeros_like.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0
    @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,14 +227,14 @@

    Zeros_like

    -
    torch_zeros_like(
    -  input,
    -  dtype = NULL,
    -  layout = torch_strided(),
    -  device = NULL,
    -  requires_grad = FALSE,
    -  memory_format = torch_preserve_format()
    -)
    +
    torch_zeros_like(
    +  input,
    +  dtype = NULL,
    +  layout = torch_strided(),
    +  device = NULL,
    +  requires_grad = FALSE,
    +  memory_format = torch_preserve_format()
    +)

    Arguments

    @@ -250,11 +282,11 @@ the old torch_zeros_like(input, out=output) is equivalent to torch_zeros(input.size(), out=output).

    Examples

    -
    if (torch_is_installed()) { +
    if (torch_is_installed()) { -input = torch_empty(c(2, 3)) -torch_zeros_like(input) -} +input = torch_empty(c(2, 3)) +torch_zeros_like(input) +}
    #> torch_tensor #> 0 0 0 #> 0 0 0 diff --git a/reference/with_enable_grad.html b/reference/with_enable_grad.html index 04143facb36989544b9b02295a5d9d94189efd2f..e9e239867d0bd9a85fb44b68dfb22d4a7a369d95 100644 --- a/reference/with_enable_grad.html +++ b/reference/with_enable_grad.html @@ -38,6 +38,8 @@ + + + + + + @@ -73,7 +85,7 @@ Enables gradient calculation, if it has been disabled via with_no_grad." /> torch - 0.0.3 + 0.1.0
    @@ -146,6 +158,9 @@ Enables gradient calculation, if it has been disabled via with_no_grad." />
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -159,7 +174,24 @@ Enables gradient calculation, if it has been disabled via with_no_grad." />
  • - +
  • Reference
  • @@ -197,7 +229,7 @@ Enables gradient calculation, if it has been disabled via with_no_grad." /> Enables gradient calculation, if it has been disabled via with_no_grad.

    -
    with_enable_grad(code)
    +
    with_enable_grad(code)

    Arguments

    @@ -214,18 +246,18 @@ Enables gradient calculation, if it has been disabled via if (torch_is_installed()) { - -x <- torch_tensor(1, requires_grad=TRUE) -with_no_grad({ - with_enable_grad({ - y = x * 2 - }) -}) -y$backward() -x$grad - -} +
    if (torch_is_installed()) { + +x <- torch_tensor(1, requires_grad=TRUE) +with_no_grad({ + with_enable_grad({ + y = x * 2 + }) +}) +y$backward() +x$grad + +}
    #> torch_tensor #> 2 #> [ CPUFloatType{1} ]
    diff --git a/reference/with_no_grad.html b/reference/with_no_grad.html index 85d16d558682a86c107cccf56e34f048f4d8e092..bc1defcedf61aff97ddfc361db8b2a27ff4a3cbf 100644 --- a/reference/with_no_grad.html +++ b/reference/with_no_grad.html @@ -38,6 +38,8 @@ + + @@ -55,6 +57,16 @@ + + + + @@ -72,7 +84,7 @@ torch - 0.0.3 + 0.1.0 @@ -145,6 +157,9 @@
  • Tensor class
  • +
  • + Serialization +
  • Loading Data @@ -158,7 +173,24 @@
  • - +
  • Reference
  • @@ -195,7 +227,7 @@

    Temporarily modify gradient recording.

    -
    with_no_grad(code)
    +
    with_no_grad(code)

    Arguments

    @@ -208,15 +240,15 @@

    Examples

    -
    if (torch_is_installed()) { -x <- torch_tensor(runif(5), requires_grad = TRUE) -with_no_grad({ - x$sub_(torch_tensor(as.numeric(1:5))) -}) -x -x$grad - -} +
    if (torch_is_installed()) { +x <- torch_tensor(runif(5), requires_grad = TRUE) +with_no_grad({ + x$sub_(torch_tensor(as.numeric(1:5))) +}) +x +x$grad + +}
    #> torch_tensor #> [ Tensor (undefined) ]