rithwiks commited on
Commit
89a696f
1 Parent(s): bb07848

uploaded filtered keck files made splits made utils

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. GBI-16-2D.py +3 -526
  2. data/LR.20051204.41155.fits +3 -0
  3. data/LR.20051204.41651.fits +3 -0
  4. data/LR.20051204.43259.fits +3 -0
  5. data/LR.20051204.43899.fits +3 -0
  6. data/LR.20051204.46034.fits +3 -0
  7. data/LR.20051204.47387.fits +3 -0
  8. data/LR.20051204.49021.fits +3 -0
  9. data/LR.20051204.51257.fits +3 -0
  10. data/LR.20051204.53196.fits +3 -0
  11. data/LR.20051204.54066.fits +3 -0
  12. data/LR.20051204.56002.fits +3 -0
  13. data/LR.20051204.57105.fits +3 -0
  14. data/LR.20051204.57873.fits +3 -0
  15. data/LR.20060530.30214.fits +3 -0
  16. data/LR.20060530.32407.fits +3 -0
  17. data/LR.20060530.36483.fits +3 -0
  18. data/LR.20060530.43065.fits +3 -0
  19. data/LR.20060530.45164.fits +3 -0
  20. data/LR.20060530.46025.fits +3 -0
  21. data/LR.20060530.48970.fits +3 -0
  22. data/LR.20060530.50806.fits +3 -0
  23. data/LR.20060530.51656.fits +3 -0
  24. data/LR.20060531.46897.fits +3 -0
  25. data/LR.20060531.49568.fits +3 -0
  26. data/LR.20060531.50684.fits +3 -0
  27. data/LR.20060531.50878.fits +3 -0
  28. data/LR.20060725.29836.fits +3 -0
  29. data/LR.20060725.37294.fits +3 -0
  30. data/LR.20060725.42247.fits +3 -0
  31. data/LR.20060725.44412.fits +3 -0
  32. data/LR.20060725.46740.fits +3 -0
  33. data/LR.20060725.47513.fits +3 -0
  34. data/LR.20060725.49810.fits +3 -0
  35. data/LR.20060726.41842.fits +3 -0
  36. data/LR.20060726.48303.fits +3 -0
  37. data/LR.20060726.49184.fits +3 -0
  38. data/LR.20060921.21065.fits +3 -0
  39. data/LR.20060921.30235.fits +3 -0
  40. data/LR.20060921.30742.fits +3 -0
  41. data/LR.20060921.31853.fits +3 -0
  42. data/LR.20060921.33371.fits +3 -0
  43. data/LR.20060921.43710.fits +3 -0
  44. data/LR.20061121.19974.fits +3 -0
  45. data/LR.20061121.27414.fits +3 -0
  46. data/LR.20061121.49514.fits +3 -0
  47. data/LR.20070416.21338.fits +3 -0
  48. data/LR.20070416.24302.fits +3 -0
  49. data/{LR.20090219.53662.fits → LR.20070416.35505.fits} +1 -1
  50. data/LR.20070416.41356.fits +3 -0
GBI-16-2D.py CHANGED
@@ -13,6 +13,8 @@ from huggingface_hub import hf_hub_download
13
  import datasets
14
  from datasets import DownloadManager
15
 
 
 
16
 
17
  _DESCRIPTION = (
18
  """SBI-16-2D is a dataset which is part of the AstroCompress project. """
@@ -167,529 +169,4 @@ class GBI_16_2D(datasets.GeneratorBasedBuilder):
167
  else:
168
  data = hdul[0].data
169
  image_data = data[:, :]
170
- yield task_instance_key, {**{"image": image_data}, **item}
171
-
172
-
173
- def make_split_jsonl_files(
174
- config_type="tiny", data_dir="./data", outdir="./splits", seed=42
175
- ):
176
- """
177
- Create jsonl files for the GBI-16-2D dataset.
178
-
179
- config_type: str, default="tiny"
180
- The type of split to create. Options are "tiny" and "full".
181
- data_dir: str, default="./data"
182
- The directory where the FITS files are located.
183
- outdir: str, default="./splits"
184
- The directory where the jsonl files will be created.
185
- seed: int, default=42
186
- The seed for the random split.
187
- """
188
- random.seed(seed)
189
- os.makedirs(outdir, exist_ok=True)
190
-
191
- fits_files = glob(os.path.join(data_dir, "*.fits"))
192
- random.shuffle(fits_files)
193
- if config_type == "tiny":
194
- train_files = fits_files[:2]
195
- test_files = fits_files[2:3]
196
- elif config_type == "full":
197
- split_idx = int(0.8 * len(fits_files))
198
- train_files = fits_files[:split_idx]
199
- test_files = fits_files[split_idx:]
200
- else:
201
- raise ValueError("Unsupported config_type. Use 'tiny' or 'full'.")
202
-
203
- def create_jsonl(files, split_name):
204
- output_file = os.path.join(outdir, f"{config_type}_{split_name}.jsonl")
205
- with open(output_file, "w") as out_f:
206
- for file in files:
207
- print(file, flush=True, end="...")
208
- image_id = os.path.basename(file).split(".fits")[0]
209
- with fits.open(file, memmap=False) as hdul:
210
- if len(hdul) > 1:
211
- # multiextension ... paste together
212
- data, header = read_lris(file)
213
- dim_1 = data.shape[0]
214
- dim_2 = data.shape[1]
215
- header = fits.header.Header(header)
216
- else:
217
- dim_1 = hdul[0].header.get("NAXIS1", 0)
218
- dim_2 = hdul[0].header.get("NAXIS2", 0)
219
- header = hdul[0].header
220
-
221
- ras = header.get("RA", "0")
222
- ra = float(
223
- Angle(f"{ras} hours").to_string(unit=u.degree, decimal=True)
224
- )
225
- decs = header.get("DEC", "0")
226
- dec = float(
227
- Angle(f"{decs} degrees").to_string(unit=u.degree, decimal=True)
228
- )
229
- pixscale = header.get("CD1_2", 0.135)
230
- rotation = header.get("ROTPOSN", 0.0)
231
- exposure_time = header.get("TTIME", 0.0)
232
- item = {
233
- "image_id": image_id,
234
- "image": file,
235
- "ra": ra,
236
- "dec": dec,
237
- "pixscale": pixscale,
238
- "rotation_angle": rotation,
239
- "dim_1": dim_1,
240
- "dim_2": dim_2,
241
- "exposure_time": exposure_time,
242
- }
243
- out_f.write(json.dumps(item) + "\n")
244
-
245
- create_jsonl(train_files, "train")
246
- create_jsonl(test_files, "test")
247
-
248
-
249
- def read_lris(raw_file, det=None, TRIM=False):
250
- """
251
- Modified from pypeit.spectrographs.keck_lris.read_lris -- Jon Brown, Josh Bloom
252
- cf. https://github.com/KerryPaterson/Imaging_pipelines
253
-
254
- Read a raw LRIS data frame (one or more detectors)
255
- Packed in a multi-extension HDU
256
- Based on readmhdufits.pro
257
-
258
- Parameters
259
- ----------
260
- raw_file : str
261
- Filename
262
- det : int, optional
263
- Detector number; Default = both
264
- TRIM : bool, optional
265
- Trim the image?
266
-
267
- Returns
268
- -------
269
- array : ndarray
270
- Combined image
271
- header : FITS header
272
- sections : list
273
- List of datasec, oscansec, ampsec sections
274
- """
275
-
276
- hdu = fits.open(raw_file)
277
- head0 = hdu[0].header
278
-
279
- # Get post, pre-pix values
280
- precol = head0["PRECOL"]
281
- postpix = head0["POSTPIX"]
282
- preline = head0["PRELINE"]
283
- postline = head0["POSTLINE"]
284
-
285
- # get the detector
286
- # this just checks if its the blue one and assumes red if not
287
- # note the red fits headers don't even have this keyword???
288
- if head0["INSTRUME"] == "LRISBLUE":
289
- redchip = False
290
- else:
291
- redchip = True
292
-
293
- # Setup for datasec, oscansec
294
- dsec = []
295
- osec = []
296
- nxdata_sum = 0
297
-
298
- # get the x and y binning factors...
299
- binning = head0["BINNING"]
300
- xbin, ybin = [int(ibin) for ibin in binning.split(",")]
301
-
302
- # First read over the header info to determine the size of the output array...
303
- n_ext = len(hdu) - 1 # Number of extensions (usually 4)
304
- xcol = []
305
- xmax = 0
306
- ymax = 0
307
- xmin = 10000
308
- ymin = 10000
309
- for i in np.arange(1, n_ext + 1):
310
- theader = hdu[i].header
311
- detsec = theader["DETSEC"]
312
- if detsec != "0":
313
- # parse the DETSEC keyword to determine the size of the array.
314
- x1, x2, y1, y2 = np.array(load_sections(detsec, fmt_iraf=False)).flatten()
315
-
316
- # find the range of detector space occupied by the data
317
- # [xmin:xmax,ymin:ymax]
318
- xt = max(x2, x1)
319
- xmax = max(xt, xmax)
320
- yt = max(y2, y1)
321
- ymax = max(yt, ymax)
322
-
323
- # find the min size of the array
324
- xt = min(x1, x2)
325
- xmin = min(xmin, xt)
326
- yt = min(y1, y2)
327
- ymin = min(ymin, yt)
328
- # Save
329
- xcol.append(xt)
330
-
331
- # determine the output array size...
332
- nx = xmax - xmin + 1
333
- ny = ymax - ymin + 1
334
-
335
- # change size for binning...
336
- nx = nx // xbin
337
- ny = ny // ybin
338
-
339
- # Update PRECOL and POSTPIX
340
- precol = precol // xbin
341
- postpix = postpix // xbin
342
-
343
- # Deal with detectors
344
- if det in [1, 2]:
345
- nx = nx // 2
346
- n_ext = n_ext // 2
347
- det_idx = np.arange(n_ext, dtype=np.int) + (det - 1) * n_ext
348
- elif det is None:
349
- det_idx = np.arange(n_ext).astype(int)
350
- else:
351
- raise ValueError("Bad value for det")
352
-
353
- # change size for pre/postscan...
354
- if not TRIM:
355
- nx += n_ext * (precol + postpix)
356
- ny += preline + postline
357
-
358
- # allocate output array...
359
- array = np.zeros((nx, ny), dtype="uint16")
360
- gain_array = np.zeros((nx, ny), dtype="uint16")
361
- order = np.argsort(np.array(xcol))
362
-
363
- # insert extensions into master image...
364
- for kk, i in enumerate(order[det_idx]):
365
-
366
- # grab complete extension...
367
- data, gaindata, predata, postdata, x1, y1 = lris_read_amp(
368
- hdu, i + 1, redchip=redchip
369
- )
370
-
371
- # insert components into output array...
372
- if not TRIM:
373
- # insert predata...
374
- buf = predata.shape
375
- nxpre = buf[0]
376
- xs = kk * precol
377
- xe = xs + nxpre
378
-
379
- array[xs:xe, :] = predata
380
- gain_array[xs:xe, :] = predata
381
-
382
- # insert data...
383
- buf = data.shape
384
- nxdata = buf[0]
385
- nydata = buf[1]
386
-
387
- # JB: have to track the number of xpixels
388
- xs = n_ext * precol + nxdata_sum
389
- xe = xs + nxdata
390
-
391
- # now log how many pixels that was
392
- nxdata_sum += nxdata
393
-
394
- # Data section
395
- # section = '[{:d}:{:d},{:d}:{:d}]'.format(preline,nydata-postline, xs, xe) # Eliminate lines
396
- section = "[{:d}:{:d},{:d}:{:d}]".format(
397
- preline, nydata, xs, xe
398
- ) # DONT eliminate lines
399
-
400
- dsec.append(section)
401
- array[xs:xe, :] = data # Include postlines
402
- gain_array[xs:xe, :] = gaindata # Include postlines
403
-
404
- # ; insert postdata...
405
- buf = postdata.shape
406
- nxpost = buf[0]
407
- xs = nx - n_ext * postpix + kk * postpix
408
- xe = xs + nxpost
409
- section = "[:,{:d}:{:d}]".format(xs, xe)
410
- osec.append(section)
411
-
412
- array[xs:xe, :] = postdata
413
- gain_array[xs:xe, :] = postdata
414
-
415
- else:
416
- buf = data.shape
417
- nxdata = buf[0]
418
- nydata = buf[1]
419
-
420
- xs = (x1 - xmin) // xbin
421
- xe = xs + nxdata
422
- ys = (y1 - ymin) // ybin
423
- ye = ys + nydata - postline
424
-
425
- yin1 = preline
426
- yin2 = nydata - postline
427
-
428
- array[xs:xe, ys:ye] = data[:, yin1:yin2]
429
- gain_array[xs:xe, ys:ye] = gaindata[:, yin1:yin2]
430
-
431
- # make sure BZERO is a valid integer for IRAF
432
- obzero = head0["BZERO"]
433
- head0["O_BZERO"] = obzero
434
- head0["BZERO"] = 32768 - obzero
435
-
436
- # Return, transposing array back to goofy Python indexing
437
- return array.T, head0
438
-
439
-
440
- def lris_read_amp(inp, ext, redchip=False, applygain=True):
441
- """
442
- Modified from pypeit.spectrographs.keck_lris.lris_read_amp -- Jon Brown, Josh Bloom
443
- cf. https://github.com/KerryPaterson/Imaging_pipelines
444
- Read one amplifier of an LRIS multi-extension FITS image
445
-
446
- Parameters
447
- ----------
448
- inp: tuple
449
- (str,int) filename, extension
450
- (hdu,int) FITS hdu, extension
451
-
452
- Returns
453
- -------
454
- data
455
- predata
456
- postdata
457
- x1
458
- y1
459
-
460
- ;------------------------------------------------------------------------
461
- function lris_read_amp, filename, ext, $
462
- linebias=linebias, nobias=nobias, $
463
- predata=predata, postdata=postdata, header=header, $
464
- x1=x1, x2=x2, y1=y1, y2=y2, GAINDATA=gaindata
465
- ;------------------------------------------------------------------------
466
- ; Read one amp from LRIS mHDU image
467
- ;------------------------------------------------------------------------
468
- """
469
- # Parse input
470
- if isinstance(inp, str):
471
- hdu = fits.open(inp)
472
- else:
473
- hdu = inp
474
-
475
- # Get the pre and post pix values
476
- # for LRIS red POSTLINE = 20, POSTPIX = 80, PRELINE = 0, PRECOL = 12
477
- head0 = hdu[0].header
478
- precol = head0["precol"]
479
- postpix = head0["postpix"]
480
-
481
- # Deal with binning
482
- binning = head0["BINNING"]
483
- xbin, ybin = [int(ibin) for ibin in binning.split(",")]
484
- precol = precol // xbin
485
- postpix = postpix // xbin
486
-
487
- # get entire extension...
488
- temp = hdu[ext].data.transpose() # Silly Python nrow,ncol formatting
489
- tsize = temp.shape
490
- nxt = tsize[0]
491
-
492
- # parse the DETSEC keyword to determine the size of the array.
493
- header = hdu[ext].header
494
- detsec = header["DETSEC"]
495
- x1, x2, y1, y2 = np.array(load_sections(detsec, fmt_iraf=False)).flatten()
496
-
497
- # parse the DATASEC keyword to determine the size of the science region (unbinned)
498
- datasec = header["DATASEC"]
499
- xdata1, xdata2, ydata1, ydata2 = np.array(
500
- load_sections(datasec, fmt_iraf=False)
501
- ).flatten()
502
-
503
- # grab the components...
504
- predata = temp[0:precol, :]
505
- # datasec appears to have the x value for the keywords that are zero
506
- # based. This is only true in the image header extensions
507
- # not true in the main header. They also appear inconsistent between
508
- # LRISr and LRISb!
509
- # data = temp[xdata1-1:xdata2-1,*]
510
- # data = temp[xdata1:xdata2+1, :]
511
-
512
- # JB: LRIS-R is windowed differently, so the default pypeit checks fail
513
- # xshape is calculated from datasec.
514
- # For blue, its 1024,
515
- # For red, the chip dimensions are different AND the observations are windowed
516
- # In windowed mode each amplifier has differently sized data sections
517
- if not redchip:
518
- xshape = 1024 // xbin # blue
519
- else:
520
- xshape = xdata2 - xdata1 + 1 // xbin # red
521
-
522
- # do some sanity checks
523
- if (xdata1 - 1) != precol:
524
- # msgs.error("Something wrong in LRIS datasec or precol")
525
- errStr = "Something wrong in LRIS datasec or precol"
526
- print(errStr)
527
-
528
- if (xshape + precol + postpix) != temp.shape[0]:
529
- # msgs.error("Wrong size for in LRIS detector somewhere. Funny binning?")
530
- errStr = "Wrong size for in LRIS detector somewhere. Funny binning?"
531
- print(errStr)
532
-
533
- data = temp[precol : precol + xshape, :]
534
- postdata = temp[nxt - postpix : nxt, :]
535
-
536
- # flip in X as needed...
537
- if x1 > x2:
538
- xt = x2
539
- x2 = x1
540
- x1 = xt
541
- data = np.flipud(data) # reverse(temporary(data),1)
542
-
543
- # flip in Y as needed...
544
- if y1 > y2:
545
- yt = y2
546
- y2 = y1
547
- y1 = yt
548
- data = np.fliplr(data)
549
- predata = np.fliplr(predata)
550
- postdata = np.fliplr(postdata)
551
-
552
- # dummy gain data since we're keeping as uint16
553
- gaindata = 0.0 * data + 1.0
554
-
555
- return data, gaindata, predata, postdata, x1, y1
556
-
557
-
558
- def load_sections(string, fmt_iraf=True):
559
- """
560
- Modified from pypit.core.parse.load_sections -- Jon Brown, Josh Bloom
561
- cf. https://github.com/KerryPaterson/Imaging_pipelines
562
- From the input string, return the coordinate sections
563
-
564
- Parameters
565
- ----------
566
- string : str
567
- character string of the form [x1:x2,y1:y2]
568
- x1 = left pixel
569
- x2 = right pixel
570
- y1 = bottom pixel
571
- y2 = top pixel
572
- fmt_iraf : bool
573
- Is the variable string in IRAF format (True) or
574
- python format (False)
575
-
576
- Returns
577
- -------
578
- sections : list (or None)
579
- the detector sections
580
- """
581
- xyrng = string.strip("[]()").split(",")
582
- if xyrng[0] == ":":
583
- xyarrx = [0, 0]
584
- else:
585
- xyarrx = xyrng[0].split(":")
586
- # If a lower/upper limit on the array slicing is not given (e.g. [:100] has no lower index specified),
587
- # set the lower/upper limit to be the first/last index.
588
- if len(xyarrx[0]) == 0:
589
- xyarrx[0] = 0
590
- if len(xyarrx[1]) == 0:
591
- xyarrx[1] = -1
592
- if xyrng[1] == ":":
593
- xyarry = [0, 0]
594
- else:
595
- xyarry = xyrng[1].split(":")
596
- # If a lower/upper limit on the array slicing is not given (e.g. [5:] has no upper index specified),
597
- # set the lower/upper limit to be the first/last index.
598
- if len(xyarry[0]) == 0:
599
- xyarry[0] = 0
600
- if len(xyarry[1]) == 0:
601
- xyarry[1] = -1
602
- if fmt_iraf:
603
- xmin = max(0, int(xyarry[0]) - 1)
604
- xmax = int(xyarry[1])
605
- ymin = max(0, int(xyarrx[0]) - 1)
606
- ymax = int(xyarrx[1])
607
- else:
608
- xmin = max(0, int(xyarrx[0]))
609
- xmax = int(xyarrx[1])
610
- ymin = max(0, int(xyarry[0]))
611
- ymax = int(xyarry[1])
612
- return [[xmin, xmax], [ymin, ymax]]
613
-
614
-
615
- def sec2slice(
616
- subarray, one_indexed=False, include_end=False, require_dim=None, transpose=False
617
- ):
618
- """
619
- Modified from pypit.core.parse.sec2slice -- Jon Brown
620
-
621
- Convert a string representation of an array subsection (slice) into
622
- a list of slice objects.
623
-
624
- Args:
625
- subarray (str):
626
- The string to convert. Should have the form of normal slice
627
- operation, 'start:stop:step'. The parser ignores whether or
628
- not the string has the brackets '[]', but the string must
629
- contain the appropriate ':' and ',' characters.
630
- one_indexed (:obj:`bool`, optional):
631
- The string should be interpreted as 1-indexed. Default
632
- is to assume python indexing.
633
- include_end (:obj:`bool`, optional):
634
- **If** the end is defined, adjust the slice such that
635
- the last element is included. Default is to exclude the
636
- last element as with normal python slicing.
637
- require_dim (:obj:`int`, optional):
638
- Test if the string indicates the slice along the proper
639
- number of dimensions.
640
- transpose (:obj:`bool`, optional):
641
- Transpose the order of the returned slices. The
642
- following are equivalent::
643
-
644
- tslices = parse_sec2slice('[:10,10:]')[::-1]
645
- tslices = parse_sec2slice('[:10,10:]', transpose=True)
646
-
647
- Returns:
648
- tuple: A tuple of slice objects, one per dimension of the
649
- prospective array.
650
-
651
- Raises:
652
- TypeError:
653
- Raised if the input `subarray` is not a string.
654
- ValueError:
655
- Raised if the string does not match the required
656
- dimensionality or if the string does not look like a
657
- slice.
658
- """
659
- # Check it's a string
660
- if not isinstance(subarray, (str, bytes)):
661
- raise TypeError("Can only parse string-based subarray sections.")
662
- # Remove brackets if they're included
663
- sections = subarray.strip("[]").split(",")
664
- # Check the dimensionality
665
- ndim = len(sections)
666
- if require_dim is not None and ndim != require_dim:
667
- raise ValueError(
668
- "Number of slices ({0}) in {1} does not match ".format(ndim, subarray)
669
- + "required dimensions ({0}).".format(require_dim)
670
- )
671
- # Convert the slice of each dimension from a string to a slice
672
- # object
673
- slices = []
674
- for s in sections:
675
- # Must be able to find the colon
676
- if ":" not in s:
677
- raise ValueError("Unrecognized slice string: {0}".format(s))
678
- # Initial conversion
679
- _s = [None if x == "" else int(x) for x in s.split(":")]
680
- if len(_s) > 3:
681
- raise ValueError(
682
- "String as too many sections. Must have format 'start:stop:step'."
683
- )
684
- if len(_s) < 3:
685
- # Include step
686
- _s += [None]
687
- if one_indexed:
688
- # Decrement to convert from 1- to 0-indexing
689
- _s = [None if x is None else x - 1 for x in _s]
690
- if include_end and _s[1] is not None:
691
- # Increment to include last
692
- _s[1] += 1
693
- # Append the new slice
694
- slices += [slice(*_s)]
695
- return tuple(slices[::-1] if transpose else slices)
 
13
  import datasets
14
  from datasets import DownloadManager
15
 
16
+ from utils import read_lris
17
+
18
 
19
  _DESCRIPTION = (
20
  """SBI-16-2D is a dataset which is part of the AstroCompress project. """
 
169
  else:
170
  data = hdul[0].data
171
  image_data = data[:, :]
172
+ yield task_instance_key, {**{"image": image_data}, **item}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
data/LR.20051204.41155.fits ADDED

Git LFS Details

  • SHA256: fe00e8378d4bf7cca584786055e27d30ead80c1b321ad8a6d6985336bb686106
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.41651.fits ADDED

Git LFS Details

  • SHA256: 9cfce7b9a1c6cca60430bbe3fad5893c00365a6adb57a18f55f4f729d2dcb1d7
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.43259.fits ADDED

Git LFS Details

  • SHA256: da78a1dff4378597231e970c8d28f1e475a9db5973a17d2509e9d086c027550d
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.43899.fits ADDED

Git LFS Details

  • SHA256: c27d72267e163e63d3237c02eda7184e1b2fd2d1b067b34fd3c94f28d4aaa90b
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.46034.fits ADDED

Git LFS Details

  • SHA256: e527c33015da135b5f3afbe229ad6d1d85571ee58c9171c5215d2dda396cfc1b
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.47387.fits ADDED

Git LFS Details

  • SHA256: 63f21e048e7e2bb1dd19fe8bfcfb02d25d76789e8be492def99308292fa0485b
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.49021.fits ADDED

Git LFS Details

  • SHA256: b9a93f54d6a83919054435b60df83c2c8b96df90b326bfece1b4336ff23ca213
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.51257.fits ADDED

Git LFS Details

  • SHA256: 7cfbf9dba5d0bca909a787f2df20b7a8dc347f59ab6e064dc8328d5e6948f148
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.53196.fits ADDED

Git LFS Details

  • SHA256: 53dce47635f7bb7b196ddf871f91f4032f085c3a6a8414d70303b23d0ca975c7
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.54066.fits ADDED

Git LFS Details

  • SHA256: 683668370aaa0e02853c76dca8b62955ccabff8ad87c9284ec66ea5aab7132e1
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.56002.fits ADDED

Git LFS Details

  • SHA256: bbc073bf04a2f17fcd4445870e158a3b07cf4af2545dff026c4c7a863691647f
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.57105.fits ADDED

Git LFS Details

  • SHA256: 9b53b761b369a31d8709e08ac1f84e46766a534c7c7630744789c5e2e4b39957
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20051204.57873.fits ADDED

Git LFS Details

  • SHA256: c742eedef558a1116559b3ce03f38204c1290642cc4e628602653b56a2be79ed
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060530.30214.fits ADDED

Git LFS Details

  • SHA256: 44095fdc620d680f946bcdba82dcbb8fe481613f6733ebbf1a8484ab3c68deb4
  • Pointer size: 132 Bytes
  • Size of remote file: 4.51 MB
data/LR.20060530.32407.fits ADDED

Git LFS Details

  • SHA256: ade686223bfbef2593c97f2b9b3718092410f457a3f2de3ced67e89147ac6d32
  • Pointer size: 132 Bytes
  • Size of remote file: 4.51 MB
data/LR.20060530.36483.fits ADDED

Git LFS Details

  • SHA256: adb9fcd69c9ec8f56cb8781dce12aa738c94147c9abd64b3c21af4b785e3a366
  • Pointer size: 132 Bytes
  • Size of remote file: 4.51 MB
data/LR.20060530.43065.fits ADDED

Git LFS Details

  • SHA256: 3e1f1ab2c6918061003e74e73331f2737a63a86eccc2088eb94e7052e6c87bd4
  • Pointer size: 132 Bytes
  • Size of remote file: 4.51 MB
data/LR.20060530.45164.fits ADDED

Git LFS Details

  • SHA256: 4e6db4219455aec0acd352899edca36b29e3d25755bacaddaa7627fc96166fe5
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060530.46025.fits ADDED

Git LFS Details

  • SHA256: 30e50f08e0605a34563c291eebd288011996766d7832e41786f8ad909657036e
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060530.48970.fits ADDED

Git LFS Details

  • SHA256: 1d6ed4558d4953fa2ccc5a73ffe65428d34d7672378f18a78c7c3e89ee69cc2a
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060530.50806.fits ADDED

Git LFS Details

  • SHA256: e4dc06c1dcec5b63159979522e60a5560193195bc16c6220e6606f81911a47be
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060530.51656.fits ADDED

Git LFS Details

  • SHA256: 0b181aea68a9dd3f327a9ae58700ff041d8237ec85e75742b21aaa1f91e5756e
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060531.46897.fits ADDED

Git LFS Details

  • SHA256: aaea37b8e6e86b8690f36e436120346f10788963cb1d28acc94fba812ea4b977
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060531.49568.fits ADDED

Git LFS Details

  • SHA256: c6be80f71887d346d3fde2138c4060d6f7a9eaa38095fad3caa2f328291441aa
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060531.50684.fits ADDED

Git LFS Details

  • SHA256: 7d32964636fe7de49ff101afacc79500fbd24b8be7fce2046dccdd3c9ddf399e
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060531.50878.fits ADDED

Git LFS Details

  • SHA256: 36f36de8e345b062e4a8820b76f33376b6e6f4a74b08d497b0f29f5b32e5a874
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.29836.fits ADDED

Git LFS Details

  • SHA256: b4895be870d488a1f405dc1fb5431b1f55377d1e47b1f070c01316f514bdb3ad
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.37294.fits ADDED

Git LFS Details

  • SHA256: 23d2de9b60b87b338f3d5ba77b3dae2c7e0297f2224f95397f0d8d91b60d2b5f
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.42247.fits ADDED

Git LFS Details

  • SHA256: 2293d89e4c7919c697ce192b5b687b3ad3b33712f37cee791fbb3a1ae9dcedfb
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.44412.fits ADDED

Git LFS Details

  • SHA256: 35280192a453bcd698317fcea647338645a9af12f223b32b8176cc70a08d0a26
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.46740.fits ADDED

Git LFS Details

  • SHA256: 72bbbca7084cacd527b0499a6b6f82fe5a7638d58e9c10b1c189b5495bd391c7
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.47513.fits ADDED

Git LFS Details

  • SHA256: 16e28ed6fb1e0c89595ac9b833caed0338c5eb757cbfa9cd24905d3471e38100
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060725.49810.fits ADDED

Git LFS Details

  • SHA256: 37a9f6421a7a722f2042eac082f3abc5231445de1ddd67cd0cd4b521b73247ae
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060726.41842.fits ADDED

Git LFS Details

  • SHA256: 213cbc350f617eec22355b3e4bf031a93b005c917b551a167c0f2ff74ab6a295
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060726.48303.fits ADDED

Git LFS Details

  • SHA256: a653c7b5ae9e5a64048d951cca2cf7dc25e01a343c731a3a795c202ea278927b
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060726.49184.fits ADDED

Git LFS Details

  • SHA256: 8b14ff9de18df25d1e4768b808ee0cda0cf2ab4ab072a6d69a91e0fb18d4eaf4
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.21065.fits ADDED

Git LFS Details

  • SHA256: e028ca0d981c2d49622456de1421677492251867b089b741d35ddf7c98505448
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.30235.fits ADDED

Git LFS Details

  • SHA256: 9ee998e53047b9cf00ad647597bc8b0c4f799f83d734c1f342bd67f500f5cb78
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.30742.fits ADDED

Git LFS Details

  • SHA256: fa838f536e38ca91d48c9951501ecb8d73f05690310e76a03aa029accb7e1901
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.31853.fits ADDED

Git LFS Details

  • SHA256: 799784836ca6f268f2b8a37f0142915ae6f448fa4838eb87b3ed3923e2b865cb
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.33371.fits ADDED

Git LFS Details

  • SHA256: de6ec4462cf6b417bac234c87e1ed1ffff7db4dabc782b7c49e0c780329249f3
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20060921.43710.fits ADDED

Git LFS Details

  • SHA256: c3b99fae2a65f3938c49ade06b92202498292ee8b6f618173103aaad8e208c8d
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20061121.19974.fits ADDED

Git LFS Details

  • SHA256: e0ca209ad86d6650e260b8ec180f71d24e642178f7266e8e1eac8196fc5ae42b
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20061121.27414.fits ADDED

Git LFS Details

  • SHA256: 8d6b9d942b6711456b4f84d0a4a5ea25ed2028332a8d90569f0b393e37598306
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20061121.49514.fits ADDED

Git LFS Details

  • SHA256: 492ae602f871e92b65e1c1398f8e2e2c155bcea95c7fb6e58ed81475c0c28fbd
  • Pointer size: 132 Bytes
  • Size of remote file: 9.22 MB
data/LR.20070416.21338.fits ADDED

Git LFS Details

  • SHA256: 14b39b713966912317fd6ed52c4b31b5deb8773d3ae896beb5ff35ffe834cd08
  • Pointer size: 132 Bytes
  • Size of remote file: 2.32 MB
data/LR.20070416.24302.fits ADDED

Git LFS Details

  • SHA256: f06ea08e3cdbcd9c52c98230853376ddb01ce8f4846cd65c5b8f090de6a62cc3
  • Pointer size: 132 Bytes
  • Size of remote file: 2.32 MB
data/{LR.20090219.53662.fits → LR.20070416.35505.fits} RENAMED
File without changes
data/LR.20070416.41356.fits ADDED

Git LFS Details

  • SHA256: bfb4022b2b423413a9196b5156caca6fb679e9142fed0ad12da1418f3f1445ff
  • Pointer size: 132 Bytes
  • Size of remote file: 9.23 MB