@article { author = {Karimi, Hooman and Gholami, Ali}, title = {Blending and Deblending in Seismic Data Acquisition and Processing}, journal = {Journal of the Earth and Space Physics}, volume = {41}, number = {2}, pages = {177-191}, year = {2015}, publisher = {Institute of Geophysics, University of Tehran}, issn = {2538-371X}, eissn = {2538-3906}, doi = {10.22059/jesphys.2015.52805}, abstract = {In the current seismic data acquisition techniques, sources are fired with large time intervals in order to avoid interferences between the responses of successively firing sources, measured by the receivers. This leads to a time-consuming and expensive survey. Theoretically the waiting time between two successively firing sources has to be infinite, since the wavefield never vanishes completely. However, in practice this waiting time varies from a few seconds (s) up to 30 s. This means that the source responses are negligible after the waiting time. As an example, within the time interval of 200 s, 40 source locations can be fired with 5 s waiting time, or 20 source locations can be fired with 10 s waiting time. Since decision making at the business level are usually based on minimizing the acquisition costs, the source domain is usually poorly sampled to limit the survey duration, causing spatial aliasing (Mahdad, 2011). On the other hand, modifying the waiting times brings flexibility in the source sampling and the survey time. The concept of simultaneous or blended acquisition is to address the aforementioned issues by either reducing the waiting time between firing sources, leading to reduced acquisition costs, or by increasing the number of sources within the same survey time, leading to a higher data quality. Note that a combination of the two approaches combines these benefits. The price paid for achieving higher data quality at lower acquisition cost is dealing with the interfered data, called blended data, which are acquired in the blended acquisition. But in order to precede further processing and imaging algorithms, one needs to first breakdown the blended data into its original components (single source responses) by a processing step called deblending. It is a try to retrieve the data as if they were acquired in a conventional, unblended way. In this paper, we introduce the concept of simultaneous acquisition and examine three methods of deblending: 1) The least-squares method (Pseudo-deblending) which perfectly predicts the blended data but its solution suffers from the interference noises related to the interfering sources in the observations, the so called blending noises (crosstalk noises). These noises have different characteristics in different domains of the data. For example, in the common-mid-point (CMP) domain they are incoherent and spike-like and thus can be tackled by a denoising algorithm.  2) Noise attenuation by multidirectional vector-median filter (MD-VMF). It is a generalization of the well-known conventional median filter from a scalar implementation to a vector form. More specifically, a vector median filter is applied in many trial directions and then the median vector is selected. 3) Regularization of deblending operator matrix. Deblending is by itself an underdetermined and thus ill-posed problem; meaning that, there are infinitely many solutions for the deblending problem. Therefore, constraints are necessary to solve it. A possible way is spatially band-limiting constraints which are useful when the sources are densely sampled. It has been shown that under such constraints, the deblending operator matrix can be regularized to form a well behaved direct deblending operator. Finally, by observing the wavefield from deblended synthetic and field data we conclude that, regularization of the belending operator matrix is reliable because of its accuracy in noise attenuation and keep the signal and speed of the algorithm.}, keywords = {Simultaneous or blended acquisition,Vector median filter,Deblending,Unblended data}, title_fa = {ترکیب و واترکیب در برداشت و پردازش داده‌های لرزه‌ای}, abstract_fa = {در داده‌برداری متعارف‎‎، به منظور پرهیز از تداخل پاسخ چشمه‌های مختلف که توسط گیرنده‌ها دریافت می‌شود، آن‌ها را با فاصلة زمانی بزرگ نسبت به هم شوت می‌کنند‎‎ که این امر موجب افزایش زمان و هزینة عملیات می‌شود. بنابراین مفهوم داده‌برداری همزمان یا ترکیبی به منظور صرفه‌جویی در زمان و هزینه معرفی می‌شود. در این روش دو یا چند چشمه به‌طور همزمان (با تأخیر زمانی کوتاه) شوت می‌شوند اما جبهة موج حاصل از این چشمه‌ها با هم تداخل می‌کنند. از این رو قبل از تمامی مراحل استاندارد پردازشی، داده‌های ترکیبی باید به صورت رکوردهای مجزا، جداسازی گردد که به این عمل واترکیب (Deblending) گفته می‌شود. در این مقاله ضمن معرفی داده‌برداری ترکیبی، سه روش واترکیب بررسی می‌شود: 1. روش حل کمترین مربعات (واترکیب کاذب (Psuedo-deblending)) که در آن هیچ منظم‌سازی انجام نمی‌گیرد و تنها معیار درستی، پیش‌بینی جبهة موج ترکیبی است. مشکل این روش این است که داده‌های بازیابی‌شده تحت تأثیر نوفه‌های ترکیبی قرار می‌گیرند. 2. استفاده از فیلتر بردار-میانة‌ چندبعدی به منظور تضعیف نوفه‌های ترکیبی حاصل از حل کمترین مربعات؛ این روش به عنوان یک فیلتر غیرخطی نمی‌تواند از تضعیف سیگنال‌های همدوس اجتناب کند. 3. منظم‌سازی ماتریس عملگر واترکیب با فرض محدودبودن باند مکانی داده‌های لرزه‌ای برای چشمه‌های مجاور و متراکم.  نتایج اعمال این سه روش روی دادة مصنوعی نشان می‌دهد که واترکیب از طریق منظم‌سازی ماتریس عملگر، به دلیل دقت آن در تضعیف نوفه و حفظ سیگنال در مقایسه با دو روش دیگر قابل اعتمادتر است.  }, keywords_fa = {داده‌برداری همزمان یا ترکیبی,دادة غیرترکیبی,فیلتر میانه برداری,واترکیب}, url = {https://jesphys.ut.ac.ir/article_52805.html}, eprint = {https://jesphys.ut.ac.ir/article_52805_d311e034aa1caad62a5e0f640b247e6e.pdf} }